diff --git a/dataset_info/Agilex_Cobot_Magic_erase_board.yaml b/dataset_info/Agilex_Cobot_Magic_erase_board.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6a2a7e544ba23eea2a4b3ffc46bb9b797d8587b1 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_erase_board.yaml @@ -0,0 +1,520 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_erase_board +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: education + level2: school + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: whiteboard + level1: stationery + level2: whiteboard + level3: null + level4: null + level5: null +- object_name: green_whiteboard_Eraser + level1: stationery + level2: green_whiteboard_Eraser + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- wipe off the handwriting on the whiteboard. +sub_tasks: +- subtask: Place the eraser with the left gripper + subtask_index: 0 +- subtask: end + subtask_index: 1 +- subtask: Grasp the eraser with the left gripper + subtask_index: 2 +- subtask: Grasp the eraser and wipe the blackboard with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Grasp the eraser and wipe the blackboard with the left gripper + subtask_index: 5 +- subtask: Place the eraser with the right gripper + subtask_index: 6 +- subtask: Grasp the eraser with the right gripper + subtask_index: 7 +- subtask: 'null' + subtask_index: 8 +atomic_actions: +- grasp +- lift +- lower +- wipe +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 24688 + fps: 30 + total_tasks: 9 + total_videos: 150 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 230.75 MB +frame_num: 24688 +dataset_size: 230.75 MB +data_structure: 'Agilex_Cobot_Magic_erase_board_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (38 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6cdab5e056a6667503f191f7fd91dc04cc548eae --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_grey_tray.yaml @@ -0,0 +1,504 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_towel_grey_tray +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: green_tray + level1: kitchen_supplies + level2: green_tray + level3: null + level4: null + level5: null +- object_name: grey_square_towel + level1: daily_necessities + level2: grey_square_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use both grippers to hold the edge of the towel and fold it forward.,use the right + gripper to hold the right edge and fold it again,use a gripper to place the folded + towel on the tray. +sub_tasks: +- subtask: Fold the grey towel from left to right with left gripper + subtask_index: 0 +- subtask: Place the folded grey towel on the tray with the right gripper + subtask_index: 1 +- subtask: Abnormal + subtask_index: 2 +- subtask: Fold the grey towel from right to left with right gripper + subtask_index: 3 +- subtask: Fold the grey towel upwards + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the folded grey towel on the tray with the left gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- lift +- lower +- fold +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 53308 + fps: 30 + total_tasks: 8 + total_videos: 150 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 724.56 MB +frame_num: 53308 +dataset_size: 724.56 MB +data_structure: 'Agilex_Cobot_Magic_fold_towel_grey_tray_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (38 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml b/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1e45819db888a7203a2f4b62063661de2917f5ac --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_fold_towel_yellow_tray.yaml @@ -0,0 +1,504 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_fold_towel_yellow_tray +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: green_tray + level1: kitchen_supplies + level2: green_tray + level3: null + level4: null + level5: null +- object_name: yellow_square_towel + level1: daily_necessities + level2: yellow_square_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use both grippers to hold the edge of the towel and fold it forward.,use the right + gripper to hold the right edge and fold it again,use a gripper to place the folded + towel on the tray. +sub_tasks: +- subtask: Abnormal + subtask_index: 0 +- subtask: Fold the yellow towel upwards + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: Fold the yellow towel from right to left with right gripper + subtask_index: 3 +- subtask: Fold the yellow towel from left to right with left gripper + subtask_index: 4 +- subtask: Place the folded yellow towel on the tray with the left gripper + subtask_index: 5 +- subtask: Place the folded yellow towel on the tray with the right gripper + subtask_index: 6 +- subtask: 'null' + subtask_index: 7 +atomic_actions: +- grasp +- lift +- lower +- fold +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 49 + total_frames: 48960 + fps: 30 + total_tasks: 8 + total_videos: 147 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 552.42 MB +frame_num: 48960 +dataset_size: 552.42 MB +data_structure: 'Agilex_Cobot_Magic_fold_towel_yellow_tray_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (37 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:48 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml b/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml new file mode 100644 index 0000000000000000000000000000000000000000..a573e62356f2c48e8d393304aa10ffd593467b0e --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_move_mouse.yaml @@ -0,0 +1,515 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_move_mouse +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office & workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: mouse + level1: appliances + level2: mouse + level3: null + level4: null + level5: null +- object_name: mouse_pad + level1: appliances + level2: mouse_pad + level3: null + level4: null + level5: null +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the right gripper organize the mouse on the mouse pad. +sub_tasks: +- subtask: Grasp the mouse with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Grasp the mouse with the right gripper + subtask_index: 2 +- subtask: Place the mouse on the mouse mat with the right gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Place the mouse on the mouse mat with the left gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 112 + total_frames: 49737 + fps: 30 + total_tasks: 7 + total_videos: 336 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 386.89 MB +frame_num: 49737 +dataset_size: 386.89 MB +data_structure: 'Agilex_Cobot_Magic_move_mouse_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (100 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:111 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml b/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9a6cd5b998c9db6e233f7ae590375237589e6f94 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_move_object_beige_tablecloth.yaml @@ -0,0 +1,675 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_move_object_beige_tablecloth +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial & convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: white_table_cloths + level1: laboratory_supplies + level2: white_table_cloths + level3: null + level4: null + level5: null +- object_name: waffle + level1: food + level2: waffle + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: food + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: chocolate + level1: food + level2: chocolate + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: mint_candy + level1: food + level2: mint_candy + level3: null + level4: null + level5: null +- object_name: mangosteen + level1: food + level2: mangosteen + level3: null + level4: null + level5: null +- object_name: orange + level1: food + level2: orange + level3: null + level4: null + level5: null +- object_name: bread + level1: food + level2: bread + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: Fruit cake + level5: null +- object_name: cake + level1: food + level2: cake + level3: null + level4: null + level5: null +- object_name: beef_cheeseburger + level1: food + level2: beef_cheeseburger + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: pan + level1: kitchen_supplies + level2: pan + level3: null + level4: null + level5: null +- object_name: small_teapot + level1: kitchen_supplies + level2: small_teapot + level3: null + level4: null + level5: null +- object_name: small_teacup + level1: kitchen_supplies + level2: small_teacup + level3: null + level4: null + level5: null +- object_name: paper_ball + level1: trash + level2: paper_ball + level3: null + level4: null + level5: null +- object_name: brown_square_towel + level1: daily_necessities + level2: brown_square_towel + level3: null + level4: null + level5: null +- object_name: black_cylindrical_pen_holder + level1: stationery + level2: black_cylindrical_pen_holder + level3: null + level4: null + level5: null +- object_name: pink_long_towel + level1: daily_necessities + level2: pink_long_towel + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_necessities + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: duck + level1: toys + level2: duck + level3: null + level4: null + level5: null +- object_name: compass + level1: stationery + level2: compass + level3: null + level4: null + level5: null +- object_name: bowl + level1: kitchen_supplies + level2: bowl + level3: null + level4: null + level5: null +- object_name: blue_long_towel + level1: daily_necessities + level2: blue_long_towel + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the gripper move the object. +sub_tasks: +- subtask: Grasp the XX with the right gripper + subtask_index: 0 +- subtask: Place the XX on the table with the left gripper + subtask_index: 1 +- subtask: Place the XX on the table with the right gripper + subtask_index: 2 +- subtask: Grasp the XX with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 199 + total_frames: 103966 + fps: 30 + total_tasks: 6 + total_videos: 597 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 1.34 GB +frame_num: 103966 +dataset_size: 1.34 GB +data_structure: 'Agilex_Cobot_Magic_move_object_beige_tablecloth_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (187 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:198 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml b/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fd12adab78fa6a9366d31d2207843a32ab4ede17 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_organize_test_tube.yaml @@ -0,0 +1,497 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_organize_test_tube +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: other + level2: laboratory + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: test_tube_rack + level1: laboratory_supplies + level2: test_tube_rack + level3: null + level4: null + level5: null +- object_name: clear_test_tube + level1: laboratory_supplies + level2: clear_test_tube + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- Put the test tube on the table back onto the test tube rack. +sub_tasks: +- subtask: Place the test tube on the test tube rack with the left gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Pick up the test tube with the right gripper + subtask_index: 2 +- subtask: Move the test tube from the right gripper to the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 197 + total_frames: 159747 + fps: 30 + total_tasks: 6 + total_videos: 591 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 1.92 GB +frame_num: 159747 +dataset_size: 1.92 GB +data_structure: 'Agilex_Cobot_Magic_organize_test_tube_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (185 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:196 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml b/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml new file mode 100644 index 0000000000000000000000000000000000000000..fd652059e6d93e030a8edf0e411c723a3a26a83e --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_pour_drink_bottle_cup.yaml @@ -0,0 +1,532 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_pour_drink_bottle_cup +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: cup + level1: kitchen_supplies + level2: cup + level3: null + level4: null + level5: null +- object_name: water_Bottle + level1: beverages + level2: water_bottle + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a gripper to hold the bottle of the drink and pour it into a random cup. +sub_tasks: +- subtask: Pour the cranberry juice into the cyan cup with the right gripper + subtask_index: 0 +- subtask: Pour the cranberry juice into the paper cup with the right gripper + subtask_index: 1 +- subtask: end + subtask_index: 2 +- subtask: Pour the cranberry juice into the White Plastic Cup with the left gripper + subtask_index: 3 +- subtask: Place the mineral water bottle on the table with the left gripper + subtask_index: 4 +- subtask: Pour the mineral water into the paper cup with the left gripper + subtask_index: 5 +- subtask: Grasp the mineral water bottle with the left gripper + subtask_index: 6 +- subtask: Pour the cranberry juice into the cyan cup with the left gripper + subtask_index: 7 +- subtask: Pour the cranberry juice into the White Plastic Cup with the right gripper + subtask_index: 8 +- subtask: Abnormal + subtask_index: 9 +- subtask: Pour the mineral water into the glass with the right gripper + subtask_index: 10 +- subtask: Place the cranberry juice bottle on the table with the right gripper + subtask_index: 11 +- subtask: Pour the cranberry juice into the paper cup with the left gripper + subtask_index: 12 +- subtask: Grasp the cranberry juice bottle with the right gripper + subtask_index: 13 +- subtask: Pour the mineral water into the cyan cup with the left gripper + subtask_index: 14 +- subtask: Grasp the cranberry juice bottle with the left gripper + subtask_index: 15 +- subtask: Place the cranberry juice bottle on the table with the left gripper + subtask_index: 16 +- subtask: Place the mineral water bottle on the table with the right gripper + subtask_index: 17 +- subtask: Grasp the mineral water bottle with the right gripper + subtask_index: 18 +- subtask: Pour the mineral water into the cyan cup with the right gripper + subtask_index: 19 +- subtask: Pour the mineral water into the paper cup with the right gripper + subtask_index: 20 +- subtask: Pour the mineral water into the glass with the left gripper + subtask_index: 21 +- subtask: 'null' + subtask_index: 22 +atomic_actions: +- grasp +- lift +- pour +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 195 + total_frames: 201530 + fps: 30 + total_tasks: 23 + total_videos: 585 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 1.58 GB +frame_num: 201530 +dataset_size: 1.58 GB +data_structure: 'Agilex_Cobot_Magic_pour_drink_bottle_cup_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (183 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:194 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml b/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0319c368f0e7f934225913bcc753b9b03f35812b --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_object_left.yaml @@ -0,0 +1,551 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_object_left +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: mango + level1: food + level2: mango + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: food + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: rubik's_cube + level1: toys + level2: rubik's_cube + level3: null + level4: null + level5: null +- object_name: whiteboard_erasers + level1: stationery + level2: whiteboard_erasers + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_necessities + level2: bathing_in_flowers + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use the left gripper to grab items from the table and place them in the basket. +sub_tasks: +- subtask: Grasp the Rubik's Cube with the left gripper + subtask_index: 0 +- subtask: Place the Rubik's Cube into the basket with the left gripper + subtask_index: 1 +- subtask: Grasp the apple rubber puff with the left gripper + subtask_index: 2 +- subtask: Grasp the mango with the right gripper + subtask_index: 3 +- subtask: Grasp the Shower puff with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the apple rubber puff into the basket with the left gripper + subtask_index: 6 +- subtask: Place the Shower puff into the basket with the left gripper + subtask_index: 7 +- subtask: Grasp the mango with the left gripper + subtask_index: 8 +- subtask: Place the mango into the basket with the left gripper + subtask_index: 9 +- subtask: Place the chalkboard eraser into the basket with the left gripper + subtask_index: 10 +- subtask: Grasp the chalkboard eraser with the left gripper + subtask_index: 11 +- subtask: 'null' + subtask_index: 12 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 26742 + fps: 30 + total_tasks: 13 + total_videos: 300 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 360.15 MB +frame_num: 26742 +dataset_size: 360.15 MB +data_structure: 'Agilex_Cobot_Magic_storage_object_left_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (88 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e5efc67d63e8c59dd5497ed8c147ed8f1e484c21 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_left.yaml @@ -0,0 +1,511 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_peach_left +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: peach + level1: food + level2: peach + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- put the peach in the basket with your left . +sub_tasks: +- subtask: End + subtask_index: 0 +- subtask: Place the peach in the basket with left gripper + subtask_index: 1 +- subtask: Grasp the peach with left gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 99 + total_frames: 21979 + fps: 30 + total_tasks: 5 + total_videos: 297 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 264.71 MB +frame_num: 21979 +dataset_size: 264.71 MB +data_structure: 'Agilex_Cobot_Magic_storage_peach_left_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (87 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:98 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml b/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml new file mode 100644 index 0000000000000000000000000000000000000000..dd836e05259cf5847532d1d647e4bb792f21a5c5 --- /dev/null +++ b/dataset_info/Agilex_Cobot_Magic_storage_peach_right.yaml @@ -0,0 +1,511 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Agilex_Cobot_Magic_storage_peach_right +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: brown_basket + level1: home_storage + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: peach + level1: food + level2: peach + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- put the peach in the basket with right arm. +sub_tasks: +- subtask: Grasp the peach with right gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Place the peach in the basket with right gripper + subtask_index: 2 +- subtask: Abnormal + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 +atomic_actions: +- grasp +- lift +- lower +robot_name: +- Agilex_Cobot_Magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 99 + total_frames: 25876 + fps: 30 + total_tasks: 5 + total_videos: 297 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 277.18 MB +frame_num: 25876 +dataset_size: 277.18 MB +data_structure: 'Agilex_Cobot_Magic_storage_peach_right_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- backup + + | |-- data + + | | `-- chunk-000 + + | `-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (87 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:98 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_click_pen.yaml b/dataset_info/Airbot_MMK2_click_pen.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d9264e7b9e5e64c5dc750d79fba3bcd2554f3edc --- /dev/null +++ b/dataset_info/Airbot_MMK2_click_pen.yaml @@ -0,0 +1,441 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_click_pen +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: education + level2: school + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: block + level1: toy + level2: block + level3: null + level4: null + level5: null +- object_name: pen + level1: stationery + level2: pen + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the pen with your hand, press the pen switch and then place it on the table. +sub_tasks: +- subtask: End + subtask_index: 0 +- subtask: Lift the pen with the right gripper + subtask_index: 1 +- subtask: Grasp the pen with the right gripper + subtask_index: 2 +- subtask: Place the pen on the table with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: Press the pen switch with the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 +atomic_actions: +- garsp +- pick +- place +- pressbutton +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 100 + total_frames: 30984 + fps: 30 + total_tasks: 7 + total_videos: 400 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 1.12 GB +frame_num: 30984 +dataset_size: 1.12 GB +data_structure: "Airbot_MMK2_click_pen_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ + | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ + | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ + | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n|\ + \ `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:99 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_move_block.yaml b/dataset_info/Airbot_MMK2_move_block.yaml new file mode 100644 index 0000000000000000000000000000000000000000..45d23ddc624d7027c3404f47b33e9ef7ffe3a081 --- /dev/null +++ b/dataset_info/Airbot_MMK2_move_block.yaml @@ -0,0 +1,432 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_move_block +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bedroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: square_building_blocks + level1: toys + level2: square_building_blocks + level3: null + level4: null + level5: null +- object_name: early_education_toys + level1: toys + level2: early_education_toys + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- put the square blocks into the circular toy. +sub_tasks: +- subtask: Place the yellow block in the blue circle with the right gripper + subtask_index: 0 +- subtask: Grasp the yellow block with the left gripper + subtask_index: 1 +- subtask: Grasp the yellow block with the rightt gripper + subtask_index: 2 +- subtask: Place the yellow block in the blue circle with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 7264 + fps: 30 + total_tasks: 6 + total_videos: 200 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 300.47 MB +frame_num: 7264 +dataset_size: 300.47 MB +data_structure: "Airbot_MMK2_move_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n\ + | |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n\ + | |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n\ + | |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n|\ + \ `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_open_door_left.yaml b/dataset_info/Airbot_MMK2_open_door_left.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5d7b50f4ee2b1b7100575cb6230c4c1300665d56 --- /dev/null +++ b/dataset_info/Airbot_MMK2_open_door_left.yaml @@ -0,0 +1,420 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_open_door_left +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: scene_level1 + level2: scene_level2 + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: cabinet + level1: furniture + level2: cabinet + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- open the cabinet. +sub_tasks: +- subtask: Touch the door with the left gripper + subtask_index: 0 +- subtask: Open the door with the left gripper + subtask_index: 1 +- subtask: End + subtask_index: 2 +- subtask: 'null' + subtask_index: 3 +atomic_actions: +- open +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 49 + total_frames: 6418 + fps: 30 + total_tasks: 4 + total_videos: 196 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 188.62 MB +frame_num: 6418 +dataset_size: 188.62 MB +data_structure: "Airbot_MMK2_open_door_left_qced_hardlink/\n|-- annotations\n| |--\ + \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ + \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| \ + \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n\ + |-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:48 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_open_door_right.yaml b/dataset_info/Airbot_MMK2_open_door_right.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8e37a8d845015a083e4051dee83b1d19bcd39aa0 --- /dev/null +++ b/dataset_info/Airbot_MMK2_open_door_right.yaml @@ -0,0 +1,425 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_open_door_right +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: other + level2: laboratory + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: cabinet + level1: home_storage + level2: cabinet + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- open the cabinet. +sub_tasks: +- subtask: End + subtask_index: 0 +- subtask: Open the door with the right gripper + subtask_index: 1 +- subtask: Touch the door handle with the right gripper + subtask_index: 2 +- subtask: Touch the door with the right gripper + subtask_index: 3 +- subtask: Abnormal + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- open +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 98 + total_frames: 31753 + fps: 30 + total_tasks: 6 + total_videos: 392 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 1.03 GB +frame_num: 31753 +dataset_size: 1.03 GB +data_structure: "Airbot_MMK2_open_door_right_qced_hardlink/\n|-- annotations\n| \ + \ |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| \ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:97 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml b/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1886340e7b5b3bc7e5e52f817d10aff55e9b2552 --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_and_take_cake_plate.yaml @@ -0,0 +1,435 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_and_take_cake_plate +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: kitchen + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: cake + level1: bread + level2: cake + level3: null + level4: null + level5: null +- object_name: plate + level1: kitchen_supplies + level2: plate + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- put the cake into the plate with left hand and take it out with right hand. +sub_tasks: +- subtask: Place the cake on the table with the right gripper + subtask_index: 0 +- subtask: Grasp the cake with the left gripper + subtask_index: 1 +- subtask: Static + subtask_index: 2 +- subtask: Place the cake into the white plate with the left gripper + subtask_index: 3 +- subtask: Grasp the cake on the plate with the right gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 9782 + fps: 30 + total_tasks: 7 + total_videos: 200 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 487.34 MB +frame_num: 9782 +dataset_size: 487.34 MB +data_structure: "Airbot_MMK2_storage_and_take_cake_plate_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:49 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_block.yaml b/dataset_info/Airbot_MMK2_storage_block.yaml new file mode 100644 index 0000000000000000000000000000000000000000..ebf658474d20d27663a628ac9eb191123701c265 --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_block.yaml @@ -0,0 +1,477 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_block +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: bedroom + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: rectangular_building_blocks + level1: building_blocks + level2: rectangular_building_blocks + level3: null + level4: null + level5: null +- object_name: pink_plate + level1: plates + level2: pink_plate + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- put the building blocks into the plate by hand. +sub_tasks: +- subtask: Place the green rectangular build blocks into the white plate with the + right gripper + subtask_index: 0 +- subtask: Grasp the green cylindrical build blocks with the left gripper + subtask_index: 1 +- subtask: Place the red cube build blocks into the plate with the left gripper + subtask_index: 2 +- subtask: Place the green rectangular build blocks into the pink plate with the right + gripper + subtask_index: 3 +- subtask: Place the green cube build blocks into the plate with the right gripper + subtask_index: 4 +- subtask: Place the green cylindrical build blocks into the cardboard box with the + left gripper + subtask_index: 5 +- subtask: Place the red rectangular build blocks into the pink plate with the left + gripper + subtask_index: 6 +- subtask: Grasp the purple cube build blocks with the left gripper + subtask_index: 7 +- subtask: Grasp the blue cube build blocks with the right gripper + subtask_index: 8 +- subtask: Abnormal + subtask_index: 9 +- subtask: Grasp the green cube build blocks with the right gripper + subtask_index: 10 +- subtask: Place the purple cube build blocks into the white plate with the left gripper + subtask_index: 11 +- subtask: Place the red rectangular build blocks into the white plate with the left + gripper + subtask_index: 12 +- subtask: Place the red cube build blocks into the white plate with the right gripper + subtask_index: 13 +- subtask: Grasp the red cylindrical build blocks with the right gripper + subtask_index: 14 +- subtask: Place the blue cube build blocks on the purple cube build blocks with the + right gripper + subtask_index: 15 +- subtask: Place the green cube build blocks into the pink plate with the left gripper + subtask_index: 16 +- subtask: Grasp the green rectangular build blocks with the right gripper + subtask_index: 17 +- subtask: End + subtask_index: 18 +- subtask: Place the red cylindrical build blocks into the cardboard box with the + right gripper + subtask_index: 19 +- subtask: Grasp the red cube build blocks with the left gripper + subtask_index: 20 +- subtask: Grasp the red cube build blocks with the right gripper + subtask_index: 21 +- subtask: Grasp the red rectangular build blocks with the left gripper + subtask_index: 22 +- subtask: Grasp the green cube build blocks with the left gripper + subtask_index: 23 +- subtask: 'null' + subtask_index: 24 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 262 + total_frames: 55260 + fps: 30 + total_tasks: 25 + total_videos: 1048 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 2.17 GB +frame_num: 55260 +dataset_size: 2.17 GB +data_structure: "Airbot_MMK2_storage_block_qced_hardlink/\n|-- annotations\n| |--\ + \ eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |--\ + \ eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| \ + \ |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n\ + |-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n\ + | |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| \ + \ |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n\ + | |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| \ + \ |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n\ + | `-- ... (250 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n\ + | |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |--\ + \ observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n\ + \ |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:261 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml b/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml new file mode 100644 index 0000000000000000000000000000000000000000..d4ce797f27e8884c2408693d00ce8c202ef3c55b --- /dev/null +++ b/dataset_info/Airbot_MMK2_storage_block_both_hands.yaml @@ -0,0 +1,434 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Airbot_MMK2_storage_block_both_hands +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: square_building_blocks + level1: toys + level2: square_building_blocks + level3: null + level4: null + level5: null +- object_name: cream_storage_basket + level1: home_storage + level2: cream_storage_basket + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the building blocks with both hands simultaneously and put them into the + white storage box. +sub_tasks: +- subtask: Grasp the red block with the left gripper + subtask_index: 0 +- subtask: End + subtask_index: 1 +- subtask: Grasp the orange block with the right gripper + subtask_index: 2 +- subtask: Place the red block into the white basket with the left gripper + subtask_index: 3 +- subtask: Place the orange block into the white basket with the right gripper + subtask_index: 4 +- subtask: 'null' + subtask_index: 5 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Airbot_MMK2 +end_effector_type: five_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +- cam_front_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_front_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 49 + total_frames: 3769 + fps: 30 + total_tasks: 6 + total_videos: 196 + total_chunks: 1 + chunks_size: 1000 + state_dim: 36 + action_dim: 36 + camera_views: 4 + dataset_size: 104.08 MB +frame_num: 3769 +dataset_size: 104.08 MB +data_structure: "Airbot_MMK2_storage_block_both_hands_qced_hardlink/\n|-- annotations\n\ + | |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n|\ + \ |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n\ + | |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `--\ + \ subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n\ + | |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| \ + \ |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n\ + | |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| \ + \ |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n\ + | `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n\ + | |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `--\ + \ tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n\ + \ |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n\ + \ `-- observation.images.cam_right_wrist_rgb" +splits: + train: 0:48 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_front_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + action: + dtype: float32 + shape: + - 36 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_hand_joint_1_rad + - left_hand_joint_2_rad + - left_hand_joint_3_rad + - left_hand_joint_4_rad + - left_hand_joint_5_rad + - left_hand_joint_6_rad + - left_hand_joint_7_rad + - left_hand_joint_8_rad + - left_hand_joint_9_rad + - left_hand_joint_10_rad + - left_hand_joint_11_rad + - left_hand_joint_12_rad + - right_hand_joint_1_rad + - right_hand_joint_2_rad + - right_hand_joint_3_rad + - right_hand_joint_4_rad + - right_hand_joint_5_rad + - right_hand_joint_6_rad + - right_hand_joint_7_rad + - right_hand_joint_8_rad + - right_hand_joint_9_rad + - right_hand_joint_10_rad + - right_hand_joint_11_rad + - right_hand_joint_12_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml new file mode 100644 index 0000000000000000000000000000000000000000..7758a0200ac187c652f88c7bdbd34b98db8366ff --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_classify_object_four.yaml @@ -0,0 +1,639 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_classify_object_four +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial_convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: brown_basket + level1: baskets + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: yellow_basket + level1: baskets + level2: yellow_basket + level3: null + level4: null + level5: null +- object_name: any_fruits + level1: fruits + level2: any_fruits + level3: null + level4: null + level5: null +- object_name: any_vegetables + level1: vegetables + level2: any_vegetables + level3: null + level4: null + level5: null +- object_name: any_snacks + level1: snacks + level2: any_snacks + level3: null + level4: null + level5: null +- object_name: any_bread + level1: bread + level2: any_bread + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- place the food in the right basket with the right gripper, and place the non food + items in the left basket with the left gripper. +sub_tasks: +- subtask: Grasp the potato chips and put it in the left basket + subtask_index: 0 +- subtask: Grasp the mineral water and put it in the right basket + subtask_index: 1 +- subtask: Grasp the rubiks cube and put it in the left basket + subtask_index: 2 +- subtask: Grasp the waffle and put it in the right basket + subtask_index: 3 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 4 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 5 +- subtask: Grasp the apple and put it in the right basket + subtask_index: 6 +- subtask: End + subtask_index: 7 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 8 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 9 +- subtask: Grasp the power strip and put it in the left basket + subtask_index: 10 +- subtask: Grasp the green lemon and put it in the right basket + subtask_index: 11 +- subtask: Grasp the coke and put it in the right basket + subtask_index: 12 +- subtask: Grasp the cleaning agent and put it in the left basket + subtask_index: 13 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 14 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 15 +- subtask: Grasp the duck toys and put it in the left basket + subtask_index: 16 +- subtask: Grasp the triangle cake and put it in the right basket + subtask_index: 17 +- subtask: Grasp the cookie and put it in the right basket + subtask_index: 18 +- subtask: Grasp the yellow cake and put it in the right basket + subtask_index: 19 +- subtask: Grasp the shower sphere and put it in the left basket + subtask_index: 20 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 21 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 22 +- subtask: Grasp the broom and put it in the left basket + subtask_index: 23 +- subtask: Grasp the back scratcher and put it in the right basket + subtask_index: 24 +- subtask: Grasp the ballpoint pen and put it in the left basket + subtask_index: 25 +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 26 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 27 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 28 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 29 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 30 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 31 +- subtask: Grasp the black marker and put it in the left basket + subtask_index: 32 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 33 +- subtask: Grasp the can and put it in the left basket + subtask_index: 34 +- subtask: Grasp the black glass cup and put it in the left basket + subtask_index: 35 +- subtask: Grasp the brush and put it in the left basket + subtask_index: 36 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 37 +- subtask: Grasp the blue towel and put it in the left basket + subtask_index: 38 +- subtask: Grasp the peeler and put it in the left basket + subtask_index: 39 +- subtask: Grasp the brown towel and put it in the left basket + subtask_index: 40 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 41 +- subtask: Grasp the tea cup and put it in the left basket + subtask_index: 42 +- subtask: Grasp the round bread and put it in the left basket + subtask_index: 43 +- subtask: Grasp the chocolate and put it in the right basket + subtask_index: 44 +- subtask: Grasp the grey towel and put it in the left basket + subtask_index: 45 +- subtask: Grasp the canned cola and put it in the right basket + subtask_index: 46 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 47 +- subtask: Grasp the bread slice and put it in the right basket + subtask_index: 48 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 49 +- subtask: Grasp the triangle cake and put it in the left basket + subtask_index: 50 +- subtask: Grasp the peach doll and put it in the right basket + subtask_index: 51 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 52 +- subtask: Grasp the pen container and put it in the left basket + subtask_index: 53 +- subtask: Grasp the red duck and put it in the left basket + subtask_index: 54 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 55 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 56 +- subtask: Grasp the potato chips and put it in the right basket + subtask_index: 57 +- subtask: Grasp the can and put it in the right basket + subtask_index: 58 +- subtask: Grasp the egg beater and put it in the right basket + subtask_index: 59 +- subtask: Place the cookie in the center of the table + subtask_index: 60 +- subtask: Grasp the square chewing gum and put it in the left basket + subtask_index: 61 +- subtask: Grasp the ad milk and put it in the right basket + subtask_index: 62 +- subtask: Grasp the detergent and put it in the left basket + subtask_index: 63 +- subtask: Grasp the yellow duck and put it in the left basket + subtask_index: 64 +- subtask: Grasp the blue marker and put it in the left basket + subtask_index: 65 +- subtask: 'null' + subtask_index: 66 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 191 + total_frames: 153386 + fps: 30 + total_tasks: 67 + total_videos: 764 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 8.39 GB +frame_num: 153386 +dataset_size: 8.39 GB +data_structure: 'Galaxea_R1_Lite_classify_object_four_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (179 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:190 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml b/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml new file mode 100644 index 0000000000000000000000000000000000000000..59356c9bb156ae5614e16d4cc48dab67ff402b09 --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_classify_object_three.yaml @@ -0,0 +1,633 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_classify_object_three +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial_convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: brown_basket + level1: baskets + level2: brown_basket + level3: null + level4: null + level5: null +- object_name: yellow_basket + level1: baskets + level2: yellow_basket + level3: null + level4: null + level5: null +- object_name: any_fruits + level1: fruits + level2: any_fruits + level3: null + level4: null + level5: null +- object_name: any_vegetables + level1: vegetables + level2: any_vegetables + level3: null + level4: null + level5: null +- object_name: any_snacks + level1: snacks + level2: any_snacks + level3: null + level4: null + level5: null +- object_name: any_bread + level1: foobreadd + level3: any_bread + level2: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- place the food in the right basket with the right gripper, and place the non food + items in the left basket with the left gripper. +sub_tasks: +- subtask: Grasp the rubiks cube and put it in the left basket + subtask_index: 0 +- subtask: Place the tape in the center of the table + subtask_index: 1 +- subtask: Grasp the soft cleanser and put it in the left basket + subtask_index: 2 +- subtask: Grasp the back scratcher and put it in the left basket + subtask_index: 3 +- subtask: Grasp the apple and put it in the right basket + subtask_index: 4 +- subtask: Grasp the yellow marker and put it in the left basket + subtask_index: 5 +- subtask: End + subtask_index: 6 +- subtask: Grasp the white eraser and put it in the left basket + subtask_index: 7 +- subtask: Grasp the power strip and put it in the left basket + subtask_index: 8 +- subtask: Grasp the square chewing gum and put it in the right basket + subtask_index: 9 +- subtask: Grasp the cleaning agent and put it in the left basket + subtask_index: 10 +- subtask: Grasp the blue marker pen and put it in the right basket + subtask_index: 11 +- subtask: Grasp the soda water and put it in the right basket + subtask_index: 12 +- subtask: Grasp the spoon and put it in the left basket + subtask_index: 13 +- subtask: Grasp the duck toys and put it in the left basket + subtask_index: 14 +- subtask: Grasp the blue marker pen and put it in the left basket + subtask_index: 15 +- subtask: Grasp the shampoo and put it in the left basket + subtask_index: 16 +- subtask: Grasp the triangle cake and put it in the right basket + subtask_index: 17 +- subtask: Grasp the brown plate and put it in the left basket + subtask_index: 18 +- subtask: Grasp the cookie and put it in the right basket + subtask_index: 19 +- subtask: Grasp the yellow cake and put it in the right basket + subtask_index: 20 +- subtask: Grasp the shower sphere and put it in the left basket + subtask_index: 21 +- subtask: Grasp the orange and put it in the right basket + subtask_index: 22 +- subtask: Grasp the compass and put it in the left basket + subtask_index: 23 +- subtask: Grasp the round bread and put it in the right basket + subtask_index: 24 +- subtask: Grasp the lemon and put it in the right basket + subtask_index: 25 +- subtask: Grasp the egg yolk pastry and put it in the right basket + subtask_index: 26 +- subtask: Grasp the soap and put it in the left basket + subtask_index: 27 +- subtask: Grasp the washing liquid and put it in the left basket + subtask_index: 28 +- subtask: Grasp the hard cleanser and put it in the left basket + subtask_index: 29 +- subtask: Grasp the milk and put it in the right basket + subtask_index: 30 +- subtask: Grasp the black marker and put it in the left basket + subtask_index: 31 +- subtask: Grasp the banana and put it in the right basket + subtask_index: 32 +- subtask: Grasp the black glass cup and put it in the left basket + subtask_index: 33 +- subtask: Grasp the blue marker and put it in the right basket + subtask_index: 34 +- subtask: Grasp the bath ball and put it in the left basket + subtask_index: 35 +- subtask: Abnormal + subtask_index: 36 +- subtask: Grasp the peeler and put it in the left basket + subtask_index: 37 +- subtask: Grasp the brown towel and put it in the left basket + subtask_index: 38 +- subtask: Grasp the peach and put it in the right basket + subtask_index: 39 +- subtask: Grasp the tea cup and put it in the left basket + subtask_index: 40 +- subtask: Grasp the brush and put it in the left basket + subtask_index: 41 +- subtask: Grasp the chocolate and put it in the right basket + subtask_index: 42 +- subtask: Grasp the grey towel and put it in the left basket + subtask_index: 43 +- subtask: Place the peach doll in the center of the table + subtask_index: 44 +- subtask: Grasp the rubiks cube and put it in the right basket + subtask_index: 45 +- subtask: Grasp the tape and put it in the left basket + subtask_index: 46 +- subtask: Grasp the bread slice and put it in the right basket + subtask_index: 47 +- subtask: Grasp the glasses case and put it in the left basket + subtask_index: 48 +- subtask: Grasp the soda water and put it in the left basket + subtask_index: 49 +- subtask: Grasp the peach doll and put it in the right basket + subtask_index: 50 +- subtask: Grasp the blue cup and put it in the left basket + subtask_index: 51 +- subtask: Grasp the spoon and put it in the right basket + subtask_index: 52 +- subtask: Grasp the pen container and put it in the left basket + subtask_index: 53 +- subtask: Grasp the red duck and put it in the left basket + subtask_index: 54 +- subtask: Grasp the glasses case and put it in the right basket + subtask_index: 55 +- subtask: Grasp the long bread and put it in the right basket + subtask_index: 56 +- subtask: Grasp the yogurt and put it in the right basket + subtask_index: 57 +- subtask: Grasp the potato chips and put it in the right basket + subtask_index: 58 +- subtask: Grasp the can and put it in the right basket + subtask_index: 59 +- subtask: Grasp the long bread and put it in the left basket + subtask_index: 60 +- subtask: Grasp the yellow duck and put it in the left basket + subtask_index: 61 +- subtask: Grasp the coke and put it in the right basket + subtask_index: 62 +- subtask: 'null' + subtask_index: 63 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 197 + total_frames: 134891 + fps: 30 + total_tasks: 64 + total_videos: 788 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 7.32 GB +frame_num: 134891 +dataset_size: 7.32 GB +data_structure: 'Galaxea_R1_Lite_classify_object_three_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (185 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:196 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_mix_color.yaml b/dataset_info/Galaxea_R1_Lite_mix_color.yaml new file mode 100644 index 0000000000000000000000000000000000000000..4136a5cf3fa00dbcb342f3fc28a162803cb66e3e --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_mix_color.yaml @@ -0,0 +1,542 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_mix_color +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: blue_pigment + level1: materials + level2: blue_pigment + level3: null + level4: null + level5: null +- object_name: red_pigment + level1: materials + level2: red_pigment + level3: null + level4: null + level5: null +- object_name: white_pigment + level1: materials + level2: white_pigment + level3: null + level4: null + level5: null +- object_name: test_tube_rack + level1: holding_utensils + level2: test_tube_rack + level3: null + level4: null + level5: null +- object_name: large_test_tubes + level1: laboratory_supplies + level2: large_test_tubes + level3: null + level4: null + level5: null +- object_name: beaker + level1: laboratory_supplies + level2: holding_utensils + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up the test tube with red pigment the test tube with blue pigment and the test + tube with white pigment by grippers and pour them into the beaker. +sub_tasks: +- subtask: Pour the red reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 0 +- subtask: Pour the orange reagent into the graduated cylinder and place the test + tube into the paper cup + subtask_index: 1 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 2 +- subtask: Pour the white reagent into the graduated cylinder with the right gripper + subtask_index: 3 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 4 +- subtask: Pour the blue reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 5 +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 6 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 7 +- subtask: End + subtask_index: 8 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 9 +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 10 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 11 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 12 +- subtask: Grasp the white reagent with the right gripper + subtask_index: 13 +- subtask: Pour the white reagent into the graduated cylinder and place the test tube + into the paper cup + subtask_index: 14 +- subtask: 'null' + subtask_index: 15 +atomic_actions: +- grasp +- pick +- place +- pour +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=360x640x3, resolution=640x360, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=360x640x3, resolution=640x360, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 50 + total_frames: 79584 + fps: 30 + total_tasks: 16 + total_videos: 200 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 2.99 GB +frame_num: 79584 +dataset_size: 2.99 GB +data_structure: 'Galaxea_R1_Lite_Galaxea_R1_Lite_mix_color_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (38 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:49 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 360 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 360 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 360 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 360 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml b/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml new file mode 100644 index 0000000000000000000000000000000000000000..245c084aab462f11e75a91037940497033790230 --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_mix_color_large_test_tube.yaml @@ -0,0 +1,530 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_mix_color_large_test_tube +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: office_workspace + level2: office + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: red_pigment + level1: materials + level2: red_pigment + level3: null + level4: null + level5: null +- object_name: blue_pigment + level1: materials + level2: blue_pigment + level3: null + level4: null + level5: null +- object_name: yellow_pigment + level1: materials + level2: yellow_pigment + level3: null + level4: null + level5: null +- object_name: test_tube_rack + level1: holding_utensils + level2: test_tube_rack + level3: null + level4: null + level5: null +- object_name: large_test_tubes + level1: laboratory_supplies + level2: large_test_tubes + level3: null + level4: null + level5: null +- object_name: beaker + level1: holding_utensils + level2: beaker + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- pick up a test tube with pigment on left test tube rack and a test tube with pigment + on right test tube rack by grippers and pour them into the beaker. +sub_tasks: +- subtask: Place the test tube into the paper cup with the left gripper + subtask_index: 0 +- subtask: Pour the blue reagent into the graduated cylinder with the left gripper + subtask_index: 1 +- subtask: Pour the red reagent into the graduated cylinder with the left gripper + subtask_index: 2 +- subtask: Pour the red reagent into the graduated cylinder with the right gripper + subtask_index: 3 +- subtask: Grasp the blue reagent with the left gripper + subtask_index: 4 +- subtask: End + subtask_index: 5 +- subtask: Place the test tube into the paper cup with the right gripper + subtask_index: 6 +- subtask: Grasp the yellow reagent with the right gripper + subtask_index: 7 +- subtask: Pour the yellow reagent into the graduated cylinder with the right gripper + subtask_index: 8 +- subtask: Grasp the red reagent with the left gripper + subtask_index: 9 +- subtask: Grasp the red reagent with the right gripper + subtask_index: 10 +- subtask: 'null' + subtask_index: 11 +atomic_actions: +- grasp +- pick +- place +- pour +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 121 + total_frames: 131656 + fps: 30 + total_tasks: 12 + total_videos: 484 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 4.70 GB +frame_num: 131656 +dataset_size: 4.70 GB +data_structure: 'Galaxea_R1_Lite_mix_color_large_test_tube_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (109 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:120 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml new file mode 100644 index 0000000000000000000000000000000000000000..151ab9b5c791e78ab48054af392f2258eea0361d --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_storage_object_brown_bowl.yaml @@ -0,0 +1,798 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_storage_object_brown_bowl +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: brown_bowl + level1: plastic_bowl + level2: brown_bowl + level3: null + level4: null + level5: null +- object_name: banana + level1: fruits + level2: banana + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_chemical_products + level2: bathing_in_flowers + level3: null + level4: null + level5: null +- object_name: blue_cup + level1: cups + level2: blue_cup + level3: null + level4: null + level5: null +- object_name: blue_pot + level1: cookware + level2: blue_pot + level3: null + level4: null + level5: null +- object_name: toast_slices + level1: bread + level2: toast_slices + level3: null + level4: null + level5: null +- object_name: brown_towel + level1: towels + level2: brown_towel + level3: null + level4: null + level5: null +- object_name: can + level1: snacks + level2: can + level3: null + level4: null + level5: null +- object_name: coke(slim_can) + level1: beveragesbeverages + level2: coke(slim_can) + level3: null + level4: null + level5: null +- object_name: potato_chips + level1: snacks + level2: potato_chips + level3: null + level4: null + level5: null +- object_name: chocolate + level1: snacks + level2: chocolate + level3: null + level4: null + level5: null +- object_name: compass + level1: rulers + level2: compass + level3: null + level4: null + level5: null +- object_name: block_pillar + level1: building_blocks + level2: block_pillar + level3: null + level4: null + level5: null +- object_name: egg_beater + level1: spoons_and_spatulas + level2: egg_beater + level3: null + level4: null + level5: null +- object_name: eraser + level1: erasers + level2: eraser + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: snacks + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_chemical_products + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: fruits + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: peach + level1: fruits + level2: peach + level3: null + level4: null + level5: null +- object_name: power_strip + level1: electrical_control_equipment + level2: power_strip + level3: null + level4: null + level5: null +- object_name: round_bread + level1: bread + level2: round_bread + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_chemical_products + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: square_building_blocks + level1: building_blocks + level2: square_building_blocks + level3: null + level4: null + level5: null +- object_name: tape + level1: stationery + level2: tape + level3: null + level4: null + level5: null +- object_name: cake + level1: bread + level2: cake + level3: null + level4: null + level5: null +- object_name: duck + level1: doll + level2: duck + level3: null + level4: null + level5: null +- object_name: ambrosial_yogurt + level1: beverages + level2: ambrosial_yogurt + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a gripper to pick the target object and place on the brown bowl. +sub_tasks: +- subtask: Grasp the blue pot with the left gripper + subtask_index: 0 +- subtask: Place the coke in the bowl with the left gripper + subtask_index: 1 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 2 +- subtask: Place the back scratcher in the bowl with the right gripper + subtask_index: 3 +- subtask: Place the plugboard in the bowl with the right gripper + subtask_index: 4 +- subtask: Place the compasses in the bowl with the right gripper + subtask_index: 5 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 6 +- subtask: Grasp the banana with the left gripper + subtask_index: 7 +- subtask: Place the square chewing gum in the bowl with the right gripper + subtask_index: 8 +- subtask: Place the round wooden block in the bowl with the right gripper + subtask_index: 9 +- subtask: Place the tin in the bowl with the left gripper + subtask_index: 10 +- subtask: Grasp the compasses with the right gripper + subtask_index: 11 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 12 +- subtask: Place the potato chips in the bowl with the left gripper + subtask_index: 13 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 14 +- subtask: Place the chocolate cake in the bowl with the right gripper + subtask_index: 15 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 16 +- subtask: Place the square wooden block in the bowl with the right gripper + subtask_index: 17 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 18 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 19 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 20 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 21 +- subtask: Grasp the tin with the left gripper + subtask_index: 22 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 23 +- subtask: Place the square wooden block in the bowl with the left gripper + subtask_index: 24 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 25 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 26 +- subtask: Place the potato chips in the bowl with the right gripper + subtask_index: 27 +- subtask: Place the duck toy in the bowl with the left gripper + subtask_index: 28 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 29 +- subtask: Place the green lemon in the bowl with the right gripper + subtask_index: 30 +- subtask: Grasp the peach with the left gripper + subtask_index: 31 +- subtask: Place the peach in the bowl with the left gripper + subtask_index: 32 +- subtask: Place the soft facial cleanser in the bowl with the right gripper + subtask_index: 33 +- subtask: Place the shower sphere in the bowl with the left gripper + subtask_index: 34 +- subtask: Place the banana in the bowl with the right gripper + subtask_index: 35 +- subtask: Place the shower sphere in the bowl with the right gripper + subtask_index: 36 +- subtask: Grasp the green lemon with the right gripper + subtask_index: 37 +- subtask: Place the brown towel in the bowl with the right gripper + subtask_index: 38 +- subtask: Place the blackboard erasure in the bowl with the left gripper + subtask_index: 39 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 40 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 41 +- subtask: End + subtask_index: 42 +- subtask: Place the blue cup in the bowl with the right gripper + subtask_index: 43 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 44 +- subtask: Grasp the coke with the left gripper + subtask_index: 45 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 46 +- subtask: Place the round wooden block in the bowl with the left gripper + subtask_index: 47 +- subtask: Place the banana in the bowl with the left gripper + subtask_index: 48 +- subtask: Place the chocolate cake in the bowl with the left gripper + subtask_index: 49 +- subtask: Place the tape in the bowl with the right gripper + subtask_index: 50 +- subtask: Grasp the compasses with the left gripper + subtask_index: 51 +- subtask: Place the chocolate in the bowl with the left gripper + subtask_index: 52 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 53 +- subtask: Place the hard facial cleanser in the bowl with the left gripper + subtask_index: 54 +- subtask: Grasp the round bread with the right gripper + subtask_index: 55 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 56 +- subtask: Place the hard facial cleanser in the bowl with the right gripper + subtask_index: 57 +- subtask: Place the round bread in the bowl with the right gripper + subtask_index: 58 +- subtask: Grasp the tape with the right gripper + subtask_index: 59 +- subtask: Place the tape in the bowl with the left gripper + subtask_index: 60 +- subtask: Grasp the coke with the right gripper + subtask_index: 61 +- subtask: Grasp the tape with the left gripper + subtask_index: 62 +- subtask: Place the duck toy in the bowl with the right gripper + subtask_index: 63 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 64 +- subtask: Place the blue pot in the bowl with the left gripper + subtask_index: 65 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 66 +- subtask: Place the plugboard in the bowl with the left gripper + subtask_index: 67 +- subtask: Place the coke in the bowl with the right gripper + subtask_index: 68 +- subtask: Place the round bread in the bowl with the left gripper + subtask_index: 69 +- subtask: Place the blue cup in the bowl with the left gripper + subtask_index: 70 +- subtask: Place the blue pot in the bowl with the right gripper + subtask_index: 71 +- subtask: Grasp the round bread with the left gripper + subtask_index: 72 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 73 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 74 +- subtask: Place the yogurt in the bowl with the right gripper + subtask_index: 75 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 76 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 77 +- subtask: Place the compasses in the bowl with the left gripper + subtask_index: 78 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 79 +- subtask: Place the brown towel in the bowl with the left gripper + subtask_index: 80 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 81 +- subtask: Grasp the banana with the right gripper + subtask_index: 82 +- subtask: 'null' + subtask_index: 83 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 101 + total_frames: 23706 + fps: 30 + total_tasks: 84 + total_videos: 404 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 909.34 MB +frame_num: 23706 +dataset_size: 909.34 MB +data_structure: 'Galaxea_R1_Lite_storage_object_brown_bowl_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (89 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:100 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml b/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml new file mode 100644 index 0000000000000000000000000000000000000000..b10fcc523af00e832830561386971604d201f272 --- /dev/null +++ b/dataset_info/Galaxea_R1_Lite_storage_object_brown_plate.yaml @@ -0,0 +1,810 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Galaxea_R1_Lite_storage_object_brown_plate +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: brown_plate + level1: plates + level2: brown_plate + level3: null + level4: null + level5: null +- object_name: banana + level1: fruits + level2: banana + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_chemical_products + level2: bathing_in_flowers + level3: null + level4: null + level5: null +- object_name: blue_cup + level1: cups + level2: blue_cup + level3: null + level4: null + level5: null +- object_name: blue_pot + level1: cookware + level2: blue_pot + level3: null + level4: null + level5: null +- object_name: toast_slices + level1: bread + level2: toast_slices + level3: null + level4: null + level5: null +- object_name: brown_towel + level1: towels + level2: brown_towel + level3: null + level4: null + level5: null +- object_name: can + level1: snacks + level2: can + level3: null + level4: null + level5: null +- object_name: coke(slim_can) + level1: beverages + level2: coke(slim_can) + level3: null + level4: null + level5: null +- object_name: potato_chips + level1: snacks + level2: potato_chips + level3: null + level4: null + level5: null +- object_name: chocolate + level1: snacks + level2: chocolate + level3: null + level4: null + level5: null +- object_name: compass + level1: rulers + level2: compass + level3: null + level4: null + level5: null +- object_name: block_pillar + level1: building_blocks + level2: block_pillar + level3: null + level4: null + level5: null +- object_name: egg_beater + level1: spoons_and_spatulas + level2: egg_beater + level3: null + level4: null + level5: null +- object_name: erasers + level1: stationery + level2: erasers + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: snacks + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_chemical_products + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: green_lemon + level1: fruits + level2: green_lemon + level3: null + level4: null + level5: null +- object_name: peach + level1: fruits + level2: peach + level3: null + level4: null + level5: null +- object_name: power_strip + level1: electrical_control_equipment + level2: power_strip + level3: null + level4: null + level5: null +- object_name: round_bread + level1: bread + level2: round_bread + level3: null + level4: null + level5: null +- object_name: mentholatum_facial_cleanser + level1: daily_chemical_products + level2: mentholatum_facial_cleanser + level3: null + level4: null + level5: null +- object_name: square_building_blocks + level1: building_blocks + level2: square_building_blocks + level3: null + level4: null + level5: null +- object_name: tape + level1: stationery + level2: tape + level3: null + level4: null + level5: null +- object_name: cake + level1: bread + level2: cake + level3: null + level4: null + level5: null +- object_name: duck + level1: doll + level2: duck + level3: null + level4: null + level5: null +- object_name: ambrosial_yogurt + level1: beverages + level2: ambrosial_yogurt + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use a gripper to pick the target object and place on the brown plate. +sub_tasks: +- subtask: Place the blue pot on the brown plate with the right gripper + subtask_index: 0 +- subtask: Grasp the blue pot with the left gripper + subtask_index: 1 +- subtask: Grasp the plugboard with the left gripper + subtask_index: 2 +- subtask: Place the soft facial cleanser on the brown plate with the right gripper + subtask_index: 3 +- subtask: Place the back scratcher on the brown plate with the right gripper + subtask_index: 4 +- subtask: Place the blackboard erasure on the brown plate with the left gripper + subtask_index: 5 +- subtask: Grasp the potato chips with the right gripper + subtask_index: 6 +- subtask: Grasp the banana with the left gripper + subtask_index: 7 +- subtask: Place the coke on the brown plate with the right gripper + subtask_index: 8 +- subtask: Place the chocolate on the brown plate with the left gripper + subtask_index: 9 +- subtask: Place the duck toy on the brown plate with the right gripper + subtask_index: 10 +- subtask: Grasp the compasses with the right gripper + subtask_index: 11 +- subtask: Place the peach on the brown plate with the left gripper + subtask_index: 12 +- subtask: Grasp the duck toy with the left gripper + subtask_index: 13 +- subtask: Place the round wooden block on the brown plate with the right gripper + subtask_index: 14 +- subtask: Grasp the blue cup with the left gripper + subtask_index: 15 +- subtask: Place the green lemon on the brown plate with the left gripper + subtask_index: 16 +- subtask: Place the shower sphere on the brown plate with the left gripper + subtask_index: 17 +- subtask: Place the shower sphere on the brown plate with the right gripper + subtask_index: 18 +- subtask: Grasp the back scratcher with the right gripper + subtask_index: 19 +- subtask: Grasp the square chewing gum with the left gripper + subtask_index: 20 +- subtask: Grasp the chocolate cake with the right gripper + subtask_index: 21 +- subtask: Place the yogurt on the brown plate with the right gripper + subtask_index: 22 +- subtask: Place the banana on the brown plate with the left gripper + subtask_index: 23 +- subtask: Grasp the shower sphere with the left gripper + subtask_index: 24 +- subtask: Place the brown towel on the brown plate with the left gripper + subtask_index: 25 +- subtask: Grasp the plugboard with the right gripper + subtask_index: 26 +- subtask: Grasp the yogurt with the right gripper + subtask_index: 27 +- subtask: Place the blue cup on the brown plate with the right gripper + subtask_index: 28 +- subtask: Place the plugboard on the brown plate with the right gripper + subtask_index: 29 +- subtask: Grasp the brown towel with the left gripper + subtask_index: 30 +- subtask: Grasp the hard facial cleanser with the left gripper + subtask_index: 31 +- subtask: Place the duck toy on the brown plate with the left gripper + subtask_index: 32 +- subtask: Place the round bread on the brown plate with the right gripper + subtask_index: 33 +- subtask: Grasp the brown towel with the right gripper + subtask_index: 34 +- subtask: Place the bread slice on the brown plate with the left gripper + subtask_index: 35 +- subtask: Grasp the hard facial cleanser with the right gripper + subtask_index: 36 +- subtask: Place the chocolate cake on the brown plate with the right gripper + subtask_index: 37 +- subtask: Grasp the peach with the left gripper + subtask_index: 38 +- subtask: Place the tin on the brown plate with the right gripper + subtask_index: 39 +- subtask: Place the tape on the brown plate with the right gripper + subtask_index: 40 +- subtask: Place the blackboard erasure on the brown plate with the right gripper + subtask_index: 41 +- subtask: Grasp the bread slice with the right gripper + subtask_index: 42 +- subtask: Place the potato chips on the brown plate with the right gripper + subtask_index: 43 +- subtask: Grasp the potato chips with the left gripper + subtask_index: 44 +- subtask: Place the tape on the brown plate with the left gripper + subtask_index: 45 +- subtask: Grasp the duck toy with the right gripper + subtask_index: 46 +- subtask: End + subtask_index: 47 +- subtask: Grasp the blackboard erasure with the left gripper + subtask_index: 48 +- subtask: Grasp the round wooden block with the left gripper + subtask_index: 49 +- subtask: Place the brown towel on the brown plate with the right gripper + subtask_index: 50 +- subtask: Place the blue cup on the brown plate with the left gripper + subtask_index: 51 +- subtask: Place the compasses on the brown plate with the right gripper + subtask_index: 52 +- subtask: Grasp the compasses with the left gripper + subtask_index: 53 +- subtask: Place the compasses on the brown plate with the left gripper + subtask_index: 54 +- subtask: Grasp the blue pot with the right gripper + subtask_index: 55 +- subtask: Grasp the round bread with the right gripper + subtask_index: 56 +- subtask: Grasp the chocolate cake with the left gripper + subtask_index: 57 +- subtask: Place the potato chips on the brown plate with the left gripper + subtask_index: 58 +- subtask: Place the plugboard on the brown plate with the left gripper + subtask_index: 59 +- subtask: Place the square chewing gum on the brown plate with the right gripper + subtask_index: 60 +- subtask: Place the banana on the brown plate with the right gripper + subtask_index: 61 +- subtask: Grasp the tin with the right gripper + subtask_index: 62 +- subtask: Place the hard facial cleanser on the brown plate with the left gripper + subtask_index: 63 +- subtask: Place the square wooden block on the brown plate with the left gripper + subtask_index: 64 +- subtask: Place the square chewing gum on the brown plate with the left gripper + subtask_index: 65 +- subtask: Grasp the tape with the right gripper + subtask_index: 66 +- subtask: Grasp the coke with the right gripper + subtask_index: 67 +- subtask: Grasp the tape with the left gripper + subtask_index: 68 +- subtask: Grasp the square chewing gum with the right gripper + subtask_index: 69 +- subtask: Grasp the shower sphere with the right gripper + subtask_index: 70 +- subtask: Place the square wooden block on the brown plate with the right gripper + subtask_index: 71 +- subtask: Place the hard facial cleanser on the brown plate with the right gripper + subtask_index: 72 +- subtask: Grasp the bread slice with the left gripper + subtask_index: 73 +- subtask: Grasp the blackboard erasure with the right gripper + subtask_index: 74 +- subtask: Place the bread slice on the brown plate with the right gripper + subtask_index: 75 +- subtask: Grasp the round bread with the left gripper + subtask_index: 76 +- subtask: Grasp the chocolate with the left gripper + subtask_index: 77 +- subtask: Grasp the square wooden block with the left gripper + subtask_index: 78 +- subtask: Grasp the blue cup with the right gripper + subtask_index: 79 +- subtask: Grasp the soft facial cleanser with the right gripper + subtask_index: 80 +- subtask: Place the chocolate cake on the brown plate with the left gripper + subtask_index: 81 +- subtask: Grasp the square wooden block with the right gripper + subtask_index: 82 +- subtask: Grasp the green lemon with the left gripper + subtask_index: 83 +- subtask: Place the round wooden block on the brown plate with the left gripper + subtask_index: 84 +- subtask: Place the round bread on the brown plate with the left gripper + subtask_index: 85 +- subtask: Place the blue pot on the brown plate with the left gripper + subtask_index: 86 +- subtask: Grasp the round wooden block with the right gripper + subtask_index: 87 +- subtask: Grasp the banana with the right gripper + subtask_index: 88 +- subtask: 'null' + subtask_index: 89 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Galaxea_R1_Lite +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_left_rgb +- cam_head_right_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_left_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_head_right_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: end_rotation_dim +end_translation_dim: end_translation_dim +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 102 + total_frames: 16390 + fps: 30 + total_tasks: 90 + total_videos: 408 + total_chunks: 1 + chunks_size: 1000 + state_dim: 14 + action_dim: 14 + camera_views: 4 + dataset_size: 615.40 MB +frame_num: 16390 +dataset_size: 615.40 MB +data_structure: 'Galaxea_R1_Lite_storage_object_brown_plate_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (90 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_left_rgb + + | |-- observation.images.cam_head_right_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:101 +features: + observation.images.cam_head_left_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_head_right_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 720 + - 1280 + - 3 + names: + - height + - width + - channels + info: + video.height: 720 + video.width: 1280 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + action: + dtype: float32 + shape: + - 14 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - left_gripper_open + - right_gripper_open + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4 diff --git a/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml b/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0f82bc179eb41422f651d8803a5cbe8fe0e34356 --- /dev/null +++ b/dataset_info/Realman_RMC-AIDA-L_arrange_flowers.yaml @@ -0,0 +1,499 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: Realman_RMC-AIDA-L_arrange_flowers +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: household + level2: living_room + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: furniture + level2: table + level3: null + level4: null + level5: null +- object_name: flower + level1: plant + level2: flower + level3: null + level4: null + level5: null +- object_name: vise + level1: container + level2: vise + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- the right gripper grasp the flowers and insert them into the vase. +sub_tasks: +- subtask: Grasp the pink flower with the right gripper + subtask_index: 0 +- subtask: Abnormal + subtask_index: 1 +- subtask: Place the pink flower into the vase with with the right gripper + subtask_index: 2 +- subtask: End + subtask_index: 3 +- subtask: 'null' + subtask_index: 4 +atomic_actions: +- grasp +- pick +- place +robot_name: +- Realman_RMC-AIDA-L +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 60 + total_frames: 22483 + fps: 30 + total_tasks: 5 + total_videos: 180 + total_chunks: 1 + chunks_size: 1000 + state_dim: 28 + action_dim: 28 + camera_views: 3 + dataset_size: 465.54 MB +frame_num: 22483 +dataset_size: 465.54 MB +data_structure: 'Realman_RMC-AIDA-L_arrange_flowers_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (48 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:59 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 28 + names: + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_arm_joint_7_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_arm_joint_7_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 28 + names: + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_arm_joint_7_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_arm_joint_7_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + shape: + - 5 + dtype: int32 + scene_annotation: + names: null + shape: + - 1 + dtype: int32 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + shape: + - 12 + dtype: float32 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + shape: + - 2 + dtype: int32 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + shape: + - 2 + dtype: int32 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + shape: + - 2 + dtype: int32 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + shape: + - 2 + dtype: float32 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + shape: + - 2 + dtype: int32 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + shape: + - 2 + dtype: int32 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml new file mode 100644 index 0000000000000000000000000000000000000000..3cc4e4bf0906092380f41991429cc33ec828a338 --- /dev/null +++ b/dataset_info/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.yaml @@ -0,0 +1,592 @@ +task_categories: +- robotics +language: +- en +tags: +- RoboCOIN +- LeRobot +license: apache-2.0 +configs: +- config_name: default + data_files: data/chunk-{id}/episode_{id}.parquet +extra_gated_prompt: By accessing this dataset, you agree to cite the associated paper + in your research/publications—see the "Citation" section for details. You agree + to not use the dataset to conduct experiments that cause harm to human subjects. +extra_gated_fields: + Company/Organization: + type: text + description: e.g., "ETH Zurich", "Boston Dynamics", "Independent Researcher" + Country: + type: country + description: e.g., "Germany", "China", "United States" +codebase_version: v2.1 +dataset_name: agilex_cobot_magic_pass_object_left_to_right_black_tablecloth +dataset_uuid: 00000000-0000-0000-0000-000000000000 +scene_type: + level1: commercial_convenience + level2: supermarket + level3: null + level4: null + level5: null +env_type: Due to some reasons, this dataset temporarily cannot provide the environment + type information. +objects: +- object_name: table + level1: home_storage + level2: table + level3: null + level4: null + level5: null +- object_name: ambrosial_yogurt + level1: food + level2: ambrosial_yogurt + level3: null + level4: null + level5: null +- object_name: banana + level1: food + level2: banana + level3: null + level4: null + level5: null +- object_name: long_bread + level1: food + level2: long_bread + level3: null + level4: null + level5: null +- object_name: milk + level1: food + level2: milk + level3: null + level4: null + level5: null +- object_name: yogurt + level1: food + level2: yogurt + level3: null + level4: null + level5: null +- object_name: grape + level1: food + level2: grape + level3: null + level4: null + level5: null +- object_name: ham_sausage + level1: food + level2: ham_sausage + level3: null + level4: null + level5: null +- object_name: eggplant + level1: food + level2: eggplant + level3: null + level4: null + level5: null +- object_name: chewing_gum + level1: food + level2: chewing_gum + level3: null + level4: null + level5: null +- object_name: eyeglass_case + level1: laboratory_supplies + level2: eyeglass_case + level3: null + level4: null + level5: null +- object_name: rubik's_cube + level1: toys + level2: rubik's_cube + level3: null + level4: null + level5: null +- object_name: purple_trash_bag + level1: trash + level2: purple_trash_bag + level3: null + level4: null + level5: null +- object_name: cleanser + level1: daily_necessities + level2: cleanser + level3: null + level4: null + level5: null +- object_name: bathing_in_flowers + level1: daily_necessities + level2: bathing_in_flowers + level3: null + level4: null + level5: null +- object_name: whiteboard_eraser + level1: stationery + level2: whiteboard_eraser + level3: null + level4: null + level5: null +- object_name: candle + level1: daily_necessities + level2: candle + level3: null + level4: null + level5: null +- object_name: black_table_cloths + level1: laboratory_supplies + level2: black_table_cloths + level3: null + level4: null + level5: null +task_operation_type: Due to some reasons, this dataset temporarily cannot provide + the operation type information. +task_instruction: +- use the left gripper to pick up the item and transfer it from the left gripper to + the right gripper. +sub_tasks: +- subtask: Grasp the XX with the right gripper + subtask_index: 0 +- subtask: Place the XX on the table with the left gripper + subtask_index: 1 +- subtask: Place the XX on the table with the right gripper + subtask_index: 2 +- subtask: Grasp the XX with the left gripper + subtask_index: 3 +- subtask: End + subtask_index: 4 +- subtask: Pass the xx to the right gripper + subtask_index: 5 +- subtask: 'null' + subtask_index: 6 +atomic_actions: +- grasp +- lift +- lower +- handover +- takeover +robot_name: +- agilex_cobot_magic +end_effector_type: two_finger_gripper +tele_type: Due to some reasons, this dataset temporarily cannot provide the teleoperation + type information. +sensor_list: +- cam_head_rgb +- cam_left_wrist_rgb +- cam_right_wrist_rgb +came_info: + cam_head_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p + cam_left_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p + cam_right_wrist_rgb: dtype=video, shape=480x640x3, resolution=640x480, codec=av1, + pix_fmt=yuv420p +depth_enabled: false +coordinate_definition: right-hand-frame +joint_rotation_dim: radian +end_rotation_dim: radian +end_translation_dim: meter +annotations: +- eef_acc_mag_annotation.jsonl +- eef_direction_annotation.jsonl +- eef_velocity_annotation.jsonl +- gripper_activity_annotation.jsonl +- gripper_mode_annotation.jsonl +- scene_annotations.jsonl +- subtask_annotations.jsonl +statistics: + total_episodes: 98 + total_frames: 60078 + fps: 30 + total_tasks: 7 + total_videos: 294 + total_chunks: 1 + chunks_size: 1000 + state_dim: 26 + action_dim: 26 + camera_views: 3 + dataset_size: 1.18 GB +frame_num: 60078 +dataset_size: 1.18 GB +data_structure: 'Agilex_Cobot_Magic_pass_object_left_to_right_black_tablecloth_qced_hardlink/ + + |-- annotations + + | |-- eef_acc_mag_annotation.jsonl + + | |-- eef_direction_annotation.jsonl + + | |-- eef_velocity_annotation.jsonl + + | |-- gripper_activity_annotation.jsonl + + | |-- gripper_mode_annotation.jsonl + + | |-- scene_annotations.jsonl + + | `-- subtask_annotations.jsonl + + |-- data + + | `-- chunk-000 + + | |-- episode_000000.parquet + + | |-- episode_000001.parquet + + | |-- episode_000002.parquet + + | |-- episode_000003.parquet + + | |-- episode_000004.parquet + + | |-- episode_000005.parquet + + | |-- episode_000006.parquet + + | |-- episode_000007.parquet + + | |-- episode_000008.parquet + + | |-- episode_000009.parquet + + | |-- episode_000010.parquet + + | `-- episode_000011.parquet + + | `-- ... (86 more entries) + + |-- meta + + | |-- episodes.jsonl + + | |-- episodes_stats.jsonl + + | |-- info.json + + | `-- tasks.jsonl + + |-- videos + + | `-- chunk-000 + + | |-- observation.images.cam_head_rgb + + | |-- observation.images.cam_left_wrist_rgb + + | `-- observation.images.cam_right_wrist_rgb + + |-- info.yaml + + `-- README.md' +splits: + train: 0:97 +features: + observation.images.cam_head_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_left_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.images.cam_right_wrist_rgb: + dtype: video + shape: + - 480 + - 640 + - 3 + names: + - height + - width + - channels + info: + video.height: 480 + video.width: 640 + video.codec: av1 + video.pix_fmt: yuv420p + video.is_depth_map: false + video.fps: 30 + video.channels: 3 + has_audio: false + observation.state: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + action: + dtype: float32 + shape: + - 26 + names: + - left_arm_joint_1_rad + - left_arm_joint_2_rad + - left_arm_joint_3_rad + - left_arm_joint_4_rad + - left_arm_joint_5_rad + - left_arm_joint_6_rad + - left_gripper_open + - left_eef_pos_x_m + - left_eef_pos_y_m + - left_eef_pos_z_m + - left_eef_rot_euler_x_rad + - left_eef_rot_euler_y_rad + - left_eef_rot_euler_z_rad + - right_arm_joint_1_rad + - right_arm_joint_2_rad + - right_arm_joint_3_rad + - right_arm_joint_4_rad + - right_arm_joint_5_rad + - right_arm_joint_6_rad + - right_gripper_open + - right_eef_pos_x_m + - right_eef_pos_y_m + - right_eef_pos_z_m + - right_eef_rot_euler_x_rad + - right_eef_rot_euler_y_rad + - right_eef_rot_euler_z_rad + timestamp: + dtype: float32 + shape: + - 1 + names: null + frame_index: + dtype: int64 + shape: + - 1 + names: null + episode_index: + dtype: int64 + shape: + - 1 + names: null + index: + dtype: int64 + shape: + - 1 + names: null + task_index: + dtype: int64 + shape: + - 1 + names: null + subtask_annotation: + names: null + dtype: int32 + shape: + - 5 + scene_annotation: + names: null + dtype: int32 + shape: + - 1 + eef_sim_pose_state: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_sim_pose_action: + names: + - left_eef_pos_x + - left_eef_pos_y + - left_eef_pos_z + - left_eef_rot_x + - left_eef_rot_y + - left_eef_rot_z + - right_eef_pos_x + - right_eef_pos_y + - right_eef_pos_z + - right_eef_rot_x + - right_eef_rot_y + - right_eef_rot_z + dtype: float32 + shape: + - 12 + eef_direction_state: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_direction_action: + names: + - left_eef_direction + - right_eef_direction + dtype: int32 + shape: + - 2 + eef_velocity_state: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_velocity_action: + names: + - left_eef_velocity + - right_eef_velocity + dtype: int32 + shape: + - 2 + eef_acc_mag_state: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + eef_acc_mag_action: + names: + - left_eef_acc_mag + - right_eef_acc_mag + dtype: int32 + shape: + - 2 + gripper_mode_state: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_mode_action: + names: + - left_gripper_mode + - right_gripper_mode + dtype: int32 + shape: + - 2 + gripper_activity_state: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_activity_action: + names: + - left_gripper_activity + - right_gripper_activity + dtype: int32 + shape: + - 2 + gripper_open_scale_state: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 + gripper_open_scale_action: + names: + - left_gripper_open_scale + - right_gripper_open_scale + dtype: float32 + shape: + - 2 +authors: + contributed_by: + - name: RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI) +dataset_description: This dataset uses an extended format based on LeRobot and is + fully compatible with LeRobot. +homepage: https://flagopen.github.io/RoboCOIN/ +paper: https://arxiv.org/abs/2511.17441 +repository: https://github.com/FlagOpen/RoboCOIN +contact_info: For questions, issues, or feedback regarding this dataset, please contact + us. +support_info: For technical support, please open an issue on our GitHub repository. +license_details: apache-2.0 +citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\ + \ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng\ + \ Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu\ + \ Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang\ + \ Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin,\ + \ Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng\ + \ Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang\ + \ Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance\ + \ Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang,\ + \ Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie,\ + \ Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao\ + \ Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian,\ + \ Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong,\ + \ He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang,\ + \ Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n\ + \ url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n" +additional_citations: 'If you use this dataset, please also consider citing: + + LeRobot Framework: https://github.com/huggingface/lerobot + + ' +version_info: Initial Release +data_path: data/chunk-{id}/episode_{id}.parquet +video_path: videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id} +video_url: videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4 diff --git a/info/consolidated_datasets.json b/info/consolidated_datasets.json index a69663eb2b31154e991d6a51350835b52be786c1..d440aa048d5546d55376f6f66b6f19439b61d71a 100644 --- a/info/consolidated_datasets.json +++ b/info/consolidated_datasets.json @@ -2480,6 +2480,793 @@ "data_schema": "Airbot_MMK2_storage_cup_rubik_s_cube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "Airbot_MMK2_storage_cup_rubik_s_cube_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Galaxea_R1_Lite_classify_object_four": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_classify_object_four", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial_convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "brown_basket", + "level1": "baskets", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_basket", + "level1": "baskets", + "level2": "yellow_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_fruits", + "level1": "fruits", + "level2": "any_fruits", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_vegetables", + "level1": "vegetables", + "level2": "any_vegetables", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_snacks", + "level1": "snacks", + "level2": "any_snacks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_bread", + "level1": "bread", + "level2": "any_bread", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + ], + "sub_tasks": [ + { + "subtask": "Grasp the potato chips and put it in the left basket", + "subtask_index": 0 + }, + { + "subtask": "Grasp the mineral water and put it in the right basket", + "subtask_index": 1 + }, + { + "subtask": "Grasp the rubiks cube and put it in the left basket", + "subtask_index": 2 + }, + { + "subtask": "Grasp the waffle and put it in the right basket", + "subtask_index": 3 + }, + { + "subtask": "Grasp the soft cleanser and put it in the left basket", + "subtask_index": 4 + }, + { + "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask_index": 5 + }, + { + "subtask": "Grasp the apple and put it in the right basket", + "subtask_index": 6 + }, + { + "subtask": "End", + "subtask_index": 7 + }, + { + "subtask": "Grasp the white eraser and put it in the left basket", + "subtask_index": 8 + }, + { + "subtask": "Grasp the square chewing gum and put it in the right basket", + "subtask_index": 9 + }, + { + "subtask": "Grasp the power strip and put it in the left basket", + "subtask_index": 10 + }, + { + "subtask": "Grasp the green lemon and put it in the right basket", + "subtask_index": 11 + }, + { + "subtask": "Grasp the coke and put it in the right basket", + "subtask_index": 12 + }, + { + "subtask": "Grasp the cleaning agent and put it in the left basket", + "subtask_index": 13 + }, + { + "subtask": "Grasp the soda water and put it in the right basket", + "subtask_index": 14 + }, + { + "subtask": "Grasp the spoon and put it in the left basket", + "subtask_index": 15 + }, + { + "subtask": "Grasp the duck toys and put it in the left basket", + "subtask_index": 16 + }, + { + "subtask": "Grasp the triangle cake and put it in the right basket", + "subtask_index": 17 + }, + { + "subtask": "Grasp the cookie and put it in the right basket", + "subtask_index": 18 + }, + { + "subtask": "Grasp the yellow cake and put it in the right basket", + "subtask_index": 19 + }, + { + "subtask": "Grasp the shower sphere and put it in the left basket", + "subtask_index": 20 + }, + { + "subtask": "Grasp the compass and put it in the left basket", + "subtask_index": 21 + }, + { + "subtask": "Grasp the orange and put it in the right basket", + "subtask_index": 22 + }, + { + "subtask": "Grasp the broom and put it in the left basket", + "subtask_index": 23 + }, + { + "subtask": "Grasp the back scratcher and put it in the right basket", + "subtask_index": 24 + }, + { + "subtask": "Grasp the ballpoint pen and put it in the left basket", + "subtask_index": 25 + }, + { + "subtask": "Grasp the round bread and put it in the right basket", + "subtask_index": 26 + }, + { + "subtask": "Grasp the egg yolk pastry and put it in the right basket", + "subtask_index": 27 + }, + { + "subtask": "Grasp the soap and put it in the left basket", + "subtask_index": 28 + }, + { + "subtask": "Grasp the washing liquid and put it in the left basket", + "subtask_index": 29 + }, + { + "subtask": "Grasp the hard cleanser and put it in the left basket", + "subtask_index": 30 + }, + { + "subtask": "Grasp the milk and put it in the right basket", + "subtask_index": 31 + }, + { + "subtask": "Grasp the black marker and put it in the left basket", + "subtask_index": 32 + }, + { + "subtask": "Grasp the banana and put it in the right basket", + "subtask_index": 33 + }, + { + "subtask": "Grasp the can and put it in the left basket", + "subtask_index": 34 + }, + { + "subtask": "Grasp the black glass cup and put it in the left basket", + "subtask_index": 35 + }, + { + "subtask": "Grasp the brush and put it in the left basket", + "subtask_index": 36 + }, + { + "subtask": "Grasp the bath ball and put it in the left basket", + "subtask_index": 37 + }, + { + "subtask": "Grasp the blue towel and put it in the left basket", + "subtask_index": 38 + }, + { + "subtask": "Grasp the peeler and put it in the left basket", + "subtask_index": 39 + }, + { + "subtask": "Grasp the brown towel and put it in the left basket", + "subtask_index": 40 + }, + { + "subtask": "Grasp the peach and put it in the right basket", + "subtask_index": 41 + }, + { + "subtask": "Grasp the tea cup and put it in the left basket", + "subtask_index": 42 + }, + { + "subtask": "Grasp the round bread and put it in the left basket", + "subtask_index": 43 + }, + { + "subtask": "Grasp the chocolate and put it in the right basket", + "subtask_index": 44 + }, + { + "subtask": "Grasp the grey towel and put it in the left basket", + "subtask_index": 45 + }, + { + "subtask": "Grasp the canned cola and put it in the right basket", + "subtask_index": 46 + }, + { + "subtask": "Grasp the tape and put it in the left basket", + "subtask_index": 47 + }, + { + "subtask": "Grasp the bread slice and put it in the right basket", + "subtask_index": 48 + }, + { + "subtask": "Grasp the glasses case and put it in the left basket", + "subtask_index": 49 + }, + { + "subtask": "Grasp the triangle cake and put it in the left basket", + "subtask_index": 50 + }, + { + "subtask": "Grasp the peach doll and put it in the right basket", + "subtask_index": 51 + }, + { + "subtask": "Grasp the blue cup and put it in the left basket", + "subtask_index": 52 + }, + { + "subtask": "Grasp the pen container and put it in the left basket", + "subtask_index": 53 + }, + { + "subtask": "Grasp the red duck and put it in the left basket", + "subtask_index": 54 + }, + { + "subtask": "Grasp the long bread and put it in the right basket", + "subtask_index": 55 + }, + { + "subtask": "Grasp the yogurt and put it in the right basket", + "subtask_index": 56 + }, + { + "subtask": "Grasp the potato chips and put it in the right basket", + "subtask_index": 57 + }, + { + "subtask": "Grasp the can and put it in the right basket", + "subtask_index": 58 + }, + { + "subtask": "Grasp the egg beater and put it in the right basket", + "subtask_index": 59 + }, + { + "subtask": "Place the cookie in the center of the table", + "subtask_index": 60 + }, + { + "subtask": "Grasp the square chewing gum and put it in the left basket", + "subtask_index": 61 + }, + { + "subtask": "Grasp the ad milk and put it in the right basket", + "subtask_index": 62 + }, + { + "subtask": "Grasp the detergent and put it in the left basket", + "subtask_index": 63 + }, + { + "subtask": "Grasp the yellow duck and put it in the left basket", + "subtask_index": 64 + }, + { + "subtask": "Grasp the blue marker and put it in the left basket", + "subtask_index": 65 + }, + { + "subtask": "null", + "subtask_index": 66 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 191, + "total_frames": 153386, + "fps": 30, + "total_tasks": 67, + "total_videos": 764, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "8.39 GB" + }, + "frame_num": 153386, + "dataset_size": "8.39 GB", + "data_structure": "Galaxea_R1_Lite_classify_object_four_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (179 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:190" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, "Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth": { "task_categories": [ "robotics" @@ -5434,6 +6221,593 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Agilex_Cobot_Magic_pour_drink_bottle_cup": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_pour_drink_bottle_cup", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "kitchen_supplies", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "water_Bottle", + "level1": "beverages", + "level2": "water_bottle", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to hold the bottle of the drink and pour it into a random cup." + ], + "sub_tasks": [ + { + "subtask": "Pour the cranberry juice into the cyan cup with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Pour the cranberry juice into the paper cup with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "end", + "subtask_index": 2 + }, + { + "subtask": "Pour the cranberry juice into the White Plastic Cup with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the mineral water bottle on the table with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "Pour the mineral water into the paper cup with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the mineral water bottle with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Pour the cranberry juice into the cyan cup with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Pour the cranberry juice into the White Plastic Cup with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Abnormal", + "subtask_index": 9 + }, + { + "subtask": "Pour the mineral water into the glass with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the cranberry juice bottle on the table with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Pour the cranberry juice into the paper cup with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the cranberry juice bottle with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Pour the mineral water into the cyan cup with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the cranberry juice bottle with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the cranberry juice bottle on the table with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the mineral water bottle on the table with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the mineral water bottle with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Pour the mineral water into the cyan cup with the right gripper", + "subtask_index": 19 + }, + { + "subtask": "Pour the mineral water into the paper cup with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "Pour the mineral water into the glass with the left gripper", + "subtask_index": 21 + }, + { + "subtask": "null", + "subtask_index": 22 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "pour", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 195, + "total_frames": 201530, + "fps": 30, + "total_tasks": 23, + "total_videos": 585, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "1.58 GB" + }, + "frame_num": 201530, + "dataset_size": "1.58 GB", + "data_structure": "Agilex_Cobot_Magic_pour_drink_bottle_cup_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (183 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:194" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "R1_Lite_take_and_place_the_portable_power_bank": { "path": "R1_Lite_take_and_place_the_portable_power_bank", "dataset_name": "take_and_place_the_portable_power_bank", @@ -8334,6 +9708,537 @@ "data_schema": "AIRBOT_MMK2_place_the_umbrella_and_the_ruler_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "AIRBOT_MMK2_place_the_umbrella_and_the_ruler_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Agilex_Cobot_Magic_erase_board": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_erase_board", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "education", + "level2": "school", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard", + "level1": "stationery", + "level2": "whiteboard", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_whiteboard_Eraser", + "level1": "stationery", + "level2": "green_whiteboard_Eraser", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "wipe off the handwriting on the whiteboard." + ], + "sub_tasks": [ + { + "subtask": "Place the eraser with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "end", + "subtask_index": 1 + }, + { + "subtask": "Grasp the eraser with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the eraser and wipe the blackboard with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Abnormal", + "subtask_index": 4 + }, + { + "subtask": "Grasp the eraser and wipe the blackboard with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the eraser with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the eraser with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "null", + "subtask_index": 8 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", + "wipe" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 24688, + "fps": 30, + "total_tasks": 9, + "total_videos": 150, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "230.75 MB" + }, + "frame_num": 24688, + "dataset_size": "230.75 MB", + "data_structure": "Agilex_Cobot_Magic_erase_board_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "Cobot_Magic_fold_clothes": { "path": "Cobot_Magic_fold_clothes", "dataset_name": "fold_clothes", @@ -14257,6 +16162,533 @@ "data_schema": "G1edu-u3_pick_apple_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "G1edu-u3_pick_apple_a_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Agilex_Cobot_Magic_fold_towel_yellow_tray": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_fold_towel_yellow_tray", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_tray", + "level1": "kitchen_supplies", + "level2": "green_tray", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_square_towel", + "level1": "daily_necessities", + "level2": "yellow_square_towel", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use both grippers to hold the edge of the towel and fold it forward.,use the right gripper to hold the right edge and fold it again,use a gripper to place the folded towel on the tray." + ], + "sub_tasks": [ + { + "subtask": "Abnormal", + "subtask_index": 0 + }, + { + "subtask": "Fold the yellow towel upwards", + "subtask_index": 1 + }, + { + "subtask": "End", + "subtask_index": 2 + }, + { + "subtask": "Fold the yellow towel from right to left with right gripper", + "subtask_index": 3 + }, + { + "subtask": "Fold the yellow towel from left to right with left gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the folded yellow towel on the tray with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the folded yellow towel on the tray with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "null", + "subtask_index": 7 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", + "fold" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 49, + "total_frames": 48960, + "fps": 30, + "total_tasks": 8, + "total_videos": 147, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "552.42 MB" + }, + "frame_num": 48960, + "dataset_size": "552.42 MB", + "data_structure": "Agilex_Cobot_Magic_fold_towel_yellow_tray_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:48" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "AIRBOT_MMK2_storage_tissue_paper": { "path": "AIRBOT_MMK2_storage_tissue_paper", "dataset_name": "storage_tissue_paper", @@ -20632,6 +23064,514 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, + "Airbot_MMK2_click_pen": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_click_pen", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "education", + "level2": "school", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block", + "level1": "toy", + "level2": "block", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pen", + "level1": "stationery", + "level2": "pen", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the pen with your hand, press the pen switch and then place it on the table." + ], + "sub_tasks": [ + { + "subtask": "End", + "subtask_index": 0 + }, + { + "subtask": "Lift the pen with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the pen with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the pen on the table with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Abnormal", + "subtask_index": 4 + }, + { + "subtask": "Press the pen switch with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "garsp", + "pick", + "place", + "pressbutton" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 100, + "total_frames": 30984, + "fps": 30, + "total_tasks": 7, + "total_videos": 400, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "1.12 GB" + }, + "frame_num": 30984, + "dataset_size": "1.12 GB", + "data_structure": "Airbot_MMK2_click_pen_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:99" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "agilex_cobot_magic_pass_object_right_to_left_green_tablecloth": { "task_categories": [ "robotics" @@ -28491,6 +31431,505 @@ "data_schema": "R1_Lite_put_on_a_garbage_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "R1_Lite_put_on_a_garbage_bag_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Airbot_MMK2_storage_and_take_cake_plate": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_and_take_cake_plate", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "kitchen", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "plate", + "level1": "kitchen_supplies", + "level2": "plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the cake into the plate with left hand and take it out with right hand." + ], + "sub_tasks": [ + { + "subtask": "Place the cake on the table with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the cake with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Static", + "subtask_index": 2 + }, + { + "subtask": "Place the cake into the white plate with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the cake on the plate with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 9782, + "fps": 30, + "total_tasks": 7, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "487.34 MB" + }, + "frame_num": 9782, + "dataset_size": "487.34 MB", + "data_structure": "Airbot_MMK2_storage_and_take_cake_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "leju_robot_hotel_services_f": { "path": "leju_robot_hotel_services_f", "dataset_name": "hotel_services_f", @@ -34880,6 +38319,501 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, + "Airbot_MMK2_storage_block_both_hands": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_block_both_hands", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cream_storage_basket", + "level1": "home_storage", + "level2": "cream_storage_basket", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the building blocks with both hands simultaneously and put them into the white storage box." + ], + "sub_tasks": [ + { + "subtask": "Grasp the red block with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Grasp the orange block with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the red block into the white basket with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the orange block into the white basket with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 49, + "total_frames": 3769, + "fps": 30, + "total_tasks": 6, + "total_videos": 196, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "104.08 MB" + }, + "frame_num": 3769, + "dataset_size": "104.08 MB", + "data_structure": "Airbot_MMK2_storage_block_both_hands_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:48" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "AIRBOT_MMK2_store_beauty_blender_and_building_blocks": { "path": "AIRBOT_MMK2_store_beauty_blender_and_building_blocks", "dataset_name": "store_beauty_blender_and_building_blocks", @@ -48936,6 +52870,1053 @@ "data_schema": "G1edu-u3_pick_up_the_bread_az_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "G1edu-u3_pick_up_the_bread_az_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n └── observation.images.ego_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Galaxea_R1_Lite_storage_object_brown_plate": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_brown_plate", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "brown_plate", + "level1": "plates", + "level2": "brown_plate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "cookware", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke(slim_can)", + "level1": "beverages", + "level2": "coke(slim_can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "rulers", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "building_blocks", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "spoons_and_spatulas", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "erasers", + "level1": "stationery", + "level2": "erasers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "building_blocks", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "doll", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to pick the target object and place on the brown plate." + ], + "sub_tasks": [ + { + "subtask": "Place the blue pot on the brown plate with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the blue pot with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the plugboard with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the soft facial cleanser on the brown plate with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the back scratcher on the brown plate with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the blackboard erasure on the brown plate with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the potato chips with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the banana with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the coke on the brown plate with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the chocolate on the brown plate with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the duck toy on the brown plate with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the peach on the brown plate with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Place the round wooden block on the brown plate with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the green lemon on the brown plate with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the shower sphere on the brown plate with the left gripper", + "subtask_index": 17 + }, + { + "subtask": "Place the shower sphere on the brown plate with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Grasp the back scratcher with the right gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the square chewing gum with the left gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Place the yogurt on the brown plate with the right gripper", + "subtask_index": 22 + }, + { + "subtask": "Place the banana on the brown plate with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 24 + }, + { + "subtask": "Place the brown towel on the brown plate with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Grasp the plugboard with the right gripper", + "subtask_index": 26 + }, + { + "subtask": "Grasp the yogurt with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Place the blue cup on the brown plate with the right gripper", + "subtask_index": 28 + }, + { + "subtask": "Place the plugboard on the brown plate with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 30 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 31 + }, + { + "subtask": "Place the duck toy on the brown plate with the left gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the round bread on the brown plate with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the bread slice on the brown plate with the left gripper", + "subtask_index": 35 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Place the chocolate cake on the brown plate with the right gripper", + "subtask_index": 37 + }, + { + "subtask": "Grasp the peach with the left gripper", + "subtask_index": 38 + }, + { + "subtask": "Place the tin on the brown plate with the right gripper", + "subtask_index": 39 + }, + { + "subtask": "Place the tape on the brown plate with the right gripper", + "subtask_index": 40 + }, + { + "subtask": "Place the blackboard erasure on the brown plate with the right gripper", + "subtask_index": 41 + }, + { + "subtask": "Grasp the bread slice with the right gripper", + "subtask_index": 42 + }, + { + "subtask": "Place the potato chips on the brown plate with the right gripper", + "subtask_index": 43 + }, + { + "subtask": "Grasp the potato chips with the left gripper", + "subtask_index": 44 + }, + { + "subtask": "Place the tape on the brown plate with the left gripper", + "subtask_index": 45 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 46 + }, + { + "subtask": "End", + "subtask_index": 47 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 48 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 49 + }, + { + "subtask": "Place the brown towel on the brown plate with the right gripper", + "subtask_index": 50 + }, + { + "subtask": "Place the blue cup on the brown plate with the left gripper", + "subtask_index": 51 + }, + { + "subtask": "Place the compasses on the brown plate with the right gripper", + "subtask_index": 52 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 53 + }, + { + "subtask": "Place the compasses on the brown plate with the left gripper", + "subtask_index": 54 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 55 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 56 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 57 + }, + { + "subtask": "Place the potato chips on the brown plate with the left gripper", + "subtask_index": 58 + }, + { + "subtask": "Place the plugboard on the brown plate with the left gripper", + "subtask_index": 59 + }, + { + "subtask": "Place the square chewing gum on the brown plate with the right gripper", + "subtask_index": 60 + }, + { + "subtask": "Place the banana on the brown plate with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the tin with the right gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the hard facial cleanser on the brown plate with the left gripper", + "subtask_index": 63 + }, + { + "subtask": "Place the square wooden block on the brown plate with the left gripper", + "subtask_index": 64 + }, + { + "subtask": "Place the square chewing gum on the brown plate with the left gripper", + "subtask_index": 65 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 66 + }, + { + "subtask": "Grasp the coke with the right gripper", + "subtask_index": 67 + }, + { + "subtask": "Grasp the tape with the left gripper", + "subtask_index": 68 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 69 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the square wooden block on the brown plate with the right gripper", + "subtask_index": 71 + }, + { + "subtask": "Place the hard facial cleanser on the brown plate with the right gripper", + "subtask_index": 72 + }, + { + "subtask": "Grasp the bread slice with the left gripper", + "subtask_index": 73 + }, + { + "subtask": "Grasp the blackboard erasure with the right gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the bread slice on the brown plate with the right gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 76 + }, + { + "subtask": "Grasp the chocolate with the left gripper", + "subtask_index": 77 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 79 + }, + { + "subtask": "Grasp the soft facial cleanser with the right gripper", + "subtask_index": 80 + }, + { + "subtask": "Place the chocolate cake on the brown plate with the left gripper", + "subtask_index": 81 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 82 + }, + { + "subtask": "Grasp the green lemon with the left gripper", + "subtask_index": 83 + }, + { + "subtask": "Place the round wooden block on the brown plate with the left gripper", + "subtask_index": 84 + }, + { + "subtask": "Place the round bread on the brown plate with the left gripper", + "subtask_index": 85 + }, + { + "subtask": "Place the blue pot on the brown plate with the left gripper", + "subtask_index": 86 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 87 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 88 + }, + { + "subtask": "null", + "subtask_index": 89 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 102, + "total_frames": 16390, + "fps": 30, + "total_tasks": 90, + "total_videos": 408, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "615.40 MB" + }, + "frame_num": 16390, + "dataset_size": "615.40 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_brown_plate_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (90 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:101" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, "Cobot_Magic_move_beverage": { "path": "Cobot_Magic_move_beverage", "dataset_name": "move_beverage", @@ -49148,6 +54129,528 @@ "data_schema": "R1_Lite_put_the_shoes_into_the_shoe_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "R1_Lite_put_the_shoes_into_the_shoe_box_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Agilex_Cobot_Magic_move_mouse": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_move_mouse", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office & workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "mouse", + "level1": "appliances", + "level2": "mouse", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mouse_pad", + "level1": "appliances", + "level2": "mouse_pad", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the right gripper organize the mouse on the mouse pad." + ], + "sub_tasks": [ + { + "subtask": "Grasp the mouse with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Abnormal", + "subtask_index": 1 + }, + { + "subtask": "Grasp the mouse with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the mouse on the mouse mat with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "Place the mouse on the mouse mat with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 112, + "total_frames": 49737, + "fps": 30, + "total_tasks": 7, + "total_videos": 336, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "386.89 MB" + }, + "frame_num": 49737, + "dataset_size": "386.89 MB", + "data_structure": "Agilex_Cobot_Magic_move_mouse_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (100 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:111" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "G1edu-u3_put_the_tissue_box_al": { "path": "G1edu-u3_put_the_tissue_box_al", "dataset_name": "put_the_tissue_box_al", @@ -59814,11 +65317,503 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_diamond_ring", + "dataset_name": "Airbot_MMK2_storage_diamond_ring", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "diamond_ring", + "level1": "daily_necessities", + "level2": "diamond_ring", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "flip_top_paper_boxes", + "level1": "packaging", + "level2": "flip_top_paper_boxes", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the ring into the box." + ], + "sub_tasks": [ + { + "subtask": "Place the diamond ring in the box with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Close the lid of the box with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the diamond ring with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "place", + "pick", + "turn" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 9131, + "fps": 30, + "total_tasks": 5, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "426.97 MB" + }, + "frame_num": 9131, + "dataset_size": "426.97 MB", + "data_structure": "Airbot_MMK2_storage_diamond_ring_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, + "Airbot_MMK2_storage_block_BBs": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_block_BBs", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "household", - "level2": "bedroom", + "level1": "scene_level1", + "level2": "scene_level2", "level3": null, "level4": null, "level5": null @@ -59826,53 +65821,65 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "diamond_ring", - "level1": "daily_necessities", - "level2": "diamond_ring", - "level3": null, + "object_name": null, + "level1": "bb_pellets", + "level2": "ball", + "level3": "bb_pellets", "level4": null, "level5": null }, { - "object_name": "flip_top_paper_boxes", - "level1": "packaging", - "level2": "flip_top_paper_boxes", - "level3": null, + "object_name": null, + "level1": "bowl", + "level2": "bowl", + "level3": "bowl", + "level4": null, + "level5": null + }, + { + "object_name": null, + "level1": "building_blocks", + "level2": "rectangular_building_blocks", + "level3": "building_blocks", "level4": null, "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the ring into the box." + "put the bb pellets and rectangular building blocks into the bowl." ], "sub_tasks": [ { - "subtask": "Place the diamond ring in the box with the right gripper", + "subtask": "Place the green cuboid block into the bowl with the right gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Grasp the green cuboid block with the right gripper", "subtask_index": 1 }, { - "subtask": "Close the lid of the box with the left gripper", + "subtask": "Grasp the bullet with the left gripper", "subtask_index": 2 }, { - "subtask": "Grasp the diamond ring with the right gripper", + "subtask": "Place the bullet into the bowl with the left gripper", "subtask_index": 3 }, { - "subtask": "null", + "subtask": "End", "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 } ], "atomic_actions": [ - "grasp", - "place", "pick", - "turn" + "clip", + "place", + "lift" ], "robot_name": [ "Airbot_MMK2" @@ -59906,23 +65913,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 50, - "total_frames": 9131, + "total_episodes": 48, + "total_frames": 8640, "fps": 30, - "total_tasks": 5, - "total_videos": 200, + "total_tasks": 6, + "total_videos": 192, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "426.97 MB" + "dataset_size": "282.66 MB" }, - "frame_num": 9131, - "dataset_size": "426.97 MB", - "data_structure": "Airbot_MMK2_storage_diamond_ring_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 8640, + "dataset_size": "282.66 MB", + "data_structure": "Airbot_MMK2_storage_block_BBs_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:49" + "train": "0:47" }, "features": { "observation.images.cam_head_rgb": { @@ -60276,7 +66283,133 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "Airbot_MMK2_storage_block_BBs": { + "RMC-AIDA-L_stir_coffee": { + "path": "RMC-AIDA-L_stir_coffee", + "dataset_name": "stir_coffee", + "robot_type": "", + "end_effector_type": [ + "two_finger_gripper" + ], + "scene_type": [], + "atomic_actions": [ + "grasp", + "take", + "put", + "move", + "stir" + ], + "tasks": "Stir the coffee in the cup with the right gripper", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cup", + "level1": "tableware", + "level2": "cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "spoon", + "level1": "tableware", + "level2": "spoon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tablecloth", + "level1": "clothing", + "level2": "tablecloth", + "level3": null, + "level4": null, + "level5": null + } + ], + "operation_platform_height": 77.2, + "frame_range": "0-555807", + "dataset_size": "3.6GB", + "statistics": { + "total_episodes": 767, + "total_frames": 555807, + "total_tasks": 8, + "total_videos": 2301, + "total_chunks": 1, + "chunks_size": 1000, + "fps": 30 + }, + "dataset_uuid": "ee676d19-f3d4-4c11-bb83-9a2474cf77c5", + "language": [ + "en", + "zh" + ], + "task_categories": [ + "robotics" + ], + "sub_tasks": [ + "Stir the coffee in the cup with the right gripper", + "Stir the coffee with right gripper", + "Abnormal", + "Move the cup with coffee to the center of view with the right gripper", + "Static", + "Move the cup in the center of the table with right gripper", + "Grasp the spoon with left gripper", + "Move the cup with coffee to the center of view with the left gripper", + "Stir the coffee in the cup with the left gripper", + "Grasp the spoon with right gripper", + "End", + "Stir the coffee with left gripper", + "Move the cup in the center of the table with left gripper", + "Pick up the spoon with the left gripper", + "Pick up the spoon with the right gripper", + "null" + ], + "annotations": { + "subtask_annotation": "auto_generated", + "scene_annotation": "auto_generated", + "eef_direction": "auto_generated", + "eef_velocity": "auto_generated", + "eef_acc_mag": "auto_generated", + "gripper_mode": "auto_generated", + "gripper_activity": "auto_generated" + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ], + "annotated_by": [ + { + "name": "RoboCOIN", + "url": "https://flagopen.github.io/RoboCOIN/", + "affiliation": "RoboCOIN Team" + } + ] + }, + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "license": "apache-2.0", + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", + "depth_enabled": false, + "data_schema": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + }, + "Airbot_MMK2_stack_cubic_block": { "task_categories": [ "robotics" ], @@ -60306,11 +66439,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_storage_block_BBs", + "dataset_name": "Airbot_MMK2_stack_cubic_block", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { - "level1": "scene_level1", - "level2": "scene_level2", + "level1": "household", + "level2": "bedroom", "level3": null, "level4": null, "level5": null @@ -60318,65 +66451,76 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": null, - "level1": "bb_pellets", - "level2": "ball", - "level3": "bb_pellets", - "level4": null, - "level5": null - }, - { - "object_name": null, - "level1": "bowl", - "level2": "bowl", - "level3": "bowl", - "level4": null, - "level5": null - }, - { - "object_name": null, - "level1": "building_blocks", - "level2": "rectangular_building_blocks", - "level3": "building_blocks", + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, "level4": null, "level5": null } ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "put the bb pellets and rectangular building blocks into the bowl." + "pick up the building blocks with left and right hands respectively and place them on the blocks." ], "sub_tasks": [ { - "subtask": "Place the green cuboid block into the bowl with the right gripper", + "subtask": "Grasp the blue build blocks with the right gripper", "subtask_index": 0 }, { - "subtask": "Grasp the green cuboid block with the right gripper", + "subtask": "End", "subtask_index": 1 }, { - "subtask": "Grasp the bullet with the left gripper", + "subtask": "Place the blue build blocks on the red build block with the right gripper", "subtask_index": 2 }, { - "subtask": "Place the bullet into the bowl with the left gripper", + "subtask": "Place the yellow build blocks on the orange build block with the right gripper", "subtask_index": 3 }, { - "subtask": "End", + "subtask": "Place the red build blocks on the center of the table with the left gripper", "subtask_index": 4 }, { - "subtask": "null", + "subtask": "Place the orange build blocks on the yellow build block with the right gripper", "subtask_index": 5 + }, + { + "subtask": "Grasp the blue build blocks with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the red build blocks with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Abnormal", + "subtask_index": 8 + }, + { + "subtask": "Grasp the orange build blocks with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the blue build blocks on the red build block with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the yellow build blocks with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "null", + "subtask_index": 12 } ], "atomic_actions": [ + "grasp", "pick", - "clip", - "place", - "lift" + "place" ], "robot_name": [ "Airbot_MMK2" @@ -60410,23 +66554,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 48, - "total_frames": 8640, + "total_episodes": 162, + "total_frames": 48691, "fps": 30, - "total_tasks": 6, - "total_videos": 192, + "total_tasks": 13, + "total_videos": 648, "total_chunks": 1, "chunks_size": 1000, "state_dim": 36, "action_dim": 36, "camera_views": 4, - "dataset_size": "282.66 MB" + "dataset_size": "1.92 GB" }, - "frame_num": 8640, - "dataset_size": "282.66 MB", - "data_structure": "Airbot_MMK2_storage_block_BBs_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (36 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_front_rgb\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "frame_num": 48691, + "dataset_size": "1.92 GB", + "data_structure": "Airbot_MMK2_stack_cubic_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (150 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", "splits": { - "train": "0:47" + "train": "0:161" }, "features": { "observation.images.cam_head_rgb": { @@ -60780,22 +66924,20 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, - "RMC-AIDA-L_stir_coffee": { - "path": "RMC-AIDA-L_stir_coffee", - "dataset_name": "stir_coffee", + "G1edu-u3_bowl_storage_grape_singletry": { + "path": "G1edu-u3_bowl_storage_grape_singletry", + "dataset_name": "bowl_storage_grape_singletry", "robot_type": "", "end_effector_type": [ - "two_finger_gripper" + "three_finger_hand" ], "scene_type": [], "atomic_actions": [ "grasp", - "take", - "put", - "move", - "stir" + "pick", + "place" ], - "tasks": "Stir the coffee in the cup with the right gripper", + "tasks": "Grasp the grapes with left hand", "objects": [ { "object_name": "table", @@ -60806,43 +66948,35 @@ "level5": null }, { - "object_name": "cup", - "level1": "tableware", - "level2": "cup", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "spoon", - "level1": "tableware", - "level2": "spoon", + "object_name": "bread", + "level1": "food", + "level2": "bread", "level3": null, "level4": null, "level5": null }, { - "object_name": "tablecloth", - "level1": "clothing", - "level2": "tablecloth", + "object_name": "bowl", + "level1": "bowl", + "level2": "bowl", "level3": null, "level4": null, "level5": null } ], "operation_platform_height": 77.2, - "frame_range": "0-555807", - "dataset_size": "3.6GB", + "frame_range": "0-119619", + "dataset_size": "2.4GB", "statistics": { - "total_episodes": 767, - "total_frames": 555807, - "total_tasks": 8, - "total_videos": 2301, + "total_episodes": 242, + "total_frames": 119619, + "total_tasks": 1, + "total_videos": 726, "total_chunks": 1, "chunks_size": 1000, "fps": 30 }, - "dataset_uuid": "ee676d19-f3d4-4c11-bb83-9a2474cf77c5", + "dataset_uuid": "40947b8c-339b-414f-94d9-6d5f24520362", "language": [ "en", "zh" @@ -60851,21 +66985,12 @@ "robotics" ], "sub_tasks": [ - "Stir the coffee in the cup with the right gripper", - "Stir the coffee with right gripper", - "Abnormal", - "Move the cup with coffee to the center of view with the right gripper", + "Grasp the grapes with left hand", + "Grasp the bread with the left gripper", "Static", - "Move the cup in the center of the table with right gripper", - "Grasp the spoon with left gripper", - "Move the cup with coffee to the center of view with the left gripper", - "Stir the coffee in the cup with the left gripper", - "Grasp the spoon with right gripper", "End", - "Stir the coffee with left gripper", - "Move the cup in the center of the table with left gripper", - "Pick up the spoon with the left gripper", - "Pick up the spoon with the right gripper", + "Place the bread the bowl with the left gripper", + "Place the grapes in the plate with left hand", "null" ], "annotations": { @@ -60903,10 +67028,10 @@ ], "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", "depth_enabled": false, - "data_schema": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "RMC-AIDA-L_stir_coffee_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "data_schema": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", + "structure": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, - "Airbot_MMK2_stack_cubic_block": { + "Agilex_Cobot_Magic_fold_towel_grey_tray": { "task_categories": [ "robotics" ], @@ -60936,11 +67061,11 @@ } }, "codebase_version": "v2.1", - "dataset_name": "Airbot_MMK2_stack_cubic_block", + "dataset_name": "Agilex_Cobot_Magic_fold_towel_grey_tray", "dataset_uuid": "00000000-0000-0000-0000-000000000000", "scene_type": { "level1": "household", - "level2": "bedroom", + "level2": "kitchen", "level3": null, "level4": null, "level5": null @@ -60948,9 +67073,25 @@ "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", "objects": [ { - "object_name": "square_building_blocks", - "level1": "toys", - "level2": "square_building_blocks", + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_tray", + "level1": "kitchen_supplies", + "level2": "green_tray", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grey_square_towel", + "level1": "daily_necessities", + "level2": "grey_square_towel", "level3": null, "level4": null, "level5": null @@ -60958,89 +67099,68 @@ ], "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", "task_instruction": [ - "pick up the building blocks with left and right hands respectively and place them on the blocks." + "use both grippers to hold the edge of the towel and fold it forward.,use the right gripper to hold the right edge and fold it again,use a gripper to place the folded towel on the tray." ], "sub_tasks": [ { - "subtask": "Grasp the blue build blocks with the right gripper", + "subtask": "Fold the grey towel from left to right with left gripper", "subtask_index": 0 }, { - "subtask": "End", + "subtask": "Place the folded grey towel on the tray with the right gripper", "subtask_index": 1 }, { - "subtask": "Place the blue build blocks on the red build block with the right gripper", + "subtask": "Abnormal", "subtask_index": 2 }, { - "subtask": "Place the yellow build blocks on the orange build block with the right gripper", + "subtask": "Fold the grey towel from right to left with right gripper", "subtask_index": 3 }, { - "subtask": "Place the red build blocks on the center of the table with the left gripper", + "subtask": "Fold the grey towel upwards", "subtask_index": 4 }, { - "subtask": "Place the orange build blocks on the yellow build block with the right gripper", + "subtask": "End", "subtask_index": 5 }, { - "subtask": "Grasp the blue build blocks with the left gripper", + "subtask": "Place the folded grey towel on the tray with the left gripper", "subtask_index": 6 }, - { - "subtask": "Grasp the red build blocks with the left gripper", - "subtask_index": 7 - }, - { - "subtask": "Abnormal", - "subtask_index": 8 - }, - { - "subtask": "Grasp the orange build blocks with the right gripper", - "subtask_index": 9 - }, - { - "subtask": "Place the blue build blocks on the red build block with the left gripper", - "subtask_index": 10 - }, - { - "subtask": "Grasp the yellow build blocks with the right gripper", - "subtask_index": 11 - }, { "subtask": "null", - "subtask_index": 12 + "subtask_index": 7 } ], "atomic_actions": [ "grasp", - "pick", - "place" + "lift", + "lower", + "fold" ], "robot_name": [ - "Airbot_MMK2" + "Agilex_Cobot_Magic" ], - "end_effector_type": "five_finger_gripper", + "end_effector_type": "two_finger_gripper", "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", "sensor_list": [ "cam_head_rgb", "cam_left_wrist_rgb", - "cam_right_wrist_rgb", - "cam_front_rgb" + "cam_right_wrist_rgb" ], "came_info": { "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", - "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" }, "depth_enabled": false, "coordinate_definition": "right-hand-frame", "joint_rotation_dim": "radian", - "end_rotation_dim": "end_rotation_dim", - "end_translation_dim": "end_translation_dim", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", "annotations": [ "eef_acc_mag_annotation.jsonl", "eef_direction_annotation.jsonl", @@ -61051,23 +67171,23 @@ "subtask_annotations.jsonl" ], "statistics": { - "total_episodes": 162, - "total_frames": 48691, + "total_episodes": 50, + "total_frames": 53308, "fps": 30, - "total_tasks": 13, - "total_videos": 648, + "total_tasks": 8, + "total_videos": 150, "total_chunks": 1, "chunks_size": 1000, - "state_dim": 36, - "action_dim": 36, - "camera_views": 4, - "dataset_size": "1.92 GB" + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "724.56 MB" }, - "frame_num": 48691, - "dataset_size": "1.92 GB", - "data_structure": "Airbot_MMK2_stack_cubic_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (150 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "frame_num": 53308, + "dataset_size": "724.56 MB", + "data_structure": "Agilex_Cobot_Magic_fold_towel_grey_tray_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", "splits": { - "train": "0:161" + "train": "0:49" }, "features": { "observation.images.cam_head_rgb": { @@ -61139,33 +67259,10 @@ "has_audio": false } }, - "observation.images.cam_front_rgb": { - "dtype": "video", - "shape": [ - 480, - 640, - 3 - ], - "names": [ - "height", - "width", - "channels" - ], - "info": { - "video.height": 480, - "video.width": 640, - "video.codec": "av1", - "video.pix_fmt": "yuv420p", - "video.is_depth_map": false, - "video.fps": 30, - "video.channels": 3, - "has_audio": false - } - }, "observation.state": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -61174,42 +67271,32 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "action": { "dtype": "float32", "shape": [ - 36 + 26 ], "names": [ "left_arm_joint_1_rad", @@ -61218,36 +67305,26 @@ "left_arm_joint_4_rad", "left_arm_joint_5_rad", "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", "right_arm_joint_1_rad", "right_arm_joint_2_rad", "right_arm_joint_3_rad", "right_arm_joint_4_rad", "right_arm_joint_5_rad", "right_arm_joint_6_rad", - "left_hand_joint_1_rad", - "left_hand_joint_2_rad", - "left_hand_joint_3_rad", - "left_hand_joint_4_rad", - "left_hand_joint_5_rad", - "left_hand_joint_6_rad", - "left_hand_joint_7_rad", - "left_hand_joint_8_rad", - "left_hand_joint_9_rad", - "left_hand_joint_10_rad", - "left_hand_joint_11_rad", - "left_hand_joint_12_rad", - "right_hand_joint_1_rad", - "right_hand_joint_2_rad", - "right_hand_joint_3_rad", - "right_hand_joint_4_rad", - "right_hand_joint_5_rad", - "right_hand_joint_6_rad", - "right_hand_joint_7_rad", - "right_hand_joint_8_rad", - "right_hand_joint_9_rad", - "right_hand_joint_10_rad", - "right_hand_joint_11_rad", - "right_hand_joint_12_rad" + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" ] }, "timestamp": { @@ -61287,17 +67364,17 @@ }, "subtask_annotation": { "names": null, + "dtype": "int32", "shape": [ 5 - ], - "dtype": "int32" + ] }, "scene_annotation": { "names": null, + "dtype": "int32", "shape": [ 1 - ], - "dtype": "int32" + ] }, "eef_sim_pose_state": { "names": [ @@ -61314,10 +67391,10 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_sim_pose_action": { "names": [ @@ -61334,70 +67411,130 @@ "right_eef_rot_y", "right_eef_rot_z" ], + "dtype": "float32", "shape": [ 12 - ], - "dtype": "float32" + ] }, "eef_direction_state": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_direction_action": { "names": [ "left_eef_direction", "right_eef_direction" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_state": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_velocity_action": { "names": [ "left_eef_velocity", "right_eef_velocity" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_state": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 - ], - "dtype": "int32" + ] }, "eef_acc_mag_action": { "names": [ "left_eef_acc_mag", "right_eef_acc_mag" ], + "dtype": "int32", "shape": [ 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" ], - "dtype": "int32" + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] } }, "authors": { @@ -61419,114 +67556,7 @@ "version_info": "Initial Release", "data_path": "data/chunk-{id}/episode_{id}.parquet", "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", - "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" - }, - "G1edu-u3_bowl_storage_grape_singletry": { - "path": "G1edu-u3_bowl_storage_grape_singletry", - "dataset_name": "bowl_storage_grape_singletry", - "robot_type": "", - "end_effector_type": [ - "three_finger_hand" - ], - "scene_type": [], - "atomic_actions": [ - "grasp", - "pick", - "place" - ], - "tasks": "Grasp the grapes with left hand", - "objects": [ - { - "object_name": "table", - "level1": "furniture", - "level2": "table", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bread", - "level1": "food", - "level2": "bread", - "level3": null, - "level4": null, - "level5": null - }, - { - "object_name": "bowl", - "level1": "bowl", - "level2": "bowl", - "level3": null, - "level4": null, - "level5": null - } - ], - "operation_platform_height": 77.2, - "frame_range": "0-119619", - "dataset_size": "2.4GB", - "statistics": { - "total_episodes": 242, - "total_frames": 119619, - "total_tasks": 1, - "total_videos": 726, - "total_chunks": 1, - "chunks_size": 1000, - "fps": 30 - }, - "dataset_uuid": "40947b8c-339b-414f-94d9-6d5f24520362", - "language": [ - "en", - "zh" - ], - "task_categories": [ - "robotics" - ], - "sub_tasks": [ - "Grasp the grapes with left hand", - "Grasp the bread with the left gripper", - "Static", - "End", - "Place the bread the bowl with the left gripper", - "Place the grapes in the plate with left hand", - "null" - ], - "annotations": { - "subtask_annotation": "auto_generated", - "scene_annotation": "auto_generated", - "eef_direction": "auto_generated", - "eef_velocity": "auto_generated", - "eef_acc_mag": "auto_generated", - "gripper_mode": "auto_generated", - "gripper_activity": "auto_generated" - }, - "authors": { - "contributed_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ], - "annotated_by": [ - { - "name": "RoboCOIN", - "url": "https://flagopen.github.io/RoboCOIN/", - "affiliation": "RoboCOIN Team" - } - ] - }, - "homepage": "https://flagopen.github.io/RoboCOIN/", - "paper": "https://arxiv.org/abs/2511.17441", - "repository": "https://github.com/FlagOpen/RoboCOIN", - "license": "apache-2.0", - "tags": [ - "RoboCOIN", - "LeRobot" - ], - "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n }", - "depth_enabled": false, - "data_schema": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", - "structure": "G1edu-u3_bowl_storage_grape_singletry_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_left_high/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, "RMC-AIDA-L_fruit_storage": { "path": "RMC-AIDA-L_fruit_storage", @@ -67830,6 +73860,740 @@ "data_schema": "AIRBOT_MMK2_place_the_cake_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "AIRBOT_MMK2_place_the_cake_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Agilex_Cobot_Magic_move_object_beige_tablecloth": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_move_object_beige_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial & convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_table_cloths", + "level1": "laboratory_supplies", + "level2": "white_table_cloths", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "waffle", + "level1": "food", + "level2": "waffle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "food", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "food", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mint_candy", + "level1": "food", + "level2": "mint_candy", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mangosteen", + "level1": "food", + "level2": "mangosteen", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "orange", + "level1": "food", + "level2": "orange", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bread", + "level1": "food", + "level2": "bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": "Fruit cake", + "level5": null + }, + { + "object_name": "cake", + "level1": "food", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beef_cheeseburger", + "level1": "food", + "level2": "beef_cheeseburger", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pan", + "level1": "kitchen_supplies", + "level2": "pan", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teapot", + "level1": "kitchen_supplies", + "level2": "small_teapot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "small_teacup", + "level1": "kitchen_supplies", + "level2": "small_teacup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "paper_ball", + "level1": "trash", + "level2": "paper_ball", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_square_towel", + "level1": "daily_necessities", + "level2": "brown_square_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_cylindrical_pen_holder", + "level1": "stationery", + "level2": "black_cylindrical_pen_holder", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_long_towel", + "level1": "daily_necessities", + "level2": "pink_long_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_necessities", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "toys", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "stationery", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bowl", + "level1": "kitchen_supplies", + "level2": "bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_long_towel", + "level1": "daily_necessities", + "level2": "blue_long_towel", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the gripper move the object." + ], + "sub_tasks": [ + { + "subtask": "Grasp the XX with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the XX on the table with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the XX on the table with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 199, + "total_frames": 103966, + "fps": 30, + "total_tasks": 6, + "total_videos": 597, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "1.34 GB" + }, + "frame_num": 103966, + "dataset_size": "1.34 GB", + "data_structure": "Agilex_Cobot_Magic_move_object_beige_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (187 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:198" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "Airbot_MMK2_take_block_both_hands": { "task_categories": [ "robotics" @@ -70325,6 +77089,501 @@ "data_schema": "R1_Lite_clean_the_sink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "R1_Lite_clean_the_sink_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Airbot_MMK2_move_block": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_move_block", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "square_building_blocks", + "level1": "toys", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "early_education_toys", + "level1": "toys", + "level2": "early_education_toys", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the square blocks into the circular toy." + ], + "sub_tasks": [ + { + "subtask": "Place the yellow block in the blue circle with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the yellow block with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the yellow block with the rightt gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the yellow block in the blue circle with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 7264, + "fps": 30, + "total_tasks": 6, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "300.47 MB" + }, + "frame_num": 7264, + "dataset_size": "300.47 MB", + "data_structure": "Airbot_MMK2_move_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "Tianqin_A2_box_storage_part": { "path": "Tianqin_A2_box_storage_part", "dataset_name": "box_storage_part", @@ -79585,6 +86844,650 @@ "data_schema": "leju_robot_hotel_services_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "leju_robot_hotel_services_ae_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial_convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "food", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "food", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "long_bread", + "level1": "food", + "level2": "long_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "milk", + "level1": "food", + "level2": "milk", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yogurt", + "level1": "food", + "level2": "yogurt", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "grape", + "level1": "food", + "level2": "grape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ham_sausage", + "level1": "food", + "level2": "ham_sausage", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eggplant", + "level1": "food", + "level2": "eggplant", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "food", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eyeglass_case", + "level1": "laboratory_supplies", + "level2": "eyeglass_case", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "purple_trash_bag", + "level1": "trash", + "level2": "purple_trash_bag", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cleanser", + "level1": "daily_necessities", + "level2": "cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_eraser", + "level1": "stationery", + "level2": "whiteboard_eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "candle", + "level1": "daily_necessities", + "level2": "candle", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "black_table_cloths", + "level1": "laboratory_supplies", + "level2": "black_table_cloths", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use the left gripper to pick up the item and transfer it from the left gripper to the right gripper." + ], + "sub_tasks": [ + { + "subtask": "Grasp the XX with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the XX on the table with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the XX on the table with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the XX with the left gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "Pass the xx to the right gripper", + "subtask_index": 5 + }, + { + "subtask": "null", + "subtask_index": 6 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower", + "handover", + "takeover" + ], + "robot_name": [ + "agilex_cobot_magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 98, + "total_frames": 60078, + "fps": 30, + "total_tasks": 7, + "total_videos": 294, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "1.18 GB" + }, + "frame_num": 60078, + "dataset_size": "1.18 GB", + "data_structure": "Agilex_Cobot_Magic_pass_object_left_to_right_black_tablecloth_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:97" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "Cobot_Magic_clear_the_desktop": { "path": "Cobot_Magic_clear_the_desktop", "dataset_name": "clear_the_desktop", @@ -84594,6 +92497,1029 @@ "data_schema": "G1edu-u3_food_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "G1edu-u3_food_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.color_left_wrist/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.color_right_wrist/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Galaxea_R1_Lite_storage_object_brown_bowl": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_storage_object_brown_bowl", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "brown_bowl", + "level1": "plastic_bowl", + "level2": "brown_bowl", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "banana", + "level1": "fruits", + "level2": "banana", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_chemical_products", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_cup", + "level1": "cups", + "level2": "blue_cup", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pot", + "level1": "cookware", + "level2": "blue_pot", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "toast_slices", + "level1": "bread", + "level2": "toast_slices", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_towel", + "level1": "towels", + "level2": "brown_towel", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "can", + "level1": "snacks", + "level2": "can", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "coke(slim_can)", + "level1": "beveragesbeverages", + "level2": "coke(slim_can)", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "potato_chips", + "level1": "snacks", + "level2": "potato_chips", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chocolate", + "level1": "snacks", + "level2": "chocolate", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "compass", + "level1": "rulers", + "level2": "compass", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "block_pillar", + "level1": "building_blocks", + "level2": "block_pillar", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "egg_beater", + "level1": "spoons_and_spatulas", + "level2": "egg_beater", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "eraser", + "level1": "erasers", + "level2": "eraser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "chewing_gum", + "level1": "snacks", + "level2": "chewing_gum", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "fruits", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "fruits", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "power_strip", + "level1": "electrical_control_equipment", + "level2": "power_strip", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "round_bread", + "level1": "bread", + "level2": "round_bread", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mentholatum_facial_cleanser", + "level1": "daily_chemical_products", + "level2": "mentholatum_facial_cleanser", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "square_building_blocks", + "level1": "building_blocks", + "level2": "square_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "tape", + "level1": "stationery", + "level2": "tape", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "cake", + "level1": "bread", + "level2": "cake", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "duck", + "level1": "doll", + "level2": "duck", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "ambrosial_yogurt", + "level1": "beverages", + "level2": "ambrosial_yogurt", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use a gripper to pick the target object and place on the brown bowl." + ], + "sub_tasks": [ + { + "subtask": "Grasp the blue pot with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the coke in the bowl with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the plugboard with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the back scratcher in the bowl with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the plugboard in the bowl with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the compasses in the bowl with the right gripper", + "subtask_index": 5 + }, + { + "subtask": "Grasp the potato chips with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the banana with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Place the square chewing gum in the bowl with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the round wooden block in the bowl with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the tin in the bowl with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the compasses with the right gripper", + "subtask_index": 11 + }, + { + "subtask": "Grasp the duck toy with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the potato chips in the bowl with the left gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the blue cup with the left gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the chocolate cake in the bowl with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Grasp the back scratcher with the right gripper", + "subtask_index": 16 + }, + { + "subtask": "Place the square wooden block in the bowl with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "Grasp the chocolate cake with the right gripper", + "subtask_index": 18 + }, + { + "subtask": "Grasp the shower sphere with the left gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the plugboard with the right gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the yogurt with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Grasp the tin with the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the brown towel with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "Place the square wooden block in the bowl with the left gripper", + "subtask_index": 24 + }, + { + "subtask": "Grasp the hard facial cleanser with the left gripper", + "subtask_index": 25 + }, + { + "subtask": "Grasp the brown towel with the right gripper", + "subtask_index": 26 + }, + { + "subtask": "Place the potato chips in the bowl with the right gripper", + "subtask_index": 27 + }, + { + "subtask": "Place the duck toy in the bowl with the left gripper", + "subtask_index": 28 + }, + { + "subtask": "Grasp the hard facial cleanser with the right gripper", + "subtask_index": 29 + }, + { + "subtask": "Place the green lemon in the bowl with the right gripper", + "subtask_index": 30 + }, + { + "subtask": "Grasp the peach with the left gripper", + "subtask_index": 31 + }, + { + "subtask": "Place the peach in the bowl with the left gripper", + "subtask_index": 32 + }, + { + "subtask": "Place the soft facial cleanser in the bowl with the right gripper", + "subtask_index": 33 + }, + { + "subtask": "Place the shower sphere in the bowl with the left gripper", + "subtask_index": 34 + }, + { + "subtask": "Place the banana in the bowl with the right gripper", + "subtask_index": 35 + }, + { + "subtask": "Place the shower sphere in the bowl with the right gripper", + "subtask_index": 36 + }, + { + "subtask": "Grasp the green lemon with the right gripper", + "subtask_index": 37 + }, + { + "subtask": "Place the brown towel in the bowl with the right gripper", + "subtask_index": 38 + }, + { + "subtask": "Place the blackboard erasure in the bowl with the left gripper", + "subtask_index": 39 + }, + { + "subtask": "Grasp the potato chips with the left gripper", + "subtask_index": 40 + }, + { + "subtask": "Grasp the duck toy with the right gripper", + "subtask_index": 41 + }, + { + "subtask": "End", + "subtask_index": 42 + }, + { + "subtask": "Place the blue cup in the bowl with the right gripper", + "subtask_index": 43 + }, + { + "subtask": "Grasp the blackboard erasure with the left gripper", + "subtask_index": 44 + }, + { + "subtask": "Grasp the coke with the left gripper", + "subtask_index": 45 + }, + { + "subtask": "Grasp the round wooden block with the left gripper", + "subtask_index": 46 + }, + { + "subtask": "Place the round wooden block in the bowl with the left gripper", + "subtask_index": 47 + }, + { + "subtask": "Place the banana in the bowl with the left gripper", + "subtask_index": 48 + }, + { + "subtask": "Place the chocolate cake in the bowl with the left gripper", + "subtask_index": 49 + }, + { + "subtask": "Place the tape in the bowl with the right gripper", + "subtask_index": 50 + }, + { + "subtask": "Grasp the compasses with the left gripper", + "subtask_index": 51 + }, + { + "subtask": "Place the chocolate in the bowl with the left gripper", + "subtask_index": 52 + }, + { + "subtask": "Grasp the blue pot with the right gripper", + "subtask_index": 53 + }, + { + "subtask": "Place the hard facial cleanser in the bowl with the left gripper", + "subtask_index": 54 + }, + { + "subtask": "Grasp the round bread with the right gripper", + "subtask_index": 55 + }, + { + "subtask": "Grasp the chocolate cake with the left gripper", + "subtask_index": 56 + }, + { + "subtask": "Place the hard facial cleanser in the bowl with the right gripper", + "subtask_index": 57 + }, + { + "subtask": "Place the round bread in the bowl with the right gripper", + "subtask_index": 58 + }, + { + "subtask": "Grasp the tape with the right gripper", + "subtask_index": 59 + }, + { + "subtask": "Place the tape in the bowl with the left gripper", + "subtask_index": 60 + }, + { + "subtask": "Grasp the coke with the right gripper", + "subtask_index": 61 + }, + { + "subtask": "Grasp the tape with the left gripper", + "subtask_index": 62 + }, + { + "subtask": "Place the duck toy in the bowl with the right gripper", + "subtask_index": 63 + }, + { + "subtask": "Grasp the square chewing gum with the right gripper", + "subtask_index": 64 + }, + { + "subtask": "Place the blue pot in the bowl with the left gripper", + "subtask_index": 65 + }, + { + "subtask": "Grasp the shower sphere with the right gripper", + "subtask_index": 66 + }, + { + "subtask": "Place the plugboard in the bowl with the left gripper", + "subtask_index": 67 + }, + { + "subtask": "Place the coke in the bowl with the right gripper", + "subtask_index": 68 + }, + { + "subtask": "Place the round bread in the bowl with the left gripper", + "subtask_index": 69 + }, + { + "subtask": "Place the blue cup in the bowl with the left gripper", + "subtask_index": 70 + }, + { + "subtask": "Place the blue pot in the bowl with the right gripper", + "subtask_index": 71 + }, + { + "subtask": "Grasp the round bread with the left gripper", + "subtask_index": 72 + }, + { + "subtask": "Grasp the chocolate with the left gripper", + "subtask_index": 73 + }, + { + "subtask": "Grasp the square wooden block with the left gripper", + "subtask_index": 74 + }, + { + "subtask": "Place the yogurt in the bowl with the right gripper", + "subtask_index": 75 + }, + { + "subtask": "Grasp the blue cup with the right gripper", + "subtask_index": 76 + }, + { + "subtask": "Grasp the soft facial cleanser with the right gripper", + "subtask_index": 77 + }, + { + "subtask": "Place the compasses in the bowl with the left gripper", + "subtask_index": 78 + }, + { + "subtask": "Grasp the square wooden block with the right gripper", + "subtask_index": 79 + }, + { + "subtask": "Place the brown towel in the bowl with the left gripper", + "subtask_index": 80 + }, + { + "subtask": "Grasp the round wooden block with the right gripper", + "subtask_index": 81 + }, + { + "subtask": "Grasp the banana with the right gripper", + "subtask_index": 82 + }, + { + "subtask": "null", + "subtask_index": 83 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 101, + "total_frames": 23706, + "fps": 30, + "total_tasks": 84, + "total_videos": 404, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "909.34 MB" + }, + "frame_num": 23706, + "dataset_size": "909.34 MB", + "data_structure": "Galaxea_R1_Lite_storage_object_brown_bowl_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (89 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:100" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, "Cobot_Magic_cube_reset": { "path": "Cobot_Magic_cube_reset", "dataset_name": "cube_reset", @@ -88140,6 +97066,590 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Galaxea_R1_Lite_mix_color": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_mix_color", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "white_pigment", + "level1": "materials", + "level2": "white_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "large_test_tubes", + "level1": "laboratory_supplies", + "level2": "large_test_tubes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beaker", + "level1": "laboratory_supplies", + "level2": "holding_utensils", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up the test tube with red pigment the test tube with blue pigment and the test tube with white pigment by grippers and pour them into the beaker." + ], + "sub_tasks": [ + { + "subtask": "Pour the red reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 0 + }, + { + "subtask": "Pour the orange reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 1 + }, + { + "subtask": "Grasp the red reagent with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Pour the white reagent into the graduated cylinder with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the red reagent with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Pour the blue reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 5 + }, + { + "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "End", + "subtask_index": 8 + }, + { + "subtask": "Place the test tube into the paper cup with the right gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the test tube into the paper cup with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the blue reagent with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask_index": 12 + }, + { + "subtask": "Grasp the white reagent with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Pour the white reagent into the graduated cylinder and place the test tube into the paper cup", + "subtask_index": 14 + }, + { + "subtask": "null", + "subtask_index": 15 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place", + "pour" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=360x640x3, resolution=640x360, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 50, + "total_frames": 79584, + "fps": 30, + "total_tasks": 16, + "total_videos": 200, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "2.99 GB" + }, + "frame_num": 79584, + "dataset_size": "2.99 GB", + "data_structure": "Galaxea_R1_Lite_Galaxea_R1_Lite_mix_color_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (38 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:49" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 360, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 360, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 360, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 360, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, "AIRBOT_MMK2_organize_books": { "path": "AIRBOT_MMK2_organize_books", "dataset_name": "organize_books", @@ -92728,6 +102238,781 @@ "data_schema": "leju_robot_moving_parts_u_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "leju_robot_moving_parts_u_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.camera_head_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.camera_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.camera_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Galaxea_R1_Lite_classify_object_three": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_classify_object_three", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "commercial_convenience", + "level2": "supermarket", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "brown_basket", + "level1": "baskets", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_basket", + "level1": "baskets", + "level2": "yellow_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_fruits", + "level1": "fruits", + "level2": "any_fruits", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_vegetables", + "level1": "vegetables", + "level2": "any_vegetables", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_snacks", + "level1": "snacks", + "level2": "any_snacks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "any_bread", + "level1": "foobreadd", + "level3": "any_bread", + "level2": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "place the food in the right basket with the right gripper, and place the non food items in the left basket with the left gripper." + ], + "sub_tasks": [ + { + "subtask": "Grasp the rubiks cube and put it in the left basket", + "subtask_index": 0 + }, + { + "subtask": "Place the tape in the center of the table", + "subtask_index": 1 + }, + { + "subtask": "Grasp the soft cleanser and put it in the left basket", + "subtask_index": 2 + }, + { + "subtask": "Grasp the back scratcher and put it in the left basket", + "subtask_index": 3 + }, + { + "subtask": "Grasp the apple and put it in the right basket", + "subtask_index": 4 + }, + { + "subtask": "Grasp the yellow marker and put it in the left basket", + "subtask_index": 5 + }, + { + "subtask": "End", + "subtask_index": 6 + }, + { + "subtask": "Grasp the white eraser and put it in the left basket", + "subtask_index": 7 + }, + { + "subtask": "Grasp the power strip and put it in the left basket", + "subtask_index": 8 + }, + { + "subtask": "Grasp the square chewing gum and put it in the right basket", + "subtask_index": 9 + }, + { + "subtask": "Grasp the cleaning agent and put it in the left basket", + "subtask_index": 10 + }, + { + "subtask": "Grasp the blue marker pen and put it in the right basket", + "subtask_index": 11 + }, + { + "subtask": "Grasp the soda water and put it in the right basket", + "subtask_index": 12 + }, + { + "subtask": "Grasp the spoon and put it in the left basket", + "subtask_index": 13 + }, + { + "subtask": "Grasp the duck toys and put it in the left basket", + "subtask_index": 14 + }, + { + "subtask": "Grasp the blue marker pen and put it in the left basket", + "subtask_index": 15 + }, + { + "subtask": "Grasp the shampoo and put it in the left basket", + "subtask_index": 16 + }, + { + "subtask": "Grasp the triangle cake and put it in the right basket", + "subtask_index": 17 + }, + { + "subtask": "Grasp the brown plate and put it in the left basket", + "subtask_index": 18 + }, + { + "subtask": "Grasp the cookie and put it in the right basket", + "subtask_index": 19 + }, + { + "subtask": "Grasp the yellow cake and put it in the right basket", + "subtask_index": 20 + }, + { + "subtask": "Grasp the shower sphere and put it in the left basket", + "subtask_index": 21 + }, + { + "subtask": "Grasp the orange and put it in the right basket", + "subtask_index": 22 + }, + { + "subtask": "Grasp the compass and put it in the left basket", + "subtask_index": 23 + }, + { + "subtask": "Grasp the round bread and put it in the right basket", + "subtask_index": 24 + }, + { + "subtask": "Grasp the lemon and put it in the right basket", + "subtask_index": 25 + }, + { + "subtask": "Grasp the egg yolk pastry and put it in the right basket", + "subtask_index": 26 + }, + { + "subtask": "Grasp the soap and put it in the left basket", + "subtask_index": 27 + }, + { + "subtask": "Grasp the washing liquid and put it in the left basket", + "subtask_index": 28 + }, + { + "subtask": "Grasp the hard cleanser and put it in the left basket", + "subtask_index": 29 + }, + { + "subtask": "Grasp the milk and put it in the right basket", + "subtask_index": 30 + }, + { + "subtask": "Grasp the black marker and put it in the left basket", + "subtask_index": 31 + }, + { + "subtask": "Grasp the banana and put it in the right basket", + "subtask_index": 32 + }, + { + "subtask": "Grasp the black glass cup and put it in the left basket", + "subtask_index": 33 + }, + { + "subtask": "Grasp the blue marker and put it in the right basket", + "subtask_index": 34 + }, + { + "subtask": "Grasp the bath ball and put it in the left basket", + "subtask_index": 35 + }, + { + "subtask": "Abnormal", + "subtask_index": 36 + }, + { + "subtask": "Grasp the peeler and put it in the left basket", + "subtask_index": 37 + }, + { + "subtask": "Grasp the brown towel and put it in the left basket", + "subtask_index": 38 + }, + { + "subtask": "Grasp the peach and put it in the right basket", + "subtask_index": 39 + }, + { + "subtask": "Grasp the tea cup and put it in the left basket", + "subtask_index": 40 + }, + { + "subtask": "Grasp the brush and put it in the left basket", + "subtask_index": 41 + }, + { + "subtask": "Grasp the chocolate and put it in the right basket", + "subtask_index": 42 + }, + { + "subtask": "Grasp the grey towel and put it in the left basket", + "subtask_index": 43 + }, + { + "subtask": "Place the peach doll in the center of the table", + "subtask_index": 44 + }, + { + "subtask": "Grasp the rubiks cube and put it in the right basket", + "subtask_index": 45 + }, + { + "subtask": "Grasp the tape and put it in the left basket", + "subtask_index": 46 + }, + { + "subtask": "Grasp the bread slice and put it in the right basket", + "subtask_index": 47 + }, + { + "subtask": "Grasp the glasses case and put it in the left basket", + "subtask_index": 48 + }, + { + "subtask": "Grasp the soda water and put it in the left basket", + "subtask_index": 49 + }, + { + "subtask": "Grasp the peach doll and put it in the right basket", + "subtask_index": 50 + }, + { + "subtask": "Grasp the blue cup and put it in the left basket", + "subtask_index": 51 + }, + { + "subtask": "Grasp the spoon and put it in the right basket", + "subtask_index": 52 + }, + { + "subtask": "Grasp the pen container and put it in the left basket", + "subtask_index": 53 + }, + { + "subtask": "Grasp the red duck and put it in the left basket", + "subtask_index": 54 + }, + { + "subtask": "Grasp the glasses case and put it in the right basket", + "subtask_index": 55 + }, + { + "subtask": "Grasp the long bread and put it in the right basket", + "subtask_index": 56 + }, + { + "subtask": "Grasp the yogurt and put it in the right basket", + "subtask_index": 57 + }, + { + "subtask": "Grasp the potato chips and put it in the right basket", + "subtask_index": 58 + }, + { + "subtask": "Grasp the can and put it in the right basket", + "subtask_index": 59 + }, + { + "subtask": "Grasp the long bread and put it in the left basket", + "subtask_index": 60 + }, + { + "subtask": "Grasp the yellow duck and put it in the left basket", + "subtask_index": 61 + }, + { + "subtask": "Grasp the coke and put it in the right basket", + "subtask_index": 62 + }, + { + "subtask": "null", + "subtask_index": 63 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 197, + "total_frames": 134891, + "fps": 30, + "total_tasks": 64, + "total_videos": 788, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "7.32 GB" + }, + "frame_num": 134891, + "dataset_size": "7.32 GB", + "data_structure": "Galaxea_R1_Lite_classify_object_three_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:196" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, "RMC-AIDA-L_clean_table": { "path": "RMC-AIDA-L_clean_table", "dataset_name": "clean_table", @@ -95609,6 +105894,520 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" }, + "Agilex_Cobot_Magic_storage_peach_right": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_storage_peach_right", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "food", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the peach in the basket with right arm." + ], + "sub_tasks": [ + { + "subtask": "Grasp the peach with right gripper", + "subtask_index": 0 + }, + { + "subtask": "End", + "subtask_index": 1 + }, + { + "subtask": "Place the peach in the basket with right gripper", + "subtask_index": 2 + }, + { + "subtask": "Abnormal", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 99, + "total_frames": 25876, + "fps": 30, + "total_tasks": 5, + "total_videos": 297, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "277.18 MB" + }, + "frame_num": 25876, + "dataset_size": "277.18 MB", + "data_structure": "Agilex_Cobot_Magic_storage_peach_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:98" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "Airbot_MMK2_storage_bell_pepper": { "task_categories": [ "robotics" @@ -105247,6 +116046,483 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Airbot_MMK2_open_door_left": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_open_door_left", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "scene_level1", + "level2": "scene_level2", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "cabinet", + "level1": "furniture", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "open the cabinet." + ], + "sub_tasks": [ + { + "subtask": "Touch the door with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Open the door with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "End", + "subtask_index": 2 + }, + { + "subtask": "null", + "subtask_index": 3 + } + ], + "atomic_actions": [ + "open" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 49, + "total_frames": 6418, + "fps": 30, + "total_tasks": 4, + "total_videos": 196, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "188.62 MB" + }, + "frame_num": 6418, + "dataset_size": "188.62 MB", + "data_structure": "Airbot_MMK2_open_door_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (37 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:48" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "Cobot_Magic_cut_banana": { "path": "Cobot_Magic_cut_banana", "dataset_name": "cut_banana", @@ -105517,6 +116793,574 @@ "data_schema": "AIRBOT_MMK2_item_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "AIRBOT_MMK2_item_storage_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Galaxea_R1_Lite_mix_color_large_test_tube": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Galaxea_R1_Lite_mix_color_large_test_tube", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "red_pigment", + "level1": "materials", + "level2": "red_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "blue_pigment", + "level1": "materials", + "level2": "blue_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "yellow_pigment", + "level1": "materials", + "level2": "yellow_pigment", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "holding_utensils", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "large_test_tubes", + "level1": "laboratory_supplies", + "level2": "large_test_tubes", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "beaker", + "level1": "holding_utensils", + "level2": "beaker", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "pick up a test tube with pigment on left test tube rack and a test tube with pigment on right test tube rack by grippers and pour them into the beaker." + ], + "sub_tasks": [ + { + "subtask": "Place the test tube into the paper cup with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Pour the blue reagent into the graduated cylinder with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Pour the red reagent into the graduated cylinder with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the blue reagent with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Place the test tube into the paper cup with the right gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the yellow reagent with the right gripper", + "subtask_index": 7 + }, + { + "subtask": "Pour the yellow reagent into the graduated cylinder with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Grasp the red reagent with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Grasp the red reagent with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "null", + "subtask_index": 11 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place", + "pour" + ], + "robot_name": [ + "Galaxea_R1_Lite" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_left_rgb", + "cam_head_right_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_left_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_head_right_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=720x1280x3, resolution=1280x720, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 121, + "total_frames": 131656, + "fps": 30, + "total_tasks": 12, + "total_videos": 484, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 14, + "action_dim": 14, + "camera_views": 4, + "dataset_size": "4.70 GB" + }, + "frame_num": 131656, + "dataset_size": "4.70 GB", + "data_structure": "Galaxea_R1_Lite_mix_color_large_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (109 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_left_rgb\n| |-- observation.images.cam_head_right_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:120" + }, + "features": { + "observation.images.cam_head_left_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_head_right_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 720, + 1280, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 720, + "video.width": 1280, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 14 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_gripper_open", + "right_gripper_open" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_left_rgb/episode_000000.mp4" + }, "R1_Lite_opening_and_closing_aalcony_sliding_doors": { "path": "R1_Lite_opening_and_closing_aalcony_sliding_doors", "dataset_name": "opening_and_closing_aalcony_sliding_doors", @@ -110810,6 +122654,520 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Agilex_Cobot_Magic_storage_peach_left": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_storage_peach_left", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "peach", + "level1": "food", + "level2": "peach", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the peach in the basket with your left ." + ], + "sub_tasks": [ + { + "subtask": "End", + "subtask_index": 0 + }, + { + "subtask": "Place the peach in the basket with left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the peach with left gripper", + "subtask_index": 2 + }, + { + "subtask": "Abnormal", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 99, + "total_frames": 21979, + "fps": 30, + "total_tasks": 5, + "total_videos": 297, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "264.71 MB" + }, + "frame_num": 21979, + "dataset_size": "264.71 MB", + "data_structure": "Agilex_Cobot_Magic_storage_peach_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (87 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:98" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "R1_Lite_move_the_position_of_the_spoon": { "path": "R1_Lite_move_the_position_of_the_spoon", "dataset_name": "move_the_position_of_the_spoon", @@ -122257,6 +134615,524 @@ "data_schema": "AIRBOT_MMK2_screw_the_bottle_cap_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "AIRBOT_MMK2_screw_the_bottle_cap_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_right_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_third_view/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Realman_RMC-AIDA-L_arrange_flowers": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Realman_RMC-AIDA-L_arrange_flowers", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "living_room", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "furniture", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "flower", + "level1": "plant", + "level2": "flower", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "vise", + "level1": "container", + "level2": "vise", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "the right gripper grasp the flowers and insert them into the vase." + ], + "sub_tasks": [ + { + "subtask": "Grasp the pink flower with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Abnormal", + "subtask_index": 1 + }, + { + "subtask": "Place the pink flower into the vase with with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "End", + "subtask_index": 3 + }, + { + "subtask": "null", + "subtask_index": 4 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Realman_RMC-AIDA-L" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 60, + "total_frames": 22483, + "fps": 30, + "total_tasks": 5, + "total_videos": 180, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 28, + "action_dim": 28, + "camera_views": 3, + "dataset_size": "465.54 MB" + }, + "frame_num": 22483, + "dataset_size": "465.54 MB", + "data_structure": "Realman_RMC-AIDA-L_arrange_flowers_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (48 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:59" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 28 + ], + "names": [ + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_arm_joint_7_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad", + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_arm_joint_7_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 28 + ], + "names": [ + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_arm_joint_7_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad", + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_arm_joint_7_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "shape": [ + 2 + ], + "dtype": "float32" + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "AIRBOT_MMK2_toy_storage": { "path": "AIRBOT_MMK2_toy_storage", "dataset_name": "toy_storage", @@ -133031,6 +145907,524 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Agilex_Cobot_Magic_organize_test_tube": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_organize_test_tube", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "other", + "level2": "laboratory", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "test_tube_rack", + "level1": "laboratory_supplies", + "level2": "test_tube_rack", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "clear_test_tube", + "level1": "laboratory_supplies", + "level2": "clear_test_tube", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "Put the test tube on the table back onto the test tube rack." + ], + "sub_tasks": [ + { + "subtask": "Place the test tube on the test tube rack with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Abnormal", + "subtask_index": 1 + }, + { + "subtask": "Pick up the test tube with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Move the test tube from the right gripper to the left gripper", + "subtask_index": 3 + }, + { + "subtask": "End", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 197, + "total_frames": 159747, + "fps": 30, + "total_tasks": 6, + "total_videos": 591, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "1.92 GB" + }, + "frame_num": 159747, + "dataset_size": "1.92 GB", + "data_structure": "Agilex_Cobot_Magic_organize_test_tube_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (185 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:196" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "AgiBot-g1_picks_up_parts_b": { "path": "AgiBot-g1_picks_up_parts_b", "dataset_name": "picks_up_parts_b", @@ -135874,6 +149268,577 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Airbot_MMK2_storage_block": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_storage_block", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "household", + "level2": "bedroom", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "rectangular_building_blocks", + "level1": "building_blocks", + "level2": "rectangular_building_blocks", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "pink_plate", + "level1": "plates", + "level2": "pink_plate", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "put the building blocks into the plate by hand." + ], + "sub_tasks": [ + { + "subtask": "Place the green rectangular build blocks into the white plate with the right gripper", + "subtask_index": 0 + }, + { + "subtask": "Grasp the green cylindrical build blocks with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Place the red cube build blocks into the plate with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Place the green rectangular build blocks into the pink plate with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Place the green cube build blocks into the plate with the right gripper", + "subtask_index": 4 + }, + { + "subtask": "Place the green cylindrical build blocks into the cardboard box with the left gripper", + "subtask_index": 5 + }, + { + "subtask": "Place the red rectangular build blocks into the pink plate with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Grasp the purple cube build blocks with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Grasp the blue cube build blocks with the right gripper", + "subtask_index": 8 + }, + { + "subtask": "Abnormal", + "subtask_index": 9 + }, + { + "subtask": "Grasp the green cube build blocks with the right gripper", + "subtask_index": 10 + }, + { + "subtask": "Place the purple cube build blocks into the white plate with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "Place the red rectangular build blocks into the white plate with the left gripper", + "subtask_index": 12 + }, + { + "subtask": "Place the red cube build blocks into the white plate with the right gripper", + "subtask_index": 13 + }, + { + "subtask": "Grasp the red cylindrical build blocks with the right gripper", + "subtask_index": 14 + }, + { + "subtask": "Place the blue cube build blocks on the purple cube build blocks with the right gripper", + "subtask_index": 15 + }, + { + "subtask": "Place the green cube build blocks into the pink plate with the left gripper", + "subtask_index": 16 + }, + { + "subtask": "Grasp the green rectangular build blocks with the right gripper", + "subtask_index": 17 + }, + { + "subtask": "End", + "subtask_index": 18 + }, + { + "subtask": "Place the red cylindrical build blocks into the cardboard box with the right gripper", + "subtask_index": 19 + }, + { + "subtask": "Grasp the red cube build blocks with the left gripper", + "subtask_index": 20 + }, + { + "subtask": "Grasp the red cube build blocks with the right gripper", + "subtask_index": 21 + }, + { + "subtask": "Grasp the red rectangular build blocks with the left gripper", + "subtask_index": 22 + }, + { + "subtask": "Grasp the green cube build blocks with the left gripper", + "subtask_index": 23 + }, + { + "subtask": "null", + "subtask_index": 24 + } + ], + "atomic_actions": [ + "grasp", + "pick", + "place" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 262, + "total_frames": 55260, + "fps": 30, + "total_tasks": 25, + "total_videos": 1048, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "2.17 GB" + }, + "frame_num": 55260, + "dataset_size": "2.17 GB", + "data_structure": "Airbot_MMK2_storage_block_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (250 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:261" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "G1edu-u3_put_the_cup_b": { "path": "G1edu-u3_put_the_cup_b", "dataset_name": "put_the_cup_b", @@ -143897,6 +157862,584 @@ "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" }, + "Agilex_Cobot_Magic_storage_object_left": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Agilex_Cobot_Magic_storage_object_left", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "office_workspace", + "level2": "office", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "table", + "level1": "home_storage", + "level2": "table", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "brown_basket", + "level1": "home_storage", + "level2": "brown_basket", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "mango", + "level1": "food", + "level2": "mango", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "green_lemon", + "level1": "food", + "level2": "green_lemon", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "rubik's_cube", + "level1": "toys", + "level2": "rubik's_cube", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "whiteboard_erasers", + "level1": "stationery", + "level2": "whiteboard_erasers", + "level3": null, + "level4": null, + "level5": null + }, + { + "object_name": "bathing_in_flowers", + "level1": "daily_necessities", + "level2": "bathing_in_flowers", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "use the left gripper to grab items from the table and place them in the basket." + ], + "sub_tasks": [ + { + "subtask": "Grasp the Rubik's Cube with the left gripper", + "subtask_index": 0 + }, + { + "subtask": "Place the Rubik's Cube into the basket with the left gripper", + "subtask_index": 1 + }, + { + "subtask": "Grasp the apple rubber puff with the left gripper", + "subtask_index": 2 + }, + { + "subtask": "Grasp the mango with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Grasp the Shower puff with the left gripper", + "subtask_index": 4 + }, + { + "subtask": "End", + "subtask_index": 5 + }, + { + "subtask": "Place the apple rubber puff into the basket with the left gripper", + "subtask_index": 6 + }, + { + "subtask": "Place the Shower puff into the basket with the left gripper", + "subtask_index": 7 + }, + { + "subtask": "Grasp the mango with the left gripper", + "subtask_index": 8 + }, + { + "subtask": "Place the mango into the basket with the left gripper", + "subtask_index": 9 + }, + { + "subtask": "Place the chalkboard eraser into the basket with the left gripper", + "subtask_index": 10 + }, + { + "subtask": "Grasp the chalkboard eraser with the left gripper", + "subtask_index": 11 + }, + { + "subtask": "null", + "subtask_index": 12 + } + ], + "atomic_actions": [ + "grasp", + "lift", + "lower" + ], + "robot_name": [ + "Agilex_Cobot_Magic" + ], + "end_effector_type": "two_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "radian", + "end_translation_dim": "meter", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 100, + "total_frames": 26742, + "fps": 30, + "total_tasks": 13, + "total_videos": 300, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 26, + "action_dim": 26, + "camera_views": 3, + "dataset_size": "360.15 MB" + }, + "frame_num": 26742, + "dataset_size": "360.15 MB", + "data_structure": "Agilex_Cobot_Magic_storage_object_left_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- backup\n| |-- data\n| | `-- chunk-000\n| `-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (88 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n|-- videos\n| `-- chunk-000\n| |-- observation.images.cam_head_rgb\n| |-- observation.images.cam_left_wrist_rgb\n| `-- observation.images.cam_right_wrist_rgb\n|-- info.yaml\n`-- README.md", + "splits": { + "train": "0:99" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 26 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "left_gripper_open", + "left_eef_pos_x_m", + "left_eef_pos_y_m", + "left_eef_pos_z_m", + "left_eef_rot_euler_x_rad", + "left_eef_rot_euler_y_rad", + "left_eef_rot_euler_z_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "right_gripper_open", + "right_eef_pos_x_m", + "right_eef_pos_y_m", + "right_eef_pos_z_m", + "right_eef_rot_euler_x_rad", + "right_eef_rot_euler_y_rad", + "right_eef_rot_euler_z_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 5 + ] + }, + "scene_annotation": { + "names": null, + "dtype": "int32", + "shape": [ + 1 + ] + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "dtype": "float32", + "shape": [ + 12 + ] + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_state": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_mode_action": { + "names": [ + "left_gripper_mode", + "right_gripper_mode" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_state": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_activity_action": { + "names": [ + "left_gripper_activity", + "right_gripper_activity" + ], + "dtype": "int32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_state": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + }, + "gripper_open_scale_action": { + "names": [ + "left_gripper_open_scale", + "right_gripper_open_scale" + ], + "dtype": "float32", + "shape": [ + 2 + ] + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_head_rgb/episode_000000.mp4" + }, "Cobot_Magic_clean_blackboard": { "path": "Cobot_Magic_clean_blackboard", "dataset_name": "clean_blackboard", @@ -150413,6 +164956,491 @@ "data_schema": "R1_Lite_clean_toilet_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)", "structure": "R1_Lite_clean_toilet_qced_hardlink/\n├── annotations/\n│ ├── eef_acc_mag_annotation.jsonl\n│ ├── eef_direction_annotation.jsonl\n│ ├── eef_velocity_annotation.jsonl\n│ ├── gripper_activity_annotation.jsonl\n│ ├── gripper_mode_annotation.jsonl\n│ └── (...)\n├── data/\n│ └── chunk-000/\n│ ├── episode_000000.parquet\n│ ├── episode_000001.parquet\n│ ├── episode_000002.parquet\n│ ├── episode_000003.parquet\n│ ├── episode_000004.parquet\n│ └── (...)\n├── meta/\n│ ├── episodes.jsonl\n│ ├── episodes_stats.jsonl\n│ ├── info.json\n│ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n ├── observation.images.cam_high_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n ├── observation.images.cam_left_wrist_rgb/\n │ ├── episode_000000.mp4\n │ ├── episode_000001.mp4\n │ ├── episode_000002.mp4\n │ ├── episode_000003.mp4\n │ ├── episode_000004.mp4\n │ └── (...)\n └── observation.images.cam_right_wrist_rgb/\n ├── episode_000000.mp4\n ├── episode_000001.mp4\n ├── episode_000002.mp4\n ├── episode_000003.mp4\n ├── episode_000004.mp4\n └── (...)" }, + "Airbot_MMK2_open_door_right": { + "task_categories": [ + "robotics" + ], + "language": [ + "en" + ], + "tags": [ + "RoboCOIN", + "LeRobot" + ], + "license": "apache-2.0", + "configs": [ + { + "config_name": "default", + "data_files": "data/chunk-{id}/episode_{id}.parquet" + } + ], + "extra_gated_prompt": "By accessing this dataset, you agree to cite the associated paper in your research/publications—see the \"Citation\" section for details. You agree to not use the dataset to conduct experiments that cause harm to human subjects.", + "extra_gated_fields": { + "Company/Organization": { + "type": "text", + "description": "e.g., \"ETH Zurich\", \"Boston Dynamics\", \"Independent Researcher\"" + }, + "Country": { + "type": "country", + "description": "e.g., \"Germany\", \"China\", \"United States\"" + } + }, + "codebase_version": "v2.1", + "dataset_name": "Airbot_MMK2_open_door_right", + "dataset_uuid": "00000000-0000-0000-0000-000000000000", + "scene_type": { + "level1": "other", + "level2": "laboratory", + "level3": null, + "level4": null, + "level5": null + }, + "env_type": "Due to some reasons, this dataset temporarily cannot provide the environment type information.", + "objects": [ + { + "object_name": "cabinet", + "level1": "home_storage", + "level2": "cabinet", + "level3": null, + "level4": null, + "level5": null + } + ], + "task_operation_type": "Due to some reasons, this dataset temporarily cannot provide the operation type information.", + "task_instruction": [ + "open the cabinet." + ], + "sub_tasks": [ + { + "subtask": "End", + "subtask_index": 0 + }, + { + "subtask": "Open the door with the right gripper", + "subtask_index": 1 + }, + { + "subtask": "Touch the door handle with the right gripper", + "subtask_index": 2 + }, + { + "subtask": "Touch the door with the right gripper", + "subtask_index": 3 + }, + { + "subtask": "Abnormal", + "subtask_index": 4 + }, + { + "subtask": "null", + "subtask_index": 5 + } + ], + "atomic_actions": [ + "open" + ], + "robot_name": [ + "Airbot_MMK2" + ], + "end_effector_type": "five_finger_gripper", + "tele_type": "Due to some reasons, this dataset temporarily cannot provide the teleoperation type information.", + "sensor_list": [ + "cam_head_rgb", + "cam_left_wrist_rgb", + "cam_right_wrist_rgb", + "cam_front_rgb" + ], + "came_info": { + "cam_head_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_left_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_right_wrist_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p", + "cam_front_rgb": "dtype=video, shape=480x640x3, resolution=640x480, codec=av1, pix_fmt=yuv420p" + }, + "depth_enabled": false, + "coordinate_definition": "right-hand-frame", + "joint_rotation_dim": "radian", + "end_rotation_dim": "end_rotation_dim", + "end_translation_dim": "end_translation_dim", + "annotations": [ + "eef_acc_mag_annotation.jsonl", + "eef_direction_annotation.jsonl", + "eef_velocity_annotation.jsonl", + "gripper_activity_annotation.jsonl", + "gripper_mode_annotation.jsonl", + "scene_annotations.jsonl", + "subtask_annotations.jsonl" + ], + "statistics": { + "total_episodes": 98, + "total_frames": 31753, + "fps": 30, + "total_tasks": 6, + "total_videos": 392, + "total_chunks": 1, + "chunks_size": 1000, + "state_dim": 36, + "action_dim": 36, + "camera_views": 4, + "dataset_size": "1.03 GB" + }, + "frame_num": 31753, + "dataset_size": "1.03 GB", + "data_structure": "Airbot_MMK2_open_door_right_qced_hardlink/\n|-- annotations\n| |-- eef_acc_mag_annotation.jsonl\n| |-- eef_direction_annotation.jsonl\n| |-- eef_velocity_annotation.jsonl\n| |-- gripper_activity_annotation.jsonl\n| |-- gripper_mode_annotation.jsonl\n| |-- scene_annotations.jsonl\n| `-- subtask_annotations.jsonl\n|-- data\n| `-- chunk-000\n| |-- episode_000000.parquet\n| |-- episode_000001.parquet\n| |-- episode_000002.parquet\n| |-- episode_000003.parquet\n| |-- episode_000004.parquet\n| |-- episode_000005.parquet\n| |-- episode_000006.parquet\n| |-- episode_000007.parquet\n| |-- episode_000008.parquet\n| |-- episode_000009.parquet\n| |-- episode_000010.parquet\n| `-- episode_000011.parquet\n| `-- ... (86 more entries)\n|-- meta\n| |-- episodes.jsonl\n| |-- episodes_stats.jsonl\n| |-- info.json\n| `-- tasks.jsonl\n`-- videos\n `-- chunk-000\n |-- observation.images.cam_front_rgb\n |-- observation.images.cam_head_rgb\n |-- observation.images.cam_left_wrist_rgb\n `-- observation.images.cam_right_wrist_rgb", + "splits": { + "train": "0:97" + }, + "features": { + "observation.images.cam_head_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_left_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_right_wrist_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.images.cam_front_rgb": { + "dtype": "video", + "shape": [ + 480, + 640, + 3 + ], + "names": [ + "height", + "width", + "channels" + ], + "info": { + "video.height": 480, + "video.width": 640, + "video.codec": "av1", + "video.pix_fmt": "yuv420p", + "video.is_depth_map": false, + "video.fps": 30, + "video.channels": 3, + "has_audio": false + } + }, + "observation.state": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "action": { + "dtype": "float32", + "shape": [ + 36 + ], + "names": [ + "left_arm_joint_1_rad", + "left_arm_joint_2_rad", + "left_arm_joint_3_rad", + "left_arm_joint_4_rad", + "left_arm_joint_5_rad", + "left_arm_joint_6_rad", + "right_arm_joint_1_rad", + "right_arm_joint_2_rad", + "right_arm_joint_3_rad", + "right_arm_joint_4_rad", + "right_arm_joint_5_rad", + "right_arm_joint_6_rad", + "left_hand_joint_1_rad", + "left_hand_joint_2_rad", + "left_hand_joint_3_rad", + "left_hand_joint_4_rad", + "left_hand_joint_5_rad", + "left_hand_joint_6_rad", + "left_hand_joint_7_rad", + "left_hand_joint_8_rad", + "left_hand_joint_9_rad", + "left_hand_joint_10_rad", + "left_hand_joint_11_rad", + "left_hand_joint_12_rad", + "right_hand_joint_1_rad", + "right_hand_joint_2_rad", + "right_hand_joint_3_rad", + "right_hand_joint_4_rad", + "right_hand_joint_5_rad", + "right_hand_joint_6_rad", + "right_hand_joint_7_rad", + "right_hand_joint_8_rad", + "right_hand_joint_9_rad", + "right_hand_joint_10_rad", + "right_hand_joint_11_rad", + "right_hand_joint_12_rad" + ] + }, + "timestamp": { + "dtype": "float32", + "shape": [ + 1 + ], + "names": null + }, + "frame_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "episode_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "task_index": { + "dtype": "int64", + "shape": [ + 1 + ], + "names": null + }, + "subtask_annotation": { + "names": null, + "shape": [ + 5 + ], + "dtype": "int32" + }, + "scene_annotation": { + "names": null, + "shape": [ + 1 + ], + "dtype": "int32" + }, + "eef_sim_pose_state": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_sim_pose_action": { + "names": [ + "left_eef_pos_x", + "left_eef_pos_y", + "left_eef_pos_z", + "left_eef_rot_x", + "left_eef_rot_y", + "left_eef_rot_z", + "right_eef_pos_x", + "right_eef_pos_y", + "right_eef_pos_z", + "right_eef_rot_x", + "right_eef_rot_y", + "right_eef_rot_z" + ], + "shape": [ + 12 + ], + "dtype": "float32" + }, + "eef_direction_state": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_direction_action": { + "names": [ + "left_eef_direction", + "right_eef_direction" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_state": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_velocity_action": { + "names": [ + "left_eef_velocity", + "right_eef_velocity" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_state": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + }, + "eef_acc_mag_action": { + "names": [ + "left_eef_acc_mag", + "right_eef_acc_mag" + ], + "shape": [ + 2 + ], + "dtype": "int32" + } + }, + "authors": { + "contributed_by": [ + { + "name": "RoboCOIN Team at Beijing Academy of Artificial Intelligence (BAAI)" + } + ] + }, + "dataset_description": "This dataset uses an extended format based on LeRobot and is fully compatible with LeRobot.", + "homepage": "https://flagopen.github.io/RoboCOIN/", + "paper": "https://arxiv.org/abs/2511.17441", + "repository": "https://github.com/FlagOpen/RoboCOIN", + "contact_info": "For questions, issues, or feedback regarding this dataset, please contact us.", + "support_info": "For technical support, please open an issue on our GitHub repository.", + "license_details": "apache-2.0", + "citation_bibtex": "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu, Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang, Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu, Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng, Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang, YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang, Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025},\n }\n", + "additional_citations": "If you use this dataset, please also consider citing:\nLeRobot Framework: https://github.com/huggingface/lerobot\n", + "version_info": "Initial Release", + "data_path": "data/chunk-{id}/episode_{id}.parquet", + "video_path": "videos/chunk-{id}/observation.images.cam_left_wrist_rgb/episode_{id}.mp{id}", + "video_url": "videos/chunk-000/observation.images.cam_front_rgb/episode_000000.mp4" + }, "Cobot_Magic_move_plate": { "path": "Cobot_Magic_move_plate", "dataset_name": "move_plate", diff --git a/info/data_index.json b/info/data_index.json index 847f887c9fb6e61f7d4034c367a905fcb2200994..6413d8909e6d4c2d8b360e6f7fa07735f1cafe73 100644 --- a/info/data_index.json +++ b/info/data_index.json @@ -109,6 +109,7 @@ "Agilex_Cobot_Magic_close_drawer_top", "Agilex_Cobot_Magic_close_drawer_upper", "Agilex_Cobot_Magic_connect_block", + "Agilex_Cobot_Magic_erase_board", "Agilex_Cobot_Magic_erase_board_left", "Agilex_Cobot_Magic_erase_board_left_side", "Agilex_Cobot_Magic_erase_board_passing_right_to_left", @@ -117,29 +118,40 @@ "Agilex_Cobot_Magic_fold_shorts_khaki", "Agilex_Cobot_Magic_fold_towel", "Agilex_Cobot_Magic_fold_towel_blue_tray", + "Agilex_Cobot_Magic_fold_towel_grey_tray", "Agilex_Cobot_Magic_fold_towel_pink_tray", + "Agilex_Cobot_Magic_fold_towel_yellow_tray", + "Agilex_Cobot_Magic_move_mouse", "Agilex_Cobot_Magic_move_mouse_pen", "Agilex_Cobot_Magic_move_mouse_pen_black_tablecloth", "Agilex_Cobot_Magic_move_mouse_pen_green_tablecloth", "Agilex_Cobot_Magic_move_mouse_pen_khaki_tablecloth", "Agilex_Cobot_Magic_move_mouse_pen_red_tablecloth", + "Agilex_Cobot_Magic_move_object_beige_tablecloth", "Agilex_Cobot_Magic_move_object_black_tablecloth", "Agilex_Cobot_Magic_move_pencil_sharpener", "Agilex_Cobot_Magic_open_drawer_bottom", + "Agilex_Cobot_Magic_organize_test_tube", + "Agilex_Cobot_Magic_pour_drink_bottle_cup", "Agilex_Cobot_Magic_storage_bread_basket", "Agilex_Cobot_Magic_storage_fruit_bowl", "Agilex_Cobot_Magic_storage_lemon_mango", "Agilex_Cobot_Magic_storage_object", "Agilex_Cobot_Magic_storage_object_closest", "Agilex_Cobot_Magic_storage_object_closest_apple", + "Agilex_Cobot_Magic_storage_object_left", "Agilex_Cobot_Magic_storage_orange_basket_left", "Agilex_Cobot_Magic_storage_orange_basket_right", "Agilex_Cobot_Magic_storage_orange_white_bag", + "Agilex_Cobot_Magic_storage_peach_left", + "Agilex_Cobot_Magic_storage_peach_right", + "Airbot_MMK2_click_pen", "Airbot_MMK2_close_drawer", "Airbot_MMK2_close_lid", "Airbot_MMK2_cover_lid", "Airbot_MMK2_dial_number", "Airbot_MMK2_move_apple_orange_pomegranate", + "Airbot_MMK2_move_block", "Airbot_MMK2_move_block_both_hands", "Airbot_MMK2_move_block_gold_bar_models", "Airbot_MMK2_move_block_wet_wipes", @@ -155,6 +167,8 @@ "Airbot_MMK2_move_sword_doll", "Airbot_MMK2_move_tub", "Airbot_MMK2_move_umbrella_tissues", + "Airbot_MMK2_open_door_left", + "Airbot_MMK2_open_door_right", "Airbot_MMK2_open_laptop", "Airbot_MMK2_open_lid", "Airbot_MMK2_pass_paper_box", @@ -174,9 +188,12 @@ "Airbot_MMK2_stack_block", "Airbot_MMK2_stack_bowl", "Airbot_MMK2_stack_cubic_block", + "Airbot_MMK2_storage_and_take_cake_plate", "Airbot_MMK2_storage_apple_orange", "Airbot_MMK2_storage_bell_pepper", + "Airbot_MMK2_storage_block", "Airbot_MMK2_storage_block_BBs", + "Airbot_MMK2_storage_block_both_hands", "Airbot_MMK2_storage_block_tape_measure", "Airbot_MMK2_storage_bottle_part", "Airbot_MMK2_storage_bowl", @@ -348,13 +365,17 @@ "Galaxea_R1_Lite_arrange_baai_then_brain", "Galaxea_R1_Lite_change_baai_into_brain", "Galaxea_R1_Lite_classify_object_five", + "Galaxea_R1_Lite_classify_object_four", "Galaxea_R1_Lite_classify_object_green_tablecloth", "Galaxea_R1_Lite_classify_object_six", + "Galaxea_R1_Lite_classify_object_three", "Galaxea_R1_Lite_fold_towel_twice", "Galaxea_R1_Lite_mix_blue_yellow_large_test_tube", "Galaxea_R1_Lite_mix_blue_yellow_left_large_test_tube", "Galaxea_R1_Lite_mix_blue_yellow_left_small_test_tube", "Galaxea_R1_Lite_mix_blue_yellow_right", + "Galaxea_R1_Lite_mix_color", + "Galaxea_R1_Lite_mix_color_large_test_tube", "Galaxea_R1_Lite_mix_color_small_test_tube", "Galaxea_R1_Lite_mix_red_blue_large_test_tube", "Galaxea_R1_Lite_mix_red_blue_left_large_test_tube", @@ -370,6 +391,8 @@ "Galaxea_R1_Lite_pour_water_black_tablecloth", "Galaxea_R1_Lite_storage_object_blue_plate", "Galaxea_R1_Lite_storage_object_brown_basket", + "Galaxea_R1_Lite_storage_object_brown_bowl", + "Galaxea_R1_Lite_storage_object_brown_plate", "Galaxea_R1_Lite_storage_object_dish", "Galaxea_R1_Lite_storage_object_gray_plate", "Galaxea_R1_Lite_storage_object_pink_bowl", @@ -489,6 +512,7 @@ "RMC-AIDA-L_stack_baskets", "RMC-AIDA-L_stir_coffee", "RMC-AIDA-L_storage_bin_storage", + "Realman_RMC-AIDA-L_arrange_flowers", "Realman_RMC-AIDA-L_fold_towel", "Realman_RMC-AIDA-L_hang_clothes", "Realman_RMC-AIDA-L_storage_block_basket", @@ -514,6 +538,7 @@ "Tianqin_A2_box_storage_part", "Tianqin_A2_container_storage_graphics_card", "Tianqin_A2_place_the_paper_box", + "agilex_cobot_magic_pass_object_left_to_right_black_tablecloth", "agilex_cobot_magic_pass_object_left_to_right_green_tablecloth", "agilex_cobot_magic_pass_object_left_to_right_khaki_tablecloth", "agilex_cobot_magic_pass_object_right_to_left_black_tablecloth", @@ -581,5 +606,5 @@ "leju_robot_pass_the_cleaner_d", "leju_robot_pass_the_cleaner_e" ], - "count": 580 + "count": 605 } \ No newline at end of file diff --git a/thumbnails/Agilex_Cobot_Magic_erase_board.jpg b/thumbnails/Agilex_Cobot_Magic_erase_board.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2cc39f6719c6b2a040b15b8acd58cda42b54af8e --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_erase_board.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2cd87aad6ddabeaed297c99c04bccba446bee5cdd3b62d7de9f71af9ec464592 +size 12970 diff --git a/thumbnails/Agilex_Cobot_Magic_fold_towel_grey_tray.jpg b/thumbnails/Agilex_Cobot_Magic_fold_towel_grey_tray.jpg new file mode 100644 index 0000000000000000000000000000000000000000..4776fda2540393c1be6f75c828562d959dc69f3e --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_fold_towel_grey_tray.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:998b55b14e66f810920ffca0902e3bcb8c3414dde0268d2f1a9a5776e217589b +size 14968 diff --git a/thumbnails/Agilex_Cobot_Magic_fold_towel_yellow_tray.jpg b/thumbnails/Agilex_Cobot_Magic_fold_towel_yellow_tray.jpg new file mode 100644 index 0000000000000000000000000000000000000000..33b0c1b01a17ae58c8c0d2186cfbcc532e39a2da --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_fold_towel_yellow_tray.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4bd244d577290ec4e832425e7d409a31ac7073130fd3c780b14efff1bacb7120 +size 16277 diff --git a/thumbnails/Agilex_Cobot_Magic_move_mouse.jpg b/thumbnails/Agilex_Cobot_Magic_move_mouse.jpg new file mode 100644 index 0000000000000000000000000000000000000000..53611a5d22b653932cc8ecf703e76aec48014aa2 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_move_mouse.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93884e2cd70fb2709d08fdcf3da961d5079e51ce17067d9f3bfbc8c6f6c13b47 +size 10091 diff --git a/thumbnails/Agilex_Cobot_Magic_move_object_beige_tablecloth.jpg b/thumbnails/Agilex_Cobot_Magic_move_object_beige_tablecloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..85e7297f07fbc78806dc652af01ccbda8e3c5b60 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_move_object_beige_tablecloth.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:826a5aee459629053daaf0dde5c6abafd4a9b379c56fd4f18686c992a889280b +size 19048 diff --git a/thumbnails/Agilex_Cobot_Magic_organize_test_tube.jpg b/thumbnails/Agilex_Cobot_Magic_organize_test_tube.jpg new file mode 100644 index 0000000000000000000000000000000000000000..21fc4781b35c14b805c3ecb2a36ead390f634332 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_organize_test_tube.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:22dc88121580fe49f7aa96f4586454c4424c3469baed128339d984a0a22e1d2f +size 15725 diff --git a/thumbnails/Agilex_Cobot_Magic_pour_drink_bottle_cup.jpg b/thumbnails/Agilex_Cobot_Magic_pour_drink_bottle_cup.jpg new file mode 100644 index 0000000000000000000000000000000000000000..e4693a094af852bf72f1196b5d617904504c2225 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_pour_drink_bottle_cup.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cd164d7a83678848050a7416b918f4933d75f055a660845fbd3c4abdc87e3c46 +size 11573 diff --git a/thumbnails/Agilex_Cobot_Magic_storage_object_left.jpg b/thumbnails/Agilex_Cobot_Magic_storage_object_left.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d22983d6295d8eddddc114dbe9daa2101b7bf1ce --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_storage_object_left.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60c1be9c11cd557424b79bc577cb77e723b0dc665cd9d3366f95ad3c5c3b8cf9 +size 22455 diff --git a/thumbnails/Agilex_Cobot_Magic_storage_peach_left.jpg b/thumbnails/Agilex_Cobot_Magic_storage_peach_left.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6decd3744236f0637e80fe201c2d19b4e6bdcbb1 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_storage_peach_left.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b4606f5b09cfbc65018c5c1845f721f66e08b3520c6b112513df21692f2080a9 +size 17299 diff --git a/thumbnails/Agilex_Cobot_Magic_storage_peach_right.jpg b/thumbnails/Agilex_Cobot_Magic_storage_peach_right.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b3e6ca5d56d7650782af45e6dddb2c8bedf93057 --- /dev/null +++ b/thumbnails/Agilex_Cobot_Magic_storage_peach_right.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4be5e692d962f63cba9bfe8aaf4624ae0590ece62f491aa3b56a4ae9b860c75d +size 16069 diff --git a/thumbnails/Airbot_MMK2_click_pen.jpg b/thumbnails/Airbot_MMK2_click_pen.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cb5dbdf27e975bd261c6a4d7db9a366d6832ce73 --- /dev/null +++ b/thumbnails/Airbot_MMK2_click_pen.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e918d0d0ce66c258a10489ebb0c70fac9ee16bbf379514a857450b8f16692877 +size 34415 diff --git a/thumbnails/Airbot_MMK2_move_block.jpg b/thumbnails/Airbot_MMK2_move_block.jpg new file mode 100644 index 0000000000000000000000000000000000000000..2c770d6f4560df89e642789617b7ecd80894d74d --- /dev/null +++ b/thumbnails/Airbot_MMK2_move_block.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4321e19a1494178526fbd5a90db93e2ae2f6e080e0246fa76f21225957f82a4 +size 48245 diff --git a/thumbnails/Airbot_MMK2_open_door_left.jpg b/thumbnails/Airbot_MMK2_open_door_left.jpg new file mode 100644 index 0000000000000000000000000000000000000000..99bd8ac9d0dd2645e38eccb3131a6cb20a582609 --- /dev/null +++ b/thumbnails/Airbot_MMK2_open_door_left.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d20405aae2afabeebe7706c964a672f50f443a35fdf64c81d4232b51708bc11f +size 26437 diff --git a/thumbnails/Airbot_MMK2_open_door_right.jpg b/thumbnails/Airbot_MMK2_open_door_right.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0e678a94b67c3e4e09c1cf9ed72ee3e1b8810627 --- /dev/null +++ b/thumbnails/Airbot_MMK2_open_door_right.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:415ea198144acf328079be5cf688b8cfe467218b268328bd80a1c866113eb6d5 +size 26248 diff --git a/thumbnails/Airbot_MMK2_storage_and_take_cake_plate.jpg b/thumbnails/Airbot_MMK2_storage_and_take_cake_plate.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dfbd9c27233aeccbbdcef3521d64156ac2d62f48 --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_and_take_cake_plate.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eb573f477d6d6c8b45d5c2a220faeaceabf1e46973ce2bd7dbc3b047b20bf048 +size 54705 diff --git a/thumbnails/Airbot_MMK2_storage_block.jpg b/thumbnails/Airbot_MMK2_storage_block.jpg new file mode 100644 index 0000000000000000000000000000000000000000..220d9a13887be48698697e28083fff227f6a84c3 --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_block.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae948c870ab6664fb6641ac6871786432b95f6ed8c45a76db97db694f70c1f0d +size 42685 diff --git a/thumbnails/Airbot_MMK2_storage_block_both_hands.jpg b/thumbnails/Airbot_MMK2_storage_block_both_hands.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b53519220e2603431b502b7e8c329343a139c749 --- /dev/null +++ b/thumbnails/Airbot_MMK2_storage_block_both_hands.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6ebae87ba1d37d16efba9bc6b595848c980e9576a88e18c9b84d3b4e6366e988 +size 27674 diff --git a/thumbnails/Galaxea_R1_Lite_classify_object_four.jpg b/thumbnails/Galaxea_R1_Lite_classify_object_four.jpg new file mode 100644 index 0000000000000000000000000000000000000000..cfbe3784752e83588f135a2e67e35fc0356d7829 --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_classify_object_four.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d360407054fe88d0bfb3832370bf4e0179fcd66cb968d5bb917e4edb1c32ba99 +size 46352 diff --git a/thumbnails/Galaxea_R1_Lite_classify_object_three.jpg b/thumbnails/Galaxea_R1_Lite_classify_object_three.jpg new file mode 100644 index 0000000000000000000000000000000000000000..49892dd007409874599fce705593cf990c4e6a90 --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_classify_object_three.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58e85b421971c336fa386171d15de581017dcd6e58e5410648e95678db457b1c +size 50680 diff --git a/thumbnails/Galaxea_R1_Lite_mix_color.jpg b/thumbnails/Galaxea_R1_Lite_mix_color.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b090923e42a04c3cfd1f59b0b244f84dbe431421 --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_mix_color.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:663b50f2eade14f0604a8dfba9fcd0d7e0b28b9ac0afc8a793786876b4875f54 +size 44767 diff --git a/thumbnails/Galaxea_R1_Lite_mix_color_large_test_tube.jpg b/thumbnails/Galaxea_R1_Lite_mix_color_large_test_tube.jpg new file mode 100644 index 0000000000000000000000000000000000000000..fbf27a7c7c1876d67d7a666cae20dfe34254c63d --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_mix_color_large_test_tube.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2c1e155c4b8c35f309770b4c596667aab6591876fa3ffcbaaefef82d92e8889 +size 29636 diff --git a/thumbnails/Galaxea_R1_Lite_storage_object_brown_bowl.jpg b/thumbnails/Galaxea_R1_Lite_storage_object_brown_bowl.jpg new file mode 100644 index 0000000000000000000000000000000000000000..74aa8bd97c82b583af378d6ba0d28e193c3d4b0c --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_storage_object_brown_bowl.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f942ceb3f572b9125d05b904a306cfc91c05ffc663363452afad2eb3fe4208fc +size 37764 diff --git a/thumbnails/Galaxea_R1_Lite_storage_object_brown_plate.jpg b/thumbnails/Galaxea_R1_Lite_storage_object_brown_plate.jpg new file mode 100644 index 0000000000000000000000000000000000000000..b534fc8875c3526709c258d85f5990fd0141f142 --- /dev/null +++ b/thumbnails/Galaxea_R1_Lite_storage_object_brown_plate.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f6491d5c16c0cef5560b51ad7fbc8d46033b13bcb2531ab11a4086a77354409 +size 31733 diff --git a/thumbnails/Realman_RMC-AIDA-L_arrange_flowers.jpg b/thumbnails/Realman_RMC-AIDA-L_arrange_flowers.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0035f9344b9f698f8231c9d93a26171ec587fb60 --- /dev/null +++ b/thumbnails/Realman_RMC-AIDA-L_arrange_flowers.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb56be483fd5dc79455e3363546512b33e0e898c03bf0f3dfb88ed2f8559520b +size 30621 diff --git a/thumbnails/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.jpg b/thumbnails/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.jpg new file mode 100644 index 0000000000000000000000000000000000000000..808f0c9d9b43944cf0273f6f4258ee6e53c98aff --- /dev/null +++ b/thumbnails/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce37d8eac4a58ff04a754472889da2de41d8715fd77c95bb08d4d38d92974379 +size 15597 diff --git a/videos/Agilex_Cobot_Magic_erase_board.mp4 b/videos/Agilex_Cobot_Magic_erase_board.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..669794a64ed17d69b90069e88e6ca40f17105b74 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_erase_board.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b590a19b2f7354149951136b1e40e7d2aa55186db0b37870d1afbb665bf9395 +size 542798 diff --git a/videos/Agilex_Cobot_Magic_fold_towel_grey_tray.mp4 b/videos/Agilex_Cobot_Magic_fold_towel_grey_tray.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..7e6f823e62ec571112b231464df33b5cfdfb8299 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_fold_towel_grey_tray.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d72cbd3a7f5b51c4a0a47867b06dc012d73972afc6ee3f9d1d12fd39ea90405c +size 1399737 diff --git a/videos/Agilex_Cobot_Magic_fold_towel_yellow_tray.mp4 b/videos/Agilex_Cobot_Magic_fold_towel_yellow_tray.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..5005a3d2034c351ef47682ffb872939dc86cadfb --- /dev/null +++ b/videos/Agilex_Cobot_Magic_fold_towel_yellow_tray.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9d5451468aa5b62861b841f9919aed33f0cfeb10eb831d358c2a3dac7bb3881 +size 1473341 diff --git a/videos/Agilex_Cobot_Magic_move_mouse.mp4 b/videos/Agilex_Cobot_Magic_move_mouse.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..439013ea126c9456000089845c221d8c98f54372 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_move_mouse.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44ef31c2ca6051661a0bd2fbb3b7544abad74a50b6375c979d9d3e0893c481ca +size 477990 diff --git a/videos/Agilex_Cobot_Magic_move_object_beige_tablecloth.mp4 b/videos/Agilex_Cobot_Magic_move_object_beige_tablecloth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..668e993efd7f88daffb0d2bcefa219652290bdaa --- /dev/null +++ b/videos/Agilex_Cobot_Magic_move_object_beige_tablecloth.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a840fe623367bb6bcdf777c564aa6eedb713131d5dc19a482a35ff1586eb8ff +size 542654 diff --git a/videos/Agilex_Cobot_Magic_organize_test_tube.mp4 b/videos/Agilex_Cobot_Magic_organize_test_tube.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..fb6c7ec4a9566448b3482b70850f57b649858812 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_organize_test_tube.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f49722ce4d1a4589300e5daa534f74357980b413c3fcfb7ca2315ba9252233ed +size 1401829 diff --git a/videos/Agilex_Cobot_Magic_pour_drink_bottle_cup.mp4 b/videos/Agilex_Cobot_Magic_pour_drink_bottle_cup.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..9bcdc352d41bc40163569f6ae9a71b4f7c24a41e --- /dev/null +++ b/videos/Agilex_Cobot_Magic_pour_drink_bottle_cup.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67ab3b773f330d305bc136d8dc021b05c0ae2e2c20ff2fe2157530a968f59245 +size 1216066 diff --git a/videos/Agilex_Cobot_Magic_storage_object_left.mp4 b/videos/Agilex_Cobot_Magic_storage_object_left.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..fe9b68215efd85c75cb881a4c007415b2ad9ace3 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_storage_object_left.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ce009484f7b311621229f641ef1ad0d99726afd54b329d0ba785dab7ee970d67 +size 365089 diff --git a/videos/Agilex_Cobot_Magic_storage_peach_left.mp4 b/videos/Agilex_Cobot_Magic_storage_peach_left.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..ebf6830447b6df8647ee2c169b9a48dbf8c20b82 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_storage_peach_left.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4f69a487bc481e5093ccdf8593328306249bef6c31417d16b0c13a4976d32da +size 406823 diff --git a/videos/Agilex_Cobot_Magic_storage_peach_right.mp4 b/videos/Agilex_Cobot_Magic_storage_peach_right.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..e740ffc2330e40ffe0ec44a1933f766a558f7557 --- /dev/null +++ b/videos/Agilex_Cobot_Magic_storage_peach_right.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:405ee0f61f02be9464a200d63dd7dc7b5d957e308e7e13d3f090963b91adc757 +size 462676 diff --git a/videos/Airbot_MMK2_click_pen.mp4 b/videos/Airbot_MMK2_click_pen.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..9e84d9ac72b323fddca98c9312db06e0e872017d --- /dev/null +++ b/videos/Airbot_MMK2_click_pen.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fee8c262fc64c363e9cbe6af72cd52214a0f579f32a34814f61294cef938e913 +size 720746 diff --git a/videos/Airbot_MMK2_move_block.mp4 b/videos/Airbot_MMK2_move_block.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..bcf586e646b2c23bfb4ebcfb1283f6603ccec2d8 --- /dev/null +++ b/videos/Airbot_MMK2_move_block.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e4e1855bff4210b85dd1d5177f616f6d33ad0c6df09fd941ad8035498feaa615 +size 380634 diff --git a/videos/Airbot_MMK2_open_door_left.mp4 b/videos/Airbot_MMK2_open_door_left.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..8cbfc0808dfd2f9cae83427d3a861c961c19dddf --- /dev/null +++ b/videos/Airbot_MMK2_open_door_left.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9abfdb400cf2dba11a58d977906b7cbb752d39b7bdf62272506f0a71ec8743eb +size 211272 diff --git a/videos/Airbot_MMK2_open_door_right.mp4 b/videos/Airbot_MMK2_open_door_right.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..bd9c597f1f1981ca46343aaf95205a7bfc173501 --- /dev/null +++ b/videos/Airbot_MMK2_open_door_right.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48a13b72ee4d06e555eebf397b2a3ca1017a91e275e81598d912b7c159a13b87 +size 1270156 diff --git a/videos/Airbot_MMK2_storage_and_take_cake_plate.mp4 b/videos/Airbot_MMK2_storage_and_take_cake_plate.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..d5421c69cc155c2a2ab953de52d9c1375d3f50b9 --- /dev/null +++ b/videos/Airbot_MMK2_storage_and_take_cake_plate.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b392b4b638db71835dd02e5070904d7e06e642652155e7124e66243d6737e19c +size 737585 diff --git a/videos/Airbot_MMK2_storage_block.mp4 b/videos/Airbot_MMK2_storage_block.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..95fee9042e48337fc561a776328028d6242f112c --- /dev/null +++ b/videos/Airbot_MMK2_storage_block.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ad2fc831a93c892d2a58df3b4f1fc32079fbea083284bea00a2edccd52fba2e +size 300212 diff --git a/videos/Airbot_MMK2_storage_block_both_hands.mp4 b/videos/Airbot_MMK2_storage_block_both_hands.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..104d81e1d427924bf7cb716528c75b9bfc2d8c49 --- /dev/null +++ b/videos/Airbot_MMK2_storage_block_both_hands.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8265b56bd12a2b278632c0260ecc8178a52130597519433ba1aa20b4b457ce1 +size 214241 diff --git a/videos/Galaxea_R1_Lite_classify_object_four.mp4 b/videos/Galaxea_R1_Lite_classify_object_four.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..a5e74fe3437ad7d594367c82e50708d8472d48d3 --- /dev/null +++ b/videos/Galaxea_R1_Lite_classify_object_four.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:661f899ead76554461b085cbda763a6afd7333f78258410c13ad035a1ab160fc +size 3691025 diff --git a/videos/Galaxea_R1_Lite_classify_object_three.mp4 b/videos/Galaxea_R1_Lite_classify_object_three.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..c878c2943ad77146e6bd6820aece0930b219a661 --- /dev/null +++ b/videos/Galaxea_R1_Lite_classify_object_three.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:537b823b6d8d72efb81a2757665d9a199119e3fee3afdfda773ff0651a0a8502 +size 2967593 diff --git a/videos/Galaxea_R1_Lite_mix_color.mp4 b/videos/Galaxea_R1_Lite_mix_color.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..9aa70944ad343c6508161f5116e201a6438d5fdb --- /dev/null +++ b/videos/Galaxea_R1_Lite_mix_color.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f66201907c4c0d854aedd218ff0e5059bac40c37d7c63a20039921e541e55683 +size 6853595 diff --git a/videos/Galaxea_R1_Lite_mix_color_large_test_tube.mp4 b/videos/Galaxea_R1_Lite_mix_color_large_test_tube.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..aab345b0ce7e2da37842e40031fb637b67152fb2 --- /dev/null +++ b/videos/Galaxea_R1_Lite_mix_color_large_test_tube.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a72ca61f3a0e1dad8dc12ab33cb930ab040b26c33b460ed8b350bb38c6533918 +size 4055937 diff --git a/videos/Galaxea_R1_Lite_storage_object_brown_bowl.mp4 b/videos/Galaxea_R1_Lite_storage_object_brown_bowl.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..88bf4bcffe660ffe177f4c6dc56afe0e17de5610 --- /dev/null +++ b/videos/Galaxea_R1_Lite_storage_object_brown_bowl.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7db830562dc6274f89998dbb188e64cefb1447e7c53b9bc14149835b8eef561c +size 1355038 diff --git a/videos/Galaxea_R1_Lite_storage_object_brown_plate.mp4 b/videos/Galaxea_R1_Lite_storage_object_brown_plate.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..4ae26c6df856da599b700c2d9ae27478e8af986c --- /dev/null +++ b/videos/Galaxea_R1_Lite_storage_object_brown_plate.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1da2fb314fd33072a33e1d9986b4942ca4578f08cf72e9fedd67e70ad198cd5b +size 788157 diff --git a/videos/Realman_RMC-AIDA-L_arrange_flowers.mp4 b/videos/Realman_RMC-AIDA-L_arrange_flowers.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..21b2c51e45a6c35c5d0ca4fa9936dc9acf3cc766 --- /dev/null +++ b/videos/Realman_RMC-AIDA-L_arrange_flowers.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36e029c0fce4950fe6f5cf132a2a4026511732818bcfcf8cb54f82669501ae2d +size 710335 diff --git a/videos/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.mp4 b/videos/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..27eabfe5deac1b2311340140ad2d1b003c7c3140 --- /dev/null +++ b/videos/agilex_cobot_magic_pass_object_left_to_right_black_tablecloth.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad64537eed5e7b858f64349ece55ec2e528125cc06a456547e0fa8aa36dbb01a +size 994870