diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..5c45dbce09feef30173b68f5e2c87514ce272faf 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,56 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+assets/meshes/head_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_ankle_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_elbow_link_merge.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_index_0_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_index_1_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_middle_0_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_middle_1_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_palm_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_thumb_1_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hand_thumb_2_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hip_pitch_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hip_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_hip_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_knee_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_rubber_hand.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_shoulder_pitch_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_shoulder_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_shoulder_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_wrist_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_wrist_roll_rubber_hand.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/left_wrist_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/logo_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/pelvis.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/pelvis_contour_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_ankle_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_elbow_link_merge.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_index_0_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_index_1_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_middle_0_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_middle_1_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_palm_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_thumb_1_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hand_thumb_2_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hip_pitch_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hip_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_hip_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_knee_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_rubber_hand.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_shoulder_pitch_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_shoulder_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_shoulder_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_wrist_roll_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_wrist_roll_rubber_hand.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/right_wrist_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/torso_constraint_L_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/torso_constraint_R_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/torso_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/torso_link_rev_1_0.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/waist_constraint_L.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/waist_constraint_R.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/waist_support_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/waist_yaw_link.STL filter=lfs diff=lfs merge=lfs -text
+assets/meshes/waist_yaw_link_rev_1_0.STL filter=lfs diff=lfs merge=lfs -text
diff --git a/CAMERA_README.md b/CAMERA_README.md
new file mode 100644
index 0000000000000000000000000000000000000000..dd407a876d3cdad6fdc41731616332e741a3843e
--- /dev/null
+++ b/CAMERA_README.md
@@ -0,0 +1,256 @@
+# Camera System for MuJoCo G1 Simulator
+
+## Overview
+
+The simulator has two cameras defined:
+
+### 1. **`head_camera`** - Robot Ego-View
+- **Location**: Attached to `torso_link` body
+- **Position**: `[0.06, 0.0, 0.45]` relative to torso (6cm forward, 45cm up)
+- **Orientation**: `euler="0 -0.8 -1.57"` (facing forward, slightly tilted down)
+- **FOV**: 90 degrees
+- **Purpose**: First-person view from the robot's perspective (like a head-mounted camera)
+
+### 2. **`global_view`** - Third-Person View
+- **Location**: Fixed in world coordinates
+- **Position**: `[2.910, -5.040, 3.860]` (behind and above the robot)
+- **Purpose**: External observer view for visualization
+
+## How Camera Publishing Works
+
+The camera system uses a **zero-copy architecture** with three components:
+
+```
+┌─────────────────────┐
+│ MuJoCo Simulator │
+│ (Main Process) │
+│ │
+│ 1. Render cameras │
+│ 2. Copy to shmem │──┐
+└─────────────────────┘ │
+ │ Shared Memory
+ │ (fast IPC)
+ ┌────▼────────────────┐
+ │ Image Publisher │
+ │ (Subprocess) │
+ │ │
+ │ 3. Encode images │
+ │ 4. ZMQ publish │
+ └─────────┬───────────┘
+ │
+ │ TCP (ZMQ)
+ │ port 5555
+ ▼
+ ┌─────────────────────┐
+ │ Your Policy/Client │
+ │ (Subscribe) │
+ └─────────────────────┘
+```
+
+### Key Technologies:
+- **MuJoCo Renderer**: Captures RGB images from virtual cameras
+- **Shared Memory (`multiprocessing.shared_memory`)**: Zero-copy transfer between processes
+- **ZMQ (ZeroMQ)**: Network socket for publishing images (TCP)
+- **No ROS2 required!** Pure Python multiprocessing
+
+## Usage
+
+### Basic Simulation (No Camera Publishing)
+```bash
+python run_sim.py
+```
+
+### With Camera Publishing
+```bash
+# Publish head camera on default port 5555
+python run_sim.py --publish-images
+
+# Publish multiple cameras
+python run_sim.py --publish-images --cameras head_camera global_view
+
+# Custom port
+python run_sim.py --publish-images --camera-port 6000
+```
+
+### Viewing Camera Streams
+
+In a **separate terminal**, run the camera viewer:
+
+```bash
+# Basic usage (default: localhost:5555)
+python view_cameras.py
+
+# Custom host/port
+python view_cameras.py --host 192.168.1.100 --port 6000
+
+# Save images to directory
+python view_cameras.py --save ./camera_recordings
+
+# Adjust display rate
+python view_cameras.py --fps 60
+```
+
+**Keyboard Controls:**
+- `q`: Quit viewer
+- `s`: Save snapshot of current frame
+
+**Example Workflow:**
+```bash
+# Terminal 1: Start simulator with camera publishing
+python run_sim.py --publish-images
+
+# Terminal 2: View the camera feed
+python view_cameras.py
+```
+
+### Receiving Images in Your Code
+
+```python
+import zmq
+import numpy as np
+from gr00t_wbc.control.sensor.sensor_server import ImageMessageSchema
+
+# Connect to camera publisher
+context = zmq.Context()
+socket = context.socket(zmq.SUB)
+socket.connect("tcp://localhost:5555")
+socket.setsockopt(zmq.SUBSCRIBE, b"") # Subscribe to all messages
+
+while True:
+ # Receive serialized image data
+ data = socket.recv_pyobj()
+
+ # Decode images
+ if "head_camera" in data:
+ # Decode image (returns numpy array HxWx3 uint8)
+ img = decode_image(data["head_camera"])
+
+ # Use image for your policy
+ process_observation(img)
+```
+
+## Camera Configuration
+
+Edit `config.yaml` to change camera settings:
+
+```yaml
+IMAGE_DT: 0.033333 # Publishing rate (30 Hz)
+ENABLE_OFFSCREEN: false # Enable for camera rendering
+MP_START_METHOD: "spawn" # Multiprocessing method
+```
+
+Or programmatically in `run_sim.py`:
+
+```python
+camera_configs = {
+ "head_camera": {
+ "height": 480,
+ "width": 640
+ },
+ "custom_camera": {
+ "height": 224,
+ "width": 224
+ }
+}
+```
+
+## Adding Custom Cameras
+
+Edit `assets/g1_29dof_with_hand.xml` or `assets/scene_43dof.xml`:
+
+```xml
+
+
+
+
+
+
+
+
+
+```
+
+Then publish it:
+```bash
+python run_sim.py --publish-images --cameras my_camera
+```
+
+## Performance Notes
+
+- **Rendering overhead**: ~5-10ms per camera per frame @ 640x480
+- **Publishing overhead**: ~2-3ms for encoding + network
+- Image publishing runs in **separate subprocess** to not block simulation
+- Uses **shared memory** for fast inter-process image transfer
+- Target: 30 FPS camera publishing while maintaining 500 Hz simulation
+
+## Troubleshooting
+
+### No images received?
+1. Check if offscreen rendering is enabled (`--publish-images` flag)
+2. Verify ZMQ port is not blocked
+3. Check camera exists in scene XML
+
+### Images are delayed?
+- Reduce `IMAGE_DT` in config
+- Lower camera resolution
+- Use fewer cameras
+
+### "Camera not found" error?
+- Verify camera name in XML matches config
+- Check XML syntax is valid
+- Ensure MuJoCo model loads successfully
+
+## Quick Reference
+
+### File Structure
+```
+mujoco_sim_g1/
+├── run_sim.py # Simulator launcher
+├── view_cameras.py # Camera viewer (this file!)
+├── config.yaml # Simulator config
+├── assets/
+│ ├── scene_43dof.xml # Scene with global_view camera
+│ └── g1_29dof_with_hand.xml # Robot model with head_camera
+└── sim/
+ ├── base_sim.py # MuJoCo environment
+ ├── sensor_utils.py # ZMQ camera server/client
+ └── image_publish_utils.py # Multiprocessing image publisher
+```
+
+### Camera Definitions
+
+Edit these files to modify cameras:
+
+**`assets/g1_29dof_with_hand.xml`** - Robot-attached cameras:
+```xml
+
+
+
+```
+
+**`assets/scene_43dof.xml`** - World-frame cameras:
+```xml
+
+
+
+```
+
+### Complete Example
+
+```bash
+# Terminal 1: Start simulator with camera publishing
+cd mujoco_sim_g1
+python run_sim.py --publish-images --cameras head_camera global_view
+
+# Terminal 2: View cameras in real-time
+python view_cameras.py
+
+# Terminal 3: Use in your policy (Python code)
+from sim.sensor_utils import SensorClient, ImageUtils
+client = SensorClient()
+client.start_client("localhost", 5555)
+data = client.receive_message()
+img = ImageUtils.decode_image(data["head_camera"])
+# img is now numpy array (H, W, 3) in BGR format
+```
+
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..3fa6b01e16d8a417ca347077c084da7cf97eae89
--- /dev/null
+++ b/README.md
@@ -0,0 +1,167 @@
+# Standalone MuJoCo Simulator for Unitree G1
+
+This is a standalone MuJoCo physics simulator for the Unitree G1 robot, extracted from the GR00T-WholeBodyControl repository.
+
+## Features
+
+- **Physics Simulation**: Runs G1 robot in MuJoCo at 500Hz (2ms timestep)
+- **DDS Communication**: Uses Unitree SDK2 DDS for robot communication
+- **Compatible**: Works with existing `unitree_g1.py` control code via DDS
+- **Visualization**: Real-time 3D visualization of robot motion
+
+## Directory Structure
+
+```
+mujoco_sim_g1/
+├── requirements.txt # Python dependencies
+├── run_sim.py # Main launcher script
+├── config.yaml # Simulation configuration
+├── sim/ # Simulation modules
+│ ├── base_sim.py
+│ ├── simulator_factory.py
+│ ├── unitree_sdk2py_bridge.py
+│ └── ...
+└── assets/ # Robot models
+ ├── g1_29dof_with_hand.xml
+ └── meshes/*.STL
+```
+
+## Installation
+
+### 1. Create Virtual Environment (Recommended)
+
+```bash
+cd mujoco_sim_g1
+python3 -m venv venv
+source venv/bin/activate # On Windows: venv\Scripts\activate
+```
+
+### 2. Install Dependencies
+
+```bash
+pip install -r requirements.txt
+```
+
+**Note**: If you encounter issues with `rclpy`, you can comment out the ROS2 imports in `sim/base_sim.py` (lines 11, 609-617) if you don't need camera publishing.
+
+## Usage
+
+### Basic Usage
+
+```bash
+# Activate environment
+source venv/bin/activate
+
+# Run the simulator
+python run_sim.py
+```
+
+The simulator will:
+1. Load the G1 robot model
+2. Initialize DDS communication on domain 0
+3. Open a MuJoCo visualization window
+4. Start listening for motor commands via DDS
+
+### Running with Your Robot Control Code
+
+Once the simulator is running, you can control it from another terminal:
+
+```bash
+# In another terminal
+cd /home/yope/Documents/lerobot
+conda activate unitree_lerobot
+
+# Run your existing control code
+python test_locomotion_minimal.py
+```
+
+Your `unitree_g1.py` code will automatically connect to the simulator via DDS!
+
+## Configuration
+
+Edit `config.yaml` to customize:
+
+- **SIMULATE_DT**: Simulation timestep (default: 0.002s = 500Hz)
+- **DOMAIN_ID**: DDS domain ID (default: 0)
+- **ENABLE_ONSCREEN**: Show visualization (default: true)
+- **USE_JOYSTICK**: Enable gamepad control (default: false)
+- **ROBOT_SCENE**: Path to MuJoCo XML scene
+
+### PD Gains
+
+The simulator uses the following PD gains (matching NVIDIA GR00T):
+
+**Legs (indices 0-11):**
+- Hip joints: KP=150, KD=2
+- Knee joints: KP=300, KD=4
+- Ankle joints: KP=40, KD=2
+
+**Waist (indices 12-14):**
+- All waist joints: KP=250, KD=5
+
+**Arms (indices 15-28):**
+- Shoulders: KP=100, KD=2-5
+- Elbows/Wrists: KP=20-40, KD=1-2
+
+## Troubleshooting
+
+### ImportError: cannot import name 'ChannelFactoryInitialize'
+
+```bash
+pip install --upgrade unitree-sdk2py
+```
+
+### ROS2/rclpy errors
+
+If you don't need camera publishing, edit `sim/base_sim.py`:
+- Comment out line 11: `import rclpy`
+- Comment out lines 609-617 (ROS2 initialization)
+
+### Meshes not found
+
+Make sure mesh paths in `assets/g1_29dof_with_hand.xml` are relative:
+```xml
+
+```
+
+### Robot falls immediately
+
+Check that:
+1. PD gains match NVIDIA's values (see config.yaml)
+2. Velocity command scaling is correct (ang_vel_scale=0.25, cmd_scale=[2.0, 2.0, 0.25])
+3. Observations for missing joints are zeroed out (indices 12, 14, 20, 21, 27, 28)
+
+## Technical Details
+
+### Communication
+
+The simulator publishes:
+- **`rt/lowstate`**: Robot state (joint positions, velocities, IMU, etc.)
+- **`rt/wirelesscontroller`**: Wireless remote controller state (if joystick enabled)
+
+The simulator subscribes to:
+- **`rt/lowcmd`**: Motor commands (position, velocity, torque, KP, KD)
+
+### Coordinate Frames
+
+- **World frame**: Z-up
+- **Joint ordering**: 29 DOF (12 legs + 3 waist + 14 arms)
+- **IMU**: Quaternion in [w, x, y, z] format
+
+### Performance
+
+- Simulation runs at ~500Hz (2ms timestep)
+- Viewer updates at ~50Hz (20ms)
+- Typical CPU usage: 20-40% on single core
+
+## Files from GR00T-WholeBodyControl
+
+This standalone simulator was extracted from:
+- `gr00t_wbc/control/envs/g1/sim/` (simulation modules)
+- `gr00t_wbc/control/robot_model/model_data/g1/` (robot model files)
+- `gr00t_wbc/control/main/teleop/configs/g1_29dof_gear_wbc.yaml` (configuration)
+
+## License
+
+Follows the license of the original GR00T-WholeBodyControl repository.
+
diff --git a/assets/g1_29dof_old.xml b/assets/g1_29dof_old.xml
new file mode 100644
index 0000000000000000000000000000000000000000..4fd64d181bcbdddcf6e34174daec6f2bace164ce
--- /dev/null
+++ b/assets/g1_29dof_old.xml
@@ -0,0 +1,568 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/assets/g1_29dof_with_hand.xml b/assets/g1_29dof_with_hand.xml
new file mode 100644
index 0000000000000000000000000000000000000000..6507fbfa9ec4c5d6ebbecdbda33a753a8c5cc33a
--- /dev/null
+++ b/assets/g1_29dof_with_hand.xml
@@ -0,0 +1,749 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/assets/g1_29dof_with_hand_original.xml b/assets/g1_29dof_with_hand_original.xml
new file mode 100644
index 0000000000000000000000000000000000000000..a53cd991faf3a296bf67acbf78cd702ce15c31d0
--- /dev/null
+++ b/assets/g1_29dof_with_hand_original.xml
@@ -0,0 +1,748 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/assets/meshes/head_link.STL b/assets/meshes/head_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..401f822751c33dab53c6cee77eb62890aca5cd8f
--- /dev/null
+++ b/assets/meshes/head_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:005fb67fbd3eff94aa8bf4a6e83238174e9f91b6721f7111594322f223724411
+size 932784
diff --git a/assets/meshes/left_ankle_pitch_link.STL b/assets/meshes/left_ankle_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..69de8490184afc698f633e34bb74e65351bd4689
Binary files /dev/null and b/assets/meshes/left_ankle_pitch_link.STL differ
diff --git a/assets/meshes/left_ankle_roll_link.STL b/assets/meshes/left_ankle_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..cb69f65c6a8059fc6720cb393540f579b4d0c26e
--- /dev/null
+++ b/assets/meshes/left_ankle_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c4092af943141d4d9f74232f3cfa345afc6565f46a077793b8ae0e68b39dc33f
+size 653384
diff --git a/assets/meshes/left_elbow_link.STL b/assets/meshes/left_elbow_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..1a96d99ba469960173129084ab6dd3bf8a732a71
Binary files /dev/null and b/assets/meshes/left_elbow_link.STL differ
diff --git a/assets/meshes/left_elbow_link_merge.STL b/assets/meshes/left_elbow_link_merge.STL
new file mode 100644
index 0000000000000000000000000000000000000000..f287918e8b94df6efea3a7d117d854faf878c930
--- /dev/null
+++ b/assets/meshes/left_elbow_link_merge.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:73c8adc6346b900eb6a2ae7159e25dfda1be60cd98affe13536e01ab33a6c1de
+size 3136784
diff --git a/assets/meshes/left_hand_index_0_link.STL b/assets/meshes/left_hand_index_0_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..a87568d6835efb4f38efa6935c6ef30a2af633e0
--- /dev/null
+++ b/assets/meshes/left_hand_index_0_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b35f2f77211d5a366f0b9a4e47c4ee35e536731266f1a34e9efa12db579b892
+size 475984
diff --git a/assets/meshes/left_hand_index_1_link.STL b/assets/meshes/left_hand_index_1_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..c6c91dd461805b2f0e89a1202e68f5b095b74a41
--- /dev/null
+++ b/assets/meshes/left_hand_index_1_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e315ebc8a7a0cb98e033985b586b20d81cf8aa761181ae61ce56fcb14077a06
+size 1521784
diff --git a/assets/meshes/left_hand_middle_0_link.STL b/assets/meshes/left_hand_middle_0_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..a87568d6835efb4f38efa6935c6ef30a2af633e0
--- /dev/null
+++ b/assets/meshes/left_hand_middle_0_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b35f2f77211d5a366f0b9a4e47c4ee35e536731266f1a34e9efa12db579b892
+size 475984
diff --git a/assets/meshes/left_hand_middle_1_link.STL b/assets/meshes/left_hand_middle_1_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..c6c91dd461805b2f0e89a1202e68f5b095b74a41
--- /dev/null
+++ b/assets/meshes/left_hand_middle_1_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e315ebc8a7a0cb98e033985b586b20d81cf8aa761181ae61ce56fcb14077a06
+size 1521784
diff --git a/assets/meshes/left_hand_palm_link.STL b/assets/meshes/left_hand_palm_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..c8fcc3e4c3a864842b002eb94f6b692314ec97b8
--- /dev/null
+++ b/assets/meshes/left_hand_palm_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:23a486b75bd78a9bf03cec25d84d87f97f3dae038cf21a743b6d469b337e4004
+size 2140184
diff --git a/assets/meshes/left_hand_thumb_0_link.STL b/assets/meshes/left_hand_thumb_0_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..3028bb4d6e1ae3d30d2504259c08e4106bbacf63
Binary files /dev/null and b/assets/meshes/left_hand_thumb_0_link.STL differ
diff --git a/assets/meshes/left_hand_thumb_1_link.STL b/assets/meshes/left_hand_thumb_1_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..54f77542da4eab84d91035acd25405c09fb9d2e8
--- /dev/null
+++ b/assets/meshes/left_hand_thumb_1_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:445c54a45bc13ce36001556f66bc0f49c83cb40321205ae4d676bb2874325684
+size 475984
diff --git a/assets/meshes/left_hand_thumb_2_link.STL b/assets/meshes/left_hand_thumb_2_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..3e4f124f2311fc11c2dcd7cec417da892ffd14c0
--- /dev/null
+++ b/assets/meshes/left_hand_thumb_2_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3d8dbe5085acfc213d21aa8b0782e89cd79084e9678f3a85fc7b04a86b029db5
+size 1521784
diff --git a/assets/meshes/left_hip_pitch_link.STL b/assets/meshes/left_hip_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..4cf7475bf6bbaef47d5b3f7644a34c73b3d64037
--- /dev/null
+++ b/assets/meshes/left_hip_pitch_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4725168105ee768ee31638ef22b53f6be2d7641bfd7cfefe803488d884776fa4
+size 181684
diff --git a/assets/meshes/left_hip_roll_link.STL b/assets/meshes/left_hip_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..585f6040c38571177d55519417a0f46ca7b29381
--- /dev/null
+++ b/assets/meshes/left_hip_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:91f25922ee8a7c3152790051bebad17b4d9cd243569c38fe340285ff93a97acf
+size 192184
diff --git a/assets/meshes/left_hip_yaw_link.STL b/assets/meshes/left_hip_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..b46a74134f90c568cbdfb8484eeb83a0503b4504
--- /dev/null
+++ b/assets/meshes/left_hip_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a16d88aa6ddac8083aa7ad55ed317bea44b1fa003d314fba88965b7ed0f3b55b
+size 296284
diff --git a/assets/meshes/left_knee_link.STL b/assets/meshes/left_knee_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..2dcf84e4018ab9e4cff48cecdafe433601c48480
--- /dev/null
+++ b/assets/meshes/left_knee_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8d92b9e3d3a636761150bb8025e32514c4602b91c7028d523ee42b3e632de477
+size 854884
diff --git a/assets/meshes/left_rubber_hand.STL b/assets/meshes/left_rubber_hand.STL
new file mode 100644
index 0000000000000000000000000000000000000000..04a2fa22b7f60f2d00c64ec996b33a5219c2b633
--- /dev/null
+++ b/assets/meshes/left_rubber_hand.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cff2221a690fa69303f61fce68f2d155c1517b52efb6ca9262dd56e0bc6e70fe
+size 2287484
diff --git a/assets/meshes/left_shoulder_pitch_link.STL b/assets/meshes/left_shoulder_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..926d9807f1cbf6ad0d41170904785eb30427c848
--- /dev/null
+++ b/assets/meshes/left_shoulder_pitch_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f0d1cfd02fcf0d42f95e678eeca33da3afbcc366ffba5c052847773ec4f31d52
+size 176784
diff --git a/assets/meshes/left_shoulder_roll_link.STL b/assets/meshes/left_shoulder_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..4c6840b93ea15d1ba7b4dac0f18f94655867cab7
--- /dev/null
+++ b/assets/meshes/left_shoulder_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fb9df21687773522598dc384f1a2945c7519f11cbc8bd372a49170316d6eee88
+size 400284
diff --git a/assets/meshes/left_shoulder_yaw_link.STL b/assets/meshes/left_shoulder_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..89b0e0661275ed832c674a46893239ad2e5bef85
--- /dev/null
+++ b/assets/meshes/left_shoulder_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1aa97e9748e924336567992181f78c7cd0652fd52a4afcca3df6b2ef6f9e712e
+size 249184
diff --git a/assets/meshes/left_wrist_pitch_link.STL b/assets/meshes/left_wrist_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..82cc224a8e41251d879502f9809e31d0988ec7f9
Binary files /dev/null and b/assets/meshes/left_wrist_pitch_link.STL differ
diff --git a/assets/meshes/left_wrist_roll_link.STL b/assets/meshes/left_wrist_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..2097ca3e5bbc0af985d150e5dcda4d8ba9bcf43f
--- /dev/null
+++ b/assets/meshes/left_wrist_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:edc387c9a0ba8c2237e9b296d32531426fabeb6f53e58df45c76106bca74148c
+size 356184
diff --git a/assets/meshes/left_wrist_roll_rubber_hand.STL b/assets/meshes/left_wrist_roll_rubber_hand.STL
new file mode 100644
index 0000000000000000000000000000000000000000..7c58819795c4cdfe406a008bfdbe7ea19d765497
--- /dev/null
+++ b/assets/meshes/left_wrist_roll_rubber_hand.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e81030abd023bd9e4a308ef376d814a2c12d684d8a7670c335bbd5cd7809c909
+size 3484884
diff --git a/assets/meshes/left_wrist_yaw_link.STL b/assets/meshes/left_wrist_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..692f4b07105fa0ec4ffd989dc60be9b2a33ac9c3
--- /dev/null
+++ b/assets/meshes/left_wrist_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:83f8fb3a726bf9613d65dd14f0f447cb918c3c95b3938042a0c9c09749267d3b
+size 318684
diff --git a/assets/meshes/logo_link.STL b/assets/meshes/logo_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..6c25961c594c714ada9d32e922250d326c121f4a
--- /dev/null
+++ b/assets/meshes/logo_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8571a0a19bc4916fa55f91449f51d5fdefd751000054865a842449429d5f155b
+size 243384
diff --git a/assets/meshes/pelvis.STL b/assets/meshes/pelvis.STL
new file mode 100644
index 0000000000000000000000000000000000000000..f98a88dbf6c7698791ba694e16cbcc180c40663c
--- /dev/null
+++ b/assets/meshes/pelvis.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5ba6bbc888e630550140d3c26763f10206da8c8bd30ed886b8ede41c61f57a31
+size 1060884
diff --git a/assets/meshes/pelvis_contour_link.STL b/assets/meshes/pelvis_contour_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..8025bc070cd65550f345e9e97bc44a8ff9740dea
--- /dev/null
+++ b/assets/meshes/pelvis_contour_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5cc5c2c7a312329e3feeb2b03d3fc09fc29705bd01864f6767e51be959662420
+size 1805184
diff --git a/assets/meshes/right_ankle_pitch_link.STL b/assets/meshes/right_ankle_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..e77d8a2fe1e5d56fac049833d254d6ffa4f6b350
Binary files /dev/null and b/assets/meshes/right_ankle_pitch_link.STL differ
diff --git a/assets/meshes/right_ankle_roll_link.STL b/assets/meshes/right_ankle_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..3c38507b9060eda2c40e5e2bb1b174ef5bda0390
--- /dev/null
+++ b/assets/meshes/right_ankle_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4b66222ea56653e627711b56d0a8949b4920da5df091da0ceb343f54e884e3a5
+size 653784
diff --git a/assets/meshes/right_elbow_link.STL b/assets/meshes/right_elbow_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..f259e3812efb9985d6463c04d3e8a4b53793a699
Binary files /dev/null and b/assets/meshes/right_elbow_link.STL differ
diff --git a/assets/meshes/right_elbow_link_merge.STL b/assets/meshes/right_elbow_link_merge.STL
new file mode 100644
index 0000000000000000000000000000000000000000..b042703b9ec2622e73b708f10f86ef149643e60f
--- /dev/null
+++ b/assets/meshes/right_elbow_link_merge.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e6def7100e5bbb276c7498c5b7a8c8812f560ca35d6bd09c376ebab93595be7f
+size 3058284
diff --git a/assets/meshes/right_hand_index_0_link.STL b/assets/meshes/right_hand_index_0_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..5e2ef477e4666a8a26b9f02f89db5641cbbed0ab
--- /dev/null
+++ b/assets/meshes/right_hand_index_0_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:12ab4300c95e437e834f9aef772b3b431c671bf34338930b52ad11aef73bbd0d
+size 475984
diff --git a/assets/meshes/right_hand_index_1_link.STL b/assets/meshes/right_hand_index_1_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..8572ae1c90f7c129b2d0a31c1ccfe23e02ed1820
--- /dev/null
+++ b/assets/meshes/right_hand_index_1_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c9c34efce4563cacdcfd29fc838a982976d40f5442c71219811dcbbf3923a33d
+size 1521784
diff --git a/assets/meshes/right_hand_middle_0_link.STL b/assets/meshes/right_hand_middle_0_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..5e2ef477e4666a8a26b9f02f89db5641cbbed0ab
--- /dev/null
+++ b/assets/meshes/right_hand_middle_0_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:12ab4300c95e437e834f9aef772b3b431c671bf34338930b52ad11aef73bbd0d
+size 475984
diff --git a/assets/meshes/right_hand_middle_1_link.STL b/assets/meshes/right_hand_middle_1_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..8572ae1c90f7c129b2d0a31c1ccfe23e02ed1820
--- /dev/null
+++ b/assets/meshes/right_hand_middle_1_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c9c34efce4563cacdcfd29fc838a982976d40f5442c71219811dcbbf3923a33d
+size 1521784
diff --git a/assets/meshes/right_hand_palm_link.STL b/assets/meshes/right_hand_palm_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..fd2f7f0a8a9f75db8c45aef9e1a77c6b6224fd2a
--- /dev/null
+++ b/assets/meshes/right_hand_palm_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:86c0b231cc44477d64a6493e5a427ba16617a00738112dd187c652675b086fb9
+size 2140184
diff --git a/assets/meshes/right_hand_thumb_0_link.STL b/assets/meshes/right_hand_thumb_0_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..1cae7f18e16605cb9d6b1d1a0cf6e5c5c360a344
Binary files /dev/null and b/assets/meshes/right_hand_thumb_0_link.STL differ
diff --git a/assets/meshes/right_hand_thumb_1_link.STL b/assets/meshes/right_hand_thumb_1_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..c118de720dd2b5a467c397f9287f0ea01b2ad0a9
--- /dev/null
+++ b/assets/meshes/right_hand_thumb_1_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0a9a820da8dd10f298778b714f1364216e8a5976f4fd3a05689ea26327d44bf6
+size 475984
diff --git a/assets/meshes/right_hand_thumb_2_link.STL b/assets/meshes/right_hand_thumb_2_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..0979fb671e215be58c914ce22f03d47bdd0b6bf3
--- /dev/null
+++ b/assets/meshes/right_hand_thumb_2_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f1bfb37668e8f61801c8d25f171fa1949e08666be86c67acad7e0079937cc45
+size 1521784
diff --git a/assets/meshes/right_hip_pitch_link.STL b/assets/meshes/right_hip_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..064085fca108bfc2a5a365403ceb6cdac4d5cd5f
--- /dev/null
+++ b/assets/meshes/right_hip_pitch_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e4f3c99d7f4a7d34eadbef9461fc66e3486cb5442db1ec50c86317d459f1a9c6
+size 181284
diff --git a/assets/meshes/right_hip_roll_link.STL b/assets/meshes/right_hip_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..6544025eeb7dc7d09da549746ae28272e5bcf224
--- /dev/null
+++ b/assets/meshes/right_hip_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4c254ef66a356f492947f360dd931965477b631e3fcc841f91ccc46d945d54f6
+size 192684
diff --git a/assets/meshes/right_hip_yaw_link.STL b/assets/meshes/right_hip_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..0ad7bee36419f85b490debe929957c6932df393f
--- /dev/null
+++ b/assets/meshes/right_hip_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e479c2936ca2057e9eb2f7dff6c189b7419d7b8484dea0b298cbb36a2a6aa668
+size 296284
diff --git a/assets/meshes/right_knee_link.STL b/assets/meshes/right_knee_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..65e8a708a7e86f1f242cd32e8959a560ea66c2d7
--- /dev/null
+++ b/assets/meshes/right_knee_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:63c4008449c9bbe701a6e2b557b7a252e90cf3a5abcf54cee46862b9a69f8ec8
+size 852284
diff --git a/assets/meshes/right_rubber_hand.STL b/assets/meshes/right_rubber_hand.STL
new file mode 100644
index 0000000000000000000000000000000000000000..58148fb9e7307176d43f71da139781a77b63d156
--- /dev/null
+++ b/assets/meshes/right_rubber_hand.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:99533b778bca6246144fa511bb9d4e555e075c641f2a0251e04372869cd99d67
+size 2192684
diff --git a/assets/meshes/right_shoulder_pitch_link.STL b/assets/meshes/right_shoulder_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..48a1c46009929341f2c312966acf81c3c733d7ec
--- /dev/null
+++ b/assets/meshes/right_shoulder_pitch_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:24cdb387e0128dfe602770a81c56cdce3a0181d34d039a11d1aaf8819b7b8c02
+size 176784
diff --git a/assets/meshes/right_shoulder_roll_link.STL b/assets/meshes/right_shoulder_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..2a5d22f9e17a986ec8c958a6ff7668d65878a907
--- /dev/null
+++ b/assets/meshes/right_shoulder_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:962b97c48f9ce9e8399f45dd9522e0865d19aa9fd299406b2d475a8fc4a53e81
+size 401884
diff --git a/assets/meshes/right_shoulder_yaw_link.STL b/assets/meshes/right_shoulder_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..0882a567fc1a11f0cf821182154551058b0517f3
--- /dev/null
+++ b/assets/meshes/right_shoulder_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a0b76489271da0c72461a344c9ffb0f0c6e64f019ea5014c1624886c442a2fe5
+size 249984
diff --git a/assets/meshes/right_wrist_pitch_link.STL b/assets/meshes/right_wrist_pitch_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..da194543c40df9d492abb0553c06cc614a78caae
Binary files /dev/null and b/assets/meshes/right_wrist_pitch_link.STL differ
diff --git a/assets/meshes/right_wrist_roll_link.STL b/assets/meshes/right_wrist_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..77d23a77ad6ab659884db4d540e0bd9fd0675d0f
--- /dev/null
+++ b/assets/meshes/right_wrist_roll_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a7ee9212ff5b94d6cb7f52bb1bbf3f352194d5b598acff74f4c77d340c5b344f
+size 356084
diff --git a/assets/meshes/right_wrist_roll_rubber_hand.STL b/assets/meshes/right_wrist_roll_rubber_hand.STL
new file mode 100644
index 0000000000000000000000000000000000000000..6f122afac362873186f291d520a255c575a4bf02
--- /dev/null
+++ b/assets/meshes/right_wrist_roll_rubber_hand.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0729aff1ac4356f9314de13a46906267642e58bc47f0d8a7f17f6590a6242ccf
+size 3481584
diff --git a/assets/meshes/right_wrist_yaw_link.STL b/assets/meshes/right_wrist_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..77edbb4135bb5b9b64357a0226431fd665d794b2
--- /dev/null
+++ b/assets/meshes/right_wrist_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bc9dece2d12509707e0057ba2e48df8f3d56db0c79410212963a25e8a50f61a6
+size 341484
diff --git a/assets/meshes/torso_constraint_L_link.STL b/assets/meshes/torso_constraint_L_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..cc2cbbf2ca4c72a20704c70fb61c80b7c4fa52aa
--- /dev/null
+++ b/assets/meshes/torso_constraint_L_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:82be7f93e85b3d303a1d1e1847e2c916939bd61c424ed1ebd28691ec33909dd1
+size 203584
diff --git a/assets/meshes/torso_constraint_L_rod_link.STL b/assets/meshes/torso_constraint_L_rod_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..6747f3f9341bd72b3803385c135c3ce750322e9d
Binary files /dev/null and b/assets/meshes/torso_constraint_L_rod_link.STL differ
diff --git a/assets/meshes/torso_constraint_R_link.STL b/assets/meshes/torso_constraint_R_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..422ffe47ee75c14e5767f96ede5f506118c1054e
--- /dev/null
+++ b/assets/meshes/torso_constraint_R_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:54ded433a3a0c76027365856fdbd55215643de88846f7d436598a4071e682725
+size 203584
diff --git a/assets/meshes/torso_constraint_R_rod_link.STL b/assets/meshes/torso_constraint_R_rod_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..95cf415f72f1679a5867ca21a4af774f0b217cad
Binary files /dev/null and b/assets/meshes/torso_constraint_R_rod_link.STL differ
diff --git a/assets/meshes/torso_link.STL b/assets/meshes/torso_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..e4fb87c4cc736c90970bb5f182981f0cf1544038
--- /dev/null
+++ b/assets/meshes/torso_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e96d023f0368a4e3450b86ca5d4f10227d8141156a373e7da8cb3c93266523e0
+size 2232984
diff --git a/assets/meshes/torso_link_rev_1_0.STL b/assets/meshes/torso_link_rev_1_0.STL
new file mode 100644
index 0000000000000000000000000000000000000000..836b9923b3edac7a1c06ecd2f11bfbbc0dedf7e0
--- /dev/null
+++ b/assets/meshes/torso_link_rev_1_0.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:11ddb46f2098efbbd8816b1d65893632d8e78be936376c7cdcd6771899ccc723
+size 2570584
diff --git a/assets/meshes/waist_constraint_L.STL b/assets/meshes/waist_constraint_L.STL
new file mode 100644
index 0000000000000000000000000000000000000000..6ec689bc8d8ca4a17fa3663f29d546837fef4ee2
--- /dev/null
+++ b/assets/meshes/waist_constraint_L.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8ebafdcb4de6871113f0ca2c356618d6e46b1d50f6c0bf9e37f47b9d8e100d99
+size 114684
diff --git a/assets/meshes/waist_constraint_R.STL b/assets/meshes/waist_constraint_R.STL
new file mode 100644
index 0000000000000000000000000000000000000000..69fd76adbd96709fe00b68da0f0da7ef08b03d79
--- /dev/null
+++ b/assets/meshes/waist_constraint_R.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:791902a291ffbd35ab97383b7b44ea5d975de7c80eef838797c970790b382ca9
+size 114684
diff --git a/assets/meshes/waist_roll_link.STL b/assets/meshes/waist_roll_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..65831abd2a64bc8c36e31016964413a3d2116725
Binary files /dev/null and b/assets/meshes/waist_roll_link.STL differ
diff --git a/assets/meshes/waist_roll_link_rev_1_0.STL b/assets/meshes/waist_roll_link_rev_1_0.STL
new file mode 100644
index 0000000000000000000000000000000000000000..a64f330c592582dc31cdf38f4d08cfff06681c5f
Binary files /dev/null and b/assets/meshes/waist_roll_link_rev_1_0.STL differ
diff --git a/assets/meshes/waist_support_link.STL b/assets/meshes/waist_support_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..4a50f94fe3e60c6c128874488869779be9fef816
--- /dev/null
+++ b/assets/meshes/waist_support_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1fae9e1bb609848a1667d32eed8d6083ae443538a306843056a2a660f1b2926a
+size 150484
diff --git a/assets/meshes/waist_yaw_link.STL b/assets/meshes/waist_yaw_link.STL
new file mode 100644
index 0000000000000000000000000000000000000000..c049debc4e0080a06ea95c2e7e9b74613cd42040
--- /dev/null
+++ b/assets/meshes/waist_yaw_link.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2883f20e03f09b669b5b4ce10677ee6b5191c0934b584d7cbaef2d0662856ffb
+size 336284
diff --git a/assets/meshes/waist_yaw_link_rev_1_0.STL b/assets/meshes/waist_yaw_link_rev_1_0.STL
new file mode 100644
index 0000000000000000000000000000000000000000..dc628fbdda04129fda29140243d148cbd2c81c83
--- /dev/null
+++ b/assets/meshes/waist_yaw_link_rev_1_0.STL
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ec6db442b11f25eed898b5add07940c85d804f300de24dcbd264ccd8be7d554c
+size 619984
diff --git a/assets/scene_29dof.xml b/assets/scene_29dof.xml
new file mode 100644
index 0000000000000000000000000000000000000000..0b60f2f83228277ea83ebea17a0ede1eb7a5383b
--- /dev/null
+++ b/assets/scene_29dof.xml
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/assets/scene_43dof.xml b/assets/scene_43dof.xml
new file mode 100644
index 0000000000000000000000000000000000000000..42251b5738d81b09949f5a0bd9de19333412f60e
--- /dev/null
+++ b/assets/scene_43dof.xml
@@ -0,0 +1,33 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/config.yaml b/config.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b85a343559bc1d350b4daccce51ee93432d67184
--- /dev/null
+++ b/config.yaml
@@ -0,0 +1,423 @@
+GEAR_WBC_CONFIG: "gr00t_wbc/sim2mujoco/resources/robots/g1/g1_gear_wbc.yaml"
+
+# copy from g1_43dof_hist.yaml
+ROBOT_TYPE: 'g1_29dof' # Robot name, "go2", "b2", "b2w", "h1", "go2w", "g1"
+ROBOT_SCENE: "assets/scene_43dof.xml" # Robot scene with 3-finger dex hands
+
+DOMAIN_ID: 0 # Domain id
+# Network Interface, "lo" for simulation and the one with "192.168.123.222" for real robot
+INTERFACE: "lo"
+SIMULATOR: "mujoco" # "robocasa"
+
+USE_JOYSTICK: 0 # Simulate Unitree WirelessController using a gamepad (0: disable, 1: enable)
+JOYSTICK_TYPE: "xbox" # support "xbox" and "switch" gamepad layout
+JOYSTICK_DEVICE: 0 # Joystick number
+
+FREE_BASE: False
+enable_waist: False # Enable waist joints control (False = band on torso, True = band on pelvis)
+
+PRINT_SCENE_INFORMATION: True # Print link, joint and sensors information of robot
+ENABLE_ELASTIC_BAND: True # Virtual spring band, used for lifting h1
+
+SIMULATE_DT: 0.002 # Simulation timestep (500Hz)
+ENABLE_ONSCREEN: true # Show visualization window
+ENABLE_OFFSCREEN: false # Offscreen rendering for cameras (set to true to enable camera publishing)
+IMAGE_DT: 0.033333 # Image publishing rate ~30Hz
+MP_START_METHOD: "spawn" # Multiprocessing start method for image subprocess
+VIEWER_DT: 0.02 # Viewer update time
+REWARD_DT: 0.02
+USE_SENSOR: False
+USE_HISTORY: True
+USE_HISTORY_LOCO: True
+USE_HISTORY_MIMIC: True
+
+GAIT_PERIOD: 0.9 # 1.25
+
+MOTOR2JOINT: [0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11,
+ 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28]
+
+JOINT2MOTOR: [0, 1, 2, 3, 4, 5,
+ 6, 7, 8, 9, 10, 11,
+ 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28]
+
+
+UNITREE_LEGGED_CONST:
+ HIGHLEVEL: 0xEE
+ LOWLEVEL: 0xFF
+ TRIGERLEVEL: 0xF0
+ PosStopF: 2146000000.0
+ VelStopF: 16000.0
+ MODE_MACHINE: 5
+ MODE_PR: 0
+
+JOINT_KP: [
+ 100, 100, 100, 200, 20, 20,
+ 100, 100, 100, 200, 20, 20,
+ 400, 400, 400,
+ 90, 60, 20, 60, 4, 4, 4,
+ 90, 60, 20, 60, 4, 4, 4
+]
+
+
+JOINT_KD: [
+ 2.5, 2.5, 2.5, 5, 0.2, 0.1,
+ 2.5, 2.5, 2.5, 5, 0.2, 0.1,
+ 5.0, 5.0, 5.0,
+ 2.0, 1.0, 0.4, 1.0, 0.2, 0.2, 0.2,
+ 2.0, 1.0, 0.4, 1.0, 0.2, 0.2, 0.2
+]
+
+# arm kp
+# soft kp, safe, test it first
+# 50, 50, 20, 20, 10, 10, 10
+# hard kp, use only if policy is safe
+# 200, 200, 80, 80, 50, 50, 50,
+
+# MOTOR_KP: [
+# 100, 100, 100, 200, 20, 20,
+# 100, 100, 100, 200, 20, 20,
+# 400, 400, 400,
+# 50, 50, 20, 20, 10, 10, 10,
+# 50, 50, 20, 20, 10, 10, 10
+# ]
+
+MOTOR_KP: [
+ 150, 150, 150, 200, 40, 40,
+ 150, 150, 150, 200, 40, 40,
+ 800, 250, 800,
+ 100, 100, 40, 40, 100, 100, 100,
+ 100, 100, 40, 40, 100, 100, 100
+]
+
+MOTOR_KD: [
+ 2, 2, 2, 4, 2, 2,
+ 2, 2, 2, 4, 2, 2,
+ 10, 5, 10,
+ 5, 5, 2, 2, 5, 5, 5,
+ 5, 5, 2, 2, 5, 5, 5
+]
+
+# MOTOR_KP: [
+# 100, 100, 100, 200, 20, 20,
+# 100, 100, 100, 200, 20, 20,
+# 400, 400, 400,
+# 90, 60, 20, 60, 4, 4, 4,
+# 90, 60, 20, 60, 4, 4, 4
+# ]
+
+
+# MOTOR_KD: [
+# 2.5, 2.5, 2.5, 5, 0.2, 0.1,
+# 2.5, 2.5, 2.5, 5, 0.2, 0.1,
+# 5.0, 5.0, 5.0,
+# 2.0, 1.0, 0.4, 1.0, 0.2, 0.2, 0.2,
+# 2.0, 1.0, 0.4, 1.0, 0.2, 0.2, 0.2
+# ]
+
+
+WeakMotorJointIndex:
+ left_hip_yaw_joint: 0
+ left_hip_roll_joint: 1
+ left_hip_pitch_joint: 2
+ left_knee_joint: 3
+ left_ankle_pitch_joint: 4
+ left_ankle_roll_joint: 5
+ right_hip_yaw_joint: 6
+ right_hip_roll_joint: 7
+ right_hip_pitch_joint: 8
+ right_knee_joint: 9
+ right_ankle_pitch_joint: 10
+ right_ankle_roll_joint: 11
+ waist_yaw_joint : 12
+ waist_roll_joint : 13
+ waist_pitch_joint : 14
+ left_shoulder_pitch_joint: 15
+ left_shoulder_roll_joint: 16
+ left_shoulder_yaw_joint: 17
+ left_elbow_joint: 18
+ left_wrist_roll_joint: 19
+ left_wrist_pitch_joint: 20
+ left_wrist_yaw_joint: 21
+ right_shoulder_pitch_joint: 22
+ right_shoulder_roll_joint: 23
+ right_shoulder_yaw_joint: 24
+ right_elbow_joint: 25
+ right_wrist_roll_joint: 26
+ right_wrist_pitch_joint: 27
+ right_wrist_yaw_joint: 28
+
+NUM_MOTORS: 29
+NUM_JOINTS: 29
+NUM_HAND_MOTORS: 7
+NUM_HAND_JOINTS: 7
+NUM_UPPER_BODY_JOINTS: 17
+
+DEFAULT_DOF_ANGLES: [
+ -0.1, # left_hip_pitch_joint
+ 0.0, # left_hip_roll_joint
+ 0.0, # left_hip_yaw_joint
+ 0.3, # left_knee_joint
+ -0.2, # left_ankle_pitch_joint
+ 0.0, # left_ankle_roll_joint
+ -0.1, # right_hip_pitch_joint
+ 0.0, # right_hip_roll_joint
+ 0.0, # right_hip_yaw_joint
+ 0.3, # right_knee_joint
+ -0.2, # right_ankle_pitch_joint
+ 0.0, # right_ankle_roll_joint
+ 0.0, # waist_yaw_joint
+ 0.0, # waist_roll_joint
+ 0.0, # waist_pitch_joint
+ 0.0, # left_shoulder_pitch_joint
+ 0.0, # left_shoulder_roll_joint
+ 0.0, # left_shoulder_yaw_joint
+ 0.0, # left_elbow_joint
+ 0.0, # left_wrist_roll_joint
+ 0.0, # left_wrist_pitch_joint
+ 0.0, # left_wrist_yaw_joint
+ 0.0, # right_shoulder_pitch_joint
+ 0.0, # right_shoulder_roll_joint
+ 0.0, # right_shoulder_yaw_joint
+ 0.0, # right_elbow_joint
+ 0.0, # right_wrist_roll_joint
+ 0.0, # right_wrist_pitch_joint
+ 0.0 # right_wrist_yaw_joint
+]
+
+DEFAULT_MOTOR_ANGLES: [
+ -0.1, # left_hip_pitch_joint
+ 0.0, # left_hip_roll_joint
+ 0.0, # left_hip_yaw_joint
+ 0.3, # left_knee_joint
+ -0.2, # left_ankle_pitch_joint
+ 0.0, # left_ankle_roll_joint
+ -0.1, # right_hip_pitch_joint
+ 0.0, # right_hip_roll_joint
+ 0.0, # right_hip_yaw_joint
+ 0.3, # right_knee_joint
+ -0.2, # right_ankle_pitch_joint
+ 0.0, # right_ankle_roll_joint
+ 0.0, # waist_yaw_joint
+ 0.0, # waist_roll_joint
+ 0.0, # waist_pitch_joint
+ 0.0, # left_shoulder_pitch_joint
+ 0.0, # left_shoulder_roll_joint
+ 0.0, # left_shoulder_yaw_joint
+ 0.0, # left_elbow_joint
+ 0.0, # left_wrist_roll_joint
+ 0.0, # left_wrist_pitch_joint
+ 0.0, # left_wrist_yaw_joint
+ 0.0, # right_shoulder_pitch_joint
+ 0.0, # right_shoulder_roll_joint
+ 0.0, # right_shoulder_yaw_joint
+ 0.0, # right_elbow_joint
+ 0.0, # right_wrist_roll_joint
+ 0.0, # right_wrist_pitch_joint
+ 0.0 # right_wrist_yaw_joint
+]
+
+motor_pos_lower_limit_list: [-2.5307, -0.5236, -2.7576, -0.087267, -0.87267, -0.2618,
+ -2.5307, -2.9671, -2.7576, -0.087267, -0.87267, -0.2618,
+ -2.618, -0.52, -0.52,
+ -3.0892, -1.5882, -2.618, -1.0472,
+ -1.972222054, -1.61443, -1.61443,
+ -3.0892, -2.2515, -2.618, -1.0472,
+ -1.972222054, -1.61443, -1.61443]
+motor_pos_upper_limit_list: [2.8798, 2.9671, 2.7576, 2.8798, 0.5236, 0.2618,
+ 2.8798, 0.5236, 2.7576, 2.8798, 0.5236, 0.2618,
+ 2.618, 0.52, 0.52,
+ 2.6704, 2.2515, 2.618, 2.0944,
+ 1.972222054, 1.61443, 1.61443,
+ 2.6704, 1.5882, 2.618, 2.0944,
+ 1.972222054, 1.61443, 1.61443]
+motor_vel_limit_list: [32.0, 32.0, 32.0, 20.0, 37.0, 37.0,
+ 32.0, 32.0, 32.0, 20.0, 37.0, 37.0,
+ 32.0, 37.0, 37.0,
+ 37.0, 37.0, 37.0, 37.0,
+ 37.0, 22.0, 22.0,
+ 37.0, 37.0, 37.0, 37.0,
+ 37.0, 22.0, 22.0]
+motor_effort_limit_list: [88.0, 88.0, 88.0, 139.0, 50.0, 50.0,
+ 88.0, 88.0, 88.0, 139.0, 50.0, 50.0,
+ 88.0, 50.0, 50.0,
+ 25.0, 25.0, 25.0, 25.0,
+ 25.0, 5.0, 5.0,
+ 2.45, 0.7, 0.7, 0.7, 0.7, 0.7, 0.7,
+ 25.0, 25.0, 25.0, 25.0,
+ 25.0, 5.0, 5.0,
+ 2.45, 0.7, 0.7, 0.7, 0.7, 0.7, 0.7]
+history_config: {
+ base_ang_vel: 4,
+ projected_gravity: 4,
+ command_lin_vel: 4,
+ command_ang_vel: 4,
+ command_base_height: 4,
+ command_stand: 4,
+ ref_upper_dof_pos: 4,
+ dof_pos: 4,
+ dof_vel: 4,
+ actions: 4,
+ # phase_time: 4,
+ ref_motion_phase: 4,
+ sin_phase: 4,
+ cos_phase: 4
+ }
+history_loco_config: {
+ base_ang_vel: 4,
+ projected_gravity: 4,
+ command_lin_vel: 4,
+ command_ang_vel: 4,
+ # command_base_height: 4,
+ command_stand: 4,
+ ref_upper_dof_pos: 4,
+ dof_pos: 4,
+ dof_vel: 4,
+ actions: 4,
+ # phase_time: 4,
+ sin_phase: 4,
+ cos_phase: 4
+ }
+history_loco_height_config: {
+ base_ang_vel: 4,
+ projected_gravity: 4,
+ command_lin_vel: 4,
+ command_ang_vel: 4,
+ command_base_height: 4,
+ command_stand: 4,
+ ref_upper_dof_pos: 4,
+ dof_pos: 4,
+ dof_vel: 4,
+ actions: 4,
+ # phase_time: 4,
+ sin_phase: 4,
+ cos_phase: 4
+ }
+history_mimic_config: {
+ base_ang_vel: 4,
+ projected_gravity: 4,
+ dof_pos: 4,
+ dof_vel: 4,
+ actions: 4,
+ ref_motion_phase: 4,
+ }
+obs_dims: {
+ base_lin_vel: 3,
+ base_ang_vel: 3,
+ projected_gravity: 3,
+ command_lin_vel: 2,
+ command_ang_vel: 1,
+ command_stand: 1,
+ command_base_height: 1,
+ ref_upper_dof_pos: 17, # upper body actions
+ dof_pos: 29,
+ dof_vel: 29,
+ # actions: 12, # lower body actions
+ actions: 29, # full body actions
+ phase_time: 1,
+ ref_motion_phase: 1, # mimic motion phase
+ sin_phase: 1,
+ cos_phase: 1,
+ }
+obs_loco_dims: {
+ base_lin_vel: 3,
+ base_ang_vel: 3,
+ projected_gravity: 3,
+ command_lin_vel: 2,
+ command_ang_vel: 1,
+ command_stand: 1,
+ command_base_height: 1,
+ ref_upper_dof_pos: 17, # upper body actions
+ dof_pos: 29,
+ dof_vel: 29,
+ actions: 12, # lower body actions
+ phase_time: 1,
+ sin_phase: 1,
+ cos_phase: 1,
+ }
+obs_mimic_dims: {
+ base_lin_vel: 3,
+ base_ang_vel: 3,
+ projected_gravity: 3,
+ dof_pos: 29,
+ dof_vel: 29,
+ actions: 29, # full body actions
+ ref_motion_phase: 1, # mimic motion phase
+ }
+obs_scales: {
+ base_lin_vel: 2.0,
+ base_ang_vel: 0.25,
+ projected_gravity: 1.0,
+ command_lin_vel: 1,
+ command_ang_vel: 1,
+ command_stand: 1,
+ command_base_height: 2, # Yuanhang: it's 2, not 1!
+ ref_upper_dof_pos: 1.0,
+ dof_pos: 1.0,
+ dof_vel: 0.05,
+ history: 1.0,
+ history_loco: 1.0,
+ history_mimic: 1.0,
+ actions: 1.0,
+ phase_time: 1.0,
+ ref_motion_phase: 1.0,
+ sin_phase: 1.0,
+ cos_phase: 1.0
+ }
+
+loco_upper_body_dof_pos: [
+ 0.0, 0.0, 0.0, # waist
+ 0.0, 0.3, 0.0, 1.0, # left shoulder and elbow
+ 0.0, 0.0, 0.0, # left wrist
+ 0.0, -0.3, 0.0, 1.0, # right shoulder and elbow
+ 0.0, 0.0, 0.0 # right wrist
+]
+
+robot_dofs: {
+ "g1_29dof": [1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1,
+ 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1],
+ "g1_29dof_anneal_23dof": [1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1,
+ 1, 1, 1,
+ 1, 1, 1, 1, 0, 0, 0,
+ 1, 1, 1, 1, 0, 0, 0],
+}
+
+mimic_robot_types: {
+
+ "APT_level1": "g1_29dof_anneal_23dof",
+}
+
+
+
+
+# 01281657
+mimic_models: {
+ "APT_level1": "20250116_225127-TairanTestbed_G129dofANNEAL23dof_dm_APT_video_APT_level1_MinimalFriction-0.3_RfiTrue_Far0.325_RESUME_LARGENOISE-motion_tracking-g1_29dof_anneal_23dof/exported/model_176500.onnx",
+
+}
+
+
+
+start_upper_body_dof_pos: {
+
+ "APT_level1":
+ [0.19964170455932617, 0.07710712403059006, -0.2882401943206787,
+ 0.21672365069389343, 0.15629297494888306, -0.5167576670646667, 0.5782126784324646,
+ 0.0, 0.0, 0.0,
+ 0.25740593671798706, -0.2504104673862457, 0.22500675916671753, 0.5127624273300171,
+ 0.0, 0.0, 0.0],
+
+}
+
+motion_length_s: {
+ "APT_level1": 7.66,
+
+}
diff --git a/env.py b/env.py
new file mode 100644
index 0000000000000000000000000000000000000000..459a92716692bb20c5d18743fe6493daed86971e
--- /dev/null
+++ b/env.py
@@ -0,0 +1,78 @@
+# env.py
+from pathlib import Path
+import yaml
+
+from sim.simulator_factory import SimulatorFactory, init_channel
+
+
+class UnitreeG1SimulationEnvironment:
+ """
+ Unitree G1 Simulation Environment for EnvHub.
+ """
+ def __init__(self, config_path=None):
+ if config_path is None:
+ config_path = Path(__file__).parent / "config.yaml"
+
+ with open(config_path, "r") as f:
+ self.config = yaml.safe_load(f)
+
+ # init DDS/communication
+ init_channel(config=self.config)
+
+ # create simulator
+ self.sim = SimulatorFactory.create_simulator(
+ config=self.config,
+ env_name="default",
+ onscreen=self.config.get("ENABLE_ONSCREEN", True),
+ offscreen=self.config.get("ENABLE_OFFSCREEN", False),
+ camera_configs={}, # populated by run() if needed
+ )
+
+ def run(self, publish_images=False, camera_port=5555, cameras=None):
+ """
+ Launches the simulator exactly like run_sim.py
+ """
+ config = self.config
+
+ # configure cameras for offscreen rendering
+ camera_configs = {}
+ enable_offscreen = publish_images or config.get("ENABLE_OFFSCREEN", False)
+
+ if enable_offscreen:
+ camera_list = cameras or ["head_camera"]
+ for cam_name in camera_list:
+ camera_configs[cam_name] = {"height": 480, "width": 640}
+
+ # re-create simulator with correct camera config
+ self.sim = SimulatorFactory.create_simulator(
+ config=config,
+ env_name="default",
+ onscreen=config.get("ENABLE_ONSCREEN", True),
+ offscreen=enable_offscreen,
+ camera_configs=camera_configs,
+ )
+
+ print("Starting Unitree G1 Simulator ...")
+
+ try:
+ SimulatorFactory.start_simulator(
+ self.sim,
+ as_thread=False,
+ enable_image_publish=publish_images,
+ camera_port=camera_port,
+ )
+ except KeyboardInterrupt:
+ print("\nShutting down simulator...")
+ self.sim.close()
+
+ # optional cleanup
+ def close(self):
+ self.sim.close()
+
+
+def make_env(**kwargs):
+ """
+ Required entrypoint for EnvHub.
+ Returns an environment instance.
+ """
+ return UnitreeG1SimulationEnvironment(**kwargs)
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..32e4c434aad957f370bc98f56c16787f1f70aff9
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,13 @@
+mujoco>=3.0.0
+numpy>=1.24.0
+pyyaml>=6.0
+unitree-sdk2py>=1.0.0
+loguru>=0.7.0
+
+# Camera publishing dependencies
+opencv-python>=4.8.0
+pyzmq>=25.0.0
+msgpack>=1.0.0
+msgpack-numpy>=0.4.8
+matplotlib>=3.5.0 # For live camera viewer
+
diff --git a/run_sim.py b/run_sim.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b86b86e9931afa48cebd080198d3c7bb470eb41
--- /dev/null
+++ b/run_sim.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+"""Standalone MuJoCo simulator for Unitree G1"""
+import argparse
+import sys
+from pathlib import Path
+
+# Add sim module to path
+sys.path.insert(0, str(Path(__file__).parent))
+
+import yaml
+from sim.simulator_factory import SimulatorFactory, init_channel
+
+def main():
+ parser = argparse.ArgumentParser(description="Unitree G1 MuJoCo Simulator")
+ parser.add_argument("--publish-images", action="store_true",
+ help="Enable camera image publishing via ZMQ (requires offscreen rendering)")
+ parser.add_argument("--camera-port", type=int, default=5555,
+ help="ZMQ port for camera publishing (default: 5555)")
+ parser.add_argument("--cameras", type=str, nargs="+", default=None,
+ help="Camera names to publish (default: head_camera)")
+ args = parser.parse_args()
+
+ # Load config
+ config_path = Path(__file__).parent / "config.yaml"
+ with open(config_path) as f:
+ config = yaml.safe_load(f)
+
+ # Override config with command line args
+ enable_offscreen = args.publish_images or config.get("ENABLE_OFFSCREEN", False)
+
+ print("="*60)
+ print("🤖 Starting Unitree G1 MuJoCo Simulator")
+ print("="*60)
+ print(f"📁 Scene: {config['ROBOT_SCENE']}")
+ print(f"⏱️ Timestep: {config['SIMULATE_DT']}s ({int(1/config['SIMULATE_DT'])} Hz)")
+ print(f"👁️ Visualization: {'ON' if config.get('ENABLE_ONSCREEN', True) else 'OFF'}")
+
+ # Configure cameras if requested
+ camera_configs = {}
+ if enable_offscreen:
+ camera_list = args.cameras or ["head_camera"]
+ for cam_name in camera_list:
+ camera_configs[cam_name] = {"height": 480, "width": 640}
+ print(f"📷 Cameras: {', '.join(camera_list)} → ZMQ port {args.camera_port}")
+
+ print("="*60)
+
+ # Initialize DDS channel
+ init_channel(config=config)
+
+ # Create simulator
+ sim = SimulatorFactory.create_simulator(
+ config=config,
+ env_name="default",
+ onscreen=config.get("ENABLE_ONSCREEN", True),
+ offscreen=enable_offscreen,
+ camera_configs=camera_configs,
+ )
+
+ # Start simulator (blocking)
+ print("\nSimulator running. Press Ctrl+C to exit.")
+ if enable_offscreen and args.publish_images:
+ print(f"Camera images publishing on tcp://localhost:{args.camera_port}")
+ try:
+ SimulatorFactory.start_simulator(
+ sim,
+ as_thread=False,
+ enable_image_publish=args.publish_images,
+ camera_port=args.camera_port,
+ )
+ except KeyboardInterrupt:
+ print("\nShutting down simulator...")
+ sim.close()
+
+if __name__ == "__main__":
+ main()
+
diff --git a/sim/__init__.py b/sim/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/sim/__pycache__/__init__.cpython-310.pyc b/sim/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d76ce7d89fb1d5b754d176b51b5b3ae725893c60
Binary files /dev/null and b/sim/__pycache__/__init__.cpython-310.pyc differ
diff --git a/sim/__pycache__/base_sim.cpython-310.pyc b/sim/__pycache__/base_sim.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..374bff09e6b158c45f0dab881d255da4429bee3d
Binary files /dev/null and b/sim/__pycache__/base_sim.cpython-310.pyc differ
diff --git a/sim/__pycache__/image_publish_utils.cpython-310.pyc b/sim/__pycache__/image_publish_utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bc4d6728d02de09717693db70fc1c4733cb80c97
Binary files /dev/null and b/sim/__pycache__/image_publish_utils.cpython-310.pyc differ
diff --git a/sim/__pycache__/metric_utils.cpython-310.pyc b/sim/__pycache__/metric_utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..77be66074bd87ccb2c51625223c626d9b614abbd
Binary files /dev/null and b/sim/__pycache__/metric_utils.cpython-310.pyc differ
diff --git a/sim/__pycache__/sensor_utils.cpython-310.pyc b/sim/__pycache__/sensor_utils.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..51b65edf3755903aeef27ff87a99f16b6af25793
Binary files /dev/null and b/sim/__pycache__/sensor_utils.cpython-310.pyc differ
diff --git a/sim/__pycache__/sim_utilts.cpython-310.pyc b/sim/__pycache__/sim_utilts.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..89d4baa7f1db1e1ab4e6d8d31aa85c93117690c6
Binary files /dev/null and b/sim/__pycache__/sim_utilts.cpython-310.pyc differ
diff --git a/sim/__pycache__/simulator_factory.cpython-310.pyc b/sim/__pycache__/simulator_factory.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..861b22b0641195d5cd905f2f1653a1fd16eb0da5
Binary files /dev/null and b/sim/__pycache__/simulator_factory.cpython-310.pyc differ
diff --git a/sim/__pycache__/unitree_sdk2py_bridge.cpython-310.pyc b/sim/__pycache__/unitree_sdk2py_bridge.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fad93111b5d173939a28d972930ffaa849bbf3db
Binary files /dev/null and b/sim/__pycache__/unitree_sdk2py_bridge.cpython-310.pyc differ
diff --git a/sim/base_sim.py b/sim/base_sim.py
new file mode 100644
index 0000000000000000000000000000000000000000..707718bfc73008dad7832a1ff0ebd7a3bf5ac988
--- /dev/null
+++ b/sim/base_sim.py
@@ -0,0 +1,803 @@
+import argparse
+import pathlib
+from pathlib import Path
+import threading
+from threading import Lock, Thread
+from typing import Dict
+
+import mujoco
+import mujoco.viewer
+import numpy as np
+try:
+ import rclpy
+ HAS_RCLPY = True
+except ImportError:
+ HAS_RCLPY = False
+ print("Warning: rclpy not found. Camera image publishing will be disabled.")
+from unitree_sdk2py.core.channel import ChannelFactoryInitialize
+import yaml
+
+from .image_publish_utils import ImagePublishProcess
+from .metric_utils import check_contact, check_height
+from .sim_utilts import get_subtree_body_names
+from .unitree_sdk2py_bridge import ElasticBand, UnitreeSdk2Bridge
+
+GR00T_WBC_ROOT = Path(__file__).resolve().parent.parent # Points to mujoco_sim_g1/
+
+
+class DefaultEnv:
+ """Base environment class that handles simulation environment setup and step"""
+
+ def __init__(
+ self,
+ config: Dict[str, any],
+ env_name: str = "default",
+ camera_configs: Dict[str, any] = None,
+ onscreen: bool = False,
+ offscreen: bool = False,
+ ):
+ # Avoid mutable default argument gotcha
+ if camera_configs is None:
+ camera_configs = {}
+
+ # global_view is only set up for this specifc scene for now.
+ if config["ROBOT_SCENE"] == "gr00t_wbc/control/robot_model/model_data/g1/scene_29dof.xml":
+ camera_configs["global_view"] = {
+ "height": 400,
+ "width": 400,
+ }
+ self.config = config
+ self.env_name = env_name
+ self.num_body_dof = self.config["NUM_JOINTS"]
+ self.num_hand_dof = self.config["NUM_HAND_JOINTS"]
+ self.sim_dt = self.config["SIMULATE_DT"]
+ self.obs = None
+ self.torques = np.zeros(self.num_body_dof + self.num_hand_dof * 2)
+ self.torque_limit = np.array(self.config["motor_effort_limit_list"])
+ self.camera_configs = camera_configs
+
+ # Debug: print camera config
+ if len(camera_configs) > 0:
+ print(f"✓ DefaultEnv initialized with {len(camera_configs)} camera(s): {list(camera_configs.keys())}")
+
+ # Thread safety lock
+ self.reward_lock = Lock()
+
+ # Unitree bridge will be initialized by the simulator
+ self.unitree_bridge = None
+
+ # Store display mode
+ self.onscreen = onscreen
+
+ # Initialize scene (defined in subclasses)
+ self.init_scene()
+ self.last_reward = 0
+
+ # Setup offscreen rendering if needed
+ self.offscreen = offscreen
+ if self.offscreen:
+ self.init_renderers()
+ self.image_dt = self.config.get("IMAGE_DT", 0.033333)
+
+ # Image publishing subprocess (initialized separately)
+ self.image_publish_process = None
+
+ def init_scene(self):
+ """Initialize the default robot scene"""
+ self.mj_model = mujoco.MjModel.from_xml_path(
+ str(pathlib.Path(GR00T_WBC_ROOT) / self.config["ROBOT_SCENE"])
+ )
+ self.mj_data = mujoco.MjData(self.mj_model)
+ self.mj_model.opt.timestep = self.sim_dt
+ self.torso_index = mujoco.mj_name2id(self.mj_model, mujoco.mjtObj.mjOBJ_BODY, "torso_link")
+ self.root_body = "pelvis"
+ # Enable the elastic band
+ if self.config["ENABLE_ELASTIC_BAND"]:
+ self.elastic_band = ElasticBand()
+ if "g1" in self.config["ROBOT_TYPE"]:
+ if self.config["enable_waist"]:
+ self.band_attached_link = self.mj_model.body("pelvis").id
+ else:
+ self.band_attached_link = self.mj_model.body("torso_link").id
+ elif "h1" in self.config["ROBOT_TYPE"]:
+ self.band_attached_link = self.mj_model.body("torso_link").id
+ else:
+ self.band_attached_link = self.mj_model.body("base_link").id
+
+ if self.onscreen:
+ self.viewer = mujoco.viewer.launch_passive(
+ self.mj_model,
+ self.mj_data,
+ key_callback=self.elastic_band.MujuocoKeyCallback,
+ show_left_ui=False,
+ show_right_ui=False,
+ )
+ else:
+ mujoco.mj_forward(self.mj_model, self.mj_data)
+ self.viewer = None
+ else:
+ if self.onscreen:
+ self.viewer = mujoco.viewer.launch_passive(
+ self.mj_model, self.mj_data, show_left_ui=False, show_right_ui=False
+ )
+ else:
+ mujoco.mj_forward(self.mj_model, self.mj_data)
+ self.viewer = None
+
+ if self.viewer:
+ # viewer camera
+ self.viewer.cam.azimuth = 120 # Horizontal rotation in degrees
+ self.viewer.cam.elevation = -30 # Vertical tilt in degrees
+ self.viewer.cam.distance = 2.0 # Distance from camera to target
+ self.viewer.cam.lookat = np.array([0, 0, 0.5]) # Point the camera is looking at
+
+ # Note that the actuator order is the same as the joint order in the mujoco model.
+ self.body_joint_index = []
+ self.left_hand_index = []
+ self.right_hand_index = []
+ for i in range(self.mj_model.njnt):
+ name = self.mj_model.joint(i).name
+ if any(
+ [
+ part_name in name
+ for part_name in ["hip", "knee", "ankle", "waist", "shoulder", "elbow", "wrist"]
+ ]
+ ):
+ self.body_joint_index.append(i)
+ elif "left_hand" in name:
+ self.left_hand_index.append(i)
+ elif "right_hand" in name:
+ self.right_hand_index.append(i)
+
+ assert len(self.body_joint_index) == self.config["NUM_JOINTS"], \
+ f"Expected {self.config['NUM_JOINTS']} body joints, got {len(self.body_joint_index)}"
+ # Hand joints are optional (some models don't have hands)
+ if self.config.get("NUM_HAND_JOINTS", 0) > 0:
+ expected_hands = self.config["NUM_HAND_JOINTS"]
+ if len(self.left_hand_index) != expected_hands or len(self.right_hand_index) != expected_hands:
+ print(f"Warning: Expected {expected_hands} hand joints, got left={len(self.left_hand_index)}, right={len(self.right_hand_index)}")
+ print("Continuing without hands...")
+
+ self.body_joint_index = np.array(self.body_joint_index)
+ self.left_hand_index = np.array(self.left_hand_index)
+ self.right_hand_index = np.array(self.right_hand_index)
+
+ def init_renderers(self):
+ # Initialize camera renderers
+ self.renderers = {}
+ for camera_name, camera_config in self.camera_configs.items():
+ renderer = mujoco.Renderer(
+ self.mj_model, height=camera_config["height"], width=camera_config["width"]
+ )
+ self.renderers[camera_name] = renderer
+
+ def start_image_publish_subprocess(self, start_method: str = "spawn", camera_port: int = 5555):
+ """Start image publishing subprocess using ZMQ"""
+ # Use spawn method for better GIL isolation, or configured method
+ if len(self.camera_configs) == 0:
+ print(
+ "Warning: No camera configs provided, image publishing subprocess will not be started"
+ )
+ return
+ start_method = self.config.get("MP_START_METHOD", "spawn")
+ self.image_publish_process = ImagePublishProcess(
+ camera_configs=self.camera_configs,
+ image_dt=self.image_dt,
+ zmq_port=camera_port,
+ start_method=start_method,
+ verbose=self.config.get("verbose", False),
+ )
+ self.image_publish_process.start_process()
+ print(f"✓ Started image publishing subprocess on ZMQ port {camera_port}")
+
+ def compute_body_torques(self) -> np.ndarray:
+ """Compute body torques based on the current robot state"""
+ body_torques = np.zeros(self.num_body_dof)
+ if self.unitree_bridge is not None and self.unitree_bridge.low_cmd:
+ for i in range(self.unitree_bridge.num_body_motor):
+ if self.unitree_bridge.use_sensor:
+ body_torques[i] = (
+ self.unitree_bridge.low_cmd.motor_cmd[i].tau
+ + self.unitree_bridge.low_cmd.motor_cmd[i].kp
+ * (self.unitree_bridge.low_cmd.motor_cmd[i].q - self.mj_data.sensordata[i])
+ + self.unitree_bridge.low_cmd.motor_cmd[i].kd
+ * (
+ self.unitree_bridge.low_cmd.motor_cmd[i].dq
+ - self.mj_data.sensordata[i + self.unitree_bridge.num_body_motor]
+ )
+ )
+ else:
+ body_torques[i] = (
+ self.unitree_bridge.low_cmd.motor_cmd[i].tau
+ + self.unitree_bridge.low_cmd.motor_cmd[i].kp
+ * (
+ self.unitree_bridge.low_cmd.motor_cmd[i].q
+ - self.mj_data.qpos[self.body_joint_index[i] + 7 - 1]
+ )
+ + self.unitree_bridge.low_cmd.motor_cmd[i].kd
+ * (
+ self.unitree_bridge.low_cmd.motor_cmd[i].dq
+ - self.mj_data.qvel[self.body_joint_index[i] + 6 - 1]
+ )
+ )
+ return body_torques
+
+ def compute_hand_torques(self) -> np.ndarray:
+ """Compute hand torques based on the current robot state"""
+ left_hand_torques = np.zeros(self.num_hand_dof)
+ right_hand_torques = np.zeros(self.num_hand_dof)
+ if self.unitree_bridge is not None and self.unitree_bridge.low_cmd:
+ for i in range(self.unitree_bridge.num_hand_motor):
+ left_hand_torques[i] = (
+ self.unitree_bridge.left_hand_cmd.motor_cmd[i].tau
+ + self.unitree_bridge.left_hand_cmd.motor_cmd[i].kp
+ * (
+ self.unitree_bridge.left_hand_cmd.motor_cmd[i].q
+ - self.mj_data.qpos[self.left_hand_index[i] + 7 - 1]
+ )
+ + self.unitree_bridge.left_hand_cmd.motor_cmd[i].kd
+ * (
+ self.unitree_bridge.left_hand_cmd.motor_cmd[i].dq
+ - self.mj_data.qvel[self.left_hand_index[i] + 6 - 1]
+ )
+ )
+ right_hand_torques[i] = (
+ self.unitree_bridge.right_hand_cmd.motor_cmd[i].tau
+ + self.unitree_bridge.right_hand_cmd.motor_cmd[i].kp
+ * (
+ self.unitree_bridge.right_hand_cmd.motor_cmd[i].q
+ - self.mj_data.qpos[self.right_hand_index[i] + 7 - 1]
+ )
+ + self.unitree_bridge.right_hand_cmd.motor_cmd[i].kd
+ * (
+ self.unitree_bridge.right_hand_cmd.motor_cmd[i].dq
+ - self.mj_data.qvel[self.right_hand_index[i] + 6 - 1]
+ )
+ )
+ return np.concatenate((left_hand_torques, right_hand_torques))
+
+ def compute_body_qpos(self) -> np.ndarray:
+ """Compute body joint positions based on the current command"""
+ body_qpos = np.zeros(self.num_body_dof)
+ if self.unitree_bridge is not None and self.unitree_bridge.low_cmd:
+ for i in range(self.unitree_bridge.num_body_motor):
+ body_qpos[i] = self.unitree_bridge.low_cmd.motor_cmd[i].q
+ return body_qpos
+
+ def compute_hand_qpos(self) -> np.ndarray:
+ """Compute hand joint positions based on the current command"""
+ hand_qpos = np.zeros(self.num_hand_dof * 2)
+ if self.unitree_bridge is not None and self.unitree_bridge.low_cmd:
+ for i in range(self.unitree_bridge.num_hand_motor):
+ hand_qpos[i] = self.unitree_bridge.left_hand_cmd.motor_cmd[i].q
+ hand_qpos[i + self.num_hand_dof] = self.unitree_bridge.right_hand_cmd.motor_cmd[i].q
+ return hand_qpos
+
+ def prepare_obs(self) -> Dict[str, any]:
+ """Prepare observation dictionary from the current robot state"""
+ obs = {}
+ obs["floating_base_pose"] = self.mj_data.qpos[:7]
+ obs["floating_base_vel"] = self.mj_data.qvel[:6]
+ obs["floating_base_acc"] = self.mj_data.qacc[:6]
+ obs["secondary_imu_quat"] = self.mj_data.xquat[self.torso_index]
+ obs["secondary_imu_vel"] = self.mj_data.cvel[self.torso_index]
+ obs["body_q"] = self.mj_data.qpos[self.body_joint_index + 7 - 1]
+ obs["body_dq"] = self.mj_data.qvel[self.body_joint_index + 6 - 1]
+ obs["body_ddq"] = self.mj_data.qacc[self.body_joint_index + 6 - 1]
+ obs["body_tau_est"] = self.mj_data.actuator_force[self.body_joint_index - 1]
+ if self.num_hand_dof > 0:
+ obs["left_hand_q"] = self.mj_data.qpos[self.left_hand_index + 7 - 1]
+ obs["left_hand_dq"] = self.mj_data.qvel[self.left_hand_index + 6 - 1]
+ obs["left_hand_ddq"] = self.mj_data.qacc[self.left_hand_index + 6 - 1]
+ obs["left_hand_tau_est"] = self.mj_data.actuator_force[self.left_hand_index - 1]
+ obs["right_hand_q"] = self.mj_data.qpos[self.right_hand_index + 7 - 1]
+ obs["right_hand_dq"] = self.mj_data.qvel[self.right_hand_index + 6 - 1]
+ obs["right_hand_ddq"] = self.mj_data.qacc[self.right_hand_index + 6 - 1]
+ obs["right_hand_tau_est"] = self.mj_data.actuator_force[self.right_hand_index - 1]
+ obs["time"] = self.mj_data.time
+ return obs
+
+ def sim_step(self):
+ self.obs = self.prepare_obs()
+ self.unitree_bridge.PublishLowState(self.obs)
+ if self.unitree_bridge.joystick:
+ self.unitree_bridge.PublishWirelessController()
+ if self.config["ENABLE_ELASTIC_BAND"]:
+ if self.elastic_band.enable:
+ # Get Cartesian pose and velocity of the band_attached_link
+ pose = np.concatenate(
+ [
+ self.mj_data.xpos[self.band_attached_link], # link position in world
+ self.mj_data.xquat[
+ self.band_attached_link
+ ], # link quaternion in world [w,x,y,z]
+ np.zeros(6), # placeholder for velocity
+ ]
+ )
+
+ # Get velocity in world frame
+ mujoco.mj_objectVelocity(
+ self.mj_model,
+ self.mj_data,
+ mujoco.mjtObj.mjOBJ_BODY,
+ self.band_attached_link,
+ pose[7:13],
+ 0, # 0 for world frame
+ )
+
+ # Reorder velocity from [ang, lin] to [lin, ang]
+ pose[7:10], pose[10:13] = pose[10:13], pose[7:10].copy()
+ self.mj_data.xfrc_applied[self.band_attached_link] = self.elastic_band.Advance(pose)
+ else:
+ # explicitly resetting the force when the band is not enabled
+ self.mj_data.xfrc_applied[self.band_attached_link] = np.zeros(6)
+ body_torques = self.compute_body_torques()
+ hand_torques = self.compute_hand_torques()
+ self.torques[self.body_joint_index - 1] = body_torques
+ if self.num_hand_dof > 0:
+ self.torques[self.left_hand_index - 1] = hand_torques[: self.num_hand_dof]
+ self.torques[self.right_hand_index - 1] = hand_torques[self.num_hand_dof :]
+
+ self.torques = np.clip(self.torques, -self.torque_limit, self.torque_limit)
+
+ if self.config["FREE_BASE"]:
+ self.mj_data.ctrl = np.concatenate((np.zeros(6), self.torques))
+ else:
+ self.mj_data.ctrl = self.torques
+ mujoco.mj_step(self.mj_model, self.mj_data)
+ # self.check_self_collision()
+
+ def kinematics_step(self):
+ """
+ Run kinematics only: compute the qpos of the robot and directly set the qpos.
+ For debugging purposes.
+ """
+ if self.unitree_bridge is not None:
+ self.unitree_bridge.PublishLowState(self.prepare_obs())
+ if self.unitree_bridge.joystick:
+ self.unitree_bridge.PublishWirelessController()
+
+ if self.config["ENABLE_ELASTIC_BAND"]:
+ if self.elastic_band.enable:
+ # Get Cartesian pose and velocity of the band_attached_link
+ pose = np.concatenate(
+ [
+ self.mj_data.xpos[self.band_attached_link], # link position in world
+ self.mj_data.xquat[
+ self.band_attached_link
+ ], # link quaternion in world [w,x,y,z]
+ np.zeros(6), # placeholder for velocity
+ ]
+ )
+
+ # Get velocity in world frame
+ mujoco.mj_objectVelocity(
+ self.mj_model,
+ self.mj_data,
+ mujoco.mjtObj.mjOBJ_BODY,
+ self.band_attached_link,
+ pose[7:13],
+ 0, # 0 for world frame
+ )
+
+ # Reorder velocity from [ang, lin] to [lin, ang]
+ pose[7:10], pose[10:13] = pose[10:13], pose[7:10].copy()
+
+ self.mj_data.xfrc_applied[self.band_attached_link] = self.elastic_band.Advance(pose)
+ else:
+ # explicitly resetting the force when the band is not enabled
+ self.mj_data.xfrc_applied[self.band_attached_link] = np.zeros(6)
+
+ body_qpos = self.compute_body_qpos() # (num_body_dof,)
+ hand_qpos = self.compute_hand_qpos() # (num_hand_dof * 2,)
+
+ self.mj_data.qpos[self.body_joint_index + 7 - 1] = body_qpos
+ self.mj_data.qpos[self.left_hand_index + 7 - 1] = hand_qpos[: self.num_hand_dof]
+ self.mj_data.qpos[self.right_hand_index + 7 - 1] = hand_qpos[self.num_hand_dof :]
+
+ mujoco.mj_kinematics(self.mj_model, self.mj_data)
+ mujoco.mj_comPos(self.mj_model, self.mj_data)
+
+ def apply_perturbation(self, key):
+ """Apply perturbation to the robot"""
+ # Add velocity perturbations in body frame
+ perturbation_x_body = 0.0 # forward/backward in body frame
+ perturbation_y_body = 0.0 # left/right in body frame
+ if key == "up":
+ perturbation_x_body = 1.0 # forward
+ elif key == "down":
+ perturbation_x_body = -1.0 # backward
+ elif key == "left":
+ perturbation_y_body = 1.0 # left
+ elif key == "right":
+ perturbation_y_body = -1.0 # right
+
+ # Transform body frame velocity to world frame using MuJoCo's rotation
+ vel_body = np.array([perturbation_x_body, perturbation_y_body, 0.0])
+ vel_world = np.zeros(3)
+ base_quat = self.mj_data.qpos[3:7] # [w, x, y, z] quaternion
+
+ # Use MuJoCo's robust quaternion rotation (handles invalid quaternions automatically)
+ mujoco.mju_rotVecQuat(vel_world, vel_body, base_quat)
+
+ # Apply to base linear velocity in world frame
+ self.mj_data.qvel[0] += vel_world[0] # world X velocity
+ self.mj_data.qvel[1] += vel_world[1] # world Y velocity
+
+ # Update dynamics after velocity change
+ mujoco.mj_forward(self.mj_model, self.mj_data)
+
+ def update_viewer(self):
+ if self.viewer is not None:
+ self.viewer.sync()
+
+ def update_viewer_camera(self):
+ if self.viewer is not None:
+ if self.viewer.cam.type == mujoco.mjtCamera.mjCAMERA_TRACKING:
+ self.viewer.cam.type = mujoco.mjtCamera.mjCAMERA_FREE
+ else:
+ self.viewer.cam.type = mujoco.mjtCamera.mjCAMERA_TRACKING
+
+ def update_reward(self):
+ """Calculate reward. Should be implemented by subclasses."""
+ with self.reward_lock:
+ self.last_reward = 0
+
+ def get_reward(self):
+ """Thread-safe way to get the last calculated reward."""
+ with self.reward_lock:
+ return self.last_reward
+
+ def set_unitree_bridge(self, unitree_bridge):
+ """Set the unitree bridge from the simulator"""
+ self.unitree_bridge = unitree_bridge
+
+ def get_privileged_obs(self):
+ """Get privileged observation. Should be implemented by subclasses."""
+ return {}
+
+ def update_render_caches(self):
+ """Update render cache and shared memory for subprocess."""
+ render_caches = {}
+ for camera_name, camera_config in self.camera_configs.items():
+ renderer = self.renderers[camera_name]
+ if "params" in camera_config:
+ renderer.update_scene(self.mj_data, camera=camera_config["params"])
+ else:
+ renderer.update_scene(self.mj_data, camera=camera_name)
+ render_caches[camera_name + "_image"] = renderer.render()
+
+ # Update shared memory if image publishing process is available
+ if self.image_publish_process is not None:
+ self.image_publish_process.update_shared_memory(render_caches)
+
+ return render_caches
+
+ def handle_keyboard_button(self, key):
+ if self.elastic_band is not None:
+ self.elastic_band.handle_keyboard_button(key)
+
+ if key == "backspace":
+ self.reset()
+ if key == "v":
+ self.update_viewer_camera()
+ if key in ["up", "down", "left", "right"]:
+ self.apply_perturbation(key)
+
+ def check_fall(self):
+ """Check if the robot has fallen"""
+ self.fall = False
+ if self.mj_data.qpos[2] < 0.2:
+ self.fall = True
+ print(f"Warning: Robot has fallen, height: {self.mj_data.qpos[2]:.3f} m")
+
+ if self.fall:
+ self.reset()
+
+ def check_self_collision(self):
+ """Check for self-collision of the robot"""
+ robot_bodies = get_subtree_body_names(self.mj_model, self.mj_model.body(self.root_body).id)
+ self_collision, contact_bodies = check_contact(
+ self.mj_model, self.mj_data, robot_bodies, robot_bodies, return_all_contact_bodies=True
+ )
+ if self_collision:
+ print(f"Warning: Self-collision detected: {contact_bodies}")
+ return self_collision
+
+ def reset(self):
+ mujoco.mj_resetData(self.mj_model, self.mj_data)
+
+
+class CubeEnv(DefaultEnv):
+ """Environment with a cube object for pick and place tasks"""
+
+ def __init__(
+ self,
+ config: Dict[str, any],
+ onscreen: bool = False,
+ offscreen: bool = False,
+ ):
+ # Override the robot scene
+ config = config.copy() # Create a copy to avoid modifying the original
+ config["ROBOT_SCENE"] = "gr00t_wbc/control/robot_model/model_data/g1/pnp_cube_43dof.xml"
+ super().__init__(config, "cube", {}, onscreen, offscreen)
+
+ def update_reward(self):
+ """Calculate reward based on gripper contact with cube and cube height"""
+ right_hand_body = [
+ "right_hand_thumb_2_link",
+ "right_hand_middle_1_link",
+ "right_hand_index_1_link",
+ ]
+ gripper_cube_contact = check_contact(
+ self.mj_model, self.mj_data, right_hand_body, "cube_body"
+ )
+ cube_lifted = check_height(self.mj_model, self.mj_data, "cube", 0.85, 2.0)
+
+ with self.reward_lock:
+ self.last_reward = gripper_cube_contact & cube_lifted
+
+
+class BoxEnv(DefaultEnv):
+ """Environment with a box object for manipulation tasks"""
+
+ def __init__(
+ self,
+ config: Dict[str, any],
+ onscreen: bool = False,
+ offscreen: bool = False,
+ ):
+ # Override the robot scene
+ config = config.copy() # Create a copy to avoid modifying the original
+ config["ROBOT_SCENE"] = "gr00t_wbc/control/robot_model/model_data/g1/lift_box_43dof.xml"
+ super().__init__(config, "box", {}, onscreen, offscreen)
+
+ def reward(self):
+ """Calculate reward based on gripper contact with cube and cube height"""
+ left_hand_body = [
+ "left_hand_thumb_2_link",
+ "left_hand_middle_1_link",
+ "left_hand_index_1_link",
+ ]
+ right_hand_body = [
+ "right_hand_thumb_2_link",
+ "right_hand_middle_1_link",
+ "right_hand_index_1_link",
+ ]
+ gripper_box_contact = check_contact(self.mj_model, self.mj_data, left_hand_body, "box_body")
+ gripper_box_contact &= check_contact(
+ self.mj_model, self.mj_data, right_hand_body, "box_body"
+ )
+ box_lifted = check_height(self.mj_model, self.mj_data, "box", 0.92, 2.0)
+
+ print("gripper_box_contact: ", gripper_box_contact, "box_lifted: ", box_lifted)
+
+ with self.reward_lock:
+ self.last_reward = gripper_box_contact & box_lifted
+ return self.last_reward
+
+
+class BottleEnv(DefaultEnv):
+ """Environment with a cylinder object for manipulation tasks"""
+
+ def __init__(
+ self,
+ config: Dict[str, any],
+ onscreen: bool = False,
+ offscreen: bool = False,
+ ):
+ # Override the robot scene
+ config = config.copy() # Create a copy to avoid modifying the original
+ config["ROBOT_SCENE"] = "gr00t_wbc/control/robot_model/model_data/g1/pnp_bottle_43dof.xml"
+ camera_configs = {
+ "egoview": {
+ "height": 400,
+ "width": 400,
+ },
+ }
+ super().__init__(
+ config, "cylinder", camera_configs, onscreen, offscreen
+ )
+
+ self.bottle_body = self.mj_model.body("bottle_body")
+ self.bottle_geom = self.mj_model.geom("bottle")
+
+ if self.viewer is not None:
+ self.viewer.cam.type = mujoco.mjtCamera.mjCAMERA_FIXED
+ self.viewer.cam.fixedcamid = self.mj_model.camera("egoview").id
+
+ def update_reward(self):
+ """Calculate reward based on gripper contact with cylinder and cylinder height"""
+ pass
+
+ def get_privileged_obs(self):
+ obs_pos = self.mj_data.xpos[self.bottle_body.id]
+ obs_quat = self.mj_data.xquat[self.bottle_body.id]
+ return {"bottle_pos": obs_pos, "bottle_quat": obs_quat}
+
+
+class BaseSimulator:
+ """Base simulator class that handles initialization and running of simulations"""
+
+ def __init__(self, config: Dict[str, any], env_name: str = "default", **kwargs):
+ self.config = config
+ self.env_name = env_name
+
+ # Initialize ROS 2 node (optional, only if rclpy is available)
+ if HAS_RCLPY:
+ if not rclpy.ok():
+ rclpy.init()
+ self.node = rclpy.create_node("sim_mujoco")
+ self.thread = threading.Thread(target=rclpy.spin, args=(self.node,), daemon=True)
+ self.thread.start()
+ else:
+ self.thread = None
+ executor = rclpy.get_global_executor()
+ self.node = executor.get_nodes()[0] # will only take the first node
+ else:
+ self.node = None
+ self.thread = None
+
+ # Set update frequencies
+ self.sim_dt = self.config["SIMULATE_DT"]
+ self.reward_dt = self.config.get("REWARD_DT", 0.02)
+ self.image_dt = self.config.get("IMAGE_DT", 0.033333)
+ self.viewer_dt = self.config.get("VIEWER_DT", 0.02)
+
+ # Create the appropriate environment based on name
+ if env_name == "default":
+ self.sim_env = DefaultEnv(config, env_name, **kwargs)
+ elif env_name == "pnp_cube":
+ self.sim_env = CubeEnv(config, **kwargs)
+ elif env_name == "lift_box":
+ self.sim_env = BoxEnv(config, **kwargs)
+ elif env_name == "pnp_bottle":
+ self.sim_env = BottleEnv(config, **kwargs)
+ else:
+ raise ValueError(f"Invalid environment name: {env_name}")
+
+ # Initialize the DDS communication layer - should be safe to call multiple times
+
+ try:
+ if self.config.get("INTERFACE", None):
+ ChannelFactoryInitialize(self.config["DOMAIN_ID"], self.config["INTERFACE"])
+ else:
+ ChannelFactoryInitialize(self.config["DOMAIN_ID"])
+ except Exception as e:
+ # If it fails because it's already initialized, that's okay
+ print(f"Note: Channel factory initialization attempt: {e}")
+
+ # Initialize the unitree bridge and pass it to the environment
+ self.init_unitree_bridge()
+ self.sim_env.set_unitree_bridge(self.unitree_bridge)
+
+ # Initialize additional components
+ self.init_subscriber()
+ self.init_publisher()
+
+ self.sim_thread = None
+
+ def start_as_thread(self):
+ # Create simulation thread
+ self.sim_thread = Thread(target=self.start)
+ self.sim_thread.start()
+
+ def start_image_publish_subprocess(self, start_method: str = "spawn", camera_port: int = 5555):
+ """Start the image publish subprocess"""
+ self.sim_env.start_image_publish_subprocess(start_method, camera_port)
+
+ def init_subscriber(self):
+ """Initialize subscribers. Can be overridden by subclasses."""
+ pass
+
+ def init_publisher(self):
+ """Initialize publishers. Can be overridden by subclasses."""
+ pass
+
+ def init_unitree_bridge(self):
+ """Initialize the unitree SDK bridge"""
+ self.unitree_bridge = UnitreeSdk2Bridge(self.config)
+ if self.config["USE_JOYSTICK"]:
+ self.unitree_bridge.SetupJoystick(
+ device_id=self.config["JOYSTICK_DEVICE"], js_type=self.config["JOYSTICK_TYPE"]
+ )
+
+ def start(self):
+ """Main simulation loop"""
+ import time
+ sim_cnt = 0
+ last_time = time.time()
+
+ print(f"Starting simulation loop. Viewer: {self.sim_env.viewer is not None}")
+
+ try:
+ while (
+ self.sim_env.viewer and self.sim_env.viewer.is_running()
+ ) or self.sim_env.viewer is None:
+ # Run simulation step
+ self.sim_env.sim_step()
+
+ # Update viewer at viewer rate
+ if sim_cnt % int(self.viewer_dt / self.sim_dt) == 0:
+ self.sim_env.update_viewer()
+
+ # Calculate reward at reward rate
+ if sim_cnt % int(self.reward_dt / self.sim_dt) == 0:
+ self.sim_env.update_reward()
+
+ # Update render caches at image rate
+ if sim_cnt % int(self.image_dt / self.sim_dt) == 0:
+ self.sim_env.update_render_caches()
+
+ # Sleep to maintain correct rate (simple timing without ROS)
+ elapsed = time.time() - last_time
+ sleep_time = max(0, self.sim_dt - elapsed)
+ if sleep_time > 0:
+ time.sleep(sleep_time)
+ last_time = time.time()
+
+ sim_cnt += 1
+
+ print(f"Loop exited. Viewer running: {self.sim_env.viewer.is_running() if self.sim_env.viewer else 'No viewer'}")
+ except KeyboardInterrupt:
+ # User pressed Ctrl+C - exit cleanly
+ print("Keyboard interrupt received")
+ pass
+ except Exception as e:
+ print(f"Exception in simulation loop: {e}")
+ import traceback
+ traceback.print_exc()
+ self.close()
+
+ def __del__(self):
+ """Clean up resources when simulator is deleted"""
+ self.close()
+
+ def reset(self):
+ """Reset the simulation. Can be overridden by subclasses."""
+ self.sim_env.reset()
+
+ def close(self):
+ """Close the simulation. Can be overridden by subclasses."""
+ try:
+ # Close viewer
+ if hasattr(self.sim_env, "viewer") and self.sim_env.viewer is not None:
+ self.sim_env.viewer.close()
+
+ # Shutdown ROS (if available)
+ if HAS_RCLPY and rclpy.ok():
+ rclpy.shutdown()
+ except Exception as e:
+ print(f"Warning during close: {e}")
+
+ def get_privileged_obs(self):
+ obs = self.sim_env.get_privileged_obs()
+ # TODO: add ros2 topic to get privileged obs
+ return obs
+
+ def handle_keyboard_button(self, key):
+ # Only handles keyboard buttons for default env.
+ if self.env_name == "default":
+ self.sim_env.handle_keyboard_button(key)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Robot")
+ parser.add_argument(
+ "--config",
+ type=str,
+ default="./gr00t_wbc/control/main/teleop/configs/g1_29dof_gear_wbc.yaml",
+ help="config file",
+ )
+ args = parser.parse_args()
+
+ with open(args.config, "r") as file:
+ config = yaml.load(file, Loader=yaml.FullLoader)
+
+ if config.get("INTERFACE", None):
+ ChannelFactoryInitialize(config["DOMAIN_ID"], config["INTERFACE"])
+ else:
+ ChannelFactoryInitialize(config["DOMAIN_ID"])
+
+ simulation = BaseSimulator(config)
+ simulation.start_as_thread()
diff --git a/sim/image_publish_utils.py b/sim/image_publish_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9dd78953e6f5c0b5b357bd8bd3d2fb71643c29c
--- /dev/null
+++ b/sim/image_publish_utils.py
@@ -0,0 +1,257 @@
+import multiprocessing as mp
+from multiprocessing import shared_memory
+import time
+from typing import Any, Dict
+
+import numpy as np
+
+from .sensor_utils import ImageMessageSchema, ImageUtils, SensorServer
+
+
+def get_multiprocessing_info(verbose: bool = True):
+ """Get information about multiprocessing start methods"""
+
+ if verbose:
+ print(f"Available start methods: {mp.get_all_start_methods()}")
+ return mp.get_start_method()
+
+
+class ImagePublishProcess:
+ """Subprocess for publishing images using shared memory and ZMQ"""
+
+ def __init__(
+ self,
+ camera_configs: Dict[str, Any],
+ image_dt: float,
+ zmq_port: int = 5555,
+ start_method: str = "spawn",
+ verbose: bool = False,
+ ):
+ self.camera_configs = camera_configs
+ self.image_dt = image_dt
+ self.zmq_port = zmq_port
+ self.verbose = verbose
+ self.shared_memory_blocks = {}
+ self.shared_memory_info = {}
+ self.process = None
+
+ # Use specific context to avoid global state pollution
+ self.mp_context = mp.get_context(start_method)
+ if self.verbose:
+ print(f"Using multiprocessing context: {start_method}")
+
+ self.stop_event = self.mp_context.Event()
+ self.data_ready_event = self.mp_context.Event()
+
+ # Ensure events start in correct state
+ self.stop_event.clear()
+ self.data_ready_event.clear()
+
+ if self.verbose:
+ print(f"Initial stop_event state: {self.stop_event.is_set()}")
+ print(f"Initial data_ready_event state: {self.data_ready_event.is_set()}")
+
+ # Calculate shared memory requirements for each camera
+ for camera_name, camera_config in camera_configs.items():
+ height = camera_config["height"]
+ width = camera_config["width"]
+ # RGB image: height * width * 3 (uint8)
+ size = height * width * 3
+
+ # Create shared memory block
+ shm = shared_memory.SharedMemory(create=True, size=size)
+ self.shared_memory_blocks[camera_name] = shm
+ self.shared_memory_info[camera_name] = {
+ "name": shm.name,
+ "size": size,
+ "shape": (height, width, 3),
+ "dtype": np.uint8,
+ }
+
+ def start_process(self):
+ """Start the image publishing subprocess"""
+ if self.verbose:
+ print(f"Starting subprocess with stop_event state: {self.stop_event.is_set()}")
+ self.process = self.mp_context.Process(
+ target=self._image_publish_worker,
+ args=(
+ self.shared_memory_info,
+ self.image_dt,
+ self.zmq_port,
+ self.stop_event,
+ self.data_ready_event,
+ self.verbose,
+ ),
+ )
+ self.process.start()
+ if self.verbose:
+ print(f"Subprocess started, PID: {self.process.pid}")
+
+ def update_shared_memory(self, render_caches: Dict[str, np.ndarray]):
+ """Update shared memory with new rendered images"""
+ images_updated = 0
+ for camera_name in self.camera_configs.keys():
+ image_key = f"{camera_name}_image"
+ if image_key in render_caches:
+ image = render_caches[image_key]
+
+ # Ensure image is uint8 and has correct shape
+ if image.dtype != np.uint8:
+ image = (image * 255).astype(np.uint8)
+
+ # Get shared memory array
+ shm = self.shared_memory_blocks[camera_name]
+ shared_array = np.ndarray(
+ self.shared_memory_info[camera_name]["shape"],
+ dtype=self.shared_memory_info[camera_name]["dtype"],
+ buffer=shm.buf,
+ )
+
+ # Copy image data to shared memory atomically
+ np.copyto(shared_array, image)
+ images_updated += 1
+
+ # Signal that new data is ready only after all images are written
+ if images_updated > 0:
+ if self.verbose:
+ print(f"Main process: Updated {images_updated} images, setting data_ready_event")
+ self.data_ready_event.set()
+ elif self.verbose:
+ print(
+ "Main process: No images to update. "
+ "please check if camera configs are provided and the renderer is properly initialized"
+ )
+
+ def stop(self):
+ """Stop the image publishing subprocess"""
+ if self.verbose:
+ print("Stopping image publishing subprocess...")
+ self.stop_event.set()
+
+ if self.process and self.process.is_alive():
+ # Give the process time to clean up gracefully
+ self.process.join(timeout=5)
+ if self.process.is_alive():
+ if self.verbose:
+ print("Subprocess didn't stop gracefully, terminating...")
+ self.process.terminate()
+ self.process.join(timeout=2)
+ if self.process.is_alive():
+ if self.verbose:
+ print("Force killing subprocess...")
+ self.process.kill()
+ self.process.join()
+
+ # Clean up shared memory
+ for camera_name, shm in self.shared_memory_blocks.items():
+ try:
+ shm.close()
+ shm.unlink()
+ if self.verbose:
+ print(f"Cleaned up shared memory for {camera_name}")
+ except Exception as e:
+ if self.verbose:
+ print(f"Warning: Failed to cleanup shared memory for {camera_name}: {e}")
+
+ self.shared_memory_blocks.clear()
+ if self.verbose:
+ print("Image publishing subprocess stopped and cleaned up")
+
+ @staticmethod
+ def _image_publish_worker(
+ shared_memory_info, image_dt, zmq_port, stop_event, data_ready_event, verbose
+ ):
+ """Worker function that runs in the subprocess"""
+ # Import dependencies within worker (needed for multiprocessing spawn mode)
+ from .sensor_utils import ImageMessageSchema, ImageUtils, SensorServer
+
+ if verbose:
+ print(f"Worker started! PID: {__import__('os').getpid()}")
+ print(f"Worker stop_event state at start: {stop_event.is_set()}")
+ print(f"Worker data_ready_event state at start: {data_ready_event.is_set()}")
+
+ try:
+ # Initialize ZMQ sensor server
+ sensor_server = SensorServer()
+ sensor_server.start_server(port=zmq_port)
+
+ # Connect to shared memory blocks
+ shared_arrays = {}
+ shm_blocks = {}
+ for camera_name, info in shared_memory_info.items():
+ shm = shared_memory.SharedMemory(name=info["name"])
+ shm_blocks[camera_name] = shm
+ shared_arrays[camera_name] = np.ndarray(
+ info["shape"], dtype=info["dtype"], buffer=shm.buf
+ )
+
+ print(
+ f"Image publishing subprocess started with {len(shared_arrays)} cameras on ZMQ port {zmq_port}"
+ )
+
+ loop_count = 0
+ last_data_time = time.time()
+
+ while not stop_event.is_set():
+ loop_count += 1
+
+ # Wait for new data with shorter timeout for better responsiveness
+ timeout = min(image_dt, 0.1) # Max 100ms timeout
+ data_available = data_ready_event.wait(timeout=timeout)
+
+ current_time = time.time()
+
+ if data_available:
+ data_ready_event.clear()
+ if loop_count % 50 == 0:
+ print("Image publish frequency: ", 1 / (current_time - last_data_time))
+ last_data_time = current_time
+
+ # Collect all camera images and serialize them
+ try:
+ # Copy all images atomically at once
+ image_copies = {name: arr.copy() for name, arr in shared_arrays.items()}
+
+ # Create message with all camera images
+ message_dict = {
+ "images": image_copies,
+ "timestamps": {name: current_time for name in image_copies.keys()},
+ }
+
+ # Create ImageMessageSchema and serialize
+ image_msg = ImageMessageSchema(
+ timestamps=message_dict.get("timestamps"),
+ images=message_dict.get("images", None),
+ )
+
+ # Serialize and send via ZMQ
+ serialized_data = image_msg.serialize()
+
+ # Add individual camera images to the message
+ for camera_name, image_copy in image_copies.items():
+ serialized_data[f"{camera_name}"] = ImageUtils.encode_image(image_copy)
+
+ sensor_server.send_message(serialized_data)
+
+ except Exception as e:
+ print(f"Error publishing images: {e}")
+
+ elif verbose and loop_count % 10 == 0:
+ print(f"Subprocess: Still waiting for data... (iteration {loop_count})")
+
+ # Small sleep to prevent busy waiting when no data
+ if not data_available:
+ time.sleep(0.001)
+
+ except KeyboardInterrupt:
+ print("Image publisher interrupted by user")
+ finally:
+ # Clean up
+ try:
+ for shm in shm_blocks.values():
+ shm.close()
+ sensor_server.stop_server()
+ except Exception as e:
+ print(f"Error during subprocess cleanup: {e}")
+ if verbose:
+ print("Image publish subprocess stopped")
diff --git a/sim/metric_utils.py b/sim/metric_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..369b8797ca185d8ba4cf87fc770bd034a56a5df3
--- /dev/null
+++ b/sim/metric_utils.py
@@ -0,0 +1,71 @@
+from typing import List, Tuple
+
+import mujoco
+
+from .sim_utilts import get_body_geom_ids
+
+
+def check_contact(
+ mj_model: mujoco.MjModel,
+ mj_data: mujoco.MjData,
+ bodies_1: List[str] | str,
+ bodies_2: List[str] | str,
+ return_all_contact_bodies: bool = False,
+) -> Tuple[bool, List[Tuple[str, str]]] | bool:
+ """
+ Finds contact between two body groups. Any geom in the body is considered to be in contact.
+ Args:
+ mj_model (MujocoModel): Current simulation object
+ mj_data (MjData): Current simulation data
+ bodies_1 (str or list of int): an individual body name or list of body names.
+ bodies_2 (str or list of int): another individual body name or list of body names.
+ Returns:
+ bool: True if any body in @bodies_1 is in contact with any body in @bodies_2.
+ """
+ if isinstance(bodies_1, str):
+ bodies_1 = [bodies_1]
+ if isinstance(bodies_2, str):
+ bodies_2 = [bodies_2]
+
+ geoms_1 = [get_body_geom_ids(mj_model, mj_model.body(g).id) for g in bodies_1]
+ geoms_1 = [g for geom_list in geoms_1 for g in geom_list]
+ geoms_2 = [get_body_geom_ids(mj_model, mj_model.body(g).id) for g in bodies_2]
+ geoms_2 = [g for geom_list in geoms_2 for g in geom_list]
+ contact_bodies = []
+ for i in range(mj_data.ncon):
+ contact = mj_data.contact[i]
+ # check contact geom in geoms
+ c1_in_g1 = contact.geom1 in geoms_1
+ c2_in_g2 = contact.geom2 in geoms_2 if geoms_2 is not None else True
+ # check contact geom in geoms (flipped)
+ c2_in_g1 = contact.geom2 in geoms_1
+ c1_in_g2 = contact.geom1 in geoms_2 if geoms_2 is not None else True
+ if (c1_in_g1 and c2_in_g2) or (c1_in_g2 and c2_in_g1):
+ contact_bodies.append(
+ (
+ mj_model.body(mj_model.geom(contact.geom1).bodyid).name,
+ mj_model.body(mj_model.geom(contact.geom2).bodyid).name,
+ )
+ )
+ if not return_all_contact_bodies:
+ break
+ if return_all_contact_bodies:
+ return len(contact_bodies) > 0, set(contact_bodies)
+ else:
+ return len(contact_bodies) > 0
+
+
+def check_height(
+ mj_model: mujoco.MjModel,
+ mj_data: mujoco.MjData,
+ geom_name: str,
+ lower_bound: float = -float("inf"),
+ upper_bound: float = float("inf"),
+):
+ """
+ Checks if the height of a geom is greater than a given height.
+ """
+ geom_id = mj_model.geom(geom_name).id
+ return (
+ mj_data.geom_xpos[geom_id][2] < upper_bound and mj_data.geom_xpos[geom_id][2] > lower_bound
+ )
diff --git a/sim/sensor_utils.py b/sim/sensor_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..7b205c2e6c68e22d0468c13aba107a0b6e4cff10
--- /dev/null
+++ b/sim/sensor_utils.py
@@ -0,0 +1,130 @@
+"""Standalone sensor utilities for camera image publishing via ZMQ"""
+import base64
+from dataclasses import dataclass
+from typing import Any, Dict
+
+import cv2
+import msgpack
+import msgpack_numpy as m
+import numpy as np
+import zmq
+
+
+@dataclass
+class ImageMessageSchema:
+ """
+ Standardized message schema for image data.
+ Used to serialize/deserialize image data for network transmission.
+ """
+
+ timestamps: Dict[str, float]
+ """Dictionary of timestamps, keyed by image identifier (e.g., {"ego_view": 123.45})"""
+ images: Dict[str, np.ndarray]
+ """Dictionary of images, keyed by image identifier (e.g., {"ego_view": array})"""
+
+ def serialize(self) -> Dict[str, Any]:
+ """Serialize the message for transmission."""
+ serialized_msg = {"timestamps": self.timestamps, "images": {}}
+ for key, image in self.images.items():
+ serialized_msg["images"][key] = ImageUtils.encode_image(image)
+ return serialized_msg
+
+ @staticmethod
+ def deserialize(data: Dict[str, Any]) -> "ImageMessageSchema":
+ """Deserialize received message data."""
+ timestamps = data.get("timestamps", {})
+ images = {}
+ for key, value in data.get("images", {}).items():
+ if isinstance(value, str):
+ images[key] = ImageUtils.decode_image(value)
+ else:
+ images[key] = value
+ return ImageMessageSchema(timestamps=timestamps, images=images)
+
+ def asdict(self) -> Dict[str, Any]:
+ """Convert to dictionary format."""
+ return {"timestamps": self.timestamps, "images": self.images}
+
+
+class SensorServer:
+ """ZMQ-based sensor server for publishing camera images"""
+
+ def start_server(self, port: int):
+ self.context = zmq.Context()
+ self.socket = self.context.socket(zmq.PUB)
+ self.socket.setsockopt(zmq.SNDHWM, 20) # high water mark
+ self.socket.setsockopt(zmq.LINGER, 0)
+ self.socket.bind(f"tcp://*:{port}")
+ print(f"Sensor server running at tcp://*:{port}")
+
+ self.message_sent = 0
+ self.message_dropped = 0
+
+ def stop_server(self):
+ self.socket.close()
+ self.context.term()
+
+ def send_message(self, data: Dict[str, Any]):
+ try:
+ packed = msgpack.packb(data, use_bin_type=True)
+ self.socket.send(packed, flags=zmq.NOBLOCK)
+ except zmq.Again:
+ self.message_dropped += 1
+ print(f"[Warning] message dropped: {self.message_dropped}")
+ self.message_sent += 1
+
+ if self.message_sent % 100 == 0:
+ print(
+ f"[Sensor server] Message sent: {self.message_sent}, message dropped: {self.message_dropped}"
+ )
+
+
+class SensorClient:
+ """ZMQ-based sensor client for subscribing to camera images"""
+
+ def start_client(self, server_ip: str, port: int):
+ self.context = zmq.Context()
+ self.socket = self.context.socket(zmq.SUB)
+ self.socket.setsockopt_string(zmq.SUBSCRIBE, "")
+ self.socket.setsockopt(zmq.CONFLATE, True) # last msg only.
+ self.socket.setsockopt(zmq.RCVHWM, 3) # queue size 3 for receive buffer
+ self.socket.connect(f"tcp://{server_ip}:{port}")
+
+ def stop_client(self):
+ self.socket.close()
+ self.context.term()
+
+ def receive_message(self):
+ packed = self.socket.recv()
+ return msgpack.unpackb(packed, object_hook=m.decode)
+
+
+class ImageUtils:
+ """Utilities for encoding/decoding images for network transmission"""
+
+ @staticmethod
+ def encode_image(image: np.ndarray) -> str:
+ """Encode numpy image to base64-encoded JPEG string"""
+ _, color_buffer = cv2.imencode(".jpg", image, [int(cv2.IMWRITE_JPEG_QUALITY), 80])
+ return base64.b64encode(color_buffer).decode("utf-8")
+
+ @staticmethod
+ def encode_depth_image(image: np.ndarray) -> str:
+ """Encode depth image to base64-encoded PNG string"""
+ depth_compressed = cv2.imencode(".png", image)[1].tobytes()
+ return base64.b64encode(depth_compressed).decode("utf-8")
+
+ @staticmethod
+ def decode_image(image: str) -> np.ndarray:
+ """Decode base64-encoded JPEG string to numpy image"""
+ color_data = base64.b64decode(image)
+ color_array = np.frombuffer(color_data, dtype=np.uint8)
+ return cv2.imdecode(color_array, cv2.IMREAD_COLOR)
+
+ @staticmethod
+ def decode_depth_image(image: str) -> np.ndarray:
+ """Decode base64-encoded PNG string to depth image"""
+ depth_data = base64.b64decode(image)
+ depth_array = np.frombuffer(depth_data, dtype=np.uint8)
+ return cv2.imdecode(depth_array, cv2.IMREAD_UNCHANGED)
+
diff --git a/sim/sim_utilts.py b/sim/sim_utilts.py
new file mode 100644
index 0000000000000000000000000000000000000000..e032eb6d801469cb543dd10fa6cef11a90f643a8
--- /dev/null
+++ b/sim/sim_utilts.py
@@ -0,0 +1,96 @@
+"""
+Utility functions for working with Mujoco models.
+copied from https://github.com/kevinzakka/mink/blob/main/mink/utils.py
+"""
+
+from typing import List
+
+import mujoco
+
+
+def get_body_body_ids(model: mujoco.MjModel, body_id: int) -> List[int]:
+ """Get immediate children bodies belonging to a given body.
+
+ Args:
+ model: Mujoco model.
+ body_id: ID of body.
+
+ Returns:
+ A List containing all child body ids.
+ """
+ return [
+ i
+ for i in range(model.nbody)
+ if model.body_parentid[i] == body_id and body_id != i # Exclude the body itself.
+ ]
+
+
+def get_subtree_body_ids(model: mujoco.MjModel, body_id: int) -> List[int]:
+ """Get all bodies belonging to subtree starting at a given body.
+
+ Args:
+ model: Mujoco model.
+ body_id: ID of body where subtree starts.
+
+ Returns:
+ A List containing all subtree body ids.
+ """
+ body_ids: List[int] = []
+ stack = [body_id]
+ while stack:
+ body_id = stack.pop()
+ body_ids.append(body_id)
+ stack += get_body_body_ids(model, body_id)
+ return body_ids
+
+
+def get_subtree_body_names(model: mujoco.MjModel, body_id: int) -> List[str]:
+ """Get all bodies belonging to subtree starting at a given body.
+ Args:
+ model: Mujoco model.
+ body_id: ID of body where subtree starts.
+
+ Returns:
+ A List containing all subtree body names.
+ """
+ return [model.body(i).name for i in get_subtree_body_ids(model, body_id)]
+
+
+def get_body_geom_ids(model: mujoco.MjModel, body_id: int) -> List[int]:
+ """Get immediate geoms belonging to a given body.
+
+ Here, immediate geoms are those directly attached to the body and not its
+ descendants.
+
+ Args:
+ model: Mujoco model.
+ body_id: ID of body.
+
+ Returns:
+ A list containing all body geom ids.
+ """
+ geom_start = model.body_geomadr[body_id]
+ geom_end = geom_start + model.body_geomnum[body_id]
+ return list(range(geom_start, geom_end))
+
+
+def get_subtree_geom_ids(model: mujoco.MjModel, body_id: int) -> List[int]:
+ """Get all geoms belonging to subtree starting at a given body.
+
+ Here, a subtree is defined as the kinematic tree starting at the body and including
+ all its descendants.
+
+ Args:
+ model: Mujoco model.
+ body_id: ID of body where subtree starts.
+
+ Returns:
+ A list containing all subtree geom ids.
+ """
+ geom_ids: List[int] = []
+ stack = [body_id]
+ while stack:
+ body_id = stack.pop()
+ geom_ids.extend(get_body_geom_ids(model, body_id))
+ stack += get_body_body_ids(model, body_id)
+ return geom_ids
diff --git a/sim/simulator_factory.py b/sim/simulator_factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..3dca58dc250a1520c5b2bad0f0bbb375ae3aa01e
--- /dev/null
+++ b/sim/simulator_factory.py
@@ -0,0 +1,148 @@
+import time
+from typing import Any, Dict
+
+from unitree_sdk2py.core.channel import ChannelFactoryInitialize
+
+from .base_sim import BaseSimulator
+
+
+def init_channel(config: Dict[str, Any]) -> None:
+ """
+ Initialize the communication channel for simulator/robot communication.
+
+ Args:
+ config: Configuration dictionary containing DOMAIN_ID and optionally INTERFACE
+ """
+ if config.get("INTERFACE", None):
+ ChannelFactoryInitialize(config["DOMAIN_ID"], config["INTERFACE"])
+ else:
+ ChannelFactoryInitialize(config["DOMAIN_ID"])
+
+
+class SimulatorFactory:
+ """Factory class for creating different types of simulators."""
+
+ @staticmethod
+ def create_simulator(config: Dict[str, Any], env_name: str = "default", **kwargs):
+ """
+ Create a simulator based on the configuration.
+
+ Args:
+ config: Configuration dictionary containing SIMULATOR type
+ env_name: Environment name
+ **kwargs: Additional keyword arguments for specific simulators
+ """
+ simulator_type = config.get("SIMULATOR", "mujoco")
+ if simulator_type == "mujoco":
+ return SimulatorFactory._create_mujoco_simulator(config, env_name, **kwargs)
+ elif simulator_type == "robocasa":
+ return SimulatorFactory._create_robocasa_simulator(config, env_name, **kwargs)
+ else:
+ print(
+ f"Warning: Invalid simulator type: {simulator_type}. "
+ "If you are using run_sim_loop, please ignore this warning."
+ )
+ return None
+
+ @staticmethod
+ def _create_mujoco_simulator(config: Dict[str, Any], env_name: str = "default", **kwargs):
+ """Create a MuJoCo simulator instance."""
+ camera_configs = kwargs.pop("camera_configs", {})
+ if len(camera_configs) > 0:
+ print(f"Debug: SimulatorFactory received {len(camera_configs)} camera config(s)")
+
+ env_kwargs = dict(
+ onscreen=kwargs.pop("onscreen", True),
+ offscreen=kwargs.pop("offscreen", False),
+ camera_configs=camera_configs,
+ )
+ return BaseSimulator(config=config, env_name=env_name, **env_kwargs)
+
+ @staticmethod
+ def _create_robocasa_simulator(config: Dict[str, Any], env_name: str = "default", **kwargs):
+ """Create a RoboCasa simulator instance."""
+ from gr00t_wbc.control.envs.g1.sim.robocasa_sim import RoboCasaG1EnvServer
+ from gr00t_wbc.control.envs.robocasa.utils.controller_utils import (
+ update_robosuite_controller_configs,
+ )
+ from gr00t_wbc.control.envs.robocasa.utils.sim_utils import change_simulation_timestep
+
+ change_simulation_timestep(config["SIMULATE_DT"])
+
+ # Use default environment if not specified
+ if env_name == "default":
+ env_name = "GroundOnly"
+
+ # Get or create controller configurations
+ controller_configs = kwargs.get("controller_configs")
+ if controller_configs is None:
+ wbc_version = kwargs.get("wbc_version", "gear_wbc")
+ controller_configs = update_robosuite_controller_configs("G1", wbc_version)
+
+ # Build environment kwargs
+ env_kwargs = dict(
+ onscreen=kwargs.pop("onscreen", True),
+ offscreen=kwargs.pop("offscreen", False),
+ camera_names=kwargs.pop("camera_names", None),
+ camera_heights=kwargs.pop("camera_heights", None),
+ camera_widths=kwargs.pop("camera_widths", None),
+ control_freq=kwargs.pop("control_freq", 50),
+ controller_configs=controller_configs,
+ ik_indicator=kwargs.pop("ik_indicator", False),
+ randomize_cameras=kwargs.pop("randomize_cameras", True),
+ )
+
+ kwargs.update(
+ {
+ "verbose": config.pop("verbose", False),
+ "sim_freq": 1 / config.pop("SIMULATE_DT"),
+ }
+ )
+
+ return RoboCasaG1EnvServer(
+ env_name=env_name,
+ wbc_config=config,
+ env_kwargs=env_kwargs,
+ **kwargs,
+ )
+
+ @staticmethod
+ def start_simulator(
+ simulator,
+ as_thread: bool = True,
+ enable_image_publish: bool = False,
+ mp_start_method: str = "spawn",
+ camera_port: int = 5555,
+ ):
+ """
+ Start the simulator either as a thread or as a separate process.
+
+ Args:
+ simulator: The simulator instance to start
+ config: Configuration dictionary
+ as_thread: If True, start as thread; if False, start as subprocess
+ enable_offscreen: If True and not as_thread, start image publishing
+ """
+
+ if as_thread:
+ simulator.start_as_thread()
+ else:
+ # Wrap in try-except to make sure simulator is properly closed upon exit.
+ try:
+ if enable_image_publish:
+ simulator.start_image_publish_subprocess(
+ start_method=mp_start_method,
+ camera_port=camera_port,
+ )
+ time.sleep(1)
+ simulator.start()
+ except KeyboardInterrupt:
+ print("+++++Simulator interrupted by user.")
+ except Exception as e:
+ print(f"++++error in simulator: {e} ++++")
+ finally:
+ print("++++closing simulator ++++")
+ simulator.close()
+
+ # Allow simulator to initialize
+ time.sleep(1)
diff --git a/sim/unitree_sdk2py_bridge.py b/sim/unitree_sdk2py_bridge.py
new file mode 100644
index 0000000000000000000000000000000000000000..6cc596611a7e00ce526179da48584c8ee618f368
--- /dev/null
+++ b/sim/unitree_sdk2py_bridge.py
@@ -0,0 +1,497 @@
+import sys
+import threading
+from typing import Dict, Tuple
+
+import glfw
+from loguru import logger
+import mujoco
+import numpy as np
+import pygame
+import scipy.spatial.transform
+from termcolor import colored
+from unitree_sdk2py.core.channel import ChannelPublisher, ChannelSubscriber
+from unitree_sdk2py.idl.default import (
+ unitree_go_msg_dds__WirelessController_,
+ unitree_hg_msg_dds__HandCmd_ as HandCmd_default,
+ unitree_hg_msg_dds__HandState_ as HandState_default,
+)
+from unitree_sdk2py.idl.unitree_go.msg.dds_ import WirelessController_
+try:
+ from unitree_sdk2py.idl.unitree_hg.msg.dds_ import HandCmd_, HandState_, OdoState_
+ HAS_ODOSTATE = True
+except ImportError:
+ from unitree_sdk2py.idl.unitree_hg.msg.dds_ import HandCmd_, HandState_
+ OdoState_ = None
+ HAS_ODOSTATE = False
+ print("Warning: OdoState_ not available in unitree_sdk2py")
+
+
+class UnitreeSdk2Bridge:
+ """
+ This class is responsible for bridging the Unitree SDK2 with the Gr00t environment.
+ It is responsible for sending and receiving messages to and from the Unitree SDK2.
+ Both the body and hand are supported.
+ """
+
+ def __init__(self, config):
+ # Note that we do not give the mjdata and mjmodel to the UnitreeSdk2Bridge.
+ # It is unsafe and would be unflexible if we use a hand-plugged robot model
+
+ robot_type = config["ROBOT_TYPE"]
+ if "g1" in robot_type or "h1-2" in robot_type:
+ from unitree_sdk2py.idl.default import (
+ unitree_hg_msg_dds__IMUState_ as IMUState_default,
+ unitree_hg_msg_dds__LowCmd_,
+ unitree_hg_msg_dds__LowState_ as LowState_default,
+ )
+ try:
+ from unitree_sdk2py.idl.default import unitree_hg_msg_dds__OdoState_ as OdoState_default
+ except ImportError:
+ OdoState_default = None
+ from unitree_sdk2py.idl.unitree_hg.msg.dds_ import IMUState_, LowCmd_, LowState_
+
+ self.low_cmd = unitree_hg_msg_dds__LowCmd_()
+ elif "h1" == robot_type or "go2" == robot_type:
+ from unitree_sdk2py.idl.default import (
+ unitree_go_msg_dds__LowCmd_,
+ unitree_go_msg_dds__LowState_ as LowState_default,
+ unitree_hg_msg_dds__IMUState_ as IMUState_default,
+ )
+ from unitree_sdk2py.idl.unitree_go.msg.dds_ import IMUState_, LowCmd_, LowState_
+
+ self.low_cmd = unitree_go_msg_dds__LowCmd_()
+ else:
+ raise ValueError(f"Invalid robot type '{robot_type}'. Expected 'g1', 'h1', or 'go2'.")
+
+ self.num_body_motor = config["NUM_MOTORS"]
+ self.num_hand_motor = config.get("NUM_HAND_MOTORS", 0)
+ self.use_sensor = config["USE_SENSOR"]
+
+ self.have_imu_ = False
+ self.have_frame_sensor_ = False
+ # if self.use_sensor:
+ # MOTOR_SENSOR_NUM = 3
+ # self.dim_motor_sensor = MOTOR_SENSOR_NUM * self.num_motor
+ # # Check sensor
+ # for i in range(self.dim_motor_sensor, self.mj_model.nsensor):
+ # name = mujoco.mj_id2name(self.mj_model, mujoco._enums.mjtObj.mjOBJ_SENSOR, i)
+ # if name == "imu_quat":
+ # self.have_imu_ = True
+ # if name == "frame_pos":
+ # self.have_frame_sensor_ = True
+
+ # Unitree sdk2 message
+ self.low_state = LowState_default()
+ self.low_state_puber = ChannelPublisher("rt/lowstate", LowState_)
+ self.low_state_puber.Init()
+
+ # Only create odo_state for supported robot types (if available)
+ if ("g1" in robot_type or "h1-2" in robot_type) and HAS_ODOSTATE and OdoState_default:
+ self.odo_state = OdoState_default()
+ self.odo_state_puber = ChannelPublisher("rt/odostate", OdoState_)
+ self.odo_state_puber.Init()
+ else:
+ self.odo_state = None
+ self.odo_state_puber = None
+ self.torso_imu_state = IMUState_default()
+ self.torso_imu_puber = ChannelPublisher("rt/secondary_imu", IMUState_)
+ self.torso_imu_puber.Init()
+
+ self.left_hand_state = HandState_default()
+ self.left_hand_state_puber = ChannelPublisher("rt/dex3/left/state", HandState_)
+ self.left_hand_state_puber.Init()
+ self.right_hand_state = HandState_default()
+ self.right_hand_state_puber = ChannelPublisher("rt/dex3/right/state", HandState_)
+ self.right_hand_state_puber.Init()
+
+ self.low_cmd_suber = ChannelSubscriber("rt/lowcmd", LowCmd_)
+ self.low_cmd_suber.Init(self.LowCmdHandler, 1)
+
+ self.left_hand_cmd = HandCmd_default()
+ self.left_hand_cmd_suber = ChannelSubscriber("rt/dex3/left/cmd", HandCmd_)
+ self.left_hand_cmd_suber.Init(self.LeftHandCmdHandler, 1)
+ self.right_hand_cmd = HandCmd_default()
+ self.right_hand_cmd_suber = ChannelSubscriber("rt/dex3/right/cmd", HandCmd_)
+ self.right_hand_cmd_suber.Init(self.RightHandCmdHandler, 1)
+
+ self.low_cmd_lock = threading.Lock()
+ self.left_hand_cmd_lock = threading.Lock()
+ self.right_hand_cmd_lock = threading.Lock()
+
+ self.wireless_controller = unitree_go_msg_dds__WirelessController_()
+ self.wireless_controller_puber = ChannelPublisher(
+ "rt/wirelesscontroller", WirelessController_
+ )
+ self.wireless_controller_puber.Init()
+
+ # joystick
+ self.key_map = {
+ "R1": 0,
+ "L1": 1,
+ "start": 2,
+ "select": 3,
+ "R2": 4,
+ "L2": 5,
+ "F1": 6,
+ "F2": 7,
+ "A": 8,
+ "B": 9,
+ "X": 10,
+ "Y": 11,
+ "up": 12,
+ "right": 13,
+ "down": 14,
+ "left": 15,
+ }
+ self.joystick = None
+
+ # Store config for initialization
+ self.config = config
+
+ self.reset()
+
+ # Initialize motors with default KP/KD from config to make robot stiff at startup
+ self._initialize_motor_defaults()
+
+ def _initialize_motor_defaults(self):
+ """Initialize motor commands with default KP/KD and joint positions"""
+ if "MOTOR_KP" in self.config and "MOTOR_KD" in self.config:
+ motor_kp = self.config["MOTOR_KP"]
+ motor_kd = self.config["MOTOR_KD"]
+ default_dof_angles = self.config.get("DEFAULT_DOF_ANGLES", [0.0] * self.num_body_motor)
+
+ for i in range(min(self.num_body_motor, len(motor_kp))):
+ self.low_cmd.motor_cmd[i].kp = motor_kp[i]
+ self.low_cmd.motor_cmd[i].kd = motor_kd[i]
+ self.low_cmd.motor_cmd[i].q = default_dof_angles[i] if i < len(default_dof_angles) else 0.0
+ self.low_cmd.motor_cmd[i].dq = 0.0
+ self.low_cmd.motor_cmd[i].tau = 0.0
+
+ print(f"✓ Initialized {self.num_body_motor} motors with default KP/KD gains")
+ else:
+ print("⚠ Warning: MOTOR_KP/MOTOR_KD not found in config, robot will be limp at startup")
+
+ def reset(self):
+ with self.low_cmd_lock:
+ self.low_cmd_received = False
+ self.new_low_cmd = False
+ with self.left_hand_cmd_lock:
+ self.left_hand_cmd_received = False
+ self.new_left_hand_cmd = False
+ with self.right_hand_cmd_lock:
+ self.right_hand_cmd_received = False
+ self.new_right_hand_cmd = False
+
+ def LowCmdHandler(self, msg):
+ with self.low_cmd_lock:
+ self.low_cmd = msg
+ self.low_cmd_received = True
+ self.new_low_cmd = True
+
+ def LeftHandCmdHandler(self, msg):
+ with self.left_hand_cmd_lock:
+ self.left_hand_cmd = msg
+ self.left_hand_cmd_received = True
+ self.new_left_hand_cmd = True
+
+ def RightHandCmdHandler(self, msg):
+ with self.right_hand_cmd_lock:
+ self.right_hand_cmd = msg
+ self.right_hand_cmd_received = True
+ self.new_right_hand_cmd = True
+
+ def cmd_received(self):
+ with self.low_cmd_lock:
+ low_cmd_received = self.low_cmd_received
+ with self.left_hand_cmd_lock:
+ left_hand_cmd_received = self.left_hand_cmd_received
+ with self.right_hand_cmd_lock:
+ right_hand_cmd_received = self.right_hand_cmd_received
+ return low_cmd_received or left_hand_cmd_received or right_hand_cmd_received
+
+ def PublishLowState(self, obs: Dict[str, any]):
+ # publish body state
+ if self.use_sensor:
+ raise NotImplementedError("Sensor data is not implemented yet.")
+ else:
+ for i in range(self.num_body_motor):
+ self.low_state.motor_state[i].q = obs["body_q"][i]
+ self.low_state.motor_state[i].dq = obs["body_dq"][i]
+ self.low_state.motor_state[i].ddq = obs["body_ddq"][i]
+ self.low_state.motor_state[i].tau_est = obs["body_tau_est"][i]
+
+ if self.use_sensor and self.have_frame_sensor_:
+ raise NotImplementedError("Frame sensor data is not implemented yet.")
+ else:
+ # Get data from ground truth
+ # Publish odo state only if available
+ if self.odo_state is not None:
+ self.odo_state.position[:] = obs["floating_base_pose"][:3]
+ self.odo_state.linear_velocity[:] = obs["floating_base_vel"][:3]
+ self.odo_state.orientation[:] = obs["floating_base_pose"][3:7]
+ self.odo_state.angular_velocity[:] = obs["floating_base_vel"][3:6]
+ # quaternion: w, x, y, z
+ self.low_state.imu_state.quaternion[:] = obs["floating_base_pose"][3:7]
+ # angular velocity
+ self.low_state.imu_state.gyroscope[:] = obs["floating_base_vel"][3:6]
+ # linear acceleration
+ self.low_state.imu_state.accelerometer[:] = obs["floating_base_acc"][:3]
+
+ self.torso_imu_state.quaternion[:] = obs["secondary_imu_quat"]
+ self.torso_imu_state.gyroscope[:] = obs["secondary_imu_vel"][3:6]
+
+ # acceleration: x, y, z (only available when frame sensor is enabled)
+ if self.have_frame_sensor_:
+ raise NotImplementedError("Frame sensor data is not implemented yet.")
+ self.low_state.tick = int(obs["time"] * 1e3)
+ self.low_state_puber.Write(self.low_state)
+
+ # Publish odo state only if available
+ if self.odo_state is not None and self.odo_state_puber is not None:
+ self.odo_state.tick = int(obs["time"] * 1e3)
+ self.odo_state_puber.Write(self.odo_state)
+
+ self.torso_imu_puber.Write(self.torso_imu_state)
+
+ # publish hand state
+ for i in range(self.num_hand_motor):
+ self.left_hand_state.motor_state[i].q = obs["left_hand_q"][i]
+ self.left_hand_state.motor_state[i].dq = obs["left_hand_dq"][i]
+ self.left_hand_state_puber.Write(self.left_hand_state)
+
+ for i in range(self.num_hand_motor):
+ self.right_hand_state.motor_state[i].q = obs["right_hand_q"][i]
+ self.right_hand_state.motor_state[i].dq = obs["right_hand_dq"][i]
+ self.right_hand_state_puber.Write(self.right_hand_state)
+
+ def GetAction(self) -> Tuple[np.ndarray, bool, bool]:
+ with self.low_cmd_lock:
+ body_q = [self.low_cmd.motor_cmd[i].q for i in range(self.num_body_motor)]
+ with self.left_hand_cmd_lock:
+ left_hand_q = [self.left_hand_cmd.motor_cmd[i].q for i in range(self.num_hand_motor)]
+ with self.right_hand_cmd_lock:
+ right_hand_q = [self.right_hand_cmd.motor_cmd[i].q for i in range(self.num_hand_motor)]
+ with self.low_cmd_lock and self.left_hand_cmd_lock and self.right_hand_cmd_lock:
+ is_new_action = self.new_low_cmd and self.new_left_hand_cmd and self.new_right_hand_cmd
+ if is_new_action:
+ self.new_low_cmd = False
+ self.new_left_hand_cmd = False
+ self.new_right_hand_cmd = False
+
+ return (
+ np.concatenate([body_q[:-7], left_hand_q, body_q[-7:], right_hand_q]),
+ self.cmd_received(),
+ is_new_action,
+ )
+
+ def PublishWirelessController(self):
+ if self.joystick is not None:
+ pygame.event.get()
+ key_state = [0] * 16
+ key_state[self.key_map["R1"]] = self.joystick.get_button(self.button_id["RB"])
+ key_state[self.key_map["L1"]] = self.joystick.get_button(self.button_id["LB"])
+ key_state[self.key_map["start"]] = self.joystick.get_button(self.button_id["START"])
+ key_state[self.key_map["select"]] = self.joystick.get_button(self.button_id["SELECT"])
+ key_state[self.key_map["R2"]] = self.joystick.get_axis(self.axis_id["RT"]) > 0
+ key_state[self.key_map["L2"]] = self.joystick.get_axis(self.axis_id["LT"]) > 0
+ key_state[self.key_map["F1"]] = 0
+ key_state[self.key_map["F2"]] = 0
+ key_state[self.key_map["A"]] = self.joystick.get_button(self.button_id["A"])
+ key_state[self.key_map["B"]] = self.joystick.get_button(self.button_id["B"])
+ key_state[self.key_map["X"]] = self.joystick.get_button(self.button_id["X"])
+ key_state[self.key_map["Y"]] = self.joystick.get_button(self.button_id["Y"])
+ key_state[self.key_map["up"]] = self.joystick.get_hat(0)[1] > 0
+ key_state[self.key_map["right"]] = self.joystick.get_hat(0)[0] > 0
+ key_state[self.key_map["down"]] = self.joystick.get_hat(0)[1] < 0
+ key_state[self.key_map["left"]] = self.joystick.get_hat(0)[0] < 0
+
+ key_value = 0
+ for i in range(16):
+ key_value += key_state[i] << i
+
+ self.wireless_controller.keys = key_value
+ self.wireless_controller.lx = self.joystick.get_axis(self.axis_id["LX"])
+ self.wireless_controller.ly = -self.joystick.get_axis(self.axis_id["LY"])
+ self.wireless_controller.rx = self.joystick.get_axis(self.axis_id["RX"])
+ self.wireless_controller.ry = -self.joystick.get_axis(self.axis_id["RY"])
+
+ self.wireless_controller_puber.Write(self.wireless_controller)
+
+ def SetupJoystick(self, device_id=0, js_type="xbox"):
+ pygame.init()
+ pygame.joystick.init()
+ joystick_count = pygame.joystick.get_count()
+ if joystick_count > 0:
+ self.joystick = pygame.joystick.Joystick(device_id)
+ self.joystick.init()
+ else:
+ print("No gamepad detected.")
+ sys.exit()
+
+ if js_type == "xbox":
+ if sys.platform.startswith("linux"):
+ self.axis_id = {
+ "LX": 0, # Left stick axis x
+ "LY": 1, # Left stick axis y
+ "RX": 3, # Right stick axis x
+ "RY": 4, # Right stick axis y
+ "LT": 2, # Left trigger
+ "RT": 5, # Right trigger
+ "DX": 6, # Directional pad x
+ "DY": 7, # Directional pad y
+ }
+ self.button_id = {
+ "X": 2,
+ "Y": 3,
+ "B": 1,
+ "A": 0,
+ "LB": 4,
+ "RB": 5,
+ "SELECT": 6,
+ "START": 7,
+ "XBOX": 8,
+ "LSB": 9,
+ "RSB": 10,
+ }
+ elif sys.platform == "darwin":
+ self.axis_id = {
+ "LX": 0, # Left stick axis x
+ "LY": 1, # Left stick axis y
+ "RX": 2, # Right stick axis x
+ "RY": 3, # Right stick axis y
+ "LT": 4, # Left trigger
+ "RT": 5, # Right trigger
+ }
+ self.button_id = {
+ "X": 2,
+ "Y": 3,
+ "B": 1,
+ "A": 0,
+ "LB": 9,
+ "RB": 10,
+ "SELECT": 4,
+ "START": 6,
+ "XBOX": 5,
+ "LSB": 7,
+ "RSB": 8,
+ "DYU": 11,
+ "DYD": 12,
+ "DXL": 13,
+ "DXR": 14,
+ }
+ else:
+ print("Unsupported OS. ")
+
+ elif js_type == "switch":
+ # Yuanhang: may differ for different OS, need to be checked
+ self.axis_id = {
+ "LX": 0, # Left stick axis x
+ "LY": 1, # Left stick axis y
+ "RX": 2, # Right stick axis x
+ "RY": 3, # Right stick axis y
+ "LT": 5, # Left trigger
+ "RT": 4, # Right trigger
+ "DX": 6, # Directional pad x
+ "DY": 7, # Directional pad y
+ }
+
+ self.button_id = {
+ "X": 3,
+ "Y": 4,
+ "B": 1,
+ "A": 0,
+ "LB": 6,
+ "RB": 7,
+ "SELECT": 10,
+ "START": 11,
+ }
+ else:
+ print("Unsupported gamepad. ")
+
+ def PrintSceneInformation(self):
+ print(" ")
+ logger.info(colored("<<------------- Link ------------->>", "green"))
+ for i in range(self.mj_model.nbody):
+ name = mujoco.mj_id2name(self.mj_model, mujoco._enums.mjtObj.mjOBJ_BODY, i)
+ if name:
+ logger.info(f"link_index: {i}, name: {name}")
+ print(" ")
+
+ logger.info(colored("<<------------- Joint ------------->>", "green"))
+ for i in range(self.mj_model.njnt):
+ name = mujoco.mj_id2name(self.mj_model, mujoco._enums.mjtObj.mjOBJ_JOINT, i)
+ if name:
+ logger.info(f"joint_index: {i}, name: {name}")
+ print(" ")
+
+ logger.info(colored("<<------------- Actuator ------------->>", "green"))
+ for i in range(self.mj_model.nu):
+ name = mujoco.mj_id2name(self.mj_model, mujoco._enums.mjtObj.mjOBJ_ACTUATOR, i)
+ if name:
+ logger.info(f"actuator_index: {i}, name: {name}")
+ print(" ")
+
+ logger.info(colored("<<------------- Sensor ------------->>", "green"))
+ index = 0
+ for i in range(self.mj_model.nsensor):
+ name = mujoco.mj_id2name(self.mj_model, mujoco._enums.mjtObj.mjOBJ_SENSOR, i)
+ if name:
+ logger.info(
+ f"sensor_index: {index}, name: {name}, dim: {self.mj_model.sensor_dim[i]}"
+ )
+ index = index + self.mj_model.sensor_dim[i]
+ print(" ")
+
+
+class ElasticBand:
+ """
+ ref: https://github.com/unitreerobotics/unitree_mujoco
+ """
+
+ def __init__(self):
+ self.kp_pos = 10000
+ self.kd_pos = 1000
+ self.kp_ang = 1000
+ self.kd_ang = 10
+ self.point = np.array([0, 0, 1])
+ self.length = 0
+ self.enable = True
+
+ def Advance(self, pose):
+ """
+ Args:
+ pose: 13D array containing:
+ - pose[0:3]: position in world frame
+ - pose[3:7]: quaternion [w,x,y,z] in world frame
+ - pose[7:10]: linear velocity in world frame
+ - pose[10:13]: angular velocity in world frame
+ Returns:
+ np.ndarray: 6D vector [fx, fy, fz, tx, ty, tz]
+ """
+ pos = pose[0:3]
+ quat = pose[3:7]
+ lin_vel = pose[7:10]
+ ang_vel = pose[10:13]
+
+ δx = self.point - pos
+ f = self.kp_pos * (δx + np.array([0, 0, self.length])) + self.kd_pos * (0 - lin_vel)
+
+ # --- Orientation PD control for torque ---
+ quat = np.array([quat[1], quat[2], quat[3], quat[0]]) # reorder to [x,y,z,w] for scipy
+ rot = scipy.spatial.transform.Rotation.from_quat(quat)
+ rotvec = rot.as_rotvec() # axis-angle error
+ torque = -self.kp_ang * rotvec - self.kd_ang * ang_vel
+
+ return np.concatenate([f, torque])
+
+ def MujuocoKeyCallback(self, key):
+ if key == glfw.KEY_7:
+ self.length -= 0.1
+ if key == glfw.KEY_8:
+ self.length += 0.1
+ if key == glfw.KEY_9:
+ self.enable = not self.enable
+
+ def handle_keyboard_button(self, key):
+ if key == "9":
+ self.enable = not self.enable
diff --git a/view_cameras_live.py b/view_cameras_live.py
new file mode 100644
index 0000000000000000000000000000000000000000..bacb637ab2543281052266081c5d6c55db08b6db
--- /dev/null
+++ b/view_cameras_live.py
@@ -0,0 +1,224 @@
+#!/usr/bin/env python3
+"""
+Live camera viewer for MuJoCo simulator using matplotlib
+Works without X11/GTK - suitable for SSH sessions with X forwarding
+"""
+import argparse
+import sys
+import time
+from pathlib import Path
+
+# Add sim module to path
+sys.path.insert(0, str(Path(__file__).parent))
+
+import cv2
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib.animation import FuncAnimation
+from sim.sensor_utils import SensorClient, ImageUtils
+
+
+class CameraViewer:
+ def __init__(self, host, port):
+ self.client = SensorClient()
+ self.client.start_client(server_ip=host, port=port)
+
+ self.fig = None
+ self.axes = {}
+ self.images = {}
+ self.text_objs = {}
+
+ self.frame_count = 0
+ self.last_time = time.time()
+ self.fps = 0
+
+ def init_plot(self):
+ """Initialize matplotlib figure and axes"""
+ # Wait for first frame to know how many cameras we have
+ print("Waiting for first frame to detect cameras...")
+ data = self.client.receive_message()
+
+ # Parse camera names - handle nested 'images' dict
+ camera_names = []
+ if "images" in data and isinstance(data["images"], dict):
+ # Nested structure: data["images"]["camera_name"]
+ camera_names = list(data["images"].keys())
+ else:
+ # Flat structure: data["camera_name"] directly
+ camera_names = [k for k in data.keys() if k not in ["timestamps", "images"]]
+
+ num_cameras = len(camera_names)
+
+ if num_cameras == 0:
+ print("No cameras found in stream!")
+ return False
+
+ print(f"Found {num_cameras} camera(s): {', '.join(camera_names)}")
+
+ # Create subplots
+ if num_cameras == 1:
+ self.fig, ax = plt.subplots(1, 1, figsize=(10, 8))
+ axes_list = [ax]
+ elif num_cameras == 2:
+ self.fig, axes_list = plt.subplots(1, 2, figsize=(16, 6))
+ else:
+ rows = (num_cameras + 1) // 2
+ self.fig, axes_list = plt.subplots(rows, 2, figsize=(16, 6 * rows))
+ axes_list = axes_list.flatten()
+
+ # Initialize each camera subplot
+ for i, cam_name in enumerate(camera_names):
+ ax = axes_list[i]
+ ax.set_title(f"{cam_name}", fontsize=12, fontweight='bold')
+ ax.axis('off')
+
+ # Get image data from nested or flat structure
+ if "images" in data and cam_name in data["images"]:
+ img_data = data["images"][cam_name]
+ elif cam_name in data:
+ img_data = data[cam_name]
+ else:
+ img_data = cam_name # Use the actual data if it's the value
+
+ # Decode first image
+ if isinstance(img_data, str):
+ img = ImageUtils.decode_image(img_data)
+ elif isinstance(img_data, np.ndarray):
+ img = img_data
+ else:
+ print(f"Warning: Unknown image format for {cam_name}: {type(img_data)}")
+ continue
+
+ # Check if image is valid
+ if img is None or not isinstance(img, np.ndarray):
+ print(f"Warning: Invalid image data for {cam_name}")
+ continue
+
+ # Convert BGR to RGB for matplotlib
+ img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
+
+ # Display image
+ im = ax.imshow(img_rgb)
+ self.images[cam_name] = im
+ self.axes[cam_name] = ax
+
+ # Add FPS text
+ text = ax.text(0.02, 0.98, 'FPS: 0.0',
+ transform=ax.transAxes,
+ fontsize=10,
+ verticalalignment='top',
+ bbox=dict(boxstyle='round', facecolor='black', alpha=0.7),
+ color='lime',
+ fontweight='bold')
+ self.text_objs[cam_name] = text
+
+ # Hide unused subplots
+ if num_cameras < len(axes_list):
+ for i in range(num_cameras, len(axes_list)):
+ axes_list[i].axis('off')
+
+ self.fig.tight_layout()
+ return True
+
+ def update_frame(self, frame_num):
+ """Update function for animation"""
+ try:
+ # Receive new frame
+ data = self.client.receive_message()
+
+ # Calculate FPS
+ self.frame_count += 1
+ current_time = time.time()
+ if current_time - self.last_time >= 1.0:
+ self.fps = self.frame_count / (current_time - self.last_time)
+ self.frame_count = 0
+ self.last_time = current_time
+
+ # Update each camera
+ for cam_name in self.images.keys():
+ # Get image data from nested or flat structure
+ if "images" in data and cam_name in data["images"]:
+ img_data = data["images"][cam_name]
+ elif cam_name in data:
+ img_data = data[cam_name]
+ else:
+ continue
+
+ # Decode image
+ if isinstance(img_data, str):
+ img = ImageUtils.decode_image(img_data)
+ elif isinstance(img_data, np.ndarray):
+ img = img_data
+ else:
+ continue
+
+ # Check if image is valid
+ if img is None or not isinstance(img, np.ndarray):
+ continue
+
+ # Convert BGR to RGB for matplotlib
+ img_rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
+
+ # Update image
+ self.images[cam_name].set_data(img_rgb)
+
+ # Update FPS text
+ self.text_objs[cam_name].set_text(f'FPS: {self.fps:.1f}')
+
+ except Exception as e:
+ print(f"Error updating frame: {e}")
+
+ return list(self.images.values()) + list(self.text_objs.values())
+
+ def start(self, interval=33):
+ """Start the live viewer"""
+ if not self.init_plot():
+ return
+
+ print(f"\n{'='*60}")
+ print("📹 Live camera viewer started!")
+ print("Close the window or press Ctrl+C to exit")
+ print(f"{'='*60}\n")
+
+ # Create animation
+ anim = FuncAnimation(
+ self.fig,
+ self.update_frame,
+ interval=interval, # ms between frames
+ blit=True,
+ cache_frame_data=False
+ )
+
+ try:
+ plt.show()
+ except KeyboardInterrupt:
+ print("\nStopping viewer...")
+ finally:
+ self.client.stop_client()
+ plt.close('all')
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Live camera viewer for MuJoCo simulator")
+ parser.add_argument("--host", type=str, default="localhost",
+ help="Simulator host address (default: localhost)")
+ parser.add_argument("--port", type=int, default=5555,
+ help="ZMQ port (default: 5555)")
+ parser.add_argument("--interval", type=int, default=33,
+ help="Update interval in ms (default: 33 = ~30fps)")
+ args = parser.parse_args()
+
+ print("="*60)
+ print("📷 MuJoCo Live Camera Viewer (matplotlib)")
+ print("="*60)
+ print(f"🌐 Connecting to: tcp://{args.host}:{args.port}")
+ print(f"⏱️ Update interval: {args.interval}ms (~{1000/args.interval:.0f} fps)")
+ print("="*60)
+
+ viewer = CameraViewer(host=args.host, port=args.port)
+ viewer.start(interval=args.interval)
+
+
+if __name__ == "__main__":
+ main()
+