TopoSlots-MotionData / scripts /preprocess_bvh.py
Tevior's picture
Update README, docs (annotation plan + guidelines), scripts, src/data
eff1e50 verified
"""
Generic BVH → TopoSlots unified format converter.
Works for any BVH source: LAFAN1, Mixamo, Truebones Zoo, etc.
Produces the same Scheme C output as preprocess_humanml3d.py:
- skeleton.npz: skeleton graph
- motions/{id}.npz: per-motion features
- splits/all.txt: all motion IDs
- stats.npz: normalization stats
Usage:
python scripts/preprocess_bvh.py \
--bvh_dir data/raw/LAFAN1/bvh \
--output_dir data/processed/lafan1 \
--dataset_id lafan1 \
--target_fps 20 \
--remove_end_sites
"""
import sys
import argparse
from pathlib import Path
import numpy as np
from tqdm import tqdm
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))
from src.data.bvh_parser import parse_bvh, resample_motion, remove_end_sites
from src.data.skeleton_graph import SkeletonGraph
from src.data.humanml3d_converter import _detect_foot_contact
def euler_to_6d_rotation(euler_angles: np.ndarray, order: str = 'ZYX') -> np.ndarray:
"""
Convert Euler angles (degrees) to continuous 6D rotation representation.
Uses scipy for correct BVH intrinsic Euler convention.
Args:
euler_angles: [..., 3] Euler angles in degrees
order: rotation order string (e.g., 'ZYX') — intrinsic convention
Returns:
[..., 6] continuous 6D rotation (first two columns of rotation matrix)
"""
from scipy.spatial.transform import Rotation
orig_shape = euler_angles.shape[:-1]
flat = euler_angles.reshape(-1, 3)
# BVH uses intrinsic rotations → scipy uppercase order
R = Rotation.from_euler(order.upper(), flat, degrees=True).as_matrix() # [N, 3, 3]
# Extract first two columns → 6D representation
rot_6d = np.concatenate([R[:, :, 0], R[:, :, 1]], axis=-1) # [N, 6]
return rot_6d.reshape(orig_shape + (6,)).astype(np.float32)
def forward_kinematics(
rotations: np.ndarray,
root_positions: np.ndarray,
offsets: np.ndarray,
parents: list[int],
rotation_order: str = 'ZYX',
local_translations: np.ndarray = None,
) -> np.ndarray:
"""
Compute global joint positions from local rotations + skeleton offsets via FK.
Uses scipy.spatial.transform.Rotation for correct BVH intrinsic Euler convention.
Verified against Blender's BVH FK (< 0.01mm error).
Args:
rotations: [T, J, 3] Euler angles in degrees (columns match rotation_order)
root_positions: [T, 3]
offsets: [J, 3] rest-pose offsets from parent (used when local_translations is None)
parents: [J] parent indices
rotation_order: Euler rotation order (e.g., 'ZYX') — intrinsic convention
local_translations: [T, J, 3] optional per-frame local translations
(for BVH files where all joints have position channels)
Returns:
[T, J, 3] global joint positions
"""
from scipy.spatial.transform import Rotation
T, J, _ = rotations.shape
positions = np.zeros((T, J, 3), dtype=np.float64)
global_rotmats = np.zeros((T, J, 3, 3), dtype=np.float64)
for j in range(J):
# Build local rotation matrix using scipy (intrinsic Euler)
local_rot = Rotation.from_euler(
rotation_order.upper(), rotations[:, j], degrees=True
).as_matrix() # [T, 3, 3]
p = parents[j]
if p < 0:
global_rotmats[:, j] = local_rot
positions[:, j] = root_positions
else:
global_rotmats[:, j] = np.einsum(
'tij,tjk->tik', global_rotmats[:, p], local_rot
)
# Use per-frame translations if available, otherwise static offsets
if local_translations is not None:
offset = local_translations[:, j, :] # [T, 3]
positions[:, j] = positions[:, p] + np.einsum(
'tij,tj->ti', global_rotmats[:, p], offset
)
else:
offset = offsets[j] # [3]
positions[:, j] = positions[:, p] + np.einsum(
'tij,j->ti', global_rotmats[:, p], offset
)
return positions.astype(np.float32)
def process_bvh_file(
bvh_path: Path,
target_fps: float,
max_frames: int,
min_frames: int,
do_remove_end_sites: bool,
manual_scale: float = None,
) -> dict | None:
"""Process a single BVH file into Scheme C format."""
try:
bvh = parse_bvh(bvh_path)
except Exception as e:
print(f" Failed to parse {bvh_path.name}: {e}")
return None
joint_names = bvh.skeleton.joint_names
parent_indices = bvh.skeleton.parent_indices
offsets = bvh.skeleton.rest_offsets
rotations = bvh.rotations
root_pos = bvh.root_positions
local_trans = bvh.local_translations # [T, J, 3] or None
# Remove end sites if requested
if do_remove_end_sites:
joint_names, parent_indices, offsets, rotations = remove_end_sites(
joint_names, parent_indices, offsets, rotations
)
# Also filter local_translations if present
if local_trans is not None:
keep_mask = [not name.endswith('_end') for name in bvh.skeleton.joint_names]
keep_indices = [i for i, k in enumerate(keep_mask) if k]
local_trans = local_trans[:, keep_indices, :]
# Remove dummy root: a static root joint whose only child is the real root (e.g. Hips).
if len(joint_names) > 1 and parent_indices[0] == -1:
children_of_root = [j for j in range(len(joint_names)) if parent_indices[j] == 0]
if len(children_of_root) == 1:
root_rot_range = rotations[:, 0].max(axis=0) - rotations[:, 0].min(axis=0)
root_is_static = np.all(root_rot_range < 1.0) # <1 degree range = static
if root_is_static:
old_root_name = joint_names[0]
child_idx = children_of_root[0]
# Use per-frame position of child as new root_pos if available
if local_trans is not None:
root_pos = local_trans[:, child_idx, :].copy()
local_trans = local_trans[:, 1:, :]
else:
root_pos = root_pos + offsets[child_idx]
# Remove joint 0
joint_names = joint_names[1:]
offsets = offsets[1:]
rotations = rotations[:, 1:]
# Remap parent indices
new_parents = []
for p in parent_indices[1:]:
if p <= 0:
new_parents.append(-1)
else:
new_parents.append(p - 1)
parent_indices = new_parents
print(f" Removed dummy root '{old_root_name}' → new root '{joint_names[0]}'")
J = len(joint_names)
# Resample to target FPS
if abs(bvh.fps - target_fps) > 0.5:
if local_trans is not None:
rotations, root_pos, local_trans = resample_motion(
rotations, root_pos, bvh.fps, target_fps, local_trans
)
else:
rotations, root_pos = resample_motion(
rotations, root_pos, bvh.fps, target_fps
)
T = rotations.shape[0]
if T < min_frames:
return None
if T > max_frames:
rotations = rotations[:max_frames]
root_pos = root_pos[:max_frames]
if local_trans is not None:
local_trans = local_trans[:max_frames]
T = max_frames
# Build skeleton graph
skeleton = SkeletonGraph(
joint_names=list(joint_names),
parent_indices=list(parent_indices),
rest_offsets=np.array(offsets, dtype=np.float32),
)
# Forward kinematics → global joint positions
joint_positions = forward_kinematics(
rotations, root_pos, offsets, parent_indices, bvh.rotation_order,
local_translations=local_trans,
)
# Scale normalization to meters
if manual_scale is not None:
scale = manual_scale
else:
# Auto-detect: BVH files commonly use centimeters
height = joint_positions[0, :, 1].max() - joint_positions[0, :, 1].min()
if height > 5.0: # almost certainly NOT meters (>5m body height)
scale = 0.01 # assume centimeters → meters
else:
scale = 1.0
if abs(scale - 1.0) > 0.001:
joint_positions = joint_positions * scale
root_pos = root_pos * scale
offsets = offsets * scale
# Center root at origin on first frame (XZ plane)
root_offset_xz = joint_positions[0, 0, [0, 2]].copy()
joint_positions[:, :, 0] -= root_offset_xz[0]
joint_positions[:, :, 2] -= root_offset_xz[1]
root_pos[:, 0] -= root_offset_xz[0]
root_pos[:, 2] -= root_offset_xz[1]
# Rebuild skeleton with scaled offsets
skeleton = SkeletonGraph(
joint_names=list(joint_names),
parent_indices=list(parent_indices),
rest_offsets=np.array(offsets, dtype=np.float32),
)
# Local positions (root-relative)
local_pos = joint_positions - joint_positions[:, 0:1, :]
# Velocities
vel = np.zeros_like(joint_positions)
vel[1:] = (joint_positions[1:] - joint_positions[:-1]) * target_fps
vel[0] = vel[1]
root_vel = vel[:, 0, :]
# Accelerations
acc = np.zeros_like(vel)
acc[1:] = (vel[1:] - vel[:-1]) * target_fps
acc[0] = acc[1]
# Bone lengths
bone_lengths = np.zeros((T, J), dtype=np.float32)
for j in range(J):
p = parent_indices[j]
if p >= 0:
bone_lengths[:, j] = np.linalg.norm(
joint_positions[:, j] - joint_positions[:, p], axis=-1
)
# Foot contact
foot_contact = _detect_foot_contact(joint_positions, vel, skeleton)
# 6D rotations (decoder GT)
rot_6d = euler_to_6d_rotation(rotations[:, 1:], bvh.rotation_order) # [T, J-1, 6]
return {
'skeleton': skeleton,
'data': {
'local_positions': local_pos.astype(np.float32),
'velocities': vel.astype(np.float32),
'root_position': root_pos.astype(np.float32),
'root_velocity': root_vel.astype(np.float32),
'joint_positions': joint_positions.astype(np.float32),
'local_rotations_6d': rot_6d.astype(np.float32),
'accelerations': acc.astype(np.float32),
'bone_lengths': bone_lengths.astype(np.float32),
'foot_contact': foot_contact.astype(np.float32),
'num_frames': T,
'fps': target_fps,
},
}
def preprocess_bvh_directory(
bvh_dir: str,
output_dir: str,
dataset_id: str,
target_fps: float = 20.0,
max_frames: int = 196,
min_frames: int = 24,
do_remove_end_sites: bool = True,
):
bvh_dir = Path(bvh_dir)
output_dir = Path(output_dir)
(output_dir / 'motions').mkdir(parents=True, exist_ok=True)
(output_dir / 'splits').mkdir(parents=True, exist_ok=True)
bvh_files = sorted(bvh_dir.rglob('*.bvh'))
print(f"Found {len(bvh_files)} BVH files in {bvh_dir}")
if not bvh_files:
print("No BVH files found, exiting.")
return
# Process all files
motion_ids = []
all_local_pos = []
all_velocities = []
first_skeleton = None
for i, bvh_path in enumerate(tqdm(bvh_files)):
result = process_bvh_file(
bvh_path, target_fps, max_frames, min_frames, do_remove_end_sites,
manual_scale=args.scale if hasattr(args, 'scale') else None,
)
if result is None:
continue
motion_id = f"{i:06d}"
skeleton = result['skeleton']
data = result['data']
if first_skeleton is None:
first_skeleton = skeleton
data['skeleton_id'] = dataset_id
data['texts'] = '' # No text for BVH data
data['source_file'] = bvh_path.name
np.savez_compressed(output_dir / 'motions' / f'{motion_id}.npz', **data)
motion_ids.append(motion_id)
# Collect stats (subsample)
if len(motion_ids) % 3 == 0:
all_local_pos.append(data['local_positions'])
all_velocities.append(data['velocities'])
print(f"\nProcessed: {len(motion_ids)} motions")
if not motion_ids:
print("No motions processed, exiting.")
return
# Save skeleton
np.savez(output_dir / 'skeleton.npz', **first_skeleton.to_dict())
print(f"Skeleton: {first_skeleton.num_joints} joints")
# Save stats
all_local_pos = np.concatenate(all_local_pos, axis=0)
all_velocities = np.concatenate(all_velocities, axis=0)
stats = {
'local_pos_mean': all_local_pos.mean(axis=0),
'local_pos_std': all_local_pos.std(axis=0) + 1e-8,
'velocity_mean': all_velocities.mean(axis=0),
'velocity_std': all_velocities.std(axis=0) + 1e-8,
'root_vel_mean': np.zeros(3, dtype=np.float32), # placeholder
'root_vel_std': np.ones(3, dtype=np.float32),
}
np.savez(output_dir / 'stats.npz', **stats)
# Save splits (80/10/10)
np.random.seed(42)
indices = np.random.permutation(len(motion_ids))
n_train = int(0.8 * len(indices))
n_val = int(0.1 * len(indices))
splits = {
'train': [motion_ids[i] for i in indices[:n_train]],
'val': [motion_ids[i] for i in indices[n_train:n_train + n_val]],
'test': [motion_ids[i] for i in indices[n_train + n_val:]],
'all': motion_ids,
}
for split_name, ids in splits.items():
with open(output_dir / 'splits' / f'{split_name}.txt', 'w') as f:
for mid in ids:
f.write(f'{mid}\n')
print(f" {split_name}: {len(ids)} motions")
print(f"\nDone! Output saved to {output_dir}")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--bvh_dir', type=str, required=True)
parser.add_argument('--output_dir', type=str, required=True)
parser.add_argument('--dataset_id', type=str, required=True)
parser.add_argument('--target_fps', type=float, default=20.0)
parser.add_argument('--max_frames', type=int, default=196)
parser.add_argument('--min_frames', type=int, default=24)
parser.add_argument('--remove_end_sites', action='store_true')
parser.add_argument('--scale', type=float, default=None,
help='Manual scale factor (e.g., 0.01 for cm→m). Auto-detect if not set.')
args = parser.parse_args()
preprocess_bvh_directory(
args.bvh_dir, args.output_dir, args.dataset_id,
target_fps=args.target_fps,
max_frames=args.max_frames,
min_frames=args.min_frames,
do_remove_end_sites=args.remove_end_sites,
)