| """ |
| Preprocess HumanML3D data into TopoSlots unified format. |
| |
| Input: HumanML3D raw data (new_joints/*.npy, texts/*.txt) |
| Output: Processed data in data/processed/humanml3d/ |
| - skeleton.npz: SMPL-22 skeleton graph |
| - motions/{motion_id}.npz: per-motion features |
| - splits/{train,val,test}.txt: data splits |
| - stats.npz: dataset statistics (mean, std) |
| """ |
|
|
| import sys |
| import argparse |
| from pathlib import Path |
| import numpy as np |
| from tqdm import tqdm |
|
|
| |
| project_root = Path(__file__).parent.parent |
| sys.path.insert(0, str(project_root)) |
|
|
| from src.data.skeleton_graph import SkeletonGraph |
| from src.data.humanml3d_converter import ( |
| get_smpl22_skeleton, |
| load_humanml3d_motion, |
| compute_motion_features, |
| extract_rotations_from_263d, |
| load_humanml3d_split, |
| SMPL_22_JOINT_NAMES, |
| ) |
|
|
|
|
| def preprocess_humanml3d( |
| raw_dir: str, |
| output_dir: str, |
| target_fps: float = 20.0, |
| max_frames: int = 196, |
| min_frames: int = 24, |
| ): |
| raw_dir = Path(raw_dir) |
| output_dir = Path(output_dir) |
|
|
| |
| (output_dir / 'motions').mkdir(parents=True, exist_ok=True) |
| (output_dir / 'splits').mkdir(parents=True, exist_ok=True) |
|
|
| |
| print("Building SMPL-22 skeleton graph...") |
|
|
| |
| train_ids = load_humanml3d_split(raw_dir, 'train') |
| rest_poses = [] |
| for mid in train_ids[:100]: |
| try: |
| motion = load_humanml3d_motion(mid, raw_dir) |
| rest_poses.append(motion['joint_positions'][0]) |
| except Exception: |
| continue |
|
|
| avg_rest_pose = np.mean(rest_poses, axis=0) if rest_poses else None |
| skeleton = get_smpl22_skeleton(avg_rest_pose) |
|
|
| |
| np.savez( |
| output_dir / 'skeleton.npz', |
| **skeleton.to_dict(), |
| ) |
| print(f" Skeleton: {skeleton.num_joints} joints") |
|
|
| |
| all_splits = {} |
| for split in ['train', 'val', 'test']: |
| try: |
| ids = load_humanml3d_split(raw_dir, split) |
| all_splits[split] = ids |
| except FileNotFoundError: |
| print(f" Warning: {split}.txt not found, skipping") |
| all_splits[split] = [] |
|
|
| all_ids = set() |
| for ids in all_splits.values(): |
| all_ids.update(ids) |
|
|
| print(f"\nProcessing {len(all_ids)} motions...") |
|
|
| |
| all_local_pos = [] |
| all_velocities = [] |
| all_root_vel = [] |
| processed_count = 0 |
| skipped_count = 0 |
|
|
| for motion_id in tqdm(sorted(all_ids)): |
| try: |
| |
| motion = load_humanml3d_motion(motion_id, raw_dir) |
| joint_positions = motion['joint_positions'] |
| T = joint_positions.shape[0] |
|
|
| |
| if T < min_frames: |
| skipped_count += 1 |
| continue |
| if T > max_frames: |
| joint_positions = joint_positions[:max_frames] |
|
|
| |
| features = compute_motion_features( |
| joint_positions, skeleton, fps=target_fps |
| ) |
|
|
| |
| joint_vecs = motion['joint_vecs'] |
| rot_features = None |
| if joint_vecs is not None and joint_vecs.shape[0] == joint_positions.shape[0]: |
| rot_features = extract_rotations_from_263d(joint_vecs) |
|
|
| |
| |
| |
| |
| |
| |
| save_dict = { |
| |
| 'local_positions': features['local_positions'].astype(np.float32), |
| 'velocities': features['velocities'].astype(np.float32), |
| |
| 'root_position': features['root_position'].astype(np.float32), |
| 'root_velocity': features['root_velocity'].astype(np.float32), |
| |
| 'joint_positions': joint_positions.astype(np.float32), |
| 'accelerations': features['accelerations'].astype(np.float32), |
| 'bone_lengths': features['bone_lengths'].astype(np.float32), |
| |
| 'foot_contact': features['foot_contact'].astype(np.float32), |
| |
| 'num_frames': joint_positions.shape[0], |
| 'fps': target_fps, |
| 'skeleton_id': 'smpl_22', |
| } |
|
|
| |
| if rot_features is not None: |
| save_dict['local_rotations_6d'] = rot_features['local_rotations_6d'].astype(np.float32) |
| save_dict['foot_contact'] = rot_features['foot_contact_4ch'].astype(np.float32) |
|
|
| |
| texts = motion['texts'] |
| save_dict['texts'] = '|||'.join(texts) if texts else '' |
|
|
| np.savez_compressed( |
| output_dir / 'motions' / f'{motion_id}.npz', |
| **save_dict, |
| ) |
|
|
| |
| if processed_count % 5 == 0: |
| all_local_pos.append(features['local_positions']) |
| all_velocities.append(features['velocities']) |
| all_root_vel.append(features['root_velocity']) |
|
|
| processed_count += 1 |
|
|
| except Exception as e: |
| print(f" Error processing {motion_id}: {e}") |
| skipped_count += 1 |
|
|
| print(f"\nProcessed: {processed_count}, Skipped: {skipped_count}") |
|
|
| |
| print("Computing dataset statistics...") |
| all_local_pos = np.concatenate(all_local_pos, axis=0) |
| all_velocities = np.concatenate(all_velocities, axis=0) |
| all_root_vel = np.concatenate(all_root_vel, axis=0) |
|
|
| stats = { |
| 'local_pos_mean': all_local_pos.mean(axis=0), |
| 'local_pos_std': all_local_pos.std(axis=0) + 1e-8, |
| 'velocity_mean': all_velocities.mean(axis=0), |
| 'velocity_std': all_velocities.std(axis=0) + 1e-8, |
| 'root_vel_mean': all_root_vel.mean(axis=0), |
| 'root_vel_std': all_root_vel.std(axis=0) + 1e-8, |
| } |
|
|
| np.savez(output_dir / 'stats.npz', **stats) |
|
|
| |
| for split, ids in all_splits.items(): |
| |
| valid_ids = [ |
| mid for mid in ids |
| if (output_dir / 'motions' / f'{mid}.npz').exists() |
| ] |
| with open(output_dir / 'splits' / f'{split}.txt', 'w') as f: |
| for mid in valid_ids: |
| f.write(f'{mid}\n') |
| print(f" {split}: {len(valid_ids)} motions") |
|
|
| print(f"\nDone! Output saved to {output_dir}") |
|
|
|
|
| if __name__ == '__main__': |
| parser = argparse.ArgumentParser() |
| parser.add_argument( |
| '--raw_dir', |
| type=str, |
| default='data/raw/HumanML3D', |
| help='Path to raw HumanML3D data', |
| ) |
| parser.add_argument( |
| '--output_dir', |
| type=str, |
| default='data/processed/humanml3d', |
| help='Output directory', |
| ) |
| parser.add_argument('--target_fps', type=float, default=20.0) |
| parser.add_argument('--max_frames', type=int, default=196) |
| parser.add_argument('--min_frames', type=int, default=24) |
|
|
| args = parser.parse_args() |
| preprocess_humanml3d( |
| args.raw_dir, args.output_dir, |
| target_fps=args.target_fps, |
| max_frames=args.max_frames, |
| min_frames=args.min_frames, |
| ) |
|
|