File size: 8,180 Bytes
c3b2920
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
"""
Preprocess HumanML3D data into TopoSlots unified format.

Input: HumanML3D raw data (new_joints/*.npy, texts/*.txt)
Output: Processed data in data/processed/humanml3d/
  - skeleton.npz: SMPL-22 skeleton graph
  - motions/{motion_id}.npz: per-motion features
  - splits/{train,val,test}.txt: data splits
  - stats.npz: dataset statistics (mean, std)
"""

import sys
import argparse
from pathlib import Path
import numpy as np
from tqdm import tqdm

# Add project root to path
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))

from src.data.skeleton_graph import SkeletonGraph
from src.data.humanml3d_converter import (
    get_smpl22_skeleton,
    load_humanml3d_motion,
    compute_motion_features,
    extract_rotations_from_263d,
    load_humanml3d_split,
    SMPL_22_JOINT_NAMES,
)


def preprocess_humanml3d(
    raw_dir: str,
    output_dir: str,
    target_fps: float = 20.0,
    max_frames: int = 196,  # ~10s at 20fps
    min_frames: int = 24,   # ~1.2s at 20fps
):
    raw_dir = Path(raw_dir)
    output_dir = Path(output_dir)

    # Create output directories
    (output_dir / 'motions').mkdir(parents=True, exist_ok=True)
    (output_dir / 'splits').mkdir(parents=True, exist_ok=True)

    # 1. Save skeleton graph
    print("Building SMPL-22 skeleton graph...")

    # Compute average rest pose from first 100 motions
    train_ids = load_humanml3d_split(raw_dir, 'train')
    rest_poses = []
    for mid in train_ids[:100]:
        try:
            motion = load_humanml3d_motion(mid, raw_dir)
            rest_poses.append(motion['joint_positions'][0])  # first frame
        except Exception:
            continue

    avg_rest_pose = np.mean(rest_poses, axis=0) if rest_poses else None
    skeleton = get_smpl22_skeleton(avg_rest_pose)

    # Save skeleton
    np.savez(
        output_dir / 'skeleton.npz',
        **skeleton.to_dict(),
    )
    print(f"  Skeleton: {skeleton.num_joints} joints")

    # 2. Process all motions
    all_splits = {}
    for split in ['train', 'val', 'test']:
        try:
            ids = load_humanml3d_split(raw_dir, split)
            all_splits[split] = ids
        except FileNotFoundError:
            print(f"  Warning: {split}.txt not found, skipping")
            all_splits[split] = []

    all_ids = set()
    for ids in all_splits.values():
        all_ids.update(ids)

    print(f"\nProcessing {len(all_ids)} motions...")

    # Collect statistics
    all_local_pos = []
    all_velocities = []
    all_root_vel = []
    processed_count = 0
    skipped_count = 0

    for motion_id in tqdm(sorted(all_ids)):
        try:
            # Load raw motion
            motion = load_humanml3d_motion(motion_id, raw_dir)
            joint_positions = motion['joint_positions']
            T = joint_positions.shape[0]

            # Filter by length
            if T < min_frames:
                skipped_count += 1
                continue
            if T > max_frames:
                joint_positions = joint_positions[:max_frames]

            # Compute position-based features (Scheme C: slot token input)
            features = compute_motion_features(
                joint_positions, skeleton, fps=target_fps
            )

            # Extract rotation-based features from 263D (Scheme C: decoder GT)
            joint_vecs = motion['joint_vecs']
            rot_features = None
            if joint_vecs is not None and joint_vecs.shape[0] == joint_positions.shape[0]:
                rot_features = extract_rotations_from_263d(joint_vecs)

            # Build save dict
            # --- Scheme C layout ---
            # Slot token input: local_positions [T,J,3] + velocities [T,J,3] = 6D per joint
            # Decoder GT: local_rotations_6d [T,J-1,6] (for FK supervision)
            # Root track: root_position [T,3] + root_velocity [T,3]
            # Auxiliary: foot_contact [T,4], bone_lengths [T,J], accelerations [T,J,3]
            save_dict = {
                # Slot token features (cross-skeleton compatible)
                'local_positions': features['local_positions'].astype(np.float32),   # [T, 22, 3]
                'velocities': features['velocities'].astype(np.float32),             # [T, 22, 3]
                # Root trajectory (separate track)
                'root_position': features['root_position'].astype(np.float32),       # [T, 3]
                'root_velocity': features['root_velocity'].astype(np.float32),       # [T, 3]
                # Decoder GT (skeleton-specific, for FK supervision)
                'joint_positions': joint_positions.astype(np.float32),               # [T, 22, 3]
                'accelerations': features['accelerations'].astype(np.float32),       # [T, 22, 3]
                'bone_lengths': features['bone_lengths'].astype(np.float32),         # [T, 22]
                # Auxiliary
                'foot_contact': features['foot_contact'].astype(np.float32),         # [T, 4]
                # Metadata
                'num_frames': joint_positions.shape[0],
                'fps': target_fps,
                'skeleton_id': 'smpl_22',
            }

            # Add rotation data if available (from 263D vector)
            if rot_features is not None:
                save_dict['local_rotations_6d'] = rot_features['local_rotations_6d'].astype(np.float32)  # [T, 21, 6]
                save_dict['foot_contact'] = rot_features['foot_contact_4ch'].astype(np.float32)          # [T, 4] (override with GT)

            # Save texts
            texts = motion['texts']
            save_dict['texts'] = '|||'.join(texts) if texts else ''

            np.savez_compressed(
                output_dir / 'motions' / f'{motion_id}.npz',
                **save_dict,
            )

            # Collect stats (subsample for memory)
            if processed_count % 5 == 0:
                all_local_pos.append(features['local_positions'])
                all_velocities.append(features['velocities'])
                all_root_vel.append(features['root_velocity'])

            processed_count += 1

        except Exception as e:
            print(f"  Error processing {motion_id}: {e}")
            skipped_count += 1

    print(f"\nProcessed: {processed_count}, Skipped: {skipped_count}")

    # 3. Compute and save statistics
    print("Computing dataset statistics...")
    all_local_pos = np.concatenate(all_local_pos, axis=0)  # [N, J, 3]
    all_velocities = np.concatenate(all_velocities, axis=0)
    all_root_vel = np.concatenate(all_root_vel, axis=0)

    stats = {
        'local_pos_mean': all_local_pos.mean(axis=0),
        'local_pos_std': all_local_pos.std(axis=0) + 1e-8,
        'velocity_mean': all_velocities.mean(axis=0),
        'velocity_std': all_velocities.std(axis=0) + 1e-8,
        'root_vel_mean': all_root_vel.mean(axis=0),
        'root_vel_std': all_root_vel.std(axis=0) + 1e-8,
    }

    np.savez(output_dir / 'stats.npz', **stats)

    # 4. Save splits
    for split, ids in all_splits.items():
        # Filter to only processed motions
        valid_ids = [
            mid for mid in ids
            if (output_dir / 'motions' / f'{mid}.npz').exists()
        ]
        with open(output_dir / 'splits' / f'{split}.txt', 'w') as f:
            for mid in valid_ids:
                f.write(f'{mid}\n')
        print(f"  {split}: {len(valid_ids)} motions")

    print(f"\nDone! Output saved to {output_dir}")


if __name__ == '__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--raw_dir',
        type=str,
        default='data/raw/HumanML3D',
        help='Path to raw HumanML3D data',
    )
    parser.add_argument(
        '--output_dir',
        type=str,
        default='data/processed/humanml3d',
        help='Output directory',
    )
    parser.add_argument('--target_fps', type=float, default=20.0)
    parser.add_argument('--max_frames', type=int, default=196)
    parser.add_argument('--min_frames', type=int, default=24)

    args = parser.parse_args()
    preprocess_humanml3d(
        args.raw_dir, args.output_dir,
        target_fps=args.target_fps,
        max_frames=args.max_frames,
        min_frames=args.min_frames,
    )