Tevior commited on
Commit
c3b2920
·
verified ·
1 Parent(s): 1399608

Upload scripts/preprocess_humanml3d.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. scripts/preprocess_humanml3d.py +227 -0
scripts/preprocess_humanml3d.py ADDED
@@ -0,0 +1,227 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Preprocess HumanML3D data into TopoSlots unified format.
3
+
4
+ Input: HumanML3D raw data (new_joints/*.npy, texts/*.txt)
5
+ Output: Processed data in data/processed/humanml3d/
6
+ - skeleton.npz: SMPL-22 skeleton graph
7
+ - motions/{motion_id}.npz: per-motion features
8
+ - splits/{train,val,test}.txt: data splits
9
+ - stats.npz: dataset statistics (mean, std)
10
+ """
11
+
12
+ import sys
13
+ import argparse
14
+ from pathlib import Path
15
+ import numpy as np
16
+ from tqdm import tqdm
17
+
18
+ # Add project root to path
19
+ project_root = Path(__file__).parent.parent
20
+ sys.path.insert(0, str(project_root))
21
+
22
+ from src.data.skeleton_graph import SkeletonGraph
23
+ from src.data.humanml3d_converter import (
24
+ get_smpl22_skeleton,
25
+ load_humanml3d_motion,
26
+ compute_motion_features,
27
+ extract_rotations_from_263d,
28
+ load_humanml3d_split,
29
+ SMPL_22_JOINT_NAMES,
30
+ )
31
+
32
+
33
+ def preprocess_humanml3d(
34
+ raw_dir: str,
35
+ output_dir: str,
36
+ target_fps: float = 20.0,
37
+ max_frames: int = 196, # ~10s at 20fps
38
+ min_frames: int = 24, # ~1.2s at 20fps
39
+ ):
40
+ raw_dir = Path(raw_dir)
41
+ output_dir = Path(output_dir)
42
+
43
+ # Create output directories
44
+ (output_dir / 'motions').mkdir(parents=True, exist_ok=True)
45
+ (output_dir / 'splits').mkdir(parents=True, exist_ok=True)
46
+
47
+ # 1. Save skeleton graph
48
+ print("Building SMPL-22 skeleton graph...")
49
+
50
+ # Compute average rest pose from first 100 motions
51
+ train_ids = load_humanml3d_split(raw_dir, 'train')
52
+ rest_poses = []
53
+ for mid in train_ids[:100]:
54
+ try:
55
+ motion = load_humanml3d_motion(mid, raw_dir)
56
+ rest_poses.append(motion['joint_positions'][0]) # first frame
57
+ except Exception:
58
+ continue
59
+
60
+ avg_rest_pose = np.mean(rest_poses, axis=0) if rest_poses else None
61
+ skeleton = get_smpl22_skeleton(avg_rest_pose)
62
+
63
+ # Save skeleton
64
+ np.savez(
65
+ output_dir / 'skeleton.npz',
66
+ **skeleton.to_dict(),
67
+ )
68
+ print(f" Skeleton: {skeleton.num_joints} joints")
69
+
70
+ # 2. Process all motions
71
+ all_splits = {}
72
+ for split in ['train', 'val', 'test']:
73
+ try:
74
+ ids = load_humanml3d_split(raw_dir, split)
75
+ all_splits[split] = ids
76
+ except FileNotFoundError:
77
+ print(f" Warning: {split}.txt not found, skipping")
78
+ all_splits[split] = []
79
+
80
+ all_ids = set()
81
+ for ids in all_splits.values():
82
+ all_ids.update(ids)
83
+
84
+ print(f"\nProcessing {len(all_ids)} motions...")
85
+
86
+ # Collect statistics
87
+ all_local_pos = []
88
+ all_velocities = []
89
+ all_root_vel = []
90
+ processed_count = 0
91
+ skipped_count = 0
92
+
93
+ for motion_id in tqdm(sorted(all_ids)):
94
+ try:
95
+ # Load raw motion
96
+ motion = load_humanml3d_motion(motion_id, raw_dir)
97
+ joint_positions = motion['joint_positions']
98
+ T = joint_positions.shape[0]
99
+
100
+ # Filter by length
101
+ if T < min_frames:
102
+ skipped_count += 1
103
+ continue
104
+ if T > max_frames:
105
+ joint_positions = joint_positions[:max_frames]
106
+
107
+ # Compute position-based features (Scheme C: slot token input)
108
+ features = compute_motion_features(
109
+ joint_positions, skeleton, fps=target_fps
110
+ )
111
+
112
+ # Extract rotation-based features from 263D (Scheme C: decoder GT)
113
+ joint_vecs = motion['joint_vecs']
114
+ rot_features = None
115
+ if joint_vecs is not None and joint_vecs.shape[0] == joint_positions.shape[0]:
116
+ rot_features = extract_rotations_from_263d(joint_vecs)
117
+
118
+ # Build save dict
119
+ # --- Scheme C layout ---
120
+ # Slot token input: local_positions [T,J,3] + velocities [T,J,3] = 6D per joint
121
+ # Decoder GT: local_rotations_6d [T,J-1,6] (for FK supervision)
122
+ # Root track: root_position [T,3] + root_velocity [T,3]
123
+ # Auxiliary: foot_contact [T,4], bone_lengths [T,J], accelerations [T,J,3]
124
+ save_dict = {
125
+ # Slot token features (cross-skeleton compatible)
126
+ 'local_positions': features['local_positions'].astype(np.float32), # [T, 22, 3]
127
+ 'velocities': features['velocities'].astype(np.float32), # [T, 22, 3]
128
+ # Root trajectory (separate track)
129
+ 'root_position': features['root_position'].astype(np.float32), # [T, 3]
130
+ 'root_velocity': features['root_velocity'].astype(np.float32), # [T, 3]
131
+ # Decoder GT (skeleton-specific, for FK supervision)
132
+ 'joint_positions': joint_positions.astype(np.float32), # [T, 22, 3]
133
+ 'accelerations': features['accelerations'].astype(np.float32), # [T, 22, 3]
134
+ 'bone_lengths': features['bone_lengths'].astype(np.float32), # [T, 22]
135
+ # Auxiliary
136
+ 'foot_contact': features['foot_contact'].astype(np.float32), # [T, 4]
137
+ # Metadata
138
+ 'num_frames': joint_positions.shape[0],
139
+ 'fps': target_fps,
140
+ 'skeleton_id': 'smpl_22',
141
+ }
142
+
143
+ # Add rotation data if available (from 263D vector)
144
+ if rot_features is not None:
145
+ save_dict['local_rotations_6d'] = rot_features['local_rotations_6d'].astype(np.float32) # [T, 21, 6]
146
+ save_dict['foot_contact'] = rot_features['foot_contact_4ch'].astype(np.float32) # [T, 4] (override with GT)
147
+
148
+ # Save texts
149
+ texts = motion['texts']
150
+ save_dict['texts'] = '|||'.join(texts) if texts else ''
151
+
152
+ np.savez_compressed(
153
+ output_dir / 'motions' / f'{motion_id}.npz',
154
+ **save_dict,
155
+ )
156
+
157
+ # Collect stats (subsample for memory)
158
+ if processed_count % 5 == 0:
159
+ all_local_pos.append(features['local_positions'])
160
+ all_velocities.append(features['velocities'])
161
+ all_root_vel.append(features['root_velocity'])
162
+
163
+ processed_count += 1
164
+
165
+ except Exception as e:
166
+ print(f" Error processing {motion_id}: {e}")
167
+ skipped_count += 1
168
+
169
+ print(f"\nProcessed: {processed_count}, Skipped: {skipped_count}")
170
+
171
+ # 3. Compute and save statistics
172
+ print("Computing dataset statistics...")
173
+ all_local_pos = np.concatenate(all_local_pos, axis=0) # [N, J, 3]
174
+ all_velocities = np.concatenate(all_velocities, axis=0)
175
+ all_root_vel = np.concatenate(all_root_vel, axis=0)
176
+
177
+ stats = {
178
+ 'local_pos_mean': all_local_pos.mean(axis=0),
179
+ 'local_pos_std': all_local_pos.std(axis=0) + 1e-8,
180
+ 'velocity_mean': all_velocities.mean(axis=0),
181
+ 'velocity_std': all_velocities.std(axis=0) + 1e-8,
182
+ 'root_vel_mean': all_root_vel.mean(axis=0),
183
+ 'root_vel_std': all_root_vel.std(axis=0) + 1e-8,
184
+ }
185
+
186
+ np.savez(output_dir / 'stats.npz', **stats)
187
+
188
+ # 4. Save splits
189
+ for split, ids in all_splits.items():
190
+ # Filter to only processed motions
191
+ valid_ids = [
192
+ mid for mid in ids
193
+ if (output_dir / 'motions' / f'{mid}.npz').exists()
194
+ ]
195
+ with open(output_dir / 'splits' / f'{split}.txt', 'w') as f:
196
+ for mid in valid_ids:
197
+ f.write(f'{mid}\n')
198
+ print(f" {split}: {len(valid_ids)} motions")
199
+
200
+ print(f"\nDone! Output saved to {output_dir}")
201
+
202
+
203
+ if __name__ == '__main__':
204
+ parser = argparse.ArgumentParser()
205
+ parser.add_argument(
206
+ '--raw_dir',
207
+ type=str,
208
+ default='data/raw/HumanML3D',
209
+ help='Path to raw HumanML3D data',
210
+ )
211
+ parser.add_argument(
212
+ '--output_dir',
213
+ type=str,
214
+ default='data/processed/humanml3d',
215
+ help='Output directory',
216
+ )
217
+ parser.add_argument('--target_fps', type=float, default=20.0)
218
+ parser.add_argument('--max_frames', type=int, default=196)
219
+ parser.add_argument('--min_frames', type=int, default=24)
220
+
221
+ args = parser.parse_args()
222
+ preprocess_humanml3d(
223
+ args.raw_dir, args.output_dir,
224
+ target_fps=args.target_fps,
225
+ max_frames=args.max_frames,
226
+ min_frames=args.min_frames,
227
+ )