Tevior commited on
Commit
571a68d
·
verified ·
1 Parent(s): cdd65f2

Upload scripts/full_skeleton_audit.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. scripts/full_skeleton_audit.py +330 -0
scripts/full_skeleton_audit.py ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Full skeleton audit: naming + structural features for all 79 skeletons.
3
+
4
+ Checks:
5
+ 1. Canonical name quality (empty, duplicate, semantic sense)
6
+ 2. Side tag correctness (L/R balance, missed sides)
7
+ 3. Symmetry pair validity (matched pairs have similar bone lengths/depths)
8
+ 4. Topology structure (tree depth, branching factor, connected)
9
+ 5. Geodesic distance matrix sanity
10
+ 6. Rest pose geometry (bone lengths, height, spread)
11
+ 7. Cross-dataset consistency for shared canonical names
12
+ """
13
+
14
+ import sys
15
+ import os
16
+ from pathlib import Path
17
+ import numpy as np
18
+ from collections import Counter, defaultdict
19
+
20
+ project_root = Path(__file__).parent.parent
21
+ sys.path.insert(0, str(project_root))
22
+
23
+ from src.data.skeleton_graph import SkeletonGraph
24
+
25
+ RESULT_DIR = project_root / 'logs' / 'data_fix_20260318'
26
+
27
+ # Severity levels
28
+ ERRORS = [] # Must fix
29
+ WARNINGS = [] # Should review
30
+ INFO = [] # FYI
31
+
32
+
33
+ def log_error(dataset, msg):
34
+ ERRORS.append(f'[ERROR] {dataset}: {msg}')
35
+
36
+ def log_warn(dataset, msg):
37
+ WARNINGS.append(f'[WARN] {dataset}: {msg}')
38
+
39
+ def log_info(dataset, msg):
40
+ INFO.append(f'[INFO] {dataset}: {msg}')
41
+
42
+
43
+ def audit_skeleton(dataset_id: str, skel_data: dict, motion_sample: dict = None):
44
+ """Full audit of one skeleton."""
45
+ sg = SkeletonGraph.from_dict(skel_data)
46
+ J = sg.num_joints
47
+ canon = [str(n) for n in skel_data.get('canonical_names', [])]
48
+ raw = [str(n) for n in skel_data['joint_names']]
49
+
50
+ results = {'dataset': dataset_id, 'num_joints': J, 'issues': []}
51
+
52
+ # ===== 1. Canonical Name Quality =====
53
+ if not canon:
54
+ log_error(dataset_id, 'No canonical_names field in skeleton.npz')
55
+ results['issues'].append('no_canonical')
56
+ else:
57
+ # Empty names
58
+ empty = [i for i, c in enumerate(canon) if not c.strip()]
59
+ if empty:
60
+ log_error(dataset_id, f'{len(empty)} empty canonical names at indices {empty[:5]}')
61
+
62
+ # Duplicate canonical names (within same skeleton)
63
+ dupes = {c: [i for i, x in enumerate(canon) if x == c] for c in set(canon)}
64
+ dupes = {c: idxs for c, idxs in dupes.items() if len(idxs) > 1}
65
+ if dupes:
66
+ for c, idxs in list(dupes.items())[:3]:
67
+ raw_names = [raw[i] for i in idxs]
68
+ log_warn(dataset_id, f'Duplicate canonical "{c}" for raw: {raw_names}')
69
+
70
+ # Names that are just numbers or single chars
71
+ bad_names = [c for c in canon if len(c) <= 2 and not c.isalpha()]
72
+ if bad_names:
73
+ log_warn(dataset_id, f'Very short canonical names: {bad_names[:5]}')
74
+
75
+ # Prefix residue (bip01, bn_, jt_, mixamorig still present)
76
+ prefix_residue = [c for c in canon if any(p in c.lower() for p in ['bip01', 'bn_', 'jt_', 'mixamorig', 'npc_'])]
77
+ if prefix_residue:
78
+ log_warn(dataset_id, f'Prefix residue in canonical: {prefix_residue[:5]}')
79
+
80
+ # ===== 2. Side Tag Correctness =====
81
+ n_left = sum(1 for t in sg.side_tags if t == 'left')
82
+ n_right = sum(1 for t in sg.side_tags if t == 'right')
83
+ n_center = sum(1 for t in sg.side_tags if t == 'center')
84
+
85
+ if n_left != n_right and J > 5: # small skeletons may be asymmetric
86
+ log_warn(dataset_id, f'L/R imbalance: L={n_left} R={n_right} C={n_center}')
87
+
88
+ # Check if canonical names agree with side tags
89
+ if canon:
90
+ for i, (c, t) in enumerate(zip(canon, sg.side_tags)):
91
+ if 'left' in c and t != 'left':
92
+ log_warn(dataset_id, f'Joint {i} "{raw[i]}" canonical="{c}" but side_tag="{t}"')
93
+ elif 'right' in c and t != 'right':
94
+ log_warn(dataset_id, f'Joint {i} "{raw[i]}" canonical="{c}" but side_tag="{t}"')
95
+
96
+ # ===== 3. Symmetry Pair Validation =====
97
+ for i, j in sg.symmetry_pairs:
98
+ # Check bone lengths match (should be ~equal for symmetric joints)
99
+ bl_i = sg.bone_lengths[i]
100
+ bl_j = sg.bone_lengths[j]
101
+ if bl_i > 0.01 and bl_j > 0.01:
102
+ ratio = max(bl_i, bl_j) / min(bl_i, bl_j)
103
+ if ratio > 1.5:
104
+ log_warn(dataset_id, f'Sym pair ({raw[i]}, {raw[j]}) bone length mismatch: {bl_i:.3f} vs {bl_j:.3f}')
105
+
106
+ # Check depths match
107
+ if sg.depths[i] != sg.depths[j]:
108
+ log_warn(dataset_id, f'Sym pair ({raw[i]}, {raw[j]}) depth mismatch: {sg.depths[i]} vs {sg.depths[j]}')
109
+
110
+ # ===== 4. Topology Structure =====
111
+ max_depth = sg.depths.max()
112
+ max_degree = sg.degrees.max()
113
+ leaf_count = sum(1 for d in sg.degrees if d == 0)
114
+ root_count = sum(1 for p in sg.parent_indices if p == -1)
115
+
116
+ if root_count != 1:
117
+ log_error(dataset_id, f'Expected 1 root, found {root_count}')
118
+
119
+ if max_depth > 20:
120
+ log_warn(dataset_id, f'Very deep tree: max_depth={max_depth}')
121
+
122
+ # Check connectivity (all joints reachable from root)
123
+ reachable = set()
124
+ queue = [i for i, p in enumerate(sg.parent_indices) if p == -1]
125
+ while queue:
126
+ curr = queue.pop(0)
127
+ reachable.add(curr)
128
+ for j in range(J):
129
+ if sg.parent_indices[j] == curr and j not in reachable:
130
+ queue.append(j)
131
+ if len(reachable) != J:
132
+ log_error(dataset_id, f'Disconnected: only {len(reachable)}/{J} joints reachable from root')
133
+
134
+ # ===== 5. Geodesic Distance Sanity =====
135
+ geo = sg.geodesic_dist
136
+ if geo.shape != (J, J):
137
+ log_error(dataset_id, f'Geodesic distance shape mismatch: {geo.shape} vs ({J},{J})')
138
+ else:
139
+ # Should be symmetric
140
+ asym = np.abs(geo - geo.T).max()
141
+ if asym > 0.01:
142
+ log_error(dataset_id, f'Geodesic distance not symmetric: max asymmetry={asym}')
143
+
144
+ # Diagonal should be 0
145
+ diag_max = np.diag(geo).max()
146
+ if diag_max > 0.01:
147
+ log_error(dataset_id, f'Geodesic distance diagonal non-zero: max={diag_max}')
148
+
149
+ # Max distance should be reasonable
150
+ max_geo = geo.max()
151
+ if max_geo > J:
152
+ log_warn(dataset_id, f'Geodesic max={max_geo} exceeds num_joints={J}')
153
+
154
+ # No unreachable pairs (distance should be < J+1)
155
+ unreachable = (geo >= J + 1).sum()
156
+ if unreachable > 0:
157
+ log_error(dataset_id, f'{unreachable} unreachable joint pairs in geodesic matrix')
158
+
159
+ # ===== 6. Rest Pose Geometry =====
160
+ offsets = sg.rest_offsets
161
+ bone_lengths = sg.bone_lengths
162
+
163
+ # Zero-length bones (excluding root)
164
+ zero_bones = sum(1 for i, bl in enumerate(bone_lengths) if bl < 1e-6 and sg.parent_indices[i] >= 0)
165
+ if zero_bones > 0:
166
+ log_info(dataset_id, f'{zero_bones} zero-length bones')
167
+
168
+ # Very long bones (> 10x median)
169
+ nonzero_bl = bone_lengths[bone_lengths > 1e-6]
170
+ if len(nonzero_bl) > 0:
171
+ median_bl = np.median(nonzero_bl)
172
+ long_bones = [(raw[i], bl) for i, bl in enumerate(bone_lengths)
173
+ if bl > 10 * median_bl and sg.parent_indices[i] >= 0]
174
+ if long_bones:
175
+ log_warn(dataset_id, f'Unusually long bones (>10x median={median_bl:.4f}): {long_bones[:3]}')
176
+
177
+ # ===== 7. Motion Data Spot Check =====
178
+ if motion_sample is not None:
179
+ T = int(motion_sample['num_frames'])
180
+ jp = motion_sample['joint_positions'][:T]
181
+ lp = motion_sample['local_positions'][:T]
182
+ vel = motion_sample['velocities'][:T]
183
+
184
+ # Check joint count matches
185
+ if jp.shape[1] != J:
186
+ log_error(dataset_id, f'Motion joints={jp.shape[1]} != skeleton joints={J}')
187
+
188
+ # Check for NaN/Inf
189
+ for key in ['joint_positions', 'local_positions', 'velocities']:
190
+ arr = motion_sample[key][:T]
191
+ if np.any(np.isnan(arr)):
192
+ log_error(dataset_id, f'NaN in motion {key}')
193
+ if np.any(np.isinf(arr)):
194
+ log_error(dataset_id, f'Inf in motion {key}')
195
+
196
+ # Height sanity
197
+ height = jp[0, :, 1].max() - jp[0, :, 1].min()
198
+ if height < 0.05:
199
+ log_warn(dataset_id, f'Very small body height: {height:.4f}m')
200
+ elif height > 10:
201
+ log_warn(dataset_id, f'Very large body height: {height:.2f}m (scale issue?)')
202
+
203
+ # Velocity sanity
204
+ vel_max = np.linalg.norm(vel, axis=-1).max()
205
+ if vel_max > 50:
206
+ log_warn(dataset_id, f'Very high velocity: max={vel_max:.1f} m/s')
207
+
208
+ # Collect summary
209
+ results['n_left'] = n_left
210
+ results['n_right'] = n_right
211
+ results['n_center'] = n_center
212
+ results['n_sym'] = len(sg.symmetry_pairs)
213
+ results['max_depth'] = int(max_depth)
214
+ results['max_degree'] = int(max_degree)
215
+ results['leaf_count'] = leaf_count
216
+ results['n_canonical'] = len(canon)
217
+ results['n_dupe_canonical'] = sum(len(v) - 1 for v in dupes.values()) if canon and dupes else 0
218
+
219
+ return results
220
+
221
+
222
+ def main():
223
+ all_results = []
224
+
225
+ # Audit human datasets
226
+ human_datasets = ['humanml3d', 'lafan1', '100style', 'bandai_namco', 'cmu_mocap', 'mixamo']
227
+ for ds in human_datasets:
228
+ ds_path = project_root / 'data' / 'processed' / ds
229
+ skel_data = dict(np.load(ds_path / 'skeleton.npz', allow_pickle=True))
230
+
231
+ # Load a motion sample
232
+ motions_dir = ds_path / 'motions'
233
+ files = sorted(os.listdir(motions_dir))
234
+ motion_sample = dict(np.load(motions_dir / files[0], allow_pickle=True)) if files else None
235
+
236
+ r = audit_skeleton(ds, skel_data, motion_sample)
237
+ all_results.append(r)
238
+
239
+ # Audit Zoo species
240
+ zoo_path = project_root / 'data' / 'processed' / 'truebones_zoo'
241
+ skel_dir = zoo_path / 'skeletons'
242
+ motions_dir = zoo_path / 'motions'
243
+
244
+ # Build species → motion mapping
245
+ species_motions = {}
246
+ for f in sorted(os.listdir(motions_dir))[:200]:
247
+ d = dict(np.load(motions_dir / f, allow_pickle=True))
248
+ sp = str(d.get('species', ''))
249
+ if sp and sp not in species_motions:
250
+ species_motions[sp] = d
251
+
252
+ for skel_file in sorted(skel_dir.glob('*.npz')):
253
+ species = skel_file.stem
254
+ skel_data = dict(np.load(skel_file, allow_pickle=True))
255
+ motion_sample = species_motions.get(species)
256
+ r = audit_skeleton(f'zoo/{species}', skel_data, motion_sample)
257
+ all_results.append(r)
258
+
259
+ # ===== Cross-dataset canonical consistency =====
260
+ # For human datasets, check that same canonical name → similar tree depth
261
+ canonical_depths = defaultdict(list)
262
+ for ds in human_datasets:
263
+ skel_data = dict(np.load(project_root / 'data' / 'processed' / ds / 'skeleton.npz', allow_pickle=True))
264
+ sg = SkeletonGraph.from_dict(skel_data)
265
+ canon = [str(n) for n in skel_data.get('canonical_names', [])]
266
+ for c, d in zip(canon, sg.depths):
267
+ canonical_depths[c].append((ds, int(d)))
268
+
269
+ for c, entries in canonical_depths.items():
270
+ depths = [d for _, d in entries]
271
+ if len(set(depths)) > 1 and max(depths) - min(depths) > 2:
272
+ sources = [(ds, d) for ds, d in entries]
273
+ log_warn('cross-dataset', f'Canonical "{c}" has depth variance: {sources}')
274
+
275
+ # ===== Write Report =====
276
+ report_path = RESULT_DIR / 'skeleton_audit_report.md'
277
+ with open(report_path, 'w') as f:
278
+ f.write('# Skeleton Audit Report\n\n')
279
+ f.write(f'**Date**: 2026-03-18\n')
280
+ f.write(f'**Datasets**: {len(human_datasets)} human + {len(list(skel_dir.glob("*.npz")))} zoo species\n\n')
281
+
282
+ # Summary table
283
+ f.write('## Summary\n\n')
284
+ f.write(f'| Dataset | J | L | R | C | Sym | Depth | Leaves | Canon | Dupes |\n')
285
+ f.write(f'|---------|:-:|:-:|:-:|:-:|:---:|:-----:|:------:|:-----:|:-----:|\n')
286
+ for r in all_results:
287
+ f.write(f'| {r["dataset"]:20s} | {r["num_joints"]:3d} | {r["n_left"]:2d} | {r["n_right"]:2d} | '
288
+ f'{r["n_center"]:2d} | {r["n_sym"]:2d} | {r["max_depth"]:2d} | {r["leaf_count"]:2d} | '
289
+ f'{r["n_canonical"]:3d} | {r["n_dupe_canonical"]:2d} |\n')
290
+
291
+ # Issues
292
+ f.write(f'\n## Errors ({len(ERRORS)})\n\n')
293
+ if ERRORS:
294
+ for e in ERRORS:
295
+ f.write(f'- {e}\n')
296
+ else:
297
+ f.write('None.\n')
298
+
299
+ f.write(f'\n## Warnings ({len(WARNINGS)})\n\n')
300
+ if WARNINGS:
301
+ for w in WARNINGS:
302
+ f.write(f'- {w}\n')
303
+ else:
304
+ f.write('None.\n')
305
+
306
+ f.write(f'\n## Info ({len(INFO)})\n\n')
307
+ if INFO:
308
+ for i in INFO:
309
+ f.write(f'- {i}\n')
310
+ else:
311
+ f.write('None.\n')
312
+
313
+ print(f'Audit complete: {len(ERRORS)} errors, {len(WARNINGS)} warnings, {len(INFO)} info')
314
+ print(f'Report: {report_path}')
315
+
316
+ # Print to console too
317
+ if ERRORS:
318
+ print(f'\n=== ERRORS ({len(ERRORS)}) ===')
319
+ for e in ERRORS:
320
+ print(f' {e}')
321
+ if WARNINGS:
322
+ print(f'\n=== WARNINGS ({len(WARNINGS)}) ===')
323
+ for w in WARNINGS[:30]:
324
+ print(f' {w}')
325
+ if len(WARNINGS) > 30:
326
+ print(f' ... and {len(WARNINGS) - 30} more')
327
+
328
+
329
+ if __name__ == '__main__':
330
+ main()