| """ |
| Unified multi-skeleton motion dataset for TopoSlots (Scheme C). |
| |
| Motion representation: |
| Slot token input : per-joint [local_pos(3) + velocity(3)] = 6D (cross-skeleton) |
| Decoder GT : per-joint local_rotations_6d (skeleton-specific, FK supervision) |
| Root track : root_position(3) + root_velocity(3) (separate) |
| Auxiliary : foot_contact(4), bone_lengths, accelerations (losses) |
| """ |
|
|
| import numpy as np |
| import torch |
| from torch.utils.data import Dataset |
| from pathlib import Path |
| from typing import Optional |
|
|
|
|
| class UnifiedMotionDataset(Dataset): |
| """ |
| Multi-skeleton motion dataset with unified format. |
| |
| Each sample returns: |
| - motion_features: [T, J, D] padded to max_joints |
| - skeleton_features: [J, D_skel] padded to max_joints |
| - joint_mask: [J] boolean mask (True = valid joint) |
| - adjacency: [J, J] padded adjacency matrix |
| - geodesic_dist: [J, J] padded geodesic distances |
| - text: str (empty if unavailable) |
| - metadata: dict |
| """ |
|
|
| def __init__( |
| self, |
| data_dirs: list[str | Path], |
| split: str = 'train', |
| max_joints: int = 128, |
| max_frames: int = 196, |
| target_fps: float = 20.0, |
| motion_dim: int = 6, |
| ): |
| self.data_dirs = [Path(d) for d in data_dirs] |
| self.split = split |
| self.max_joints = max_joints |
| self.max_frames = max_frames |
| self.target_fps = target_fps |
| self.motion_dim = motion_dim |
|
|
| |
| self.samples = [] |
| self.skeletons = {} |
| self.stats = {} |
|
|
| for data_dir in self.data_dirs: |
| self._load_data_source(data_dir) |
|
|
| print(f"UnifiedMotionDataset [{split}]: {len(self.samples)} motions, " |
| f"{len(self.skeletons)} skeleton types") |
|
|
| def _load_data_source(self, data_dir: Path): |
| """Load one data source (e.g., processed/humanml3d).""" |
| if not data_dir.exists(): |
| print(f" Warning: {data_dir} not found, skipping") |
| return |
|
|
| |
| skel_path = data_dir / 'skeleton.npz' |
| if skel_path.exists(): |
| skel_data = dict(np.load(skel_path, allow_pickle=True)) |
| skeleton_id = data_dir.name |
| self.skeletons[skeleton_id] = skel_data |
|
|
| |
| stats_path = data_dir / 'stats.npz' |
| if stats_path.exists(): |
| self.stats[data_dir.name] = dict(np.load(stats_path)) |
|
|
| |
| split_path = data_dir / 'splits' / f'{self.split}.txt' |
| if not split_path.exists(): |
| |
| split_path = data_dir / 'splits' / 'all.txt' |
| if not split_path.exists(): |
| print(f" Warning: no split file for {data_dir.name}, skipping") |
| return |
|
|
| motion_ids = [] |
| with open(split_path, 'r') as f: |
| for line in f: |
| line = line.strip() |
| if line: |
| motion_ids.append(line) |
|
|
| for mid in motion_ids: |
| motion_path = data_dir / 'motions' / f'{mid}.npz' |
| if motion_path.exists(): |
| self.samples.append({ |
| 'motion_path': str(motion_path), |
| 'motion_id': mid, |
| 'data_source': data_dir.name, |
| 'skeleton_id': data_dir.name, |
| }) |
|
|
| def __len__(self) -> int: |
| return len(self.samples) |
|
|
| def __getitem__(self, idx: int) -> dict: |
| sample_info = self.samples[idx] |
|
|
| |
| data = dict(np.load(sample_info['motion_path'], allow_pickle=True)) |
|
|
| |
| skeleton_id = sample_info['skeleton_id'] |
| skel_data = self.skeletons.get(skeleton_id, {}) |
|
|
| |
| local_pos = data['local_positions'] |
| velocities = data['velocities'] |
| T, J, _ = local_pos.shape |
|
|
| |
| if skeleton_id in self.stats: |
| stats = self.stats[skeleton_id] |
| local_pos = (local_pos - stats['local_pos_mean']) / stats['local_pos_std'] |
| velocities = (velocities - stats['velocity_mean']) / stats['velocity_std'] |
|
|
| |
| motion_features = np.concatenate([local_pos, velocities], axis=-1) |
|
|
| |
| if T > self.max_frames: |
| |
| if self.split == 'train': |
| start = np.random.randint(0, T - self.max_frames) |
| else: |
| start = 0 |
| motion_features = motion_features[start:start + self.max_frames] |
| actual_frames = self.max_frames |
| else: |
| actual_frames = T |
| |
| pad = np.zeros( |
| (self.max_frames - T, J, self.motion_dim), |
| dtype=np.float32, |
| ) |
| motion_features = np.concatenate([motion_features, pad], axis=0) |
|
|
| |
| padded_motion = np.zeros( |
| (self.max_frames, self.max_joints, self.motion_dim), |
| dtype=np.float32, |
| ) |
| padded_motion[:, :J, :] = motion_features |
|
|
| |
| joint_mask = np.zeros(self.max_joints, dtype=np.bool_) |
| joint_mask[:J] = True |
|
|
| |
| frame_mask = np.zeros(self.max_frames, dtype=np.bool_) |
| frame_mask[:actual_frames] = True |
|
|
| |
| skeleton_features = np.zeros( |
| (self.max_joints, 9), dtype=np.float32 |
| ) |
| if 'joint_names' in skel_data: |
| from .skeleton_graph import SkeletonGraph |
| sg = SkeletonGraph.from_dict(skel_data) |
| skel_feats = sg.get_joint_features() |
| skeleton_features[:J] = skel_feats |
|
|
| |
| adjacency = np.zeros( |
| (self.max_joints, self.max_joints), dtype=np.float32 |
| ) |
| geodesic_dist = np.zeros( |
| (self.max_joints, self.max_joints), dtype=np.float32 |
| ) |
| if 'adjacency' in skel_data: |
| adj = skel_data['adjacency'] |
| adjacency[:J, :J] = adj |
| if 'geodesic_dist' in skel_data: |
| gdist = skel_data['geodesic_dist'] |
| geodesic_dist[:J, :J] = gdist |
|
|
| |
| text = '' |
| if 'texts' in data: |
| texts_str = str(data['texts']) |
| if texts_str: |
| text_list = texts_str.split('|||') |
| if text_list and text_list[0]: |
| |
| if self.split == 'train': |
| text = text_list[np.random.randint(len(text_list))] |
| else: |
| text = text_list[0] |
|
|
| |
| root_pos = data.get('root_position', np.zeros((T, 3), dtype=np.float32)) |
| root_vel = data.get('root_velocity', np.zeros((T, 3), dtype=np.float32)) |
| padded_root_pos = np.zeros((self.max_frames, 3), dtype=np.float32) |
| padded_root_vel = np.zeros((self.max_frames, 3), dtype=np.float32) |
| padded_root_pos[:actual_frames] = root_pos[:actual_frames] |
| padded_root_vel[:actual_frames] = root_vel[:actual_frames] |
|
|
| |
| fc_raw = data.get('foot_contact', np.zeros((T, 4), dtype=np.float32)) |
| if fc_raw.shape[-1] == 2: |
| |
| fc_4ch = np.zeros((fc_raw.shape[0], 4), dtype=np.float32) |
| fc_4ch[:, 0] = fc_4ch[:, 1] = fc_raw[:, 0] |
| fc_4ch[:, 2] = fc_4ch[:, 3] = fc_raw[:, 1] |
| fc_raw = fc_4ch |
| padded_contact = np.zeros((self.max_frames, 4), dtype=np.float32) |
| padded_contact[:actual_frames] = fc_raw[:actual_frames] |
|
|
| |
| rot_6d = data.get('local_rotations_6d', None) |
| if rot_6d is not None: |
| |
| Jr = rot_6d.shape[1] |
| padded_rot = np.zeros((self.max_frames, self.max_joints, 6), dtype=np.float32) |
| T_rot = min(rot_6d.shape[0], actual_frames) |
| padded_rot[:T_rot, :Jr, :] = rot_6d[:T_rot] |
| has_rotations = True |
| else: |
| padded_rot = np.zeros((self.max_frames, self.max_joints, 6), dtype=np.float32) |
| has_rotations = False |
|
|
| |
| bone_raw = data.get('bone_lengths', np.zeros((T, J), dtype=np.float32)) |
| padded_bones = np.zeros((self.max_frames, self.max_joints), dtype=np.float32) |
| padded_bones[:actual_frames, :J] = bone_raw[:actual_frames] |
|
|
| return { |
| |
| 'motion_features': torch.from_numpy(padded_motion), |
| |
| 'skeleton_features': torch.from_numpy(skeleton_features), |
| 'joint_mask': torch.from_numpy(joint_mask), |
| 'frame_mask': torch.from_numpy(frame_mask), |
| 'adjacency': torch.from_numpy(adjacency), |
| 'geodesic_dist': torch.from_numpy(geodesic_dist), |
| |
| 'root_position': torch.from_numpy(padded_root_pos), |
| 'root_velocity': torch.from_numpy(padded_root_vel), |
| |
| 'local_rotations_6d': torch.from_numpy(padded_rot), |
| 'has_rotations': has_rotations, |
| |
| 'foot_contact': torch.from_numpy(padded_contact), |
| 'bone_lengths': torch.from_numpy(padded_bones), |
| |
| 'text': text, |
| 'num_joints': J, |
| 'num_frames': actual_frames, |
| 'skeleton_id': skeleton_id, |
| 'motion_id': sample_info['motion_id'], |
| } |
|
|
|
|
| def collate_fn(batch: list[dict]) -> dict: |
| """Custom collate function for variable-length text.""" |
| result = {} |
| for key in batch[0]: |
| if key == 'text': |
| result[key] = [b[key] for b in batch] |
| elif isinstance(batch[0][key], torch.Tensor): |
| result[key] = torch.stack([b[key] for b in batch]) |
| elif isinstance(batch[0][key], (int, float)): |
| result[key] = torch.tensor([b[key] for b in batch]) |
| else: |
| result[key] = [b[key] for b in batch] |
| return result |
|
|