|
|
|
|
|
"""Dataset analysis utility for the Unitree G1 AMASS trajectories. |
|
|
|
|
|
Run: |
|
|
python analyze_dataset.py --root /home/ubuntu/MoCapDataset/AMASSDataset/UnitreeG1 |
|
|
|
|
|
The script outputs two files in the same folder: |
|
|
├─ episode_stats.json – per-trajectory statistics |
|
|
└─ aggregate_stats.json – averages / extrema over the whole dataset |
|
|
|
|
|
Useful for diagnosing AMP training issues (range mismatch, outliers, etc.). |
|
|
""" |
|
|
|
|
|
from __future__ import annotations |
|
|
|
|
|
import argparse |
|
|
import json |
|
|
import os |
|
|
from pathlib import Path |
|
|
from collections import defaultdict |
|
|
from typing import Any, Dict, List |
|
|
|
|
|
import torch |
|
|
import numpy as np |
|
|
|
|
|
import isaaclab.utils.math as math_utils |
|
|
|
|
|
_ALLOWED_EXT = {".pt", ".pth", ".pkl", ".npz"} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _load_file(path: Path) -> Dict[str, torch.Tensor]: |
|
|
"""Load torch / numpy trajectory file into a dict of torch tensors.""" |
|
|
if path.suffix in {".npz", ".pkl"}: |
|
|
data = dict(np.load(path, allow_pickle=True)) |
|
|
for k, v in data.items(): |
|
|
if isinstance(v, np.ndarray) and v.dtype.kind in {"f", "c", "i", "u", "b"}: |
|
|
data[k] = torch.from_numpy(v) |
|
|
else: |
|
|
data[k] = v |
|
|
return data |
|
|
|
|
|
return torch.load(path, map_location="cpu") |
|
|
|
|
|
|
|
|
def _tensor_range(t: torch.Tensor): |
|
|
return t.min().item(), t.max().item() |
|
|
|
|
|
|
|
|
def _analyze_field(value: Any) -> Dict[str, Any]: |
|
|
"""Analyze a field value and return metadata about it.""" |
|
|
if isinstance(value, torch.Tensor): |
|
|
return { |
|
|
"type": "tensor", |
|
|
"dtype": str(value.dtype), |
|
|
"shape": list(value.shape), |
|
|
"ndim": value.ndim, |
|
|
"size": value.numel(), |
|
|
} |
|
|
elif isinstance(value, np.ndarray): |
|
|
return { |
|
|
"type": "numpy_array", |
|
|
"dtype": str(value.dtype), |
|
|
"shape": list(value.shape), |
|
|
"ndim": value.ndim, |
|
|
"size": value.size, |
|
|
} |
|
|
elif isinstance(value, list): |
|
|
return { |
|
|
"type": "list", |
|
|
"length": len(value), |
|
|
"element_type": type(value[0]).__name__ if value else "unknown", |
|
|
} |
|
|
elif isinstance(value, (str, int, float, bool)): |
|
|
return { |
|
|
"type": type(value).__name__, |
|
|
"value": value if isinstance(value, (int, float, bool)) else f"<string of length {len(value)}>", |
|
|
} |
|
|
else: |
|
|
return { |
|
|
"type": type(value).__name__, |
|
|
"repr": str(value)[:100] + ("..." if len(str(value)) > 100 else ""), |
|
|
} |
|
|
|
|
|
def _determine_angular_unit(max_vel: float) -> str: |
|
|
"""Determine if angular velocity is in deg/s or rad/s based on magnitude.""" |
|
|
return "deg/s" if max_vel > 20.0 else "rad/s" |
|
|
|
|
|
def _analyze_angular_velocity(velocities: torch.Tensor, name: str = "") -> Dict[str, Any]: |
|
|
"""Analyze angular velocity data to determine if it's in deg/s or rad/s. |
|
|
|
|
|
Uses multiple heuristics: |
|
|
1. Physical limits - robots rarely exceed 1000 deg/s or ~17 rad/s |
|
|
2. Distribution of values - deg/s values tend to be larger |
|
|
3. Common ranges for motion capture data |
|
|
|
|
|
Args: |
|
|
velocities: Tensor of angular velocities |
|
|
name: Name of the joint/axis for reporting |
|
|
|
|
|
Returns: |
|
|
Dict with analysis results |
|
|
""" |
|
|
abs_max = float(torch.abs(velocities).max()) |
|
|
abs_mean = float(torch.abs(velocities).mean()) |
|
|
|
|
|
|
|
|
if abs_max > 20.0: |
|
|
rad_max = abs_max * np.pi / 180 |
|
|
rad_mean = abs_mean * np.pi / 180 |
|
|
deg_max = abs_max |
|
|
deg_mean = abs_mean |
|
|
original_unit = "deg/s" |
|
|
else: |
|
|
rad_max = abs_max |
|
|
rad_mean = abs_mean |
|
|
deg_max = abs_max * 180 / np.pi |
|
|
deg_mean = abs_mean * 180 / np.pi |
|
|
original_unit = "rad/s" |
|
|
|
|
|
|
|
|
deg_score = 0 |
|
|
rad_score = 0 |
|
|
|
|
|
|
|
|
if deg_max > 1000: |
|
|
rad_score += 2 |
|
|
if rad_max > 17: |
|
|
deg_score += 2 |
|
|
|
|
|
|
|
|
if 20 <= deg_max <= 720: |
|
|
deg_score += 1 |
|
|
if 0.3 <= rad_max <= 12: |
|
|
rad_score += 1 |
|
|
|
|
|
|
|
|
if 5 <= deg_mean <= 180: |
|
|
deg_score += 1 |
|
|
if 0.1 <= rad_mean <= 3: |
|
|
rad_score += 1 |
|
|
|
|
|
|
|
|
likely_unit = "deg/s" if deg_score > rad_score else "rad/s" |
|
|
confidence = abs(deg_score - rad_score) / (deg_score + rad_score) if (deg_score + rad_score) > 0 else 0 |
|
|
|
|
|
return { |
|
|
"likely_unit": likely_unit, |
|
|
"confidence": confidence, |
|
|
"max_value": abs_max, |
|
|
"mean_value": abs_mean, |
|
|
"deg_score": deg_score, |
|
|
"rad_score": rad_score, |
|
|
"analysis": { |
|
|
"deg/s": {"max": deg_max, "mean": deg_mean}, |
|
|
"rad/s": {"max": rad_max, "mean": rad_mean} |
|
|
}, |
|
|
"original_unit": original_unit |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def analyse_dataset(root: Path) -> None: |
|
|
|
|
|
files: List[Path] = [] |
|
|
for p, _, names in os.walk(root): |
|
|
for n in names: |
|
|
if n == "shape_optimized.pkl": |
|
|
continue |
|
|
if Path(n).suffix in _ALLOWED_EXT: |
|
|
files.append(Path(p) / n) |
|
|
files.sort() |
|
|
|
|
|
if not files: |
|
|
raise RuntimeError(f"No trajectory files found under {root}") |
|
|
|
|
|
print(f"Found {len(files)} trajectory files. Analysing…") |
|
|
|
|
|
|
|
|
agg = { |
|
|
"num_episodes": len(files), |
|
|
"total_frames": 0, |
|
|
"lengths": [], |
|
|
"base_lin_vel_b": {"min": torch.full((3,), torch.inf), "max": torch.full((3,), -torch.inf)}, |
|
|
"base_ang_vel_b": {"min": torch.full((3,), torch.inf), "max": torch.full((3,), -torch.inf)}, |
|
|
"base_height": {"min": torch.tensor(torch.inf), "max": torch.tensor(-torch.inf)}, |
|
|
"base_quat": {"min": torch.full((4,), torch.inf), "max": torch.full((4,), -torch.inf)}, |
|
|
"joint_pos": {}, |
|
|
"joint_vel": {}, |
|
|
"base_pos": {"min": torch.full((3,), torch.inf), "max": torch.full((3,), -torch.inf)}, |
|
|
"start_pos_list": [], |
|
|
} |
|
|
|
|
|
|
|
|
field_analysis = defaultdict(lambda: { |
|
|
"count": 0, |
|
|
"files_with_field": [], |
|
|
"metadata": None, |
|
|
"consistent_shape": True, |
|
|
"shapes_seen": set(), |
|
|
}) |
|
|
|
|
|
episode_stats: List[Dict[str, Any]] = [] |
|
|
|
|
|
for f_idx, path in enumerate(files): |
|
|
data = _load_file(path) |
|
|
|
|
|
|
|
|
for field_name, field_value in data.items(): |
|
|
field_info = field_analysis[field_name] |
|
|
field_info["count"] += 1 |
|
|
field_info["files_with_field"].append(str(path.relative_to(root))) |
|
|
|
|
|
|
|
|
metadata = _analyze_field(field_value) |
|
|
if field_info["metadata"] is None: |
|
|
field_info["metadata"] = metadata |
|
|
|
|
|
|
|
|
if metadata["type"] in ["tensor", "numpy_array"]: |
|
|
shape_tuple = tuple(metadata["shape"]) |
|
|
field_info["shapes_seen"].add(shape_tuple) |
|
|
if len(field_info["shapes_seen"]) > 1: |
|
|
field_info["consistent_shape"] = False |
|
|
|
|
|
|
|
|
if "qpos" not in data or "qvel" not in data: |
|
|
print(f"[WARN] {path.name}: Missing qpos or qvel, skipping detailed analysis") |
|
|
continue |
|
|
|
|
|
qpos, qvel = data["qpos"].float(), data["qvel"].float() |
|
|
n = qpos.shape[0] |
|
|
agg["total_frames"] += n |
|
|
agg["lengths"].append(n) |
|
|
|
|
|
|
|
|
base_pos = qpos[:, :3] |
|
|
base_quat = math_utils.quat_unique(qpos[:, 3:7]) |
|
|
base_lin_vel = qvel[:, :3] |
|
|
base_ang_vel = qvel[:, 3:6] |
|
|
base_lin_vel_b = math_utils.quat_rotate_inverse(base_quat, base_lin_vel) |
|
|
base_ang_vel_b = math_utils.quat_rotate_inverse(base_quat, base_ang_vel) |
|
|
|
|
|
height = base_pos[:, 2] |
|
|
|
|
|
|
|
|
agg["base_height"]["min"] = torch.minimum(agg["base_height"]["min"], height.min()) |
|
|
agg["base_height"]["max"] = torch.maximum(agg["base_height"]["max"], height.max()) |
|
|
for k, tensor in zip(["base_lin_vel_b", "base_ang_vel_b"], [base_lin_vel_b, base_ang_vel_b]): |
|
|
agg[k]["min"] = torch.minimum(agg[k]["min"], tensor.min(dim=0).values) |
|
|
agg[k]["max"] = torch.maximum(agg[k]["max"], tensor.max(dim=0).values) |
|
|
agg["base_quat"]["min"] = torch.minimum(agg["base_quat"]["min"], base_quat.min(dim=0).values) |
|
|
agg["base_quat"]["max"] = torch.maximum(agg["base_quat"]["max"], base_quat.max(dim=0).values) |
|
|
|
|
|
|
|
|
agg["base_pos"]["min"] = torch.minimum(agg["base_pos"]["min"], base_pos.min(dim=0).values) |
|
|
agg["base_pos"]["max"] = torch.maximum(agg["base_pos"]["max"], base_pos.max(dim=0).values) |
|
|
|
|
|
|
|
|
agg["start_pos_list"].append(base_pos[0].tolist()) |
|
|
|
|
|
|
|
|
joint_pos = qpos[:, 7:] |
|
|
joint_vel = qvel[:, 6:] |
|
|
num_joints = joint_pos.shape[1] |
|
|
joint_names = data.get("joint_names", None) |
|
|
if joint_names is not None and len(joint_names) == num_joints + 1: |
|
|
|
|
|
joint_names = joint_names[1:] |
|
|
|
|
|
if joint_names is None or len(joint_names) != num_joints: |
|
|
|
|
|
if joint_names is not None and len(joint_names) != num_joints: |
|
|
print(f"[WARN] {path.name}: joint_names length {len(joint_names)} != joint dim {num_joints}. Using generic names.") |
|
|
joint_names = [f"joint_{i}" for i in range(num_joints)] |
|
|
|
|
|
ep_joint_range = {} |
|
|
ep_joint_vel_range = {} |
|
|
for j in range(num_joints): |
|
|
name = joint_names[j] |
|
|
|
|
|
j_min, j_max = _tensor_range(joint_pos[:, j]) |
|
|
ep_joint_range[name] = {"min": j_min, "max": j_max} |
|
|
|
|
|
|
|
|
v_min, v_max = _tensor_range(joint_vel[:, j]) |
|
|
ep_joint_vel_range[name] = {"min": v_min, "max": v_max} |
|
|
|
|
|
|
|
|
if name not in agg["joint_pos"]: |
|
|
agg["joint_pos"][name] = {"min": j_min, "max": j_max} |
|
|
else: |
|
|
agg["joint_pos"][name]["min"] = min(agg["joint_pos"][name]["min"], j_min) |
|
|
agg["joint_pos"][name]["max"] = max(agg["joint_pos"][name]["max"], j_max) |
|
|
|
|
|
|
|
|
if name not in agg["joint_vel"]: |
|
|
agg["joint_vel"][name] = {"min": v_min, "max": v_max} |
|
|
else: |
|
|
agg["joint_vel"][name]["min"] = min(agg["joint_vel"][name]["min"], v_min) |
|
|
agg["joint_vel"][name]["max"] = max(agg["joint_vel"][name]["max"], v_max) |
|
|
|
|
|
|
|
|
base_ang_vel_analysis = {} |
|
|
for i, axis in enumerate(['x', 'y', 'z']): |
|
|
base_ang_vel_axis = base_ang_vel_b[:, i] |
|
|
base_ang_vel_analysis[axis] = _analyze_angular_velocity(base_ang_vel_axis, f"base_ang_vel_{axis}") |
|
|
|
|
|
|
|
|
joint_vel_analysis = {} |
|
|
for j in range(num_joints): |
|
|
name = joint_names[j] |
|
|
joint_vel_analysis[name] = _analyze_angular_velocity(joint_vel[:, j], name) |
|
|
|
|
|
|
|
|
episode_stats.append({ |
|
|
"file": str(path.relative_to(root)), |
|
|
"length": n, |
|
|
"base_height": _tensor_range(height), |
|
|
"base_pos_start": base_pos[0].tolist(), |
|
|
"base_pos_range": { |
|
|
"min": base_pos.min(dim=0).values.tolist(), |
|
|
"max": base_pos.max(dim=0).values.tolist(), |
|
|
}, |
|
|
"base_lin_vel_b": { |
|
|
"min": base_lin_vel_b.min(dim=0).values.tolist(), |
|
|
"max": base_lin_vel_b.max(dim=0).values.tolist(), |
|
|
}, |
|
|
"base_ang_vel_b": { |
|
|
"min": base_ang_vel_b.min(dim=0).values.tolist(), |
|
|
"max": base_ang_vel_b.max(dim=0).values.tolist(), |
|
|
}, |
|
|
"base_quat": {k: v.tolist() for k, v in agg["base_quat"].items()}, |
|
|
"joint_pos_range": ep_joint_range, |
|
|
"joint_vel_range": ep_joint_vel_range, |
|
|
"base_ang_vel_analysis": base_ang_vel_analysis, |
|
|
"joint_vel_analysis": joint_vel_analysis, |
|
|
}) |
|
|
|
|
|
if (f_idx + 1) % 50 == 0: |
|
|
print(f"Processed {f_idx+1}/{len(files)} files…") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
field_summary = {} |
|
|
for field_name, info in field_analysis.items(): |
|
|
field_summary[field_name] = { |
|
|
"present_in_files": info["count"], |
|
|
"present_in_percentage": round(info["count"] / len(files) * 100, 1), |
|
|
"metadata": info["metadata"], |
|
|
"consistent_shape": info["consistent_shape"], |
|
|
} |
|
|
if not info["consistent_shape"]: |
|
|
field_summary[field_name]["shapes_seen"] = sorted([list(s) for s in info["shapes_seen"]]) |
|
|
|
|
|
|
|
|
agg_stats = { |
|
|
"num_episodes": agg["num_episodes"], |
|
|
"average_length": float(np.mean(agg["lengths"])), |
|
|
"min_length": int(min(agg["lengths"])), |
|
|
"max_length": int(max(agg["lengths"])), |
|
|
"base_height": {k: v.item() if torch.is_tensor(v) else v.tolist() for k, v in agg["base_height"].items()}, |
|
|
"base_lin_vel_b": {k: v.tolist() for k, v in agg["base_lin_vel_b"].items()}, |
|
|
"base_ang_vel_b": {k: v.tolist() for k, v in agg["base_ang_vel_b"].items()}, |
|
|
"base_quat": {k: v.tolist() for k, v in agg["base_quat"].items()}, |
|
|
"joint_pos_global_range": agg["joint_pos"], |
|
|
"joint_vel_global_range": agg["joint_vel"], |
|
|
"base_pos": {k: v.tolist() for k, v in agg["base_pos"].items()}, |
|
|
"avg_start_pos": np.mean(agg["start_pos_list"], axis=0).tolist(), |
|
|
"field_analysis": field_summary, |
|
|
"angular_velocity_analysis": { |
|
|
"base": base_ang_vel_analysis, |
|
|
"joints": joint_vel_analysis, |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
max_ang_vel = max(abs(x) for x in agg_stats["base_ang_vel_b"]["max"]) |
|
|
base_ang_unit = _determine_angular_unit(max_ang_vel) |
|
|
agg_stats["base_ang_vel_unit"] = base_ang_unit |
|
|
|
|
|
|
|
|
joint_vel_units = {} |
|
|
max_joint_vels = {} |
|
|
for joint_name, vel_range in agg["joint_vel"].items(): |
|
|
max_vel = max(abs(vel_range["min"]), abs(vel_range["max"])) |
|
|
max_joint_vels[joint_name] = max_vel |
|
|
joint_vel_units[joint_name] = _determine_angular_unit(max_vel) |
|
|
|
|
|
|
|
|
unique_units = set(joint_vel_units.values()) |
|
|
if len(unique_units) == 1: |
|
|
joint_vel_unit = next(iter(unique_units)) |
|
|
print(f"\nAll joint velocities appear to be in {joint_vel_unit}") |
|
|
else: |
|
|
print("\nWARNING: Inconsistent joint velocity units detected:") |
|
|
for unit in unique_units: |
|
|
joints = [name for name, u in joint_vel_units.items() if u == unit] |
|
|
print(f" {unit}: {', '.join(joints)}") |
|
|
|
|
|
|
|
|
agg_stats["joint_vel_units"] = joint_vel_units |
|
|
agg_stats["joint_vel_max_magnitude"] = max_joint_vels |
|
|
|
|
|
print(f"\nEstimated base angular velocity unit: {base_ang_unit} (max ω ≈ {max_ang_vel:.2f})") |
|
|
print("Joint velocity analysis added to aggregate_stats.json") |
|
|
|
|
|
|
|
|
print("\nAngular Velocity Analysis:") |
|
|
print("\nBase Angular Velocity:") |
|
|
for axis, analysis in base_ang_vel_analysis.items(): |
|
|
print(f" {axis}-axis: Likely {analysis['likely_unit']} (confidence: {analysis['confidence']:.2f})") |
|
|
print(f" Max: {analysis['max_value']:.2f} {analysis['original_unit']}") |
|
|
print(f" Mean: {analysis['mean_value']:.2f} {analysis['original_unit']}") |
|
|
|
|
|
print("\nJoint Velocities:") |
|
|
for joint, analysis in joint_vel_analysis.items(): |
|
|
print(f" {joint}: Likely {analysis['likely_unit']} (confidence: {analysis['confidence']:.2f})") |
|
|
print(f" Max: {analysis['max_value']:.2f} {analysis['original_unit']}") |
|
|
print(f" Mean: {analysis['mean_value']:.2f} {analysis['original_unit']}") |
|
|
|
|
|
|
|
|
(root / "episode_stats.json").write_text(json.dumps(episode_stats, indent=2)) |
|
|
(root / "aggregate_stats.json").write_text(json.dumps(agg_stats, indent=2)) |
|
|
|
|
|
print(f"\nAnalysis complete. Results saved to episode_stats.json and aggregate_stats.json") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
parser = argparse.ArgumentParser(description="Analyse Unitree G1 AMASS dataset") |
|
|
parser.add_argument("--root", type=str, required=True, help="Root folder of trajectories") |
|
|
args = parser.parse_args() |
|
|
|
|
|
analyse_dataset(Path(args.root).expanduser()) |
|
|
|