mutil_mode_data / sync_image_low_dim.py
rollingoat's picture
Upload sync_image_low_dim.py with huggingface_hub
5c44e75 verified
"""Synchronize image observations with low-dimensional robot data.
Uses image timestamps as the master timeline and aligns low-dimensional
datapoints (e.g., joint states) by nearest timestamp.
"""
from __future__ import annotations
import argparse
import os
from typing import Dict, Iterable, List, Optional, Tuple
import h5py
import numpy as np
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Synchronize an image HDF5 file with a low-dimensional HDF5 file"
)
parser.add_argument("--image-h5", required=True, help="Path to the image HDF5 file")
parser.add_argument("--lowdim-h5", required=True, help="Path to the low-dimensional HDF5 file")
parser.add_argument("--output-h5", required=True, help="Destination path for the synchronized HDF5")
parser.add_argument(
"--image-timestamp-key",
default="timestamp",
help="Dataset key holding timestamps inside the image obs group",
)
parser.add_argument(
"--lowdim-timestamp-key",
default="timestamp",
help="Dataset key holding timestamps inside the low-dimensional obs group",
)
parser.add_argument(
"--image-keys",
nargs="*",
help="Optional list of image observation keys to copy (defaults to all datasets except the timestamp)",
)
parser.add_argument(
"--lowdim-keys",
nargs="*",
help="Optional list of low-dimensional observation keys to sync (defaults to all datasets except the timestamp)",
)
parser.add_argument(
"--allow-missing",
action="store_true",
help="Skip demos that miss required keys instead of raising an error",
)
parser.add_argument(
"--exclude-demo",
nargs="*",
default=None,
help="Demo names to exclude, e.g. demo_4 demo_5 demo_42",
)
parser.add_argument(
"--skip-n",
type=int,
default=0,
dest="skip_n",
help=(
"Keep every (skip_n + 1)-th frame and discard the rest. "
"E.g. --skip-n 2 keeps frames 0, 3, 6, … (default: 0 = keep all frames)."
),
)
return parser.parse_args()
def validate_files(*paths: str) -> None:
missing = [path for path in paths if not os.path.exists(path)]
if missing:
joined = ", ".join(missing)
raise FileNotFoundError(f"Missing required file(s): {joined}")
def resolve_dataset_keys(
group: h5py.Group, timestamp_key: str, explicit: Iterable[str] | None
) -> List[str]:
def _filter(keys: Iterable[str]) -> List[str]:
return [k for k in keys if "timestamp" not in k.lower()]
if explicit:
explicit = list(explicit)
missing = [k for k in explicit if k not in group]
if missing:
raise KeyError(f"Group {group.name} missing requested keys: {missing}")
filtered = _filter(explicit)
if not filtered:
raise KeyError("No valid keys remain after removing timestamp datasets")
return filtered
keys: List[str] = []
for key, item in group.items():
if key == timestamp_key or "timestamp" in key.lower():
continue
if isinstance(item, h5py.Dataset):
keys.append(key)
if not keys:
raise KeyError(f"Group {group.name} has no datasets besides timestamp '{timestamp_key}'")
return keys
def find_nearest_idx(array: np.ndarray, value: float) -> int:
idx = int(np.searchsorted(array, value, side="left"))
if idx == 0:
return 0
if idx >= len(array):
return len(array) - 1
prev_diff = abs(value - array[idx - 1])
next_diff = abs(array[idx] - value)
return idx - 1 if prev_diff <= next_diff else idx
def resample_sequence(
sequence: np.ndarray, follower_ts: np.ndarray, master_ts: np.ndarray
) -> np.ndarray:
if sequence.shape[0] != follower_ts.shape[0]:
raise ValueError(
"Sequence length does not match low-dimensional timestamp count for resampling"
)
indices = [find_nearest_idx(follower_ts, t) for t in master_ts]
return sequence[indices]
def detect_timestamp_jump(timestamps: np.ndarray, threshold: float = 1.0) -> int:
"""Return the index of the start of the valid segment after the last sudden jump."""
if len(timestamps) < 2:
return 0
diffs = np.diff(timestamps)
jump_indices = np.where(diffs > threshold)[0]
if jump_indices.size > 0:
return int(jump_indices[-1] + 1)
return 0
def sync_demo(
demo: str,
image_obs: h5py.Group,
lowdim_obs: h5py.Group,
image_ts_key: str,
lowdim_ts_key: str,
image_keys: List[str],
lowdim_keys: List[str],
) -> Tuple[Optional[Dict[str, Dict[str, np.ndarray]]], np.ndarray]:
if image_ts_key not in image_obs:
raise KeyError(f"Image timestamps '{image_ts_key}' missing in {image_obs.name}")
if lowdim_ts_key not in lowdim_obs:
raise KeyError(f"Low-dim timestamps '{lowdim_ts_key}' missing in {lowdim_obs.name}")
master_timestamps = np.asarray(image_obs[image_ts_key][:], dtype=np.float64)
follower_timestamps = np.asarray(lowdim_obs[lowdim_ts_key][:], dtype=np.float64)
if master_timestamps.size == 0:
raise ValueError(f"Demo {demo} has no image timestamps to drive synchronization")
if follower_timestamps.size == 0:
raise ValueError(f"Demo {demo} has no low-dimensional timestamps")
master_cache = {key: image_obs[key][:] for key in image_keys}
follower_cache = {key: lowdim_obs[key][:] for key in lowdim_keys}
if master_cache:
min_cache_len = min(v.shape[0] for v in master_cache.values())
if master_timestamps.size > min_cache_len:
print(f"Warning: master_timestamps has {master_timestamps.size} entries but image cache has {min_cache_len}; truncating timestamps for demo {demo}.")
master_timestamps = master_timestamps[:min_cache_len]
non_zero_mask = follower_timestamps > 1e-6
if not np.all(non_zero_mask):
print(f"Warning: Discarding {np.sum(~non_zero_mask)} zero-valued timestamps from low-dim data for demo {demo}")
follower_timestamps = follower_timestamps[non_zero_mask]
for k in follower_cache:
follower_cache[k] = follower_cache[k][non_zero_mask]
if follower_timestamps.size == 0:
raise ValueError(f"Demo {demo} has only zero-valued low-dimensional timestamps")
jump_idx = detect_timestamp_jump(follower_timestamps, threshold=0.5)
if jump_idx > 0:
print(f"Warning: Sudden jump detected in low-dim timestamps for demo {demo} at index {jump_idx}. Discarding {jump_idx} samples before the jump.")
follower_timestamps = follower_timestamps[jump_idx:]
for k in follower_cache:
follower_cache[k] = follower_cache[k][jump_idx:]
if follower_timestamps.size == 0:
print(f"Warning: Discarding all low-dim timestamps due to jump for demo {demo}; skipping demo")
return None, follower_timestamps
low_start, low_end = np.min(follower_timestamps), np.max(follower_timestamps)
img_start, img_end = np.min(master_timestamps), np.max(master_timestamps)
overlap_start = max(img_start, low_start)
overlap_end = min(img_end, low_end)
print(f"Demo {demo} timestamp overlap: [{overlap_start:.3f}, {overlap_end:.3f}]")
if overlap_start > overlap_end:
print(f"Warning: No timestamp overlap between image and low-dim for demo {demo}; skipping demo")
return None, follower_timestamps
candidates_mask = (master_timestamps >= overlap_start) & (master_timestamps <= overlap_end)
candidate_indices = np.where(candidates_mask)[0]
if candidate_indices.size == 0:
print(f"Warning: No image timestamps fall within the overlap interval for demo {demo}; skipping demo")
return None, follower_timestamps
start_idx = candidate_indices[0]
end_idx = candidate_indices[-1]
print(f"Demo {demo} master start idx: {start_idx}, timestamp: {master_timestamps[start_idx]:.3f}")
master_indices = np.arange(start_idx, end_idx + 1)
master_cache_sliced = {k: v[master_indices] for k, v in master_cache.items()}
synced_images: Dict[str, List[np.ndarray]] = {key: [] for key in image_keys}
synced_lowdim: Dict[str, List[np.ndarray]] = {key: [] for key in lowdim_keys}
master_in_ts = master_timestamps[master_indices]
for local_idx, timestamp in enumerate(master_in_ts):
timestamp = float(timestamp)
follower_idx = find_nearest_idx(follower_timestamps, timestamp)
time_diff = abs(follower_timestamps[follower_idx] - timestamp)
if time_diff > 0.1:
raise ValueError(
f"Timestamp mismatch at master idx {master_indices[local_idx]} (master ts: {timestamp}, nearest follower ts: {follower_timestamps[follower_idx]}, diff: {time_diff})"
)
for key in image_keys:
synced_images[key].append(master_cache_sliced[key][local_idx])
for key in lowdim_keys:
synced_lowdim[key].append(follower_cache[key][follower_idx])
image_arrays = {key: np.stack(values, axis=0) for key, values in synced_images.items()}
lowdim_arrays = {key: np.stack(values, axis=0) for key, values in synced_lowdim.items()}
return (
{
"timestamps": master_in_ts,
"image_obs": image_arrays,
"lowdim_obs": lowdim_arrays,
},
follower_timestamps,
)
def write_demo(
demo: str,
out_root: h5py.Group,
synced: Dict[str, Dict[str, np.ndarray]],
image_ts_key: str,
actions: Optional[np.ndarray],
) -> None:
g_demo = out_root.create_group(demo)
g_obs = g_demo.create_group("obs")
if actions is not None:
g_demo.create_dataset("actions", data=actions)
g_obs.create_dataset(image_ts_key, data=synced["timestamps"])
for key, arr in synced["image_obs"].items():
g_obs.create_dataset(key, data=arr)
for key, arr in synced["lowdim_obs"].items():
g_obs.create_dataset(key, data=arr)
g_demo.attrs["num_samples"] = synced["timestamps"].shape[0]
def main() -> None:
args = parse_args()
validate_files(args.image_h5, args.lowdim_h5)
if os.path.abspath(args.image_h5) == os.path.abspath(args.output_h5):
raise ValueError("Output file must differ from the image input file")
if os.path.abspath(args.lowdim_h5) == os.path.abspath(args.output_h5):
raise ValueError("Output file must differ from the low-dimensional input file")
with h5py.File(args.image_h5, "r") as f_image, h5py.File(args.lowdim_h5, "r") as f_lowdim:
if "data" not in f_image or "data" not in f_lowdim:
raise KeyError("Both HDF5 files must contain a top-level 'data' group")
demos = sorted(set(f_image["data"].keys()) & set(f_lowdim["data"].keys()))
if getattr(args, "exclude_demo", None):
exclude_names = set(args.exclude_demo)
unknown = exclude_names - set(demos)
if unknown:
print(f"Warning: --exclude-demo names not found and ignored: {sorted(unknown)}")
demos = [d for d in demos if d not in exclude_names]
if not demos:
raise ValueError("No demos left after applying --exclude-demo filter")
if not demos:
raise ValueError("No overlapping demos found between the provided files")
os.makedirs(os.path.dirname(os.path.abspath(args.output_h5)) or ".", exist_ok=True)
with h5py.File(args.output_h5, "w") as f_out:
g_out = f_out.create_group("data")
processed = 0
for demo in demos:
print(f"Processing demo {demo}...")
try:
image_obs = f_image["data"][demo]["obs"]
lowdim_demo = f_lowdim["data"][demo]
lowdim_obs = lowdim_demo["obs"]
image_keys = resolve_dataset_keys(
image_obs, args.image_timestamp_key, args.image_keys
)
lowdim_keys = resolve_dataset_keys(
lowdim_obs, args.lowdim_timestamp_key, args.lowdim_keys
)
result = sync_demo(
demo,
image_obs,
lowdim_obs,
args.image_timestamp_key,
args.lowdim_timestamp_key,
image_keys,
lowdim_keys,
)
if result[0] is None:
continue
synced, follower_ts = result
except Exception as exc:
if args.allow_missing:
print(f"Skipping {demo}: {exc}")
continue
raise
if args.skip_n > 0:
step = args.skip_n + 1
indices = np.arange(0, synced["timestamps"].shape[0], step)
if len(indices) < 2:
print(f" Skipping {demo}: too few frames after --skip-n {args.skip_n} subsampling.")
continue
synced["timestamps"] = synced["timestamps"][indices]
for k in synced["image_obs"]:
synced["image_obs"][k] = synced["image_obs"][k][indices]
for k in synced["lowdim_obs"]:
synced["lowdim_obs"][k] = synced["lowdim_obs"][k][indices]
print(f" [skip_n={args.skip_n}] {len(indices)} frames kept (step={step})")
actions_data = None
if "actions" in lowdim_demo:
try:
actions_source = lowdim_demo["actions"][:]
actions_data = resample_sequence(
actions_source, follower_ts, synced["timestamps"]
)
except ValueError as exc:
print(f"Skipping actions for {demo}: {exc}")
out_name = f"demo_{processed}"
write_demo(out_name, g_out, synced, args.image_timestamp_key, actions_data)
processed += 1
suffix = f" (renamed from {demo})" if out_name != demo else ""
print(f"Synchronized {out_name}{suffix}: {synced['timestamps'].shape[0]} frames")
f_out.attrs["source_image_h5"] = os.path.abspath(args.image_h5)
f_out.attrs["source_lowdim_h5"] = os.path.abspath(args.lowdim_h5)
f_out.attrs["num_synced_demos"] = processed
print(f"Finished syncing {processed} demo(s) to {args.output_h5}")
if __name__ == "__main__":
main()