| """Minimal usage example for DroidRGBDataset. |
| |
| Run from the `droid_share/` directory: |
| python example_usage.py --root /path/to/lerobot/droid_1.0.1 |
| """ |
|
|
| import argparse |
| from pathlib import Path |
|
|
| import torch |
| from torch.utils.data import DataLoader |
|
|
| from droid_rgb_dataset import DroidRGBDataset |
|
|
|
|
| def main() -> None: |
| parser = argparse.ArgumentParser() |
| parser.add_argument("--root", required=True, help="Path to lerobot/droid_1.0.1") |
| parser.add_argument("--stats-dir", default=str(Path(__file__).parent / "_stats")) |
| parser.add_argument("--n-frames", type=int, default=8) |
| parser.add_argument("--stride", type=int, default=3, help="3 = 5Hz, 1 = 15Hz") |
| parser.add_argument("--batch-size", type=int, default=2) |
| parser.add_argument("--num-workers", type=int, default=2) |
| parser.add_argument("--save-preview", default="preview.png") |
| args = parser.parse_args() |
|
|
| dataset = DroidRGBDataset( |
| root=args.root, |
| stats_dir=args.stats_dir, |
| camera_keys=( |
| "observation.images.exterior_2_left", |
| "observation.images.wrist_left", |
| ), |
| n_frames=args.n_frames, |
| stride=args.stride, |
| image_size=(224, 224), |
| ) |
| print(f"usable episodes: {len(dataset)}") |
|
|
| loader = DataLoader( |
| dataset, |
| batch_size=args.batch_size, |
| num_workers=args.num_workers, |
| shuffle=True, |
| collate_fn=_collate, |
| ) |
|
|
| batch = next(iter(loader)) |
| ext = batch["observation.images.exterior_2_left"] |
| wrist = batch["observation.images.wrist_left"] |
| print(f"exterior: shape={tuple(ext.shape)} dtype={ext.dtype}") |
| print(f"wrist: shape={tuple(wrist.shape)} dtype={wrist.dtype}") |
| print(f"episode_index: {batch['episode_index'].tolist()}") |
| print(f"start_frame: {batch['start_frame'].tolist()}") |
|
|
| try: |
| from PIL import Image |
| import numpy as np |
| except ImportError: |
| return |
| grid = torch.cat([ext[0], wrist[0]], dim=2).numpy() |
| row = np.concatenate(list(grid), axis=1) |
| Image.fromarray(row).save(args.save_preview) |
| print(f"saved preview: {args.save_preview}") |
|
|
|
|
| def _collate(samples): |
| out = {} |
| for k in samples[0]: |
| if isinstance(samples[0][k], torch.Tensor): |
| out[k] = torch.stack([s[k] for s in samples], dim=0) |
| else: |
| out[k] = torch.tensor([s[k] for s in samples]) |
| return out |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|