File size: 2,470 Bytes
e7ac48b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 | """Minimal usage example for DroidRGBDataset.
Run from the `droid_share/` directory:
python example_usage.py --root /path/to/lerobot/droid_1.0.1
"""
import argparse
from pathlib import Path
import torch
from torch.utils.data import DataLoader
from droid_rgb_dataset import DroidRGBDataset
def main() -> None:
parser = argparse.ArgumentParser()
parser.add_argument("--root", required=True, help="Path to lerobot/droid_1.0.1")
parser.add_argument("--stats-dir", default=str(Path(__file__).parent / "_stats"))
parser.add_argument("--n-frames", type=int, default=8)
parser.add_argument("--stride", type=int, default=3, help="3 = 5Hz, 1 = 15Hz")
parser.add_argument("--batch-size", type=int, default=2)
parser.add_argument("--num-workers", type=int, default=2)
parser.add_argument("--save-preview", default="preview.png")
args = parser.parse_args()
dataset = DroidRGBDataset(
root=args.root,
stats_dir=args.stats_dir,
camera_keys=(
"observation.images.exterior_2_left",
"observation.images.wrist_left",
),
n_frames=args.n_frames,
stride=args.stride,
image_size=(224, 224),
)
print(f"usable episodes: {len(dataset)}")
loader = DataLoader(
dataset,
batch_size=args.batch_size,
num_workers=args.num_workers,
shuffle=True,
collate_fn=_collate,
)
batch = next(iter(loader))
ext = batch["observation.images.exterior_2_left"]
wrist = batch["observation.images.wrist_left"]
print(f"exterior: shape={tuple(ext.shape)} dtype={ext.dtype}")
print(f"wrist: shape={tuple(wrist.shape)} dtype={wrist.dtype}")
print(f"episode_index: {batch['episode_index'].tolist()}")
print(f"start_frame: {batch['start_frame'].tolist()}")
try:
from PIL import Image
import numpy as np
except ImportError:
return
grid = torch.cat([ext[0], wrist[0]], dim=2).numpy() # (T, H, 2W, 3)
row = np.concatenate(list(grid), axis=1) # (H, T*2W, 3)
Image.fromarray(row).save(args.save_preview)
print(f"saved preview: {args.save_preview}")
def _collate(samples):
out = {}
for k in samples[0]:
if isinstance(samples[0][k], torch.Tensor):
out[k] = torch.stack([s[k] for s in samples], dim=0)
else:
out[k] = torch.tensor([s[k] for s in samples])
return out
if __name__ == "__main__":
main()
|