| | import argparse |
| | import io |
| | import json |
| | from pathlib import Path |
| |
|
| | from PIL import Image |
| | import webdataset as wds |
| |
|
| |
|
| | def decode_sample(sample: dict) -> dict: |
| | frames = [ |
| | Image.open(io.BytesIO(sample[f"jpg-{i:02d}"])).convert("RGB") |
| | for i in range(8) |
| | ] |
| | metadata = json.loads(sample["json"]) |
| | return { |
| | "id": metadata["id"], |
| | "label": metadata["label"], |
| | "frames": frames, |
| | "__key__": sample["__key__"], |
| | } |
| |
|
| |
|
| | def parse_args() -> argparse.Namespace: |
| | parser = argparse.ArgumentParser(description="Load Camera Motion WebDataset shards.") |
| | parser.add_argument("--dataset-root", type=Path, required=True, help="Folder containing the staged HF dataset files.") |
| | parser.add_argument("--split", choices=["train", "val", "test"], default="train") |
| | parser.add_argument("--limit", type=int, default=3, help="Number of samples to print.") |
| | return parser.parse_args() |
| |
|
| |
|
| | def main() -> None: |
| | args = parse_args() |
| | shards = sorted(str(path) for path in args.dataset_root.glob(f"{args.split}-*.tar")) |
| | if not shards: |
| | raise SystemExit(f"No shards found for split {args.split} in {args.dataset_root}") |
| |
|
| | dataset = wds.WebDataset(shards, shardshuffle=False).map(decode_sample) |
| | for index, sample in enumerate(dataset): |
| | print( |
| | json.dumps( |
| | { |
| | "index": index, |
| | "id": sample["id"], |
| | "label": sample["label"], |
| | "num_frames": len(sample["frames"]), |
| | "frame_size": sample["frames"][0].size, |
| | "webdataset_key": sample["__key__"], |
| | }, |
| | indent=2, |
| | ) |
| | ) |
| | if index + 1 >= args.limit: |
| | break |
| |
|
| |
|
| | if __name__ == "__main__": |
| | main() |
| |
|