multiview-datasets / download_all.py
dxgl's picture
Upload folder using huggingface_hub
60fe6e9 verified
#!/usr/bin/env python3
"""
Download DX.GL multi-view datasets from the manifest.
Usage:
python download_all.py # download all datasets
python download_all.py --object apple # specific object
python download_all.py --output ./datasets # custom output directory
python download_all.py --splats # also download pre-trained .ply splats
Requires: pip install requests
"""
import argparse
import json
import os
import sys
try:
import requests
except ImportError:
print("Please install requests: pip install requests")
sys.exit(1)
MANIFEST_URL = "https://huggingface.co/datasets/dxgl/multiview-datasets/resolve/main/manifest.json"
MANIFEST_LOCAL = os.path.join(os.path.dirname(os.path.abspath(__file__)), "manifest.json")
def load_manifest():
"""Load manifest from local file or download from HuggingFace."""
if os.path.exists(MANIFEST_LOCAL):
with open(MANIFEST_LOCAL) as f:
return json.load(f)
print(f"Downloading manifest from {MANIFEST_URL} ...")
resp = requests.get(MANIFEST_URL)
resp.raise_for_status()
return resp.json()
def download_file(url, dest_path):
"""Download a file with progress display."""
resp = requests.get(url, stream=True)
resp.raise_for_status()
total = int(resp.headers.get("content-length", 0))
downloaded = 0
with open(dest_path, "wb") as f:
for chunk in resp.iter_content(chunk_size=1 << 20):
f.write(chunk)
downloaded += len(chunk)
if total > 0:
pct = downloaded / total * 100
mb = downloaded / 1e6
print(f"\r {mb:.1f} MB ({pct:.0f}%)", end="", flush=True)
print()
def main():
parser = argparse.ArgumentParser(description="Download DX.GL multi-view datasets")
parser.add_argument("--object", default=None,
help="Download only a specific object (by name, case-insensitive)")
parser.add_argument("--output", default="./dxgl-datasets",
help="Output directory (default: ./dxgl-datasets)")
parser.add_argument("--splats", action="store_true",
help="Also download pre-trained 3DGS .ply files")
parser.add_argument("--extract", action="store_true", default=True,
help="Extract ZIPs after download (default: true)")
parser.add_argument("--no-extract", action="store_true",
help="Keep ZIPs without extracting")
args = parser.parse_args()
manifest = load_manifest()
objects = manifest["objects"]
# Filter by object name
if args.object:
objects = [o for o in objects if args.object.lower() in o["name"].lower()]
if not objects:
print(f"No object matching '{args.object}' found in manifest.")
sys.exit(1)
os.makedirs(args.output, exist_ok=True)
total_downloads = 0
for obj in objects:
name_slug = obj["name"].lower().replace(" ", "_")
filename = f"{name_slug}.zip"
dest_path = os.path.join(args.output, filename)
if os.path.exists(dest_path):
print(f" Skipping {filename} (already exists)")
continue
print(f"Downloading {obj['name']} ...")
download_file(obj["download_url"], dest_path)
total_downloads += 1
# Extract
if args.extract and not args.no_extract:
import zipfile
extract_dir = os.path.join(args.output, name_slug)
os.makedirs(extract_dir, exist_ok=True)
with zipfile.ZipFile(dest_path) as zf:
zf.extractall(extract_dir)
print(f" Extracted to {extract_dir}")
# Download splats
if args.splats:
splats_dir = os.path.join(args.output, "splats")
os.makedirs(splats_dir, exist_ok=True)
for obj in objects:
if "splat_url" not in obj or not obj["splat_url"]:
continue
name_slug = obj["name"].lower().replace(" ", "_")
dest_path = os.path.join(splats_dir, f"{name_slug}.ply")
if os.path.exists(dest_path):
print(f" Skipping {name_slug}.ply (already exists)")
continue
print(f"Downloading splat: {obj['name']} ...")
download_file(obj["splat_url"], dest_path)
total_downloads += 1
print(f"\nDone. Downloaded {total_downloads} files to {args.output}")
if total_downloads == 0:
print("(All files already existed — delete them to re-download)")
if __name__ == "__main__":
main()