objaverse_vida / unpack.py
ellisbrown's picture
Upload unpack.py with huggingface_hub
5e0f151 verified
#!/usr/bin/env python3
"""
Download and unpack the objaverse_vida dataset from HuggingFace.
This script downloads the dataset and extracts all tar archives to restore
the original directory structure expected by downstream consumers.
Usage:
python unpack.py [target_directory]
Example:
python unpack.py ./objaverse_vida
python unpack.py /data/datasets/objaverse_vida
"""
import os
import sys
import tarfile
from pathlib import Path
def unpack_dataset(target_dir: str = "./objaverse_vida"):
"""Download and unpack the dataset to the target directory."""
# Import here to give helpful error if not installed
try:
from huggingface_hub import snapshot_download
except ImportError:
print("Error: huggingface_hub not installed.")
print("Install with: pip install huggingface_hub[hf_transfer]")
sys.exit(1)
target = Path(target_dir).resolve()
print(f"Target directory: {target}")
# Enable hf_transfer for faster downloads (if available)
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
# Download dataset
print("\n[1/3] Downloading dataset from HuggingFace...")
print(" (This may take a while for ~30GB)")
snapshot_download(
repo_id="spatial-training/objaverse_vida",
repo_type="dataset",
local_dir=str(target),
local_dir_use_symlinks=False
)
print(" Done!")
# Unpack processed shards
processed_dir = target / "processed_2023_07_28"
if processed_dir.exists():
print("\n[2/3] Unpacking processed objects...")
shards = sorted(processed_dir.glob("shard_*.tar"))
total_shards = len(shards)
for i, shard in enumerate(shards, 1):
print(f" [{i}/{total_shards}] Extracting {shard.name}...")
with tarfile.open(shard) as tar:
tar.extractall(processed_dir)
shard.unlink() # Remove tar after extraction
# Remove manifest (no longer needed)
manifest = processed_dir / "manifest.json"
if manifest.exists():
manifest.unlink()
print(" Done!")
else:
print("\n[2/3] Skipping processed objects (directory not found)")
# Unpack houses individual files
houses_dir = target / "houses_2023_07_28"
if houses_dir.exists():
print("\n[3/3] Unpacking house files...")
for split in ["train", "test", "val"]:
tar_file = houses_dir / f"{split}_individual.tar"
if tar_file.exists():
print(f" Extracting {tar_file.name}...")
split_dir = houses_dir / split
split_dir.mkdir(exist_ok=True)
with tarfile.open(tar_file) as tar:
tar.extractall(split_dir)
tar_file.unlink() # Remove tar after extraction
print(" Done!")
else:
print("\n[3/3] Skipping house files (directory not found)")
# Summary
print("\n" + "=" * 60)
print("Dataset unpacked successfully!")
print("=" * 60)
print(f"\nLocation: {target}")
print("\nStructure:")
print(" processed_2023_07_28/ - ~40K 3D object directories")
print(" houses_2023_07_28/ - train/test/val house layouts")
print(" procthor_databases_2023_07_28/ - asset databases")
print(" 0.json - sample house")
def main():
if len(sys.argv) > 1:
if sys.argv[1] in ["-h", "--help"]:
print(__doc__)
sys.exit(0)
target = sys.argv[1]
else:
target = "./objaverse_vida"
unpack_dataset(target)
if __name__ == "__main__":
main()