File size: 4,054 Bytes
52d11c0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 | """
This script restores the original directory structure of an image dataset
from a locally saved Hugging Face Dataset.
It reads a dataset created by the `datasets` library's `save_to_disk` method,
iterates through each record, and uses the 'relative_path' column to
recreate the folder hierarchy and save each image file.
Usage:
Run this script from the project's root directory.
Syntax:
python <path_to_script> <path_to_saved_dataset> <output_directory>
## Example:
### For stage 1 data decompression:
python scripts/restore.py images/description_style_new data/selected/description_style_new
### For stage 2 data decompression:
python scripts/restore.py images/stage_2_identity_matching data/selected/stage_2_identity_matching
python scripts/restore.py images/stage_2_view_synthesis data/selected/stage_2_view_synthesis
python scripts/restore.py images/stage_2_point_matching data/selected/stage_2_point_matching
python scripts/restore.py images/stage_2_depth_estimation data/selected/stage_2_depth_estimation
python scripts/restore.py images/stage_2_camera_pose data/selected/stage_2_camera_pose
"""
import os
import argparse
from datasets import load_from_disk
from tqdm import tqdm
from PIL import Image
def restore_images_from_dataset(dataset_path: str, output_path: str):
"""
Loads a Hugging Face dataset from disk and restores the original image
directory structure in a specified output folder.
Args:
dataset_path (str): The path to the saved Hugging Face dataset directory.
output_path (str): The root directory where the images will be restored.
"""
# 1. --- Load the dataset from disk ---
print(f"Loading dataset from '{dataset_path}'...")
try:
dataset = load_from_disk(dataset_path)
except FileNotFoundError:
print(f"Error: No saved dataset found at '{dataset_path}'.")
print("Please check the path and try again.")
return
print(f"Dataset loaded successfully. Found {len(dataset)} records.")
# 2. --- Create the main output directory if it doesn't exist ---
if not os.path.exists(output_path):
print(f"Creating output directory: '{output_path}'")
os.makedirs(output_path)
# 3. --- Iterate, reconstruct paths, and save images ---
print(f"Restoring images to '{output_path}'...")
for record in tqdm(dataset, desc="Restoring images"):
# The `record['image']` will be a PIL.Image.Image object
image: Image.Image = record['image']
relative_path: str = record['relative_path']
# Create the full destination path for the image file
# os.path.join handles path separators correctly for any OS
destination_path = os.path.join(output_path, relative_path)
# Get the directory part of the destination path
destination_dir = os.path.dirname(destination_path)
# Create the subdirectories if they don't exist
# `exist_ok=True` prevents an error if the directory already exists
os.makedirs(destination_dir, exist_ok=True)
# Save the image to its restored path
# The format is inferred from the file extension, but can be specified
image.save(destination_path)
print("\nImage restoration complete!")
print(f"All images have been saved in '{os.path.abspath(output_path)}'.")
def main():
parser = argparse.ArgumentParser(
description="Restore an image folder structure from a saved "
"Hugging Face dataset."
)
parser.add_argument(
'dataset_path',
type=str,
help="Path to the saved dataset directory (e.g., 'my-local-co3d-dataset')."
)
parser.add_argument(
'output_path',
type=str,
help="Path to the root folder where images will be restored "
"(e.g., 'data/restored')."
)
args = parser.parse_args()
restore_images_from_dataset(args.dataset_path, args.output_path)
if __name__ == "__main__":
main()
|