small-norb / small-norb.py
haodoz0118's picture
Update small-norb.py
3770c44 verified
import datasets
import numpy as np
import os
from PIL import Image
# URLs (you already uploaded to Huggingface Hub!)
TRAIN_URLS = {
"dat": "https://huggingface.co/datasets/randall-lab/small-norb/resolve/main/smallnorb-5x46789x9x18x6x2x96x96-training-dat.mat",
"cat": "https://huggingface.co/datasets/randall-lab/small-norb/resolve/main/smallnorb-5x46789x9x18x6x2x96x96-training-cat.mat",
"info": "https://huggingface.co/datasets/randall-lab/small-norb/resolve/main/smallnorb-5x46789x9x18x6x2x96x96-training-info.mat",
}
TEST_URLS = {
"dat": "https://huggingface.co/datasets/randall-lab/small-norb/resolve/main/smallnorb-5x01235x9x18x6x2x96x96-testing-dat.mat",
"cat": "https://huggingface.co/datasets/randall-lab/small-norb/resolve/main/smallnorb-5x01235x9x18x6x2x96x96-testing-cat.mat",
"info": "https://huggingface.co/datasets/randall-lab/small-norb/resolve/main/smallnorb-5x01235x9x18x6x2x96x96-testing-info.mat",
}
class SmallNORB(datasets.GeneratorBasedBuilder):
"""SmallNORB dataset: 96x96 stereo images with 5 known factors."""
VERSION = datasets.Version("1.0.0")
def _info(self):
return datasets.DatasetInfo(
description=(
"SmallNORB dataset: stereo pair images of 3D toy objects, used for learning object recognition "
"robust to pose and lighting. Each image pair corresponds to a combination of 5 factors: "
"category, instance, elevation, azimuth, lighting. "
"Unlike dSprites or MPI3D, SmallNORB does NOT follow a full cartesian product over factors. "
"Instances are sampled per category."
),
features=datasets.Features(
{
"left_image": datasets.Image(), # (96, 96), grayscale
"right_image": datasets.Image(), # (96, 96), grayscale
"index": datasets.Value("int32"),
"label": datasets.Sequence(datasets.Value("int32")), # 5 factor indices
"category": datasets.Value("int32"), # [0-4]
"instance": datasets.Value("int32"), # [0-9]
"elevation": datasets.Value("int32"), # [0-8]
"azimuth": datasets.Value("int32"), # [0-17], after /2 correction
"lighting": datasets.Value("int32"), # [0-5]
}
),
supervised_keys=("left_image", "label"),
homepage="https://cs.nyu.edu/~ylclab/data/norb-v1.0-small/",
license="apache-2.0",
citation="""@inproceedings{lecun2004learning,
title={Learning methods for generic object recognition with invariance to pose and lighting},
author={LeCun, Yann and Huang, Fu Jie and Bottou, Leon},
booktitle={Proceedings of the 2004 IEEE Computer Society Conference on Computer Vision and Pattern Recognition, 2004. CVPR 2004.},
volume={2},
pages={II--104},
year={2004},
organization={IEEE}
}""",
)
def _split_generators(self, dl_manager):
# Download (no extract needed since .mat already!)
train_files = dl_manager.download(TRAIN_URLS)
test_files = dl_manager.download(TEST_URLS)
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"dat_file": train_files["dat"],
"cat_file": train_files["cat"],
"info_file": train_files["info"],
},
),
datasets.SplitGenerator(
name=datasets.Split.TEST,
gen_kwargs={
"dat_file": test_files["dat"],
"cat_file": test_files["cat"],
"info_file": test_files["info"],
},
),
]
def _generate_examples(self, dat_file, cat_file, info_file):
# Use your functions to load all (left_images, right_images, features)
images_left, images_right, features = _load_small_norb_chunks(
path_template=os.path.join(os.path.dirname(dat_file), "{}-{}.mat"),
chunk_names=[os.path.basename(dat_file).replace("-dat.mat", "")]
)
for idx in range(len(images_left)):
left_img = Image.fromarray(images_left[idx].astype(np.uint8), mode="L")
right_img = Image.fromarray(images_right[idx].astype(np.uint8), mode="L")
factors = features[idx].tolist() # [category, instance, elevation, azimuth, lighting]
yield idx, {
"left_image": left_img,
"right_image": right_img,
"index": idx,
"label": factors,
"category": factors[0],
"instance": factors[1],
"elevation": factors[2],
"azimuth": factors[3],
"lighting": factors[4],
}
# -------------------------------------------------
# Main function: _load_small_norb_chunks
# -------------------------------------------------
def _load_small_norb_chunks(path_template, chunk_names):
"""Loads several chunks of the small NORB dataset for final use."""
list_of_images_left, list_of_images_right, list_of_features = _load_chunks(path_template, chunk_names)
features = np.concatenate(list_of_features, axis=0)
features[:, 3] = features[:, 3] / 2 # azimuth values are 0, 2, 4, ..., 34
return (
np.concatenate(list_of_images_left, axis=0),
np.concatenate(list_of_images_right, axis=0),
features
)
# -------------------------------------------------
# Helper function: _load_chunks
# -------------------------------------------------
def _load_chunks(path_template, chunk_names):
"""Loads several chunks of the small NORB dataset into lists."""
list_of_images_left = []
list_of_images_right = []
list_of_features = []
for chunk_name in chunk_names:
# Read .dat
norb = _read_binary_matrix(path_template.format(chunk_name, "dat"))
list_of_images_left.append(norb[:, 0]) # left view
list_of_images_right.append(norb[:, 1]) # right view
# Read .cat
norb_class = _read_binary_matrix(path_template.format(chunk_name, "cat"))
# Read .info
norb_info = _read_binary_matrix(path_template.format(chunk_name, "info"))
# Combine features
list_of_features.append(np.column_stack((norb_class, norb_info)))
return list_of_images_left, list_of_images_right, list_of_features
# -------------------------------------------------
# Helper function: _read_binary_matrix
# -------------------------------------------------
def _read_binary_matrix(filename):
"""Reads and returns binary formatted matrix stored in filename."""
with open(filename, "rb") as f:
s = f.read()
magic = int(np.frombuffer(s, "int32", 1))
ndim = int(np.frombuffer(s, "int32", 1, 4))
eff_dim = max(3, ndim)
raw_dims = np.frombuffer(s, "int32", eff_dim, 8)
dims = []
for i in range(0, ndim):
dims.append(raw_dims[i])
dtype_map = {
507333717: "int8",
507333716: "int32",
507333713: "float",
507333715: "double"
}
dtype = dtype_map[magic]
data = np.frombuffer(s, dtype, offset=8 + eff_dim * 4)
data = data.reshape(tuple(dims))
return data