Datasets:

Languages:
English
Size:
n<1K
License:
ZoomLDM-demo-dataset-NAIP / ZoomLDM-demo-dataset-NAIP.py
srikarym's picture
Upload folder using huggingface_hub
c1eb678 verified
import datasets
import numpy as np
from pathlib import Path
import torch
import torch.nn.functional as F
from einops import rearrange
_DATASET_VERSION = datasets.Version("1.0.0")
_N_EMBED = {
"1x": 1,
"2x": 4,
"3x": 16,
"4x": 64,
}
_MAG_DICT = {
"1x": 0,
"2x": 1,
"3x": 2,
"4x": 3,
}
_FIXED_SSL_FEATURE_DIM_1 = 1024
def get_ssl_feat_shape(mag_level):
first_dim = _N_EMBED[mag_level]
h = int(np.sqrt(first_dim))
return (_FIXED_SSL_FEATURE_DIM_1, h, h)
def preprocess_features(feat_array):
if len(feat_array.shape) == 1:
feat_array = feat_array[:, None]
mean = feat_array.mean(axis=0, keepdims=True)
std = feat_array.std(axis=0, keepdims=True)
feat_array = (feat_array - mean) / (std + 1e-8)
return feat_array
class MagnificationConfig(datasets.BuilderConfig):
def __init__(self, mag_level=None, ssl_feat_shape=None, data_dir=None, **kwargs):
super(MagnificationConfig, self).__init__(**kwargs)
self.mag_level = mag_level
self.ssl_feat_shape = ssl_feat_shape
self.data_dir = data_dir
class NAIPDataset(datasets.GeneratorBasedBuilder):
VERSION = _DATASET_VERSION
BUILDER_CONFIGS = []
for mag_level_str in _MAG_DICT.keys():
builder_config_instance = MagnificationConfig(
name=mag_level_str,
version=_DATASET_VERSION,
description=f"Dataset at {mag_level_str} mag",
data_dir=mag_level_str,
mag_level=mag_level_str,
ssl_feat_shape=get_ssl_feat_shape(mag_level_str),
)
BUILDER_CONFIGS.append(builder_config_instance)
DEFAULT_CONFIG_NAME = "1x"
def _info(self):
return datasets.DatasetInfo(
description=f"Dataset with images and SSL features. Configuration: {self.config.name}",
features=datasets.Features(
{
"image": datasets.Image(),
"ssl_feat": datasets.Array3D(shape=self.config.ssl_feat_shape, dtype="float32"),
"mag": datasets.Value("int32"),
}
),
homepage="https://github.com/cvlab-stonybrook/ZoomLDM",
)
def _split_generators(self, dl_manager):
original_script_dir = Path(self.base_path)
mag_folder_name = self.config.data_dir
mag_data_abs_path = original_script_dir / "data" / mag_folder_name
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"mag_folder_abs_path": mag_data_abs_path,
"mag_level": self.config.mag_level,
},
),
]
def _generate_examples(self, mag_folder_abs_path: Path, mag_level: str):
idx = 0
for i in range(16):
img_filename = f"{i}.jpg"
feat_filename = f"{i}_ssl_feat.npy"
img_path = mag_folder_abs_path / img_filename
feat_path = mag_folder_abs_path / feat_filename
ssl_feat_data = np.load(feat_path)
h = np.sqrt(ssl_feat_data.shape[0]).astype(int)
ssl_feat_data = torch.tensor(rearrange(ssl_feat_data, "(h1 h2) dim -> dim h1 h2", h1 = h))
feat_array = preprocess_features(ssl_feat_data)
yield idx, {
"image": str(img_path),
"ssl_feat": feat_array,
"mag": _MAG_DICT[mag_level],
}
idx += 1