HHDC / check_dataset.py
anfera236's picture
Upload check_dataset.py
df0e176 verified
from datasets import load_dataset
from tqdm import tqdm
import torch
from torch.utils.data import DataLoader
from scripts.forward_model import LidarForwardImagingModel
forward_model = LidarForwardImagingModel()
BATCH_SIZE = 64
def make_loader(split: str):
ds = load_dataset("anfera236/HHDC", split=split)
# return PyTorch tensors for the "cube" column
ds.set_format(type="torch", columns=["cube"])
# wrap in a DataLoader to get batches
loader = DataLoader(
ds,
batch_size=BATCH_SIZE,
shuffle=False, # no need to shuffle for shape checking
)
return ds, loader
def check_split(split_name: str):
print(f"Checking {split_name} dataset batches (batch_size={BATCH_SIZE})...")
ds, loader = make_loader(split_name)
for batch in tqdm(loader):
cubes = batch["cube"] # shape: (B, 128, 48, 48)
# sanity check on input shape
assert cubes.ndim == 4, f"Expected 4D input (B, 128, 48, 48), got {cubes.shape}"
assert cubes.shape[1:] == (128, 48, 48), f"Bad input sample shape: {cubes.shape}"
# forward pass (expects model to support batched input)
output = forward_model(cubes) # expected shape: (B, 128, 32, 16)
# sanity checks on output shape
assert output.ndim == 4, f"Expected 4D output (B, 128, 32, 16), got {output.shape}"
assert output.shape[0] == cubes.shape[0], (
f"Batch size mismatch: input B={cubes.shape[0]}, output B={output.shape[0]}"
)
assert output.shape[1:] == (128, 32, 16), f"Bad output sample shape: {output.shape}"
if __name__ == "__main__":
check_split("train")
check_split("validation")
check_split("test")
print("All splits passed shape checks ✅")