|
|
from datasets import load_dataset |
|
|
from tqdm import tqdm |
|
|
import torch |
|
|
from torch.utils.data import DataLoader |
|
|
from scripts.forward_model import LidarForwardImagingModel |
|
|
|
|
|
forward_model = LidarForwardImagingModel() |
|
|
|
|
|
BATCH_SIZE = 64 |
|
|
|
|
|
def make_loader(split: str): |
|
|
ds = load_dataset("anfera236/HHDC", split=split) |
|
|
|
|
|
ds.set_format(type="torch", columns=["cube"]) |
|
|
|
|
|
loader = DataLoader( |
|
|
ds, |
|
|
batch_size=BATCH_SIZE, |
|
|
shuffle=False, |
|
|
) |
|
|
return ds, loader |
|
|
|
|
|
|
|
|
def check_split(split_name: str): |
|
|
print(f"Checking {split_name} dataset batches (batch_size={BATCH_SIZE})...") |
|
|
ds, loader = make_loader(split_name) |
|
|
|
|
|
for batch in tqdm(loader): |
|
|
cubes = batch["cube"] |
|
|
|
|
|
assert cubes.ndim == 4, f"Expected 4D input (B, 128, 48, 48), got {cubes.shape}" |
|
|
assert cubes.shape[1:] == (128, 48, 48), f"Bad input sample shape: {cubes.shape}" |
|
|
|
|
|
|
|
|
output = forward_model(cubes) |
|
|
|
|
|
|
|
|
assert output.ndim == 4, f"Expected 4D output (B, 128, 32, 16), got {output.shape}" |
|
|
assert output.shape[0] == cubes.shape[0], ( |
|
|
f"Batch size mismatch: input B={cubes.shape[0]}, output B={output.shape[0]}" |
|
|
) |
|
|
assert output.shape[1:] == (128, 32, 16), f"Bad output sample shape: {output.shape}" |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
check_split("train") |
|
|
check_split("validation") |
|
|
check_split("test") |
|
|
print("All splits passed shape checks ✅") |
|
|
|