|
|
"""Standalone inference script for visualization (does not modify original code).""" |
|
|
import argparse |
|
|
import os |
|
|
import sys |
|
|
from types import SimpleNamespace |
|
|
from typing import Any |
|
|
|
|
|
import yaml |
|
|
import numpy as np |
|
|
import torch |
|
|
import nibabel as nib |
|
|
from torch.utils.data import DataLoader |
|
|
|
|
|
os.environ.setdefault("MONAI_SKIP_SUBMODULES", "1") |
|
|
from monai.inferers import sliding_window_inference |
|
|
|
|
|
ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) |
|
|
SRC_DIR = os.path.join(ROOT_DIR, "src") |
|
|
if SRC_DIR not in sys.path: |
|
|
sys.path.insert(0, SRC_DIR) |
|
|
|
|
|
from gliomasam3_moe.models.gliomasam3_moe import GliomaSAM3_MoE |
|
|
from gliomasam3_moe.data.brats_dataset import SegMambaNPZDataset |
|
|
from gliomasam3_moe.utils.brats_regions import regions_to_label |
|
|
from gliomasam3_moe.utils.postprocess import remove_small_components |
|
|
|
|
|
|
|
|
def _to_namespace(obj: Any): |
|
|
if isinstance(obj, dict): |
|
|
return SimpleNamespace(**{k: _to_namespace(v) for k, v in obj.items()}) |
|
|
return obj |
|
|
|
|
|
|
|
|
def load_config(path: str) -> SimpleNamespace: |
|
|
with open(path, "r") as f: |
|
|
cfg = yaml.safe_load(f) |
|
|
return _to_namespace(cfg) |
|
|
|
|
|
|
|
|
def save_nifti(path: str, arr: np.ndarray, affine: np.ndarray): |
|
|
img = nib.Nifti1Image(arr, affine) |
|
|
nib.save(img, path) |
|
|
|
|
|
|
|
|
def save_segmamba_3c(path: str, arr_3c: np.ndarray, affine: np.ndarray | None = None): |
|
|
if affine is None: |
|
|
affine = np.eye(4) |
|
|
if arr_3c.ndim != 4 or arr_3c.shape[0] != 3: |
|
|
raise ValueError(f"expected (3,D,H,W), got {arr_3c.shape}") |
|
|
arr = arr_3c.transpose(1, 2, 3, 0) |
|
|
save_nifti(path, arr.astype(np.uint8), affine) |
|
|
|
|
|
|
|
|
def main(): |
|
|
parser = argparse.ArgumentParser() |
|
|
parser.add_argument("--config", type=str, default=os.path.join(ROOT_DIR, "configs/train.yaml")) |
|
|
parser.add_argument("--input", type=str, required=True) |
|
|
parser.add_argument("--output", type=str, required=True) |
|
|
parser.add_argument("--checkpoint", type=str, required=True) |
|
|
parser.add_argument("--cases", type=str, default="", help="Comma-separated case IDs (optional)") |
|
|
args = parser.parse_args() |
|
|
|
|
|
cfg = load_config(args.config) |
|
|
device = torch.device(cfg.device if torch.cuda.is_available() else "cpu") |
|
|
|
|
|
model = GliomaSAM3_MoE(**cfg.model.__dict__).to(device) |
|
|
ckpt = torch.load(args.checkpoint, map_location="cpu") |
|
|
|
|
|
state_dict = {k: v for k, v in ckpt["model"].items() if "freqs_cis" not in k} |
|
|
missing, unexpected = model.load_state_dict(state_dict, strict=False) |
|
|
if missing: |
|
|
non_freqs = [k for k in missing if "freqs_cis" not in k] |
|
|
if non_freqs: |
|
|
print(f"Missing keys (non-freqs_cis): {non_freqs}") |
|
|
model.eval() |
|
|
|
|
|
input_path = args.input |
|
|
if not os.path.isdir(input_path): |
|
|
raise ValueError("Input must be a directory containing *.npz files.") |
|
|
|
|
|
if args.cases: |
|
|
case_ids = [c.strip() for c in args.cases.split(",")] |
|
|
npz_paths = [os.path.join(input_path, f"{c}.npz") for c in case_ids] |
|
|
npz_paths = [p for p in npz_paths if os.path.isfile(p)] |
|
|
else: |
|
|
npz_paths = None |
|
|
|
|
|
dataset = SegMambaNPZDataset( |
|
|
data_dir=input_path, |
|
|
npz_paths=npz_paths, |
|
|
test=True, |
|
|
ensure_npy=True, |
|
|
map_et_to_4=True, |
|
|
) |
|
|
loader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=0) |
|
|
|
|
|
os.makedirs(args.output, exist_ok=True) |
|
|
with torch.no_grad(): |
|
|
for batch in loader: |
|
|
image = batch["image"].to(device) |
|
|
case_id = batch["case_id"][0] if isinstance(batch["case_id"], (list, tuple)) else batch["case_id"] |
|
|
print(f"Processing {case_id}...") |
|
|
|
|
|
logits = sliding_window_inference( |
|
|
inputs=image, |
|
|
roi_size=tuple(cfg.infer.roi_size), |
|
|
sw_batch_size=cfg.infer.sw_batch_size, |
|
|
predictor=lambda x: model(x)[0], |
|
|
overlap=cfg.infer.overlap, |
|
|
) |
|
|
_, aux = model(image) |
|
|
probs = torch.sigmoid(logits) |
|
|
pi_et = aux["pi_et"].view(probs.shape[0], 1, 1, 1, 1) |
|
|
probs[:, 2:3] = probs[:, 2:3] * pi_et |
|
|
regions_bin = (probs > cfg.infer.threshold).float() |
|
|
|
|
|
et_pp = remove_small_components(regions_bin[:, 2], cfg.infer.et_cc_min_size) |
|
|
regions_bin[:, 2] = et_pp |
|
|
|
|
|
label_map = regions_to_label(regions_bin) |
|
|
|
|
|
affine = np.eye(4) |
|
|
prob_np = probs[0].detach().cpu().numpy().transpose(1, 2, 3, 0) |
|
|
bin_np = regions_bin[0].detach().cpu().numpy().transpose(1, 2, 3, 0) |
|
|
lbl_np = label_map[0, 0].detach().cpu().numpy().astype(np.int16) |
|
|
|
|
|
save_nifti(os.path.join(args.output, f"{case_id}_regions_prob.nii.gz"), prob_np, affine) |
|
|
save_nifti(os.path.join(args.output, f"{case_id}_regions_bin.nii.gz"), bin_np, affine) |
|
|
save_nifti(os.path.join(args.output, f"{case_id}_label.nii.gz"), lbl_np, affine) |
|
|
|
|
|
seg_arr = regions_bin[0].detach().cpu().numpy().astype(np.uint8) |
|
|
save_segmamba_3c(os.path.join(args.output, f"{case_id}.nii.gz"), seg_arr, affine) |
|
|
|
|
|
print(f" Saved: {case_id}") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|