|
|
| """
|
| migrate.py – One-time reorganisation of the visualization directory.
|
|
|
| Run from D:\\visualization:
|
| python migrate.py # full migration + cleanup
|
| python migrate.py --dry-run # preview only, no files touched
|
|
|
| What the script does
|
| --------------------
|
| 1. Creates:
|
| data/Sen2_MTC_{New,Old}/GT/ one shared ground-truth copy
|
| data/Sen2_MTC_{New,Old}/inputs/ cloudy inputs (_A1 / _A2 / _A3)
|
| results/Sen2_MTC_{New,Old}/{method}/ per-method predictions
|
|
|
| 2. Copies images with a unified naming scheme:
|
| {id}_real_B.png → GT/{id}.png
|
| {id}_fake_B.png → {method}/{id}.png
|
| Out_{id}.png → {method}/{id}.png (diffcr convention)
|
| {id}_real_A1.png → inputs/{id}_A1.png
|
|
|
| 3. After verifying every expected directory is non-empty, deletes the
|
| original Sen2_MTC_New and Sen2_MTC_Old trees.
|
|
|
| Special cases handled
|
| ---------------------
|
| - pmaa / Sen2_MTC_New has no Out/ folder: outputs are extracted from the
|
| per-sample test/{psnr_ssim}/ sub-directories.
|
| - diffcr / Sen2_MTC_New has no test/ folder: Out/ already contains flat
|
| Out_{id}.png files.
|
| - diffcr / Sen2_MTC_Old same as above.
|
| - ctgan / Sen2_MTC_New has an extra save/ directory (ignored).
|
| """
|
|
|
| from __future__ import annotations
|
|
|
| import argparse
|
| import os
|
| import shutil
|
| import sys
|
| from glob import glob
|
|
|
| from tqdm import tqdm
|
|
|
|
|
|
|
|
|
| ROOT = os.path.dirname(os.path.abspath(__file__))
|
|
|
| METHODS: list[str] = [
|
| "ae",
|
| "crtsnet",
|
| "ctgan",
|
| "ddpmcr",
|
| "diffcr",
|
| "dsen2cr",
|
| "mcgan",
|
| "pix2pix",
|
| "pmaa",
|
| "stgan",
|
| "stnet",
|
| "uncrtaints",
|
| ]
|
|
|
|
|
|
|
|
|
|
|
|
|
| def strip_id(path: str) -> str:
|
| """Extract the clean sample ID from any of the naming conventions used.
|
|
|
| Examples
|
| --------
|
| T12TUR_R027_0_real_B.png -> T12TUR_R027_0
|
| T12TUR_R027_0_fake_B.png -> T12TUR_R027_0
|
| Out_T12TUR_R027_0.png -> T12TUR_R027_0
|
| GT_T12TUR_R027_0.png -> T12TUR_R027_0
|
| 01WFN_60009000_real_B.png -> 01WFN_60009000
|
| """
|
| stem = os.path.splitext(os.path.basename(path))[0]
|
|
|
|
|
| for pfx in ("GT_", "Out_"):
|
| if stem.startswith(pfx):
|
| return stem[len(pfx) :]
|
|
|
|
|
| for sfx in ("_real_B", "_fake_B"):
|
| if stem.endswith(sfx):
|
| return stem[: -len(sfx)]
|
|
|
|
|
| return stem
|
|
|
|
|
| def _copy(src: str, dst: str) -> None:
|
| """Copy *src* to *dst*, creating parent directories as needed."""
|
| os.makedirs(os.path.dirname(dst), exist_ok=True)
|
| shutil.copy2(src, dst)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def migrate_gt(
|
| src_base: str,
|
| dst_data: str,
|
| dry_run: bool,
|
| ) -> int:
|
| """Copy GT images from ae/GT/ → data/{dataset}/GT/ with clean names.
|
|
|
| Source name: {id}_real_B.png
|
| Dest name: {id}.png
|
| """
|
| src_dir = os.path.join(src_base, "ae", "GT")
|
| dst_dir = os.path.join(dst_data, "GT")
|
|
|
| files = sorted(glob(os.path.join(src_dir, "*.png")))
|
| if not files:
|
| print(f" [WARN] No GT images found in {src_dir}", file=sys.stderr)
|
| return 0
|
|
|
| if not dry_run:
|
| os.makedirs(dst_dir, exist_ok=True)
|
| for f in tqdm(files, desc=" GT", leave=False):
|
| _copy(f, os.path.join(dst_dir, strip_id(f) + ".png"))
|
|
|
| return len(files)
|
|
|
|
|
| def migrate_inputs(
|
| src_base: str,
|
| dst_data: str,
|
| dry_run: bool,
|
| ) -> int:
|
| """Extract real_A1 / real_A2 / real_A3 from ae/test/**/
|
| → data/{dataset}/inputs/{id}_A{1,2,3}.png
|
|
|
| Source name: {id}_real_A1.png
|
| Dest name: {id}_A1.png
|
| """
|
| src_test = os.path.join(src_base, "ae", "test")
|
| dst_dir = os.path.join(dst_data, "inputs")
|
|
|
|
|
| files = sorted(glob(os.path.join(src_test, "*", "*_real_A?.png")))
|
| if not files:
|
| print(f" [WARN] No input images found under {src_test}", file=sys.stderr)
|
| return 0
|
|
|
| if not dry_run:
|
| os.makedirs(dst_dir, exist_ok=True)
|
| for f in tqdm(files, desc=" inputs", leave=False):
|
|
|
| new_name = os.path.basename(f).replace("_real_A", "_A")
|
| _copy(f, os.path.join(dst_dir, new_name))
|
|
|
| return len(files)
|
|
|
|
|
| def migrate_outputs(
|
| src_base: str,
|
| dst_results: str,
|
| dataset_label: str,
|
| dry_run: bool,
|
| ) -> dict[str, int]:
|
| """Copy each method's predictions into results/{dataset}/{method}/{id}.png
|
|
|
| Handles the three different source layouts:
|
| a) Standard: method/Out/{id}_fake_B.png
|
| b) diffcr: method/Out/Out_{id}.png
|
| c) pmaa (New): method/test/{psnr_ssim}/{id}_fake_B.png (no Out/ folder)
|
| """
|
| counts: dict[str, int] = {}
|
|
|
| for method in METHODS:
|
| src_method = os.path.join(src_base, method)
|
|
|
| if not os.path.isdir(src_method):
|
| print(f" SKIP {dataset_label}/{method} (directory not found)")
|
| continue
|
|
|
| dst_dir = os.path.join(dst_results, method)
|
| files: list[tuple[str, str]] = []
|
|
|
|
|
| if method == "pmaa" and "New" in dataset_label:
|
| for subdir in sorted(glob(os.path.join(src_method, "test", "*/"))):
|
| for f in sorted(glob(os.path.join(subdir, "*_fake_B.png"))):
|
| files.append((f, strip_id(f) + ".png"))
|
|
|
|
|
| else:
|
| src_out = os.path.join(src_method, "Out")
|
| if not os.path.isdir(src_out):
|
| print(
|
| f" SKIP {dataset_label}/{method} (Out/ folder not found)",
|
| file=sys.stderr,
|
| )
|
| continue
|
| for f in sorted(glob(os.path.join(src_out, "*.png"))):
|
| files.append((f, strip_id(f) + ".png"))
|
|
|
| if not files:
|
| print(
|
| f" [WARN] {dataset_label}/{method}: no output images found",
|
| file=sys.stderr,
|
| )
|
| counts[method] = 0
|
| continue
|
|
|
| if not dry_run:
|
| os.makedirs(dst_dir, exist_ok=True)
|
| for src_f, dst_name in tqdm(files, desc=f" {method}", leave=False):
|
| _copy(src_f, os.path.join(dst_dir, dst_name))
|
|
|
| counts[method] = len(files)
|
|
|
| return counts
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def verify(datasets: list[str]) -> bool:
|
| """Check that every expected output directory is non-empty."""
|
| ok = True
|
| print("\nVerification")
|
| print("-" * 60)
|
|
|
| for ds in datasets:
|
| gt_dir = os.path.join(ROOT, "data", ds, "GT")
|
| n_gt = len(glob(os.path.join(gt_dir, "*.png")))
|
| status = "OK" if n_gt > 0 else "EMPTY"
|
| print(f" data/{ds}/GT → {n_gt:4d} files [{status}]")
|
| if n_gt == 0:
|
| ok = False
|
|
|
| inp_dir = os.path.join(ROOT, "data", ds, "inputs")
|
| n_inp = len(glob(os.path.join(inp_dir, "*.png")))
|
| status = "OK" if n_inp > 0 else "EMPTY"
|
| print(f" data/{ds}/inputs → {n_inp:4d} files [{status}]")
|
| if n_inp == 0:
|
| ok = False
|
|
|
| for m in METHODS:
|
| d = os.path.join(ROOT, "results", ds, m)
|
| if os.path.isdir(d):
|
| n = len(glob(os.path.join(d, "*.png")))
|
| status = "OK" if n > 0 else "EMPTY"
|
| print(f" results/{ds}/{m:<14} → {n:4d} files [{status}]")
|
| if n == 0:
|
| ok = False
|
| else:
|
| print(f" results/{ds}/{m:<14} → MISSING")
|
|
|
|
|
| return ok
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def cleanup(datasets: list[str]) -> None:
|
| """Delete the original Sen2_MTC_* directories."""
|
| for ds in datasets:
|
| old_dir = os.path.join(ROOT, ds)
|
| if os.path.isdir(old_dir):
|
| print(f" Removing {old_dir} …")
|
| shutil.rmtree(old_dir)
|
| else:
|
| print(f" Already gone: {old_dir}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| def main() -> None:
|
| ap = argparse.ArgumentParser(
|
| description="Migrate visualization directory to the cleaned-up layout."
|
| )
|
| ap.add_argument(
|
| "--dry-run",
|
| action="store_true",
|
| help="Print a summary of what would happen without touching the filesystem.",
|
| )
|
| ap.add_argument(
|
| "--skip-cleanup",
|
| action="store_true",
|
| help="Do not delete the old Sen2_MTC_* directories after migration.",
|
| )
|
| ap.add_argument(
|
| "--dataset",
|
| type=str,
|
| default=None,
|
| choices=["Sen2_MTC_New", "Sen2_MTC_Old"],
|
| help="Migrate only this dataset (default: both).",
|
| )
|
| args = ap.parse_args()
|
|
|
| datasets = [args.dataset] if args.dataset else ["Sen2_MTC_New", "Sen2_MTC_Old"]
|
|
|
| if args.dry_run:
|
| print("=" * 60)
|
| print("DRY RUN – no files will be copied or deleted")
|
| print("=" * 60)
|
|
|
| total_gt = 0
|
| total_inputs = 0
|
| total_results: dict[str, dict[str, int]] = {}
|
|
|
| for ds in datasets:
|
| src_base = os.path.join(ROOT, ds)
|
| dst_data = os.path.join(ROOT, "data", ds)
|
| dst_results = os.path.join(ROOT, "results", ds)
|
|
|
| if not os.path.isdir(src_base):
|
| print(f"\n[ERROR] Source directory not found: {src_base}", file=sys.stderr)
|
| sys.exit(1)
|
|
|
| print(f"\n{'=' * 60}")
|
| print(f" Dataset: {ds}")
|
| print(f"{'=' * 60}")
|
|
|
|
|
| print(" Step 1/3 – GT images")
|
| n_gt = migrate_gt(src_base, dst_data, dry_run=args.dry_run)
|
| total_gt += n_gt
|
| print(f" → {n_gt} GT images {'(would copy)' if args.dry_run else 'copied'}")
|
|
|
|
|
| print(" Step 2/3 – Cloudy inputs")
|
| n_inp = migrate_inputs(src_base, dst_data, dry_run=args.dry_run)
|
| total_inputs += n_inp
|
| print(
|
| f" → {n_inp} input images ({n_inp // 3 if n_inp else 0} samples × 3) "
|
| f"{'(would copy)' if args.dry_run else 'copied'}"
|
| )
|
|
|
|
|
| print(" Step 3/3 – Method outputs")
|
| counts = migrate_outputs(src_base, dst_results, ds, dry_run=args.dry_run)
|
| total_results[ds] = counts
|
| for method, n in counts.items():
|
| print(
|
| f" {method:<14} → {n:4d} images "
|
| f"{'(would copy)' if args.dry_run else 'copied'}"
|
| )
|
|
|
|
|
| print(f"\n{'=' * 60}")
|
| print("Summary")
|
| print(f"{'=' * 60}")
|
| print(f" GT images : {total_gt}")
|
| print(f" Input images: {total_inputs}")
|
| for ds, counts in total_results.items():
|
| total_preds = sum(counts.values())
|
| print(f" Results ({ds}): {total_preds}")
|
|
|
| if args.dry_run:
|
| print("\n[DRY RUN] Nothing was written. Re-run without --dry-run to proceed.")
|
| return
|
|
|
|
|
| ok = verify(datasets)
|
|
|
| if not ok:
|
| print(
|
| "\n[ERROR] Verification found empty directories. "
|
| "Old directories were NOT deleted.\n"
|
| "Please inspect the output above and re-run.",
|
| file=sys.stderr,
|
| )
|
| sys.exit(1)
|
|
|
|
|
| if args.skip_cleanup:
|
| print("\n[INFO] --skip-cleanup set: original directories kept.")
|
| print(" Delete them manually when you are satisfied with the result.")
|
| else:
|
| print("\nAll checks passed. Deleting original directories …")
|
| cleanup(datasets)
|
| print("Done.")
|
|
|
|
|
| if __name__ == "__main__":
|
| main()
|
|
|