UIPress / scripts /batch_uipress_clip_epochs.py
DesonDai's picture
Add files using upload-large-folder tool
fbc94ef verified
"""
For each checkpoints/optical_mix_d2c/epoch{N}.pt (and optionally latest.pt), run
eval_all (UIPress) + step_clip_batch, write under results/clip_per_epoch/<name>/.
Usage:
# After GPU 1 is free (or use --wait_gpu to block until VRAM drops):
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=. python scripts/batch_uipress_clip_epochs.py \\
--checkpoint_dir checkpoints/optical_mix_d2c \\
--tag optical_mix_d2c
# Skip epochs that already have clip_scores.json:
... --skip_existing
"""
from __future__ import annotations
import argparse
import json
import os
import re
import subprocess
import sys
import time
from pathlib import Path
PROJECT_ROOT = Path(__file__).resolve().parent.parent
def _wait_for_gpu(gpu_index: int, free_below_mib: int, poll_s: int) -> None:
import subprocess as sp
print(f"Waiting until GPU {gpu_index} memory.used < {free_below_mib} MiB ...", flush=True)
while True:
out = sp.check_output(
[
"nvidia-smi",
"-i",
str(gpu_index),
"--query-gpu=memory.used",
"--format=csv,noheader,nounits",
],
text=True,
)
used = int(out.strip().split("\n")[0].strip())
print(f" GPU{gpu_index} used={used} MiB", flush=True)
if used < free_below_mib:
break
time.sleep(poll_s)
def main() -> int:
ap = argparse.ArgumentParser()
ap.add_argument("--checkpoint_dir", type=Path, default=PROJECT_ROOT / "checkpoints" / "optical_mix_d2c")
ap.add_argument(
"--output_root",
type=Path,
default=None,
help="Defaults to results/clip_per_epoch/<tag>",
)
ap.add_argument("--tag", default="optical_mix_d2c", help="Subfolder under results/clip_per_epoch/")
ap.add_argument("--max_samples", type=int, default=50)
ap.add_argument("--target_tokens", type=int, default=256)
ap.add_argument("--data_dir", default="data")
ap.add_argument("--ref_dir", default="data/ref_screenshots")
ap.add_argument("--skip_existing", action="store_true")
ap.add_argument("--include_latest", action="store_true", help="Also eval latest.pt into folder latest/")
ap.add_argument("--clip_device", default="cuda", choices=["cuda", "cpu"])
ap.add_argument("--force_cpu_eval", action="store_true", help="Pass --force_cpu to eval_all (8B on CPU; very slow).")
ap.add_argument("--wait_gpu", type=int, default=None, help="Poll this GPU index until memory drops.")
ap.add_argument("--wait_free_mib", type=int, default=12000, help="Start when memory.used < this (MiB).")
ap.add_argument("--wait_poll_s", type=int, default=60)
args = ap.parse_args()
os.environ.setdefault("HF_ENDPOINT", os.environ.get("HF_ENDPOINT", "https://hf-mirror.com"))
ckpt_dir = args.checkpoint_dir.resolve()
if not ckpt_dir.is_dir():
print(f"checkpoint_dir not found: {ckpt_dir}", file=sys.stderr)
return 1
out_root = (args.output_root or (PROJECT_ROOT / "results" / "clip_per_epoch" / args.tag)).resolve()
out_root.mkdir(parents=True, exist_ok=True)
if args.wait_gpu is not None:
_wait_for_gpu(args.wait_gpu, args.wait_free_mib, args.wait_poll_s)
jobs: list[tuple[str, Path]] = []
for p in sorted(ckpt_dir.glob("epoch*.pt"), key=lambda x: int(re.search(r"epoch(\d+)", x.name).group(1))):
n = int(re.search(r"epoch(\d+)", p.name).group(1))
jobs.append((f"epoch_{n}", p))
if args.include_latest and (ckpt_dir / "latest.pt").exists():
jobs.append(("latest", ckpt_dir / "latest.pt"))
manifest: list[dict] = []
run_name = f"uipress_{args.target_tokens}"
for folder_name, ckpt_path in jobs:
eval_out = out_root / folder_name
method_dir = eval_out / run_name
clip_path = method_dir / "clip_scores.json"
if args.skip_existing and clip_path.is_file():
print(f"Skip (exists): {clip_path}", flush=True)
data = json.loads(clip_path.read_text(encoding="utf-8"))
ep = folder_name.replace("epoch_", "") if folder_name.startswith("epoch_") else folder_name
manifest.append(
{
"folder": folder_name,
"checkpoint": str(ckpt_path.relative_to(PROJECT_ROOT)),
"avg_clip": data.get("avg_clip"),
"n": data.get("n"),
"clip_path": str(clip_path.relative_to(PROJECT_ROOT)),
}
)
continue
eval_out.mkdir(parents=True, exist_ok=True)
cmd_eval = [
sys.executable,
str(PROJECT_ROOT / "scripts" / "eval_all.py"),
"--method",
"uipress",
"--checkpoint",
str(ckpt_path),
"--max_samples",
str(args.max_samples),
"--data_dir",
args.data_dir,
"--output_dir",
str(eval_out),
"--target_tokens",
str(args.target_tokens),
]
if args.force_cpu_eval:
cmd_eval.append("--force_cpu")
print(f"\n=== eval_all: {folder_name} <- {ckpt_path.name} ===", flush=True)
r1 = subprocess.run(cmd_eval, cwd=str(PROJECT_ROOT))
if r1.returncode != 0:
print(f"[error] eval_all failed rc={r1.returncode} for {folder_name}", flush=True)
continue
cmd_clip = [
sys.executable,
str(PROJECT_ROOT / "scripts" / "step_clip_batch.py"),
"--method_dir",
str(method_dir),
"--ref_dir",
str(PROJECT_ROOT / args.ref_dir),
"--clip_device",
args.clip_device,
]
print(f"=== CLIP: {method_dir} ===", flush=True)
r2 = subprocess.run(cmd_clip, cwd=str(PROJECT_ROOT))
if r2.returncode != 0:
print(f"[error] step_clip_batch failed rc={r2.returncode} for {folder_name}", flush=True)
continue
if clip_path.is_file():
data = json.loads(clip_path.read_text(encoding="utf-8"))
manifest.append(
{
"folder": folder_name,
"checkpoint": str(ckpt_path.relative_to(PROJECT_ROOT)),
"avg_clip": data.get("avg_clip"),
"n": data.get("n"),
"clip_path": str(clip_path.relative_to(PROJECT_ROOT)),
}
)
def _sort_key(row: dict) -> tuple:
f = row["folder"]
if f == "latest":
return (2, 10**9)
m = re.match(r"epoch_(\d+)", f)
return (1, int(m.group(1))) if m else (0, 0)
manifest.sort(key=_sort_key)
summary_path = out_root / "summary.json"
summary_path.write_text(json.dumps(manifest, indent=2, ensure_ascii=False), encoding="utf-8")
print(f"\nWrote {summary_path}", flush=True)
lines = ["| 文件夹 | checkpoint | avg CLIP | n |", "|---|---|---|---|"]
for row in manifest:
lines.append(
f"| {row['folder']} | `{row['checkpoint']}` | {row.get('avg_clip')} | {row.get('n')} |"
)
(out_root / "CLIP_TABLE.md").write_text("\n".join(lines) + "\n", encoding="utf-8")
print(f"Wrote {out_root / 'CLIP_TABLE.md'}", flush=True)
return 0
if __name__ == "__main__":
raise SystemExit(main())