| #!/usr/bin/env python3 | |
| """CLI: image path to mili score in [0, 1]. --backbone: local directory or Hugging Face model id.""" | |
| from __future__ import annotations | |
| import argparse | |
| from pathlib import Path | |
| import torch | |
| from PIL import Image | |
| from mili_score_inference.predict import load_model, predict_pil | |
| def main() -> int: | |
| p = argparse.ArgumentParser() | |
| p.add_argument("--weights", type=Path, required=True) | |
| p.add_argument( | |
| "--backbone", | |
| type=str, | |
| required=True, | |
| help="Local checkpoint directory or Hugging Face model id", | |
| ) | |
| p.add_argument("--image", type=Path, required=True) | |
| p.add_argument("--device", default="auto") | |
| args = p.parse_args() | |
| if args.device == "auto": | |
| device = torch.device("cuda" if torch.cuda.is_available() else "cpu") | |
| else: | |
| device = torch.device(args.device) | |
| bb = Path(args.backbone) | |
| if bb.is_dir(): | |
| backbone_arg: Path | str = bb | |
| local_bb = True | |
| else: | |
| backbone_arg = args.backbone | |
| local_bb = False | |
| model = load_model( | |
| args.weights, | |
| backbone_arg, | |
| device=device, | |
| local_backbone=local_bb, | |
| ) | |
| score = predict_pil(model, Image.open(args.image), device=device) | |
| print(f"score={score:.6f}") | |
| return 0 | |
| if __name__ == "__main__": | |
| raise SystemExit(main()) | |