samwell commited on
Commit
bf9db6a
·
verified ·
1 Parent(s): 99bd982

Upload export_medsiglip.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. export_medsiglip.py +68 -30
export_medsiglip.py CHANGED
@@ -10,10 +10,10 @@ Export trained MedSigLIP model to ONNX/CoreML/TFLite for mobile deployment
10
  # "onnx>=1.14.0",
11
  # "onnxscript>=0.1.0",
12
  # "onnxruntime>=1.16.0",
13
- # "onnxruntime-gpu>=1.16.0",
14
  # "huggingface_hub>=0.20.0",
15
  # "numpy>=1.24.0",
16
  # "pillow>=10.0.0",
 
17
  # ]
18
  # ///
19
 
@@ -281,9 +281,10 @@ def export_to_onnx(model, config: Config, quantize: bool = True):
281
 
282
 
283
  def export_to_coreml(model, config: Config):
284
- """Export model to CoreML format for iOS"""
285
  try:
286
  import coremltools as ct
 
287
  except ImportError:
288
  print("coremltools not installed. Skipping CoreML export.")
289
  print("Install with: pip install coremltools")
@@ -292,46 +293,83 @@ def export_to_coreml(model, config: Config):
292
  config.output_dir.mkdir(parents=True, exist_ok=True)
293
 
294
  model.eval()
 
295
  wrapper = LaborViewExportWrapper(model)
296
  wrapper.eval()
297
 
298
  # Trace the model
 
299
  dummy_input = torch.randn(1, 3, config.image_size, config.image_size)
300
- traced_model = torch.jit.trace(wrapper, dummy_input)
 
 
301
 
302
  coreml_path = config.output_dir / "laborview_medsiglip.mlpackage"
 
303
 
304
- print(f"Exporting to CoreML: {coreml_path}")
305
 
306
- # Convert to CoreML
307
- mlmodel = ct.convert(
308
- traced_model,
309
- inputs=[
310
- ct.TensorType(
311
- name="pixel_values",
312
- shape=(1, 3, config.image_size, config.image_size),
313
- dtype=np.float32
314
- )
315
- ],
316
- outputs=[
317
- ct.TensorType(name="seg_probs"),
318
- ct.TensorType(name="plane_pred")
319
- ],
320
- minimum_deployment_target=ct.target.iOS16,
321
- compute_precision=ct.precision.FLOAT16 # Use FP16 for mobile
322
- )
 
323
 
324
- # Add metadata
325
- mlmodel.author = "LaborView AI"
326
- mlmodel.short_description = "Ultrasound segmentation for labor monitoring"
327
- mlmodel.version = "1.0"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
328
 
329
- # Save
330
- mlmodel.save(str(coreml_path))
 
 
 
331
 
332
- print(f"CoreML model saved!")
333
 
334
- return coreml_path
 
 
 
 
335
 
336
 
337
  def verify_onnx_model(onnx_path: Path, config: Config):
@@ -378,7 +416,7 @@ def main():
378
  parser = argparse.ArgumentParser(description="Export MedSigLIP for edge deployment")
379
  parser.add_argument("--output-dir", type=str, default="./exports", help="Output directory")
380
  parser.add_argument("--quantize", action="store_true", default=True, help="Quantize to INT8")
381
- parser.add_argument("--coreml", action="store_true", help="Export to CoreML")
382
  parser.add_argument("--verify", action="store_true", default=True, help="Verify exported model")
383
  args = parser.parse_args()
384
 
 
10
  # "onnx>=1.14.0",
11
  # "onnxscript>=0.1.0",
12
  # "onnxruntime>=1.16.0",
 
13
  # "huggingface_hub>=0.20.0",
14
  # "numpy>=1.24.0",
15
  # "pillow>=10.0.0",
16
+ # "coremltools>=7.0",
17
  # ]
18
  # ///
19
 
 
281
 
282
 
283
  def export_to_coreml(model, config: Config):
284
+ """Export model to CoreML format for iOS with Neural Engine optimization"""
285
  try:
286
  import coremltools as ct
287
+ from coremltools.models.neural_network import quantization_utils
288
  except ImportError:
289
  print("coremltools not installed. Skipping CoreML export.")
290
  print("Install with: pip install coremltools")
 
293
  config.output_dir.mkdir(parents=True, exist_ok=True)
294
 
295
  model.eval()
296
+ model = model.cpu() # Move to CPU for export
297
  wrapper = LaborViewExportWrapper(model)
298
  wrapper.eval()
299
 
300
  # Trace the model
301
+ print("Tracing model for CoreML...")
302
  dummy_input = torch.randn(1, 3, config.image_size, config.image_size)
303
+
304
+ with torch.no_grad():
305
+ traced_model = torch.jit.trace(wrapper, dummy_input)
306
 
307
  coreml_path = config.output_dir / "laborview_medsiglip.mlpackage"
308
+ coreml_fp16_path = config.output_dir / "laborview_medsiglip_fp16.mlpackage"
309
 
310
+ print(f"Converting to CoreML: {coreml_path}")
311
 
312
+ try:
313
+ # Convert to CoreML with FP32 first
314
+ mlmodel = ct.convert(
315
+ traced_model,
316
+ inputs=[
317
+ ct.TensorType(
318
+ name="pixel_values",
319
+ shape=(1, 3, config.image_size, config.image_size),
320
+ dtype=np.float32
321
+ )
322
+ ],
323
+ outputs=[
324
+ ct.TensorType(name="seg_probs"),
325
+ ct.TensorType(name="plane_pred")
326
+ ],
327
+ minimum_deployment_target=ct.target.iOS16,
328
+ compute_units=ct.ComputeUnit.ALL # Use Neural Engine + GPU + CPU
329
+ )
330
 
331
+ # Add metadata
332
+ mlmodel.author = "LaborView AI"
333
+ mlmodel.short_description = "MedSigLIP ultrasound segmentation for labor monitoring"
334
+ mlmodel.version = "1.0"
335
+
336
+ # Save FP32 version
337
+ mlmodel.save(str(coreml_path))
338
+ print(f"CoreML FP32 model saved: {coreml_path}")
339
+
340
+ # Create FP16 version for smaller size and faster inference
341
+ print("Creating FP16 quantized version...")
342
+ mlmodel_fp16 = ct.convert(
343
+ traced_model,
344
+ inputs=[
345
+ ct.TensorType(
346
+ name="pixel_values",
347
+ shape=(1, 3, config.image_size, config.image_size),
348
+ dtype=np.float32
349
+ )
350
+ ],
351
+ outputs=[
352
+ ct.TensorType(name="seg_probs"),
353
+ ct.TensorType(name="plane_pred")
354
+ ],
355
+ minimum_deployment_target=ct.target.iOS16,
356
+ compute_units=ct.ComputeUnit.ALL,
357
+ compute_precision=ct.precision.FLOAT16
358
+ )
359
 
360
+ mlmodel_fp16.author = "LaborView AI"
361
+ mlmodel_fp16.short_description = "MedSigLIP ultrasound segmentation (FP16)"
362
+ mlmodel_fp16.version = "1.0"
363
+ mlmodel_fp16.save(str(coreml_fp16_path))
364
+ print(f"CoreML FP16 model saved: {coreml_fp16_path}")
365
 
366
+ return coreml_path
367
 
368
+ except Exception as e:
369
+ print(f"CoreML export failed: {e}")
370
+ import traceback
371
+ traceback.print_exc()
372
+ return None
373
 
374
 
375
  def verify_onnx_model(onnx_path: Path, config: Config):
 
416
  parser = argparse.ArgumentParser(description="Export MedSigLIP for edge deployment")
417
  parser.add_argument("--output-dir", type=str, default="./exports", help="Output directory")
418
  parser.add_argument("--quantize", action="store_true", default=True, help="Quantize to INT8")
419
+ parser.add_argument("--coreml", action="store_true", default=True, help="Export to CoreML")
420
  parser.add_argument("--verify", action="store_true", default=True, help="Verify exported model")
421
  args = parser.parse_args()
422