EnginDev commited on
Commit
2b89597
·
verified ·
1 Parent(s): e0c236f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -15
app.py CHANGED
@@ -1,47 +1,46 @@
1
  import gradio as gr
2
- from transformers import AutoProcessor, SamModel
3
  from PIL import Image
4
  import torch
5
  import numpy as np
6
  import traceback
7
 
8
- # CPU-freundliches Modell
9
  model_id = "facebook/sam-vit-base"
10
- processor = AutoProcessor.from_pretrained(model_id)
11
  model = SamModel.from_pretrained(model_id)
12
 
13
  def segment_image(image):
14
  try:
15
  device = torch.device("cpu")
16
  model.to(device)
 
 
17
  inputs = processor(images=image, return_tensors="pt").to(device)
18
 
19
  with torch.no_grad():
20
  outputs = model(**inputs)
21
 
 
22
  masks = processor.post_process_masks(
23
- outputs=outputs,
24
- original_sizes=[image.size[::-1]],
25
- reshaped_input_sizes=[image.size[::-1]]
26
  )
27
 
28
- if len(masks) == 0 or "masks" not in masks[0]:
29
- return "Keine Maske erkannt."
30
-
31
- mask_array = masks[0]["masks"][0][0].cpu().numpy()
32
- mask_image = Image.fromarray((mask_array * 255).astype(np.uint8))
33
  return mask_image
34
 
35
  except Exception as e:
36
- # Volle Fehlerausgabe anzeigen
37
  return f"Fehler:\n{traceback.format_exc()}"
38
 
39
  demo = gr.Interface(
40
  fn=segment_image,
41
  inputs=gr.Image(type="pil", label="Upload your fish image"),
42
- outputs="text",
43
- title="FishBoost Segment Anything (Meta SAM CPU Debug)",
44
- description="Debug-Version: Zeigt genaue Fehlermeldung bei Problemen mit SAM."
45
  )
46
 
47
  demo.launch()
 
1
  import gradio as gr
2
+ from transformers import SamProcessor, SamModel
3
  from PIL import Image
4
  import torch
5
  import numpy as np
6
  import traceback
7
 
8
+ # SAM Modell laden (CPU-kompatibel)
9
  model_id = "facebook/sam-vit-base"
10
+ processor = SamProcessor.from_pretrained(model_id)
11
  model = SamModel.from_pretrained(model_id)
12
 
13
  def segment_image(image):
14
  try:
15
  device = torch.device("cpu")
16
  model.to(device)
17
+
18
+ # Bild vorbereiten
19
  inputs = processor(images=image, return_tensors="pt").to(device)
20
 
21
  with torch.no_grad():
22
  outputs = model(**inputs)
23
 
24
+ # Die alte API erwartet NICHT das Schlüsselwort 'outputs'
25
  masks = processor.post_process_masks(
26
+ outputs.pred_masks.cpu(),
27
+ inputs["original_sizes"].cpu(),
28
+ inputs["reshaped_input_sizes"].cpu()
29
  )
30
 
31
+ mask = masks[0][0][0].numpy()
32
+ mask_image = Image.fromarray((mask * 255).astype(np.uint8))
 
 
 
33
  return mask_image
34
 
35
  except Exception as e:
 
36
  return f"Fehler:\n{traceback.format_exc()}"
37
 
38
  demo = gr.Interface(
39
  fn=segment_image,
40
  inputs=gr.Image(type="pil", label="Upload your fish image"),
41
+ outputs=gr.Image(type="pil", label="Segmented Output"),
42
+ title="FishBoost Segment Anything (Meta SAM - CPU Safe)",
43
+ description="Stable version for Hugging Face CPU runtime. Uses Meta's SAM model."
44
  )
45
 
46
  demo.launch()