PrarthanaTS commited on
Commit
b386bfd
·
1 Parent(s): c1d0f6f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -11
app.py CHANGED
@@ -63,7 +63,7 @@ scaled_anchors = (
63
  * torch.tensor(config.S).unsqueeze(1).unsqueeze(1).repeat(1, 3, 2)
64
  )
65
 
66
- def process_image_and_plot(image,iou_threshold=0.5, threshold=0.4,show_cam="No",target_layer=-2):
67
 
68
  transformed_image = transforms(image=image)["image"].unsqueeze(0)
69
  output = model(transformed_image)
@@ -87,15 +87,13 @@ def process_image_and_plot(image,iou_threshold=0.5, threshold=0.4,show_cam="No",
87
  layer = [model.model.layers[-1]]
88
 
89
  cam = YoloCAM(model=model, target_layers=layer, use_cuda=False)
90
- if show_cam == "No":
91
- return fig
92
- else:
93
- grayscale_cam = cam(transformed_image, scaled_anchors)[0, :, :]
94
- img = cv2.resize(image, (416, 416))
95
- img = np.float32(img) / 255
96
- cam_image = show_cam_on_image(img, grayscale_cam, use_rgb=True)
97
-
98
- return fig,cam_image
99
 
100
 
101
  examples = [
@@ -132,7 +130,6 @@ demo = gr.Interface(process_image_and_plot,
132
  inputs=[gr.Image(label="Input Image"),
133
  gr.Slider(0, 1, value=0.5, label="Intersection over Union (IOU) Threshold",info="Determines how much overlap between two boxes, Set it low to filter out weaker predicts"),
134
  gr.Slider(0, 1, value=0.4, label="Threshold"),
135
- gr.Radio(["Yes", "No"], value="No" , label="Show GradCAM outputs"),
136
  gr.Slider(-2, -1, value=-1, step=1, label="Which Layer?"),],
137
  outputs=[
138
  gr.Plot(label="Output with Classes",),
 
63
  * torch.tensor(config.S).unsqueeze(1).unsqueeze(1).repeat(1, 3, 2)
64
  )
65
 
66
+ def process_image_and_plot(image,iou_threshold=0.5, threshold=0.4,target_layer=-2):
67
 
68
  transformed_image = transforms(image=image)["image"].unsqueeze(0)
69
  output = model(transformed_image)
 
87
  layer = [model.model.layers[-1]]
88
 
89
  cam = YoloCAM(model=model, target_layers=layer, use_cuda=False)
90
+
91
+ grayscale_cam = cam(transformed_image, scaled_anchors)[0, :, :]
92
+ img = cv2.resize(image, (416, 416))
93
+ img = np.float32(img) / 255
94
+ cam_image = show_cam_on_image(img, grayscale_cam, use_rgb=True)
95
+
96
+ return fig,cam_image
 
 
97
 
98
 
99
  examples = [
 
130
  inputs=[gr.Image(label="Input Image"),
131
  gr.Slider(0, 1, value=0.5, label="Intersection over Union (IOU) Threshold",info="Determines how much overlap between two boxes, Set it low to filter out weaker predicts"),
132
  gr.Slider(0, 1, value=0.4, label="Threshold"),
 
133
  gr.Slider(-2, -1, value=-1, step=1, label="Which Layer?"),],
134
  outputs=[
135
  gr.Plot(label="Output with Classes",),