DatSplit commited on
Commit
c589de5
·
verified ·
1 Parent(s): cbda6dd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -109,7 +109,7 @@ def draw_predictions(boxes, labels, scores, img, score_threshold=0.5):
109
 
110
  img_bbox = draw_bounding_boxes(
111
  img,
112
- boxes=torch.from_numpy(boxes_filtered),
113
  labels=[f"{name}: {score:.2f}" for name, score in zip(label_names, scores_filtered)],
114
  colors=colors,
115
  width=4
@@ -154,7 +154,7 @@ def inference(image_path, model_name, bbox_threshold):
154
  mask = max_scores > bbox_threshold
155
 
156
  pred_boxes = torch.from_numpy(pred_boxes[0])
157
- image_w, image_h = img.size
158
 
159
  pred_boxes_abs = pred_boxes.clone()
160
  pred_boxes_abs[:, 0] *= image_w
@@ -170,7 +170,7 @@ def inference(image_path, model_name, bbox_threshold):
170
  filtered_scores = max_scores.squeeze(0)[mask]
171
  filtered_labels = pred_labels.squeeze(0)[mask]
172
 
173
- img_tensor = torch.from_numpy(np.array(img)).permute(2, 0, 1)
174
 
175
  return draw_predictions(filtered_boxes, filtered_labels, filtered_scores, img_tensor, score_threshold=bbox_threshold)
176
 
 
109
 
110
  img_bbox = draw_bounding_boxes(
111
  img,
112
+ boxes=boxes_filtered,
113
  labels=[f"{name}: {score:.2f}" for name, score in zip(label_names, scores_filtered)],
114
  colors=colors,
115
  width=4
 
154
  mask = max_scores > bbox_threshold
155
 
156
  pred_boxes = torch.from_numpy(pred_boxes[0])
157
+ image_w, image_h = image.size
158
 
159
  pred_boxes_abs = pred_boxes.clone()
160
  pred_boxes_abs[:, 0] *= image_w
 
170
  filtered_scores = max_scores.squeeze(0)[mask]
171
  filtered_labels = pred_labels.squeeze(0)[mask]
172
 
173
+ img_tensor = torch.from_numpy(np.array(image)).permute(2, 0, 1)
174
 
175
  return draw_predictions(filtered_boxes, filtered_labels, filtered_scores, img_tensor, score_threshold=bbox_threshold)
176