Spaces:
Running
Running
fcakyon
commited on
Commit
·
9dc104e
1
Parent(s):
75ec64c
handle time consuming requests better
Browse files
app.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
-
import yolov5
|
| 3 |
import sahi.utils
|
| 4 |
import sahi.model
|
| 5 |
import sahi.predict
|
|
|
|
| 6 |
from PIL import Image
|
| 7 |
import numpy
|
| 8 |
|
|
@@ -46,6 +46,20 @@ def sahi_yolo_inference(
|
|
| 46 |
postprocess_class_agnostic=False,
|
| 47 |
):
|
| 48 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
# standard inference
|
| 50 |
prediction_result_1 = sahi.predict.get_prediction(
|
| 51 |
image=image, detection_model=model, image_size=image_size
|
|
@@ -125,4 +139,5 @@ gr.Interface(
|
|
| 125 |
article=article,
|
| 126 |
examples=examples,
|
| 127 |
theme="default",
|
|
|
|
| 128 |
).launch(debug=True)
|
|
|
|
| 1 |
import gradio as gr
|
|
|
|
| 2 |
import sahi.utils
|
| 3 |
import sahi.model
|
| 4 |
import sahi.predict
|
| 5 |
+
import sahi.slicing
|
| 6 |
from PIL import Image
|
| 7 |
import numpy
|
| 8 |
|
|
|
|
| 46 |
postprocess_class_agnostic=False,
|
| 47 |
):
|
| 48 |
|
| 49 |
+
image_width, image_height = image.size
|
| 50 |
+
sliced_bboxes = sahi.slicing.get_slice_bboxes(
|
| 51 |
+
image_height,
|
| 52 |
+
image_width,
|
| 53 |
+
slice_height,
|
| 54 |
+
slice_width,
|
| 55 |
+
overlap_height_ratio,
|
| 56 |
+
overlap_width_ratio,
|
| 57 |
+
)
|
| 58 |
+
if len(sliced_bboxes) > 60:
|
| 59 |
+
raise ValueError(
|
| 60 |
+
f"{len(sliced_bboxes)} slices are too much for huggingface spaces, try smaller slice size."
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
# standard inference
|
| 64 |
prediction_result_1 = sahi.predict.get_prediction(
|
| 65 |
image=image, detection_model=model, image_size=image_size
|
|
|
|
| 139 |
article=article,
|
| 140 |
examples=examples,
|
| 141 |
theme="default",
|
| 142 |
+
enable_queue=True,
|
| 143 |
).launch(debug=True)
|