Spaces:
Sleeping
Sleeping
Commit
·
788697a
1
Parent(s):
53e7451
rectfied output
Browse files
app.py
CHANGED
|
@@ -6,7 +6,7 @@ import numpy as np
|
|
| 6 |
from PIL import Image
|
| 7 |
import torch
|
| 8 |
import cv2
|
| 9 |
-
from transformers import CLIPSegProcessor, CLIPSegForImageSegmentation,AutoProcessor
|
| 10 |
from skimage.measure import label, regionprops
|
| 11 |
|
| 12 |
processor = CLIPSegProcessor.from_pretrained("CIDAS/clipseg-rd64-refined")
|
|
@@ -94,7 +94,7 @@ with gr.Row():
|
|
| 94 |
labels = gr.Textbox(placeholder="Enter Label/ labels ex. cat,car,door,window,",scale=4)
|
| 95 |
button = gr.Button(value="Locate objects")
|
| 96 |
with gr.Column():
|
| 97 |
-
outputs = gr.
|
| 98 |
button.click(shot,inputt,labels)
|
| 99 |
|
| 100 |
|
|
|
|
| 6 |
from PIL import Image
|
| 7 |
import torch
|
| 8 |
import cv2
|
| 9 |
+
from transformers import CLIPSegProcessor, CLIPSegForImageSegmentation,AutoProcessor,AutoConfig
|
| 10 |
from skimage.measure import label, regionprops
|
| 11 |
|
| 12 |
processor = CLIPSegProcessor.from_pretrained("CIDAS/clipseg-rd64-refined")
|
|
|
|
| 94 |
labels = gr.Textbox(placeholder="Enter Label/ labels ex. cat,car,door,window,",scale=4)
|
| 95 |
button = gr.Button(value="Locate objects")
|
| 96 |
with gr.Column():
|
| 97 |
+
outputs = gr.Image(type="numpy", label="Detected Objects with Selected Category")
|
| 98 |
button.click(shot,inputt,labels)
|
| 99 |
|
| 100 |
|