Spaces:
Running
on
Zero
Running
on
Zero
optimize gpu allocation
Browse files
app.py
CHANGED
|
@@ -85,7 +85,6 @@ downscaled_outputs = ['./images/ncut_0_small.jpg', './images/ncut_1_small.jpg',
|
|
| 85 |
|
| 86 |
example_items = downscaled_images[:3] + downscaled_outputs[:3]
|
| 87 |
|
| 88 |
-
@spaces.GPU(duration=30)
|
| 89 |
def main_fn(
|
| 90 |
images,
|
| 91 |
model_name="SAM(sam_vit_b)",
|
|
@@ -102,8 +101,6 @@ def main_fn(
|
|
| 102 |
n_neighbors=500,
|
| 103 |
min_dist=0.1,
|
| 104 |
):
|
| 105 |
-
if len(images) is None:
|
| 106 |
-
return [], example_items
|
| 107 |
|
| 108 |
if perplexity >= num_sample_tsne or n_neighbors >= num_sample_tsne:
|
| 109 |
# raise gr.Error("Perplexity must be less than the number of samples for t-SNE.")
|
|
@@ -138,7 +135,66 @@ def main_fn(
|
|
| 138 |
rgb = dont_use_too_much_green(rgb)
|
| 139 |
return to_pil_images(rgb), []
|
| 140 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 141 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 142 |
with gr.Blocks() as demo:
|
| 143 |
|
| 144 |
with gr.Row():
|
|
|
|
| 85 |
|
| 86 |
example_items = downscaled_images[:3] + downscaled_outputs[:3]
|
| 87 |
|
|
|
|
| 88 |
def main_fn(
|
| 89 |
images,
|
| 90 |
model_name="SAM(sam_vit_b)",
|
|
|
|
| 101 |
n_neighbors=500,
|
| 102 |
min_dist=0.1,
|
| 103 |
):
|
|
|
|
|
|
|
| 104 |
|
| 105 |
if perplexity >= num_sample_tsne or n_neighbors >= num_sample_tsne:
|
| 106 |
# raise gr.Error("Perplexity must be less than the number of samples for t-SNE.")
|
|
|
|
| 135 |
rgb = dont_use_too_much_green(rgb)
|
| 136 |
return to_pil_images(rgb), []
|
| 137 |
|
| 138 |
+
@spaces.GPU(duration=10)
|
| 139 |
+
def quick_run(*args, **kwargs):
|
| 140 |
+
return main_fn(*args, **kwargs)
|
| 141 |
+
|
| 142 |
+
@spaces.GPU(duration=30)
|
| 143 |
+
def long_run(*args, **kwargs):
|
| 144 |
+
return main_fn(*args, **kwargs)
|
| 145 |
+
|
| 146 |
+
@spaces.GPU(duration=120)
|
| 147 |
+
def super_duper_long_run(*args, **kwargs):
|
| 148 |
+
return main_fn(*args, **kwargs)
|
| 149 |
|
| 150 |
+
def run_fn(
|
| 151 |
+
images,
|
| 152 |
+
model_name="SAM(sam_vit_b)",
|
| 153 |
+
layer=-1,
|
| 154 |
+
num_eig=100,
|
| 155 |
+
node_type="block",
|
| 156 |
+
affinity_focal_gamma=0.3,
|
| 157 |
+
num_sample_ncut=10000,
|
| 158 |
+
knn_ncut=10,
|
| 159 |
+
embedding_method="UMAP",
|
| 160 |
+
num_sample_tsne=1000,
|
| 161 |
+
knn_tsne=10,
|
| 162 |
+
perplexity=500,
|
| 163 |
+
n_neighbors=500,
|
| 164 |
+
min_dist=0.1,
|
| 165 |
+
):
|
| 166 |
+
if images is None:
|
| 167 |
+
return [], example_items
|
| 168 |
+
|
| 169 |
+
kwargs = {
|
| 170 |
+
"images": images,
|
| 171 |
+
"model_name": model_name,
|
| 172 |
+
"layer": layer,
|
| 173 |
+
"num_eig": num_eig,
|
| 174 |
+
"node_type": node_type,
|
| 175 |
+
"affinity_focal_gamma": affinity_focal_gamma,
|
| 176 |
+
"num_sample_ncut": num_sample_ncut,
|
| 177 |
+
"knn_ncut": knn_ncut,
|
| 178 |
+
"embedding_method": embedding_method,
|
| 179 |
+
"num_sample_tsne": num_sample_tsne,
|
| 180 |
+
"knn_tsne": knn_tsne,
|
| 181 |
+
"perplexity": perplexity,
|
| 182 |
+
"n_neighbors": n_neighbors,
|
| 183 |
+
"min_dist": min_dist,
|
| 184 |
+
}
|
| 185 |
+
num_images = len(images)
|
| 186 |
+
if num_images > 100:
|
| 187 |
+
return super_duper_long_run(images, **kwargs)
|
| 188 |
+
if num_images > 20:
|
| 189 |
+
return long_run(images, **kwargs)
|
| 190 |
+
if embedding_method == "UMAP":
|
| 191 |
+
return long_run(images, **kwargs)
|
| 192 |
+
if perplexity >= 250:
|
| 193 |
+
return long_run(images, **kwargs)
|
| 194 |
+
if num_sample_tsne >= 500:
|
| 195 |
+
return long_run(images, **kwargs)
|
| 196 |
+
return quick_run(images, **kwargs)
|
| 197 |
+
|
| 198 |
with gr.Blocks() as demo:
|
| 199 |
|
| 200 |
with gr.Row():
|