Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -122,7 +122,7 @@ pipe = TryonPipeline.from_pretrained(
|
|
| 122 |
pipe.unet_encoder = UNet_Encoder
|
| 123 |
|
| 124 |
@spaces.GPU
|
| 125 |
-
def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed):
|
| 126 |
device = "cuda"
|
| 127 |
|
| 128 |
openpose_model.preprocessor.body_estimation.model.to(device)
|
|
@@ -150,7 +150,7 @@ def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_ste
|
|
| 150 |
if is_checked:
|
| 151 |
keypoints = openpose_model(human_img.resize((384,512)))
|
| 152 |
model_parse, _ = parsing_model(human_img.resize((384,512)))
|
| 153 |
-
mask, mask_gray = get_mask_location('hd',
|
| 154 |
mask = mask.resize((768,1024))
|
| 155 |
else:
|
| 156 |
mask = pil_to_binary_mask(dict['layers'][0].convert("RGB").resize((768, 1024)))
|
|
@@ -275,6 +275,9 @@ with image_blocks as demo:
|
|
| 275 |
examples_per_page=10,
|
| 276 |
examples=human_ex_list
|
| 277 |
)
|
|
|
|
|
|
|
|
|
|
| 278 |
|
| 279 |
with gr.Column():
|
| 280 |
garm_img = gr.Image(label="Garment", sources='upload', type="pil")
|
|
@@ -304,7 +307,7 @@ with image_blocks as demo:
|
|
| 304 |
|
| 305 |
|
| 306 |
|
| 307 |
-
try_button.click(fn=start_tryon, inputs=[imgs, garm_img, prompt, is_checked,is_checked_crop, denoise_steps, seed], outputs=[image_out,masked_img], api_name='tryon')
|
| 308 |
|
| 309 |
|
| 310 |
|
|
|
|
| 122 |
pipe.unet_encoder = UNet_Encoder
|
| 123 |
|
| 124 |
@spaces.GPU
|
| 125 |
+
def start_tryon(dict,garm_img,garment_des,is_checked,is_checked_crop,denoise_steps,seed, category):
|
| 126 |
device = "cuda"
|
| 127 |
|
| 128 |
openpose_model.preprocessor.body_estimation.model.to(device)
|
|
|
|
| 150 |
if is_checked:
|
| 151 |
keypoints = openpose_model(human_img.resize((384,512)))
|
| 152 |
model_parse, _ = parsing_model(human_img.resize((384,512)))
|
| 153 |
+
mask, mask_gray = get_mask_location('hd', category, model_parse, keypoints)
|
| 154 |
mask = mask.resize((768,1024))
|
| 155 |
else:
|
| 156 |
mask = pil_to_binary_mask(dict['layers'][0].convert("RGB").resize((768, 1024)))
|
|
|
|
| 275 |
examples_per_page=10,
|
| 276 |
examples=human_ex_list
|
| 277 |
)
|
| 278 |
+
|
| 279 |
+
with gr.Column():
|
| 280 |
+
category = gr.Textbox(placeholder="0 = upper body, 1 = lower body, 2 = full body", show_label=False, elem_id="prompt")
|
| 281 |
|
| 282 |
with gr.Column():
|
| 283 |
garm_img = gr.Image(label="Garment", sources='upload', type="pil")
|
|
|
|
| 307 |
|
| 308 |
|
| 309 |
|
| 310 |
+
try_button.click(fn=start_tryon, inputs=[imgs, garm_img, prompt, is_checked,is_checked_crop, denoise_steps, seed, category], outputs=[image_out,masked_img], api_name='tryon')
|
| 311 |
|
| 312 |
|
| 313 |
|