ShubhankarMUS commited on
Commit
dfe5cd0
·
verified ·
1 Parent(s): 806a3a7

Update run/gradio_ootd.py

Browse files
Files changed (1) hide show
  1. run/gradio_ootd.py +105 -90
run/gradio_ootd.py CHANGED
@@ -8,6 +8,20 @@ import torch
8
  from PIL import Image, ImageOps
9
 
10
  from utils_ootd import get_mask_location
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  PROJECT_ROOT = Path(__file__).absolute().parents[1].absolute()
13
  sys.path.insert(0, str(PROJECT_ROOT))
@@ -72,8 +86,8 @@ def process_hd(vton_img, garm_img, n_samples, n_steps, image_scale, seed):
72
  image_scale=image_scale,
73
  seed=seed,
74
  )
75
-
76
- return images
77
 
78
  @spaces.GPU
79
  def process_dc(vton_img, garm_img, category, n_samples, n_steps, image_scale, seed):
@@ -170,7 +184,8 @@ with block:
170
  os.path.join(example_path, 'garment/04825_00.jpg'),
171
  ])
172
  with gr.Column():
173
- result_gallery = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
 
174
  with gr.Column():
175
  run_button = gr.Button(value="Run")
176
  n_samples = gr.Slider(label="Images", minimum=1, maximum=4, value=1, step=1)
@@ -180,95 +195,95 @@ with block:
180
  seed = gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, value=-1)
181
 
182
  ips = [vton_img, garm_img, n_samples, n_steps, image_scale, seed]
183
- run_button.click(fn=process_hd, inputs=ips, outputs=[result_gallery])
184
 
185
 
186
- with gr.Row():
187
- gr.Markdown("## Full-body")
188
- with gr.Row():
189
- gr.Markdown("***Support upper-body/lower-body/dresses; garment category must be paired!!!***")
190
- with gr.Row():
191
- with gr.Column():
192
- vton_img_dc = gr.Image(label="Model", sources='upload', type="filepath", height=384, value=model_dc)
193
- example = gr.Examples(
194
- label="Examples (upper-body/lower-body)",
195
- inputs=vton_img_dc,
196
- examples_per_page=7,
197
- examples=[
198
- os.path.join(example_path, 'model/model_8.png'),
199
- os.path.join(example_path, 'model/049447_0.jpg'),
200
- os.path.join(example_path, 'model/049713_0.jpg'),
201
- os.path.join(example_path, 'model/051482_0.jpg'),
202
- os.path.join(example_path, 'model/051918_0.jpg'),
203
- os.path.join(example_path, 'model/051962_0.jpg'),
204
- os.path.join(example_path, 'model/049205_0.jpg'),
205
- ])
206
- example = gr.Examples(
207
- label="Examples (dress)",
208
- inputs=vton_img_dc,
209
- examples_per_page=7,
210
- examples=[
211
- os.path.join(example_path, 'model/model_9.png'),
212
- os.path.join(example_path, 'model/052767_0.jpg'),
213
- os.path.join(example_path, 'model/052472_0.jpg'),
214
- os.path.join(example_path, 'model/053514_0.jpg'),
215
- os.path.join(example_path, 'model/053228_0.jpg'),
216
- os.path.join(example_path, 'model/052964_0.jpg'),
217
- os.path.join(example_path, 'model/053700_0.jpg'),
218
- ])
219
- with gr.Column():
220
- garm_img_dc = gr.Image(label="Garment", sources='upload', type="filepath", height=384, value=garment_dc)
221
- category_dc = gr.Dropdown(label="Garment category (important option!!!)", choices=["Upper-body", "Lower-body", "Dress"], value="Upper-body")
222
- example = gr.Examples(
223
- label="Examples (upper-body)",
224
- inputs=garm_img_dc,
225
- examples_per_page=7,
226
- examples=[
227
- os.path.join(example_path, 'garment/048554_1.jpg'),
228
- os.path.join(example_path, 'garment/049920_1.jpg'),
229
- os.path.join(example_path, 'garment/049965_1.jpg'),
230
- os.path.join(example_path, 'garment/049949_1.jpg'),
231
- os.path.join(example_path, 'garment/050181_1.jpg'),
232
- os.path.join(example_path, 'garment/049805_1.jpg'),
233
- os.path.join(example_path, 'garment/050105_1.jpg'),
234
- ])
235
- example = gr.Examples(
236
- label="Examples (lower-body)",
237
- inputs=garm_img_dc,
238
- examples_per_page=7,
239
- examples=[
240
- os.path.join(example_path, 'garment/051827_1.jpg'),
241
- os.path.join(example_path, 'garment/051946_1.jpg'),
242
- os.path.join(example_path, 'garment/051473_1.jpg'),
243
- os.path.join(example_path, 'garment/051515_1.jpg'),
244
- os.path.join(example_path, 'garment/051517_1.jpg'),
245
- os.path.join(example_path, 'garment/051988_1.jpg'),
246
- os.path.join(example_path, 'garment/051412_1.jpg'),
247
- ])
248
- example = gr.Examples(
249
- label="Examples (dress)",
250
- inputs=garm_img_dc,
251
- examples_per_page=7,
252
- examples=[
253
- os.path.join(example_path, 'garment/053290_1.jpg'),
254
- os.path.join(example_path, 'garment/053744_1.jpg'),
255
- os.path.join(example_path, 'garment/053742_1.jpg'),
256
- os.path.join(example_path, 'garment/053786_1.jpg'),
257
- os.path.join(example_path, 'garment/053790_1.jpg'),
258
- os.path.join(example_path, 'garment/053319_1.jpg'),
259
- os.path.join(example_path, 'garment/052234_1.jpg'),
260
- ])
261
- with gr.Column():
262
- result_gallery_dc = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
263
- with gr.Column():
264
- run_button_dc = gr.Button(value="Run")
265
- n_samples_dc = gr.Slider(label="Images", minimum=1, maximum=4, value=1, step=1)
266
- n_steps_dc = gr.Slider(label="Steps", minimum=20, maximum=40, value=20, step=1)
267
- # scale_dc = gr.Slider(label="Scale", minimum=1.0, maximum=12.0, value=5.0, step=0.1)
268
- image_scale_dc = gr.Slider(label="Guidance scale", minimum=1.0, maximum=5.0, value=2.0, step=0.1)
269
- seed_dc = gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, value=-1)
270
 
271
- ips_dc = [vton_img_dc, garm_img_dc, category_dc, n_samples_dc, n_steps_dc, image_scale_dc, seed_dc]
272
- run_button_dc.click(fn=process_dc, inputs=ips_dc, outputs=[result_gallery_dc])
273
 
274
  block.launch()
 
8
  from PIL import Image, ImageOps
9
 
10
  from utils_ootd import get_mask_location
11
+ # Import base64 and BytesIO for encoding and decoding images
12
+ import base64
13
+ from io import BytesIO
14
+
15
+ # Convert PIL image to base64
16
+ def pil_to_base64(image):
17
+ buffered = BytesIO()
18
+ image.save(buffered, format="PNG")
19
+ return base64.b64encode(buffered.getvalue()).decode('utf-8')
20
+
21
+ # Convert base64 string back to PIL image
22
+ def base64_to_pil(base64_str):
23
+ image_data = base64.b64decode(base64_str)
24
+ return Image.open(BytesIO(image_data))
25
 
26
  PROJECT_ROOT = Path(__file__).absolute().parents[1].absolute()
27
  sys.path.insert(0, str(PROJECT_ROOT))
 
86
  image_scale=image_scale,
87
  seed=seed,
88
  )
89
+ output_image_base64 = pil_to_base64(images)
90
+ return output_image_base64
91
 
92
  @spaces.GPU
93
  def process_dc(vton_img, garm_img, category, n_samples, n_steps, image_scale, seed):
 
184
  os.path.join(example_path, 'garment/04825_00.jpg'),
185
  ])
186
  with gr.Column():
187
+ #result_gallery = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
188
+ result_gallery = gr.Textbox(label="Output",elem_id="output-img")
189
  with gr.Column():
190
  run_button = gr.Button(value="Run")
191
  n_samples = gr.Slider(label="Images", minimum=1, maximum=4, value=1, step=1)
 
195
  seed = gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, value=-1)
196
 
197
  ips = [vton_img, garm_img, n_samples, n_steps, image_scale, seed]
198
+ run_button.click(fn=process_hd, inputs=ips, outputs=[result_gallery],api_name='process_hd')
199
 
200
 
201
+ # with gr.Row():
202
+ # gr.Markdown("## Full-body")
203
+ # with gr.Row():
204
+ # gr.Markdown("***Support upper-body/lower-body/dresses; garment category must be paired!!!***")
205
+ # with gr.Row():
206
+ # with gr.Column():
207
+ # vton_img_dc = gr.Image(label="Model", sources='upload', type="filepath", height=384, value=model_dc)
208
+ # example = gr.Examples(
209
+ # label="Examples (upper-body/lower-body)",
210
+ # inputs=vton_img_dc,
211
+ # examples_per_page=7,
212
+ # examples=[
213
+ # os.path.join(example_path, 'model/model_8.png'),
214
+ # os.path.join(example_path, 'model/049447_0.jpg'),
215
+ # os.path.join(example_path, 'model/049713_0.jpg'),
216
+ # os.path.join(example_path, 'model/051482_0.jpg'),
217
+ # os.path.join(example_path, 'model/051918_0.jpg'),
218
+ # os.path.join(example_path, 'model/051962_0.jpg'),
219
+ # os.path.join(example_path, 'model/049205_0.jpg'),
220
+ # ])
221
+ # example = gr.Examples(
222
+ # label="Examples (dress)",
223
+ # inputs=vton_img_dc,
224
+ # examples_per_page=7,
225
+ # examples=[
226
+ # os.path.join(example_path, 'model/model_9.png'),
227
+ # os.path.join(example_path, 'model/052767_0.jpg'),
228
+ # os.path.join(example_path, 'model/052472_0.jpg'),
229
+ # os.path.join(example_path, 'model/053514_0.jpg'),
230
+ # os.path.join(example_path, 'model/053228_0.jpg'),
231
+ # os.path.join(example_path, 'model/052964_0.jpg'),
232
+ # os.path.join(example_path, 'model/053700_0.jpg'),
233
+ # ])
234
+ # with gr.Column():
235
+ # garm_img_dc = gr.Image(label="Garment", sources='upload', type="filepath", height=384, value=garment_dc)
236
+ # category_dc = gr.Dropdown(label="Garment category (important option!!!)", choices=["Upper-body", "Lower-body", "Dress"], value="Upper-body")
237
+ # example = gr.Examples(
238
+ # label="Examples (upper-body)",
239
+ # inputs=garm_img_dc,
240
+ # examples_per_page=7,
241
+ # examples=[
242
+ # os.path.join(example_path, 'garment/048554_1.jpg'),
243
+ # os.path.join(example_path, 'garment/049920_1.jpg'),
244
+ # os.path.join(example_path, 'garment/049965_1.jpg'),
245
+ # os.path.join(example_path, 'garment/049949_1.jpg'),
246
+ # os.path.join(example_path, 'garment/050181_1.jpg'),
247
+ # os.path.join(example_path, 'garment/049805_1.jpg'),
248
+ # os.path.join(example_path, 'garment/050105_1.jpg'),
249
+ # ])
250
+ # example = gr.Examples(
251
+ # label="Examples (lower-body)",
252
+ # inputs=garm_img_dc,
253
+ # examples_per_page=7,
254
+ # examples=[
255
+ # os.path.join(example_path, 'garment/051827_1.jpg'),
256
+ # os.path.join(example_path, 'garment/051946_1.jpg'),
257
+ # os.path.join(example_path, 'garment/051473_1.jpg'),
258
+ # os.path.join(example_path, 'garment/051515_1.jpg'),
259
+ # os.path.join(example_path, 'garment/051517_1.jpg'),
260
+ # os.path.join(example_path, 'garment/051988_1.jpg'),
261
+ # os.path.join(example_path, 'garment/051412_1.jpg'),
262
+ # ])
263
+ # example = gr.Examples(
264
+ # label="Examples (dress)",
265
+ # inputs=garm_img_dc,
266
+ # examples_per_page=7,
267
+ # examples=[
268
+ # os.path.join(example_path, 'garment/053290_1.jpg'),
269
+ # os.path.join(example_path, 'garment/053744_1.jpg'),
270
+ # os.path.join(example_path, 'garment/053742_1.jpg'),
271
+ # os.path.join(example_path, 'garment/053786_1.jpg'),
272
+ # os.path.join(example_path, 'garment/053790_1.jpg'),
273
+ # os.path.join(example_path, 'garment/053319_1.jpg'),
274
+ # os.path.join(example_path, 'garment/052234_1.jpg'),
275
+ # ])
276
+ # with gr.Column():
277
+ # result_gallery_dc = gr.Gallery(label='Output', show_label=False, elem_id="gallery", preview=True, scale=1)
278
+ # with gr.Column():
279
+ # run_button_dc = gr.Button(value="Run")
280
+ # n_samples_dc = gr.Slider(label="Images", minimum=1, maximum=4, value=1, step=1)
281
+ # n_steps_dc = gr.Slider(label="Steps", minimum=20, maximum=40, value=20, step=1)
282
+ # # scale_dc = gr.Slider(label="Scale", minimum=1.0, maximum=12.0, value=5.0, step=0.1)
283
+ # image_scale_dc = gr.Slider(label="Guidance scale", minimum=1.0, maximum=5.0, value=2.0, step=0.1)
284
+ # seed_dc = gr.Slider(label="Seed", minimum=-1, maximum=2147483647, step=1, value=-1)
285
 
286
+ # ips_dc = [vton_img_dc, garm_img_dc, category_dc, n_samples_dc, n_steps_dc, image_scale_dc, seed_dc]
287
+ # run_button_dc.click(fn=process_dc, inputs=ips_dc, outputs=[result_gallery_dc])
288
 
289
  block.launch()