Darius Morawiec commited on
Commit
02e527d
·
1 Parent(s): 94ca913

Improve memory management and error handling in image processing

Browse files
Files changed (1) hide show
  1. app.py +19 -11
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import shutil
2
  import time
3
  from pathlib import Path
 
4
 
5
  import cv2
6
  import gradio as gr
@@ -246,8 +247,6 @@ with gr.Blocks() as demo:
246
  with gr.Row():
247
  run_button = gr.Button("Run")
248
 
249
- model = Model()
250
-
251
  def preprocess_image(
252
  image: PIL.Image.Image,
253
  image_size: int = IMAGE_SIZE,
@@ -276,6 +275,9 @@ with gr.Blocks() as demo:
276
  num_blocks_on_gpu,
277
  seed,
278
  ):
 
 
 
279
  pil_images = []
280
  for contents in images:
281
  for content in contents:
@@ -285,15 +287,21 @@ with gr.Blocks() as demo:
285
  break
286
 
287
  try:
288
- output_image = model.compute(
289
- pil_images,
290
- prompt,
291
- negative_prompt,
292
- true_cfg_scale,
293
- num_inference_steps,
294
- num_blocks_on_gpu,
295
- seed,
296
- )
 
 
 
 
 
 
297
 
298
  # Save the output image for download
299
  timestamp = int(time.time())
 
1
  import shutil
2
  import time
3
  from pathlib import Path
4
+ from xml.parsers.expat import model
5
 
6
  import cv2
7
  import gradio as gr
 
247
  with gr.Row():
248
  run_button = gr.Button("Run")
249
 
 
 
250
  def preprocess_image(
251
  image: PIL.Image.Image,
252
  image_size: int = IMAGE_SIZE,
 
275
  num_blocks_on_gpu,
276
  seed,
277
  ):
278
+ if DEVICE == "cuda":
279
+ torch.cuda.empty_cache()
280
+
281
  pil_images = []
282
  for contents in images:
283
  for content in contents:
 
287
  break
288
 
289
  try:
290
+ model = Model()
291
+
292
+ try:
293
+ output_image = model.compute(
294
+ pil_images,
295
+ prompt,
296
+ negative_prompt,
297
+ true_cfg_scale,
298
+ num_inference_steps,
299
+ num_blocks_on_gpu,
300
+ seed,
301
+ )
302
+ except Exception:
303
+ if DEVICE == "cuda":
304
+ torch.cuda.empty_cache()
305
 
306
  # Save the output image for download
307
  timestamp = int(time.time())