Ffftdtd5dtft commited on
Commit
bc9b4da
verified
1 Parent(s): 67a0d1e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -27,12 +27,11 @@ def truncate_prompt(prompt, max_length=77):
27
  truncated_prompt = tokenizer.decode(input_ids, skip_special_tokens=True)
28
  return truncated_prompt
29
 
30
- @spaces.GPU()
31
  def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, init_image=None, img2img_strength=0.75, progress=gr.Progress(track_tqdm=True)):
32
  if randomize_seed:
33
  seed = random.randint(0, MAX_SEED)
34
  generator = torch.Generator(device=device).manual_seed(seed)
35
-
36
  # Truncar el prompt si es demasiado largo
37
  prompt = truncate_prompt(prompt)
38
 
@@ -40,8 +39,10 @@ def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_in
40
  if init_image and not isinstance(init_image, Image.Image):
41
  init_image = Image.fromarray(np.array(init_image))
42
 
 
 
 
43
  try:
44
- # Generaci贸n de la imagen
45
  if init_image:
46
  init_image = init_image.convert("RGB")
47
  generated_image = pipe_img2img(
 
27
  truncated_prompt = tokenizer.decode(input_ids, skip_special_tokens=True)
28
  return truncated_prompt
29
 
 
30
  def infer(prompt, seed=42, randomize_seed=False, width=1024, height=1024, num_inference_steps=4, init_image=None, img2img_strength=0.75, progress=gr.Progress(track_tqdm=True)):
31
  if randomize_seed:
32
  seed = random.randint(0, MAX_SEED)
33
  generator = torch.Generator(device=device).manual_seed(seed)
34
+
35
  # Truncar el prompt si es demasiado largo
36
  prompt = truncate_prompt(prompt)
37
 
 
39
  if init_image and not isinstance(init_image, Image.Image):
40
  init_image = Image.fromarray(np.array(init_image))
41
 
42
+ # Liberar memoria CUDA antes de la generaci贸n
43
+ torch.cuda.empty_cache()
44
+
45
  try:
 
46
  if init_image:
47
  init_image = init_image.convert("RGB")
48
  generated_image = pipe_img2img(