rahul7star commited on
Commit
f15c73f
·
verified ·
1 Parent(s): f755829

Update app_quant_latent1.py

Browse files
Files changed (1) hide show
  1. app_quant_latent1.py +24 -26
app_quant_latent1.py CHANGED
@@ -270,32 +270,30 @@ def safe_generate_with_latents(
270
  max_sequence_length,
271
  ):
272
 
273
- try:
274
-
275
- latents_or_images = generate(
276
- transformer=transformer,
277
- vae=vae,
278
- text_encoder=text_encoder,
279
- tokenizer=tokenizer,
280
- scheduler=scheduler,
281
- prompt=prompt,
282
- height=height,
283
- width=width,
284
- num_inference_steps=steps,
285
- guidance_scale=guidance_scale,
286
- negative_prompt=negative_prompt,
287
- num_images_per_prompt=num_images_per_prompt,
288
- generator=generator,
289
- cfg_normalization=cfg_normalization,
290
- cfg_truncation=cfg_truncation,
291
- max_sequence_length=max_sequence_length,
292
- output_type="latent", # IMPORTANT
293
- )
294
-
295
-
296
- return latents_or_images, None
297
-
298
- except Exception as e:
299
  return None, e
300
 
301
 
 
270
  max_sequence_length,
271
  ):
272
 
273
+ try:
274
+
275
+ latents_or_images = generate(
276
+ transformer=transformer,
277
+ vae=vae,
278
+ text_encoder=text_encoder,
279
+ tokenizer=tokenizer,
280
+ scheduler=scheduler,
281
+ prompt=prompt,
282
+ height=height,
283
+ width=width,
284
+ num_inference_steps=steps,
285
+ guidance_scale=guidance_scale,
286
+ negative_prompt=negative_prompt,
287
+ num_images_per_prompt=num_images_per_prompt,
288
+ generator=generator,
289
+ cfg_normalization=cfg_normalization,
290
+ cfg_truncation=cfg_truncation,
291
+ max_sequence_length=max_sequence_length,
292
+ output_type="latent", # IMPORTANT
293
+ )
294
+ return latents_or_images, None
295
+
296
+ except Exception as e:
 
 
297
  return None, e
298
 
299