Spaces:
Configuration error
Configuration error
Anonymous
commited on
Commit
·
b39a2ed
1
Parent(s):
c4f1e79
load in advance
Browse files
app.py
CHANGED
|
@@ -18,7 +18,7 @@ pipe_turbo = StableDiffusionXLPipeline_Turbo.from_pretrained(model_ckpt_turbo, t
|
|
| 18 |
torch.cuda.empty_cache()
|
| 19 |
|
| 20 |
@spaces.GPU(duration=120)
|
| 21 |
-
def infer_gpu_part(
|
| 22 |
generator = torch.Generator(device='cuda')
|
| 23 |
generator = generator.manual_seed(seed)
|
| 24 |
if not disable_freeu:
|
|
@@ -32,13 +32,13 @@ def infer_gpu_part(pipe, seed, prompt, negative_prompt, ddim_steps, guidance_sca
|
|
| 32 |
return result
|
| 33 |
|
| 34 |
@spaces.GPU(duration=40)
|
| 35 |
-
def infer_gpu_part_turbo(
|
| 36 |
generator = torch.Generator(device='cuda')
|
| 37 |
generator = generator.manual_seed(seed)
|
| 38 |
if not disable_freeu:
|
| 39 |
-
register_free_upblock2d(
|
| 40 |
-
register_free_crossattn_upblock2d(
|
| 41 |
-
result =
|
| 42 |
num_inference_steps=ddim_steps, guidance_scale=guidance_scale,
|
| 43 |
resolutions_list=resolutions_list, fast_mode=fast_mode, cosine_scale=cosine_scale,
|
| 44 |
restart_steps=restart_steps,
|
|
@@ -67,7 +67,7 @@ def infer(prompt, output_size, ddim_steps, guidance_scale, cosine_scale, seed, o
|
|
| 67 |
restart_steps = [int(ddim_steps * 0.3)]
|
| 68 |
|
| 69 |
# print('GPU starts')
|
| 70 |
-
result = infer_gpu_part(
|
| 71 |
# print('GPU ends')
|
| 72 |
|
| 73 |
else:
|
|
@@ -87,7 +87,7 @@ def infer(prompt, output_size, ddim_steps, guidance_scale, cosine_scale, seed, o
|
|
| 87 |
restart_steps = [int(ddim_steps * 0.5)] * 2
|
| 88 |
|
| 89 |
# print('GPU starts')
|
| 90 |
-
result = infer_gpu_part_turbo(
|
| 91 |
# print('GPU ends')
|
| 92 |
|
| 93 |
return result
|
|
|
|
| 18 |
torch.cuda.empty_cache()
|
| 19 |
|
| 20 |
@spaces.GPU(duration=120)
|
| 21 |
+
def infer_gpu_part(seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps):
|
| 22 |
generator = torch.Generator(device='cuda')
|
| 23 |
generator = generator.manual_seed(seed)
|
| 24 |
if not disable_freeu:
|
|
|
|
| 32 |
return result
|
| 33 |
|
| 34 |
@spaces.GPU(duration=40)
|
| 35 |
+
def infer_gpu_part_turbo(seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps):
|
| 36 |
generator = torch.Generator(device='cuda')
|
| 37 |
generator = generator.manual_seed(seed)
|
| 38 |
if not disable_freeu:
|
| 39 |
+
register_free_upblock2d(pipe_turbo, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
| 40 |
+
register_free_crossattn_upblock2d(pipe_turbo, b1=1.1, b2=1.2, s1=0.6, s2=0.4)
|
| 41 |
+
result = pipe_turbo(prompt, negative_prompt=negative_prompt, generator=generator,
|
| 42 |
num_inference_steps=ddim_steps, guidance_scale=guidance_scale,
|
| 43 |
resolutions_list=resolutions_list, fast_mode=fast_mode, cosine_scale=cosine_scale,
|
| 44 |
restart_steps=restart_steps,
|
|
|
|
| 67 |
restart_steps = [int(ddim_steps * 0.3)]
|
| 68 |
|
| 69 |
# print('GPU starts')
|
| 70 |
+
result = infer_gpu_part(seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps)
|
| 71 |
# print('GPU ends')
|
| 72 |
|
| 73 |
else:
|
|
|
|
| 87 |
restart_steps = [int(ddim_steps * 0.5)] * 2
|
| 88 |
|
| 89 |
# print('GPU starts')
|
| 90 |
+
result = infer_gpu_part_turbo(seed, prompt, negative_prompt, ddim_steps, guidance_scale, resolutions_list, fast_mode, cosine_scale, disable_freeu, restart_steps)
|
| 91 |
# print('GPU ends')
|
| 92 |
|
| 93 |
return result
|