Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -57,11 +57,19 @@ pipe_hyper.to("cuda")
|
|
| 57 |
del unet
|
| 58 |
|
| 59 |
@spaces.GPU
|
| 60 |
-
def
|
| 61 |
image_turbo=pipe_turbo(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
image_lightning=pipe_lightning(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
image_hyper=pipe_hyper(prompt=prompt, num_inference_steps=1, guidance_scale=0, timesteps=[800]).images[0]
|
| 64 |
-
return
|
| 65 |
|
| 66 |
examples = ["A dignified beaver wearing glasses, a vest, and colorful neck tie.",
|
| 67 |
"The spirit of a tamagotchi wandering in the city of Barcelona",
|
|
@@ -90,16 +98,23 @@ with gr.Blocks() as demo:
|
|
| 90 |
image_outputs = [image_turbo, image_lightning, image_hyper]
|
| 91 |
gr.on(
|
| 92 |
triggers=[prompt.submit, run.click],
|
| 93 |
-
fn=
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 94 |
inputs=prompt,
|
| 95 |
-
outputs=
|
| 96 |
)
|
| 97 |
gr.Examples(
|
| 98 |
examples=examples,
|
| 99 |
fn=run_comparison,
|
| 100 |
inputs=prompt,
|
| 101 |
outputs=image_outputs,
|
| 102 |
-
cache_examples=False
|
| 103 |
-
run_on_click=True
|
| 104 |
)
|
| 105 |
demo.launch()
|
|
|
|
| 57 |
del unet
|
| 58 |
|
| 59 |
@spaces.GPU
|
| 60 |
+
def run_comparison_turbo(prompt, progress=gr.Progress(track_tqdm=True)):
|
| 61 |
image_turbo=pipe_turbo(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
| 62 |
+
return image_turbo
|
| 63 |
+
|
| 64 |
+
@spaces.GPU
|
| 65 |
+
def run_comparison_lightning(prompt, progress=gr.Progress(track_tqdm=True)):
|
| 66 |
image_lightning=pipe_lightning(prompt=prompt, num_inference_steps=1, guidance_scale=0).images[0]
|
| 67 |
+
return image_lightning
|
| 68 |
+
|
| 69 |
+
@spaces.GPU
|
| 70 |
+
def run_comparison_hyper(prompt, progress=gr.Progress(track_tqdm=True)):
|
| 71 |
image_hyper=pipe_hyper(prompt=prompt, num_inference_steps=1, guidance_scale=0, timesteps=[800]).images[0]
|
| 72 |
+
return image_hyper
|
| 73 |
|
| 74 |
examples = ["A dignified beaver wearing glasses, a vest, and colorful neck tie.",
|
| 75 |
"The spirit of a tamagotchi wandering in the city of Barcelona",
|
|
|
|
| 98 |
image_outputs = [image_turbo, image_lightning, image_hyper]
|
| 99 |
gr.on(
|
| 100 |
triggers=[prompt.submit, run.click],
|
| 101 |
+
fn=run_comparison_turbo,
|
| 102 |
+
inputs=prompt,
|
| 103 |
+
outputs=image_turbo
|
| 104 |
+
).then(
|
| 105 |
+
fn=run_comparison_lightning,
|
| 106 |
+
inputs=prompt,
|
| 107 |
+
outputs=image_lightning
|
| 108 |
+
).then(
|
| 109 |
+
fn=run_comparison_hyper,
|
| 110 |
inputs=prompt,
|
| 111 |
+
outputs=image_hyper
|
| 112 |
)
|
| 113 |
gr.Examples(
|
| 114 |
examples=examples,
|
| 115 |
fn=run_comparison,
|
| 116 |
inputs=prompt,
|
| 117 |
outputs=image_outputs,
|
| 118 |
+
cache_examples=False
|
|
|
|
| 119 |
)
|
| 120 |
demo.launch()
|