Spaces:
Running
on
Zero
Running
on
Zero
WeichenFan
commited on
Commit
·
cae415f
1
Parent(s):
7f96c09
Add application file
Browse files- app.py +2 -0
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -12,6 +12,7 @@ from wan_pipeline import WanPipeline
|
|
| 12 |
from diffusers.schedulers.scheduling_unipc_multistep import UniPCMultistepScheduler
|
| 13 |
from PIL import Image
|
| 14 |
from diffusers.utils import export_to_video
|
|
|
|
| 15 |
|
| 16 |
def set_seed(seed):
|
| 17 |
random.seed(seed)
|
|
@@ -52,6 +53,7 @@ def load_model(model_name):
|
|
| 52 |
return current_model.to('cuda')
|
| 53 |
|
| 54 |
|
|
|
|
| 55 |
def generate_content(prompt, model_name, guidance_scale=7.5, num_inference_steps=50, use_cfg_zero_star=True, use_zero_init=True, zero_steps=0, seed=None, compare_mode=False):
|
| 56 |
model = load_model(model_name)
|
| 57 |
if seed is None:
|
|
|
|
| 12 |
from diffusers.schedulers.scheduling_unipc_multistep import UniPCMultistepScheduler
|
| 13 |
from PIL import Image
|
| 14 |
from diffusers.utils import export_to_video
|
| 15 |
+
import spaces
|
| 16 |
|
| 17 |
def set_seed(seed):
|
| 18 |
random.seed(seed)
|
|
|
|
| 53 |
return current_model.to('cuda')
|
| 54 |
|
| 55 |
|
| 56 |
+
@spaces.GPU(duration=120)
|
| 57 |
def generate_content(prompt, model_name, guidance_scale=7.5, num_inference_steps=50, use_cfg_zero_star=True, use_zero_init=True, zero_steps=0, seed=None, compare_mode=False):
|
| 58 |
model = load_model(model_name)
|
| 59 |
if seed is None:
|
requirements.txt
CHANGED
|
@@ -11,4 +11,4 @@ moviepy
|
|
| 11 |
sentencepiece
|
| 12 |
Pillow==9.5.0
|
| 13 |
ftfy
|
| 14 |
-
|
|
|
|
| 11 |
sentencepiece
|
| 12 |
Pillow==9.5.0
|
| 13 |
ftfy
|
| 14 |
+
spaces
|