Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -243,7 +243,7 @@ load_diffusers_format_model = [
|
|
| 243 |
]
|
| 244 |
|
| 245 |
CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
|
| 246 |
-
hf_token = os.environ.get("
|
| 247 |
|
| 248 |
# Download stuffs
|
| 249 |
for url in [url.strip() for url in download_model.split(',')]:
|
|
@@ -442,7 +442,7 @@ class GuiSD:
|
|
| 442 |
)
|
| 443 |
yield f"Model loaded: {model_name}"
|
| 444 |
|
| 445 |
-
@spaces.GPU(duration=
|
| 446 |
@torch.inference_mode()
|
| 447 |
def generate_pipeline(
|
| 448 |
self,
|
|
|
|
| 243 |
]
|
| 244 |
|
| 245 |
CIVITAI_API_KEY = os.environ.get("CIVITAI_API_KEY")
|
| 246 |
+
hf_token = os.environ.get("HF_READ_TOKEN")
|
| 247 |
|
| 248 |
# Download stuffs
|
| 249 |
for url in [url.strip() for url in download_model.split(',')]:
|
|
|
|
| 442 |
)
|
| 443 |
yield f"Model loaded: {model_name}"
|
| 444 |
|
| 445 |
+
@spaces.GPU(duration=59)
|
| 446 |
@torch.inference_mode()
|
| 447 |
def generate_pipeline(
|
| 448 |
self,
|