Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,14 +1,13 @@
|
|
| 1 |
import os
|
| 2 |
-
if os.environ.get("SPACES_ZERO_GPU") is not None:
|
| 3 |
-
|
| 4 |
-
else:
|
| 5 |
-
class spaces:
|
| 6 |
-
@staticmethod
|
| 7 |
-
def GPU(func):
|
| 8 |
-
def wrapper(*args, **kwargs):
|
| 9 |
-
return func(*args, **kwargs)
|
| 10 |
-
return wrapper
|
| 11 |
-
|
| 12 |
import gradio as gr
|
| 13 |
import json
|
| 14 |
import logging
|
|
@@ -43,11 +42,13 @@ from diffusers.models.autoencoders import AutoencoderKL
|
|
| 43 |
from transformers import CLIPModel, CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPConfig, T5EncoderModel, T5Tokenizer
|
| 44 |
import gc
|
| 45 |
import warnings
|
| 46 |
-
model_path = snapshot_download(repo_id="Kijai/OpenFLUX-comfy")
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
os.environ["
|
|
|
|
|
|
|
| 51 |
|
| 52 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 53 |
|
|
@@ -690,7 +691,7 @@ class EmptyInitWrapper(torch.overrides.TorchFunctionMode):
|
|
| 690 |
with EmptyInitWrapper():
|
| 691 |
model = Flux().to(dtype=torch.bfloat16, device="cuda")
|
| 692 |
|
| 693 |
-
sd = load_file(f"{model_path}/
|
| 694 |
sd = {k.replace("model.", ""): v for k, v in sd.items()}
|
| 695 |
result = model.load_state_dict(sd)
|
| 696 |
|
|
|
|
| 1 |
import os
|
| 2 |
+
#if os.environ.get("SPACES_ZERO_GPU") is not None:
|
| 3 |
+
import spaces
|
| 4 |
+
#else:
|
| 5 |
+
# class spaces:
|
| 6 |
+
# @staticmethod
|
| 7 |
+
# def GPU(func):
|
| 8 |
+
# def wrapper(*args, **kwargs):
|
| 9 |
+
# return func(*args, **kwargs)
|
| 10 |
+
# return wrapper
|
|
|
|
| 11 |
import gradio as gr
|
| 12 |
import json
|
| 13 |
import logging
|
|
|
|
| 42 |
from transformers import CLIPModel, CLIPProcessor, CLIPTextModel, CLIPTokenizer, CLIPConfig, T5EncoderModel, T5Tokenizer
|
| 43 |
import gc
|
| 44 |
import warnings
|
| 45 |
+
#model_path = snapshot_download(repo_id="Kijai/OpenFLUX-comfy")
|
| 46 |
+
model_path = snapshot_download(repo_id="nyanko7/flux-dev-de-distill")
|
| 47 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 48 |
+
#cache_path = path.join(path.dirname(path.abspath(__file__)), "models")
|
| 49 |
+
#os.environ["TRANSFORMERS_CACHE"] = cache_path
|
| 50 |
+
#os.environ["HF_HUB_CACHE"] = cache_path
|
| 51 |
+
#os.environ["HF_HOME"] = cache_path
|
| 52 |
|
| 53 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 54 |
|
|
|
|
| 691 |
with EmptyInitWrapper():
|
| 692 |
model = Flux().to(dtype=torch.bfloat16, device="cuda")
|
| 693 |
|
| 694 |
+
sd = load_file(f"{model_path}/consolidated_s6700.safetensors")
|
| 695 |
sd = {k.replace("model.", ""): v for k, v in sd.items()}
|
| 696 |
result = model.load_state_dict(sd)
|
| 697 |
|