Spaces:
Runtime error
Runtime error
Normal
Browse files
app.py
CHANGED
|
@@ -14,20 +14,26 @@ if not torch.cuda.is_available():
|
|
| 14 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
|
| 15 |
|
| 16 |
MAX_SEED = np.iinfo(np.int32).max
|
| 17 |
-
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "
|
| 18 |
MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4096"))
|
| 19 |
USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
|
| 20 |
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
|
| 21 |
|
| 22 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 23 |
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
|
| 32 |
def save_image(img):
|
| 33 |
unique_name = str(uuid.uuid4()) + ".png"
|
|
@@ -39,10 +45,9 @@ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
|
|
| 39 |
seed = random.randint(0, MAX_SEED)
|
| 40 |
return seed
|
| 41 |
|
| 42 |
-
@spaces.GPU(duration=
|
| 43 |
def generate(
|
| 44 |
prompt: str,
|
| 45 |
-
style: str = "BEST",
|
| 46 |
negative_prompt: str = "",
|
| 47 |
use_negative_prompt: bool = False,
|
| 48 |
seed: int = 0,
|
|
@@ -54,31 +59,12 @@ def generate(
|
|
| 54 |
use_resolution_binning: bool = True,
|
| 55 |
progress=gr.Progress(track_tqdm=True),
|
| 56 |
):
|
| 57 |
-
|
| 58 |
-
pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle2")
|
| 59 |
-
pipe.load_lora_weights("ehristoforu/dalle-3-xl", weight_name="dalle-3-xl-lora-v1.safetensors", adapter_name="dalle1")
|
| 60 |
-
pipe.set_adapters(["dalle2","dalle1"], adapter_weights=[0.7, 0.3])
|
| 61 |
-
elif style=="Origami":
|
| 62 |
-
pipe.load_lora_weights("RalFinger/origami-style-sdxl-lora", weight_name="ral-orgmi-sdxl.safetensors", adapter_name="origami")
|
| 63 |
-
pipe.set_adapters(["origami"], adapter_weights=[2])
|
| 64 |
-
elif style=="3D":
|
| 65 |
-
pipe.load_lora_weights("artificialguybr/3DRedmond-V1", weight_name="3DRedmond-3DRenderStyle-3DRenderAF.safetensors", adapter_name="3d")
|
| 66 |
-
pipe.set_adapters(["3d"])
|
| 67 |
-
elif style=="PixelART":
|
| 68 |
-
pipe.load_lora_weights("artificialguybr/PixelArtRedmond", weight_name="PixelArtRedmond-Lite64.safetensors", adapter_name="lora")
|
| 69 |
-
pipe.load_lora_weights("nerijs/pixel-art-xl", weight_name="pixel-art-xl.safetensors", adapter_name="pixel")
|
| 70 |
-
pipe.set_adapters(["lora", "pixel"], adapter_weights=[1.0, 1.2])
|
| 71 |
-
elif style=="Logo":
|
| 72 |
-
pipe.load_lora_weights("artificialguybr/LogoRedmond-LogoLoraForSDXL", weight_name="LogoRedmond_LogoRedAF.safetensors", adapter_name="logo")
|
| 73 |
-
pipe.set_adapters(["logo"])
|
| 74 |
-
|
| 75 |
-
pipe.to("cuda")
|
| 76 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
| 77 |
generator = torch.Generator().manual_seed(seed)
|
| 78 |
|
| 79 |
options = {
|
| 80 |
"prompt":prompt,
|
| 81 |
-
"style":style,
|
| 82 |
"negative_prompt":negative_prompt,
|
| 83 |
"width":width,
|
| 84 |
"height":height,
|
|
@@ -127,8 +113,6 @@ with gr.Blocks(css=css) as demo:
|
|
| 127 |
)
|
| 128 |
run_button = gr.Button("Run", scale=0)
|
| 129 |
result = gr.Gallery(label="Result", columns=1)
|
| 130 |
-
with gr.Row():
|
| 131 |
-
style = gr.Radio(choices=["Default","BEST","3D", "PixelART","Logo","Origami"],label="Style", value="Default", interactive=True)
|
| 132 |
with gr.Accordion("Advanced options", open=False):
|
| 133 |
with gr.Row():
|
| 134 |
use_negative_prompt = gr.Checkbox(label="Use negative prompt", value=True)
|
|
@@ -203,7 +187,6 @@ with gr.Blocks(css=css) as demo:
|
|
| 203 |
fn=generate,
|
| 204 |
inputs=[
|
| 205 |
prompt,
|
| 206 |
-
style,
|
| 207 |
negative_prompt,
|
| 208 |
use_negative_prompt,
|
| 209 |
seed,
|
|
|
|
| 14 |
DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
|
| 15 |
|
| 16 |
MAX_SEED = np.iinfo(np.int32).max
|
| 17 |
+
CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
|
| 18 |
MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "4096"))
|
| 19 |
USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
|
| 20 |
ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
|
| 21 |
|
| 22 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 23 |
|
| 24 |
+
if torch.cuda.is_available():
|
| 25 |
+
pipe = StableDiffusionXLPipeline.from_pretrained(
|
| 26 |
+
"sd-community/sdxl-flash",
|
| 27 |
+
torch_dtype=torch.float16,
|
| 28 |
+
use_safetensors=True,
|
| 29 |
+
add_watermarker=False
|
| 30 |
+
)
|
| 31 |
+
pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
|
| 32 |
+
pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle")
|
| 33 |
+
pipe.set_adapters("dalle")
|
| 34 |
+
|
| 35 |
+
pipe.to("cuda")
|
| 36 |
+
|
| 37 |
|
| 38 |
def save_image(img):
|
| 39 |
unique_name = str(uuid.uuid4()) + ".png"
|
|
|
|
| 45 |
seed = random.randint(0, MAX_SEED)
|
| 46 |
return seed
|
| 47 |
|
| 48 |
+
@spaces.GPU(duration=30, queue=False)
|
| 49 |
def generate(
|
| 50 |
prompt: str,
|
|
|
|
| 51 |
negative_prompt: str = "",
|
| 52 |
use_negative_prompt: bool = False,
|
| 53 |
seed: int = 0,
|
|
|
|
| 59 |
use_resolution_binning: bool = True,
|
| 60 |
progress=gr.Progress(track_tqdm=True),
|
| 61 |
):
|
| 62 |
+
pipe.to(device)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
seed = int(randomize_seed_fn(seed, randomize_seed))
|
| 64 |
generator = torch.Generator().manual_seed(seed)
|
| 65 |
|
| 66 |
options = {
|
| 67 |
"prompt":prompt,
|
|
|
|
| 68 |
"negative_prompt":negative_prompt,
|
| 69 |
"width":width,
|
| 70 |
"height":height,
|
|
|
|
| 113 |
)
|
| 114 |
run_button = gr.Button("Run", scale=0)
|
| 115 |
result = gr.Gallery(label="Result", columns=1)
|
|
|
|
|
|
|
| 116 |
with gr.Accordion("Advanced options", open=False):
|
| 117 |
with gr.Row():
|
| 118 |
use_negative_prompt = gr.Checkbox(label="Use negative prompt", value=True)
|
|
|
|
| 187 |
fn=generate,
|
| 188 |
inputs=[
|
| 189 |
prompt,
|
|
|
|
| 190 |
negative_prompt,
|
| 191 |
use_negative_prompt,
|
| 192 |
seed,
|