Spaces:
Running
on
Zero
Running
on
Zero
add second version in a new tab
Browse files
app.py
CHANGED
|
@@ -2,6 +2,7 @@ import os
|
|
| 2 |
import random
|
| 3 |
import sys
|
| 4 |
from typing import Sequence, Mapping, Any, Union
|
|
|
|
| 5 |
import torch
|
| 6 |
import gradio as gr
|
| 7 |
from PIL import Image
|
|
@@ -14,10 +15,12 @@ from huggingface_hub import hf_hub_download
|
|
| 14 |
|
| 15 |
hf_hub_download(repo_id="stable-diffusion-v1-5/stable-diffusion-v1-5", filename="v1-5-pruned-emaonly.ckpt", local_dir="models/checkpoints")
|
| 16 |
hf_hub_download(repo_id="Lykon/DreamShaper", filename="DreamShaper_3.32_baked_vae_clip_fix_half.safetensors", local_dir="models/checkpoints")
|
|
|
|
| 17 |
hf_hub_download(repo_id="latentcat/latentcat-controlnet", filename="models/control_v1p_sd15_brightness.safetensors", local_dir="models/controlnet")
|
| 18 |
hf_hub_download(repo_id="comfyanonymous/ControlNet-v1-1_fp16_safetensors", filename="control_v11f1e_sd15_tile_fp16.safetensors", local_dir="models/controlnet")
|
| 19 |
hf_hub_download(repo_id="Lykon/dreamshaper-7", filename="vae/diffusion_pytorch_model.fp16.safetensors", local_dir="models")
|
| 20 |
hf_hub_download(repo_id="stabilityai/sd-vae-ft-mse-original", filename="vae-ft-mse-840000-ema-pruned.safetensors", local_dir="models/vae")
|
|
|
|
| 21 |
|
| 22 |
def get_value_at_index(obj: Union[Sequence, Mapping], index: int) -> Any:
|
| 23 |
"""Returns the value at the given index of a sequence or mapping.
|
|
@@ -128,16 +131,19 @@ def import_custom_nodes() -> None:
|
|
| 128 |
|
| 129 |
from nodes import NODE_CLASS_MAPPINGS
|
| 130 |
|
|
|
|
| 131 |
checkpointloadersimple = NODE_CLASS_MAPPINGS["CheckpointLoaderSimple"]()
|
| 132 |
checkpointloadersimple_4 = checkpointloadersimple.load_checkpoint(
|
| 133 |
ckpt_name="DreamShaper_3.32_baked_vae_clip_fix_half.safetensors"
|
| 134 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
emptylatentimage = NODE_CLASS_MAPPINGS["EmptyLatentImage"]()
|
| 136 |
cliptextencode = NODE_CLASS_MAPPINGS["CLIPTextEncode"]()
|
| 137 |
controlnetloader = NODE_CLASS_MAPPINGS["ControlNetLoader"]()
|
| 138 |
-
|
| 139 |
controlnetapplyadvanced = NODE_CLASS_MAPPINGS["ControlNetApplyAdvanced"]()
|
| 140 |
-
|
| 141 |
ksampler = NODE_CLASS_MAPPINGS["KSampler"]()
|
| 142 |
vaedecode = NODE_CLASS_MAPPINGS["VAEDecode"]()
|
| 143 |
|
|
@@ -145,22 +151,31 @@ import_custom_nodes()
|
|
| 145 |
comfy_qr_by_module_size = NODE_CLASS_MAPPINGS["comfy-qr-by-module-size"]()
|
| 146 |
tilepreprocessor = NODE_CLASS_MAPPINGS["TilePreprocessor"]()
|
| 147 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 148 |
from comfy import model_management
|
| 149 |
|
| 150 |
-
#Add all the models that load a safetensors file
|
| 151 |
-
model_loaders = [checkpointloadersimple_4]
|
| 152 |
|
| 153 |
# Check which models are valid and how to best load them
|
| 154 |
valid_models = [
|
| 155 |
-
getattr(loader[0], 'patcher', loader[0])
|
| 156 |
for loader in model_loaders
|
| 157 |
if not isinstance(loader[0], dict) and not isinstance(getattr(loader[0], 'patcher', None), dict)
|
| 158 |
]
|
| 159 |
|
| 160 |
model_management.load_models_gpu(valid_models)
|
| 161 |
|
| 162 |
-
@spaces.GPU(duration=
|
| 163 |
-
def
|
| 164 |
# Only manipulate the text if it's a URL input type
|
| 165 |
qr_text = text_input
|
| 166 |
if input_type == "URL":
|
|
@@ -169,165 +184,434 @@ def generate_qr_code(prompt: str, text_input: str, input_type: str = "URL", imag
|
|
| 169 |
if "http://" in qr_text:
|
| 170 |
qr_text = qr_text.replace("http://", "")
|
| 171 |
|
|
|
|
|
|
|
|
|
|
| 172 |
with torch.inference_mode():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 173 |
|
| 174 |
-
|
| 175 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 176 |
)
|
| 177 |
|
| 178 |
-
|
| 179 |
-
|
| 180 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 181 |
)
|
| 182 |
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 186 |
)
|
| 187 |
|
| 188 |
-
|
| 189 |
-
|
|
|
|
| 190 |
)
|
| 191 |
|
| 192 |
-
|
| 193 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 194 |
)
|
| 195 |
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
|
| 203 |
-
|
| 204 |
-
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
error_correction=error_correction,
|
| 208 |
-
border=border_size,
|
| 209 |
-
module_drawer=module_drawer,
|
| 210 |
-
)
|
| 211 |
-
except RuntimeError as e:
|
| 212 |
-
error_msg = (
|
| 213 |
-
f"Error generating QR code: {str(e)}\n"
|
| 214 |
-
"Try with a shorter text, increase the image size, or decrease the border size, module size, and error correction level under Advanced Settings."
|
| 215 |
-
)
|
| 216 |
-
# Stream a single error message
|
| 217 |
-
yield None, error_msg
|
| 218 |
-
return
|
| 219 |
-
|
| 220 |
-
# 1) Yield the base QR image as the first intermediate result
|
| 221 |
-
base_qr_tensor = get_value_at_index(comfy_qr_by_module_size_15, 0)
|
| 222 |
-
base_qr_np = (base_qr_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 223 |
-
base_qr_np = base_qr_np[0]
|
| 224 |
-
base_qr_pil = Image.fromarray(base_qr_np)
|
| 225 |
-
yield base_qr_pil, "Generated base QR pattern… enhancing with AI (step 1/3)"
|
| 226 |
-
|
| 227 |
-
emptylatentimage_17 = emptylatentimage.generate(
|
| 228 |
-
width=image_size*2, height=image_size*2, batch_size=1
|
| 229 |
)
|
| 230 |
|
| 231 |
-
|
| 232 |
-
|
|
|
|
| 233 |
)
|
| 234 |
|
| 235 |
-
#
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
|
| 268 |
-
|
| 269 |
-
|
| 270 |
-
|
| 271 |
-
|
| 272 |
-
|
| 273 |
-
|
| 274 |
-
|
| 275 |
-
|
| 276 |
-
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
|
| 281 |
-
|
| 282 |
-
|
| 283 |
-
|
| 284 |
-
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
| 301 |
-
|
| 302 |
-
|
| 303 |
-
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
-
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
-
|
| 317 |
-
|
| 318 |
-
|
| 319 |
-
|
| 320 |
-
|
| 321 |
-
|
| 322 |
-
|
| 323 |
-
|
| 324 |
-
|
| 325 |
-
|
| 326 |
-
|
| 327 |
-
|
| 328 |
-
|
| 329 |
-
|
| 330 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 331 |
|
| 332 |
|
| 333 |
if __name__ == "__main__":
|
|
@@ -337,7 +621,7 @@ if __name__ == "__main__":
|
|
| 337 |
# Add a title and description
|
| 338 |
gr.Markdown("# QR Code Art Generator")
|
| 339 |
gr.Markdown("""
|
| 340 |
-
This is an AI-powered QR code generator that creates artistic QR codes using Stable Diffusion 1.5 and ControlNet models.
|
| 341 |
The application uses a custom ComfyUI workflow to generate QR codes.
|
| 342 |
|
| 343 |
### Tips:
|
|
@@ -346,248 +630,524 @@ if __name__ == "__main__":
|
|
| 346 |
- Choose **URL** mode for web links or **Plain Text** mode for VCARD, WiFi credentials, calendar events, etc.
|
| 347 |
- Try the examples below for inspiration
|
| 348 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 349 |
### Note:
|
| 350 |
Feel free to share your suggestions or feedback on how to improve the app! Thanks!
|
| 351 |
-
|
| 352 |
-
|
| 353 |
-
|
| 354 |
-
with gr.
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
| 359 |
-
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
| 363 |
-
|
| 364 |
-
|
| 365 |
-
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
-
|
| 374 |
-
|
| 375 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 376 |
)
|
| 377 |
|
| 378 |
-
|
| 379 |
-
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
|
| 387 |
-
|
| 388 |
-
|
| 389 |
-
|
| 390 |
-
|
| 391 |
-
|
| 392 |
-
|
| 393 |
-
|
| 394 |
-
|
| 395 |
-
|
| 396 |
-
|
| 397 |
-
|
| 398 |
-
|
| 399 |
-
|
| 400 |
-
|
| 401 |
-
|
| 402 |
-
|
| 403 |
-
|
| 404 |
-
|
| 405 |
-
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
|
| 409 |
-
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
| 447 |
-
|
| 448 |
-
|
| 449 |
-
|
| 450 |
-
|
| 451 |
-
|
| 452 |
-
|
| 453 |
-
|
| 454 |
-
|
| 455 |
-
|
| 456 |
-
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
|
| 460 |
-
|
| 461 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 462 |
)
|
| 463 |
|
| 464 |
-
#
|
| 465 |
-
|
| 466 |
-
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
|
| 482 |
-
|
| 483 |
-
|
| 484 |
-
|
| 485 |
-
|
| 486 |
-
|
| 487 |
-
|
| 488 |
-
|
| 489 |
-
|
| 490 |
-
|
| 491 |
-
|
| 492 |
-
|
| 493 |
-
|
| 494 |
-
|
| 495 |
-
|
| 496 |
-
|
| 497 |
-
|
| 498 |
-
|
| 499 |
-
|
| 500 |
-
|
| 501 |
-
|
| 502 |
-
|
| 503 |
-
|
| 504 |
-
|
| 505 |
-
|
| 506 |
-
|
| 507 |
-
|
| 508 |
-
|
| 509 |
-
|
| 510 |
-
|
| 511 |
-
|
| 512 |
-
|
| 513 |
-
|
| 514 |
-
|
| 515 |
-
|
| 516 |
-
|
| 517 |
-
|
| 518 |
-
|
| 519 |
-
|
| 520 |
-
|
| 521 |
-
|
| 522 |
-
|
| 523 |
-
|
| 524 |
-
|
| 525 |
-
|
| 526 |
-
|
| 527 |
-
|
| 528 |
-
|
| 529 |
-
|
| 530 |
-
|
| 531 |
-
|
| 532 |
-
|
| 533 |
-
|
| 534 |
-
|
| 535 |
-
|
| 536 |
-
|
| 537 |
-
|
| 538 |
-
|
| 539 |
-
|
| 540 |
-
|
| 541 |
-
|
| 542 |
-
|
| 543 |
-
|
| 544 |
-
|
| 545 |
-
|
| 546 |
-
|
| 547 |
-
|
| 548 |
-
|
| 549 |
-
|
| 550 |
-
|
| 551 |
-
|
| 552 |
-
|
| 553 |
-
|
| 554 |
-
|
| 555 |
-
|
| 556 |
-
|
| 557 |
-
|
| 558 |
-
|
| 559 |
-
|
| 560 |
-
|
| 561 |
-
|
| 562 |
-
|
| 563 |
-
|
| 564 |
-
|
| 565 |
-
|
| 566 |
-
|
| 567 |
-
|
| 568 |
-
|
| 569 |
-
|
| 570 |
-
|
| 571 |
-
|
| 572 |
-
|
| 573 |
-
|
| 574 |
-
|
| 575 |
-
|
| 576 |
-
|
| 577 |
-
|
| 578 |
-
|
| 579 |
-
|
| 580 |
-
|
| 581 |
-
|
| 582 |
-
|
| 583 |
-
|
| 584 |
-
|
| 585 |
-
|
| 586 |
-
|
| 587 |
-
|
| 588 |
-
|
| 589 |
-
|
| 590 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 591 |
|
| 592 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 593 |
|
|
|
|
|
|
| 2 |
import random
|
| 3 |
import sys
|
| 4 |
from typing import Sequence, Mapping, Any, Union
|
| 5 |
+
from functools import partial
|
| 6 |
import torch
|
| 7 |
import gradio as gr
|
| 8 |
from PIL import Image
|
|
|
|
| 15 |
|
| 16 |
hf_hub_download(repo_id="stable-diffusion-v1-5/stable-diffusion-v1-5", filename="v1-5-pruned-emaonly.ckpt", local_dir="models/checkpoints")
|
| 17 |
hf_hub_download(repo_id="Lykon/DreamShaper", filename="DreamShaper_3.32_baked_vae_clip_fix_half.safetensors", local_dir="models/checkpoints")
|
| 18 |
+
hf_hub_download(repo_id="Lykon/DreamShaper", filename="DreamShaper_6.31_BakedVae_pruned.safetensors", local_dir="models/checkpoints")
|
| 19 |
hf_hub_download(repo_id="latentcat/latentcat-controlnet", filename="models/control_v1p_sd15_brightness.safetensors", local_dir="models/controlnet")
|
| 20 |
hf_hub_download(repo_id="comfyanonymous/ControlNet-v1-1_fp16_safetensors", filename="control_v11f1e_sd15_tile_fp16.safetensors", local_dir="models/controlnet")
|
| 21 |
hf_hub_download(repo_id="Lykon/dreamshaper-7", filename="vae/diffusion_pytorch_model.fp16.safetensors", local_dir="models")
|
| 22 |
hf_hub_download(repo_id="stabilityai/sd-vae-ft-mse-original", filename="vae-ft-mse-840000-ema-pruned.safetensors", local_dir="models/vae")
|
| 23 |
+
hf_hub_download(repo_id="lllyasviel/Annotators", filename="RealESRGAN_x4plus.pth", local_dir="models/upscale_models")
|
| 24 |
|
| 25 |
def get_value_at_index(obj: Union[Sequence, Mapping], index: int) -> Any:
|
| 26 |
"""Returns the value at the given index of a sequence or mapping.
|
|
|
|
| 131 |
|
| 132 |
from nodes import NODE_CLASS_MAPPINGS
|
| 133 |
|
| 134 |
+
# Initialize common nodes
|
| 135 |
checkpointloadersimple = NODE_CLASS_MAPPINGS["CheckpointLoaderSimple"]()
|
| 136 |
checkpointloadersimple_4 = checkpointloadersimple.load_checkpoint(
|
| 137 |
ckpt_name="DreamShaper_3.32_baked_vae_clip_fix_half.safetensors"
|
| 138 |
)
|
| 139 |
+
checkpointloadersimple_artistic = checkpointloadersimple.load_checkpoint(
|
| 140 |
+
ckpt_name="DreamShaper_6.31_BakedVae_pruned.safetensors"
|
| 141 |
+
)
|
| 142 |
+
|
| 143 |
emptylatentimage = NODE_CLASS_MAPPINGS["EmptyLatentImage"]()
|
| 144 |
cliptextencode = NODE_CLASS_MAPPINGS["CLIPTextEncode"]()
|
| 145 |
controlnetloader = NODE_CLASS_MAPPINGS["ControlNetLoader"]()
|
|
|
|
| 146 |
controlnetapplyadvanced = NODE_CLASS_MAPPINGS["ControlNetApplyAdvanced"]()
|
|
|
|
| 147 |
ksampler = NODE_CLASS_MAPPINGS["KSampler"]()
|
| 148 |
vaedecode = NODE_CLASS_MAPPINGS["VAEDecode"]()
|
| 149 |
|
|
|
|
| 151 |
comfy_qr_by_module_size = NODE_CLASS_MAPPINGS["comfy-qr-by-module-size"]()
|
| 152 |
tilepreprocessor = NODE_CLASS_MAPPINGS["TilePreprocessor"]()
|
| 153 |
|
| 154 |
+
# Load upscale model and additional nodes for artistic pipeline
|
| 155 |
+
upscalemodelloader = NODE_CLASS_MAPPINGS["UpscaleModelLoader"]()
|
| 156 |
+
upscalemodelloader_30 = upscalemodelloader.load_model(
|
| 157 |
+
model_name="RealESRGAN_x4plus.pth"
|
| 158 |
+
)
|
| 159 |
+
imageupscalewithmodel = NODE_CLASS_MAPPINGS["ImageUpscaleWithModel"]()
|
| 160 |
+
imagescale = NODE_CLASS_MAPPINGS["ImageScale"]()
|
| 161 |
+
latentupscaleby = NODE_CLASS_MAPPINGS["LatentUpscaleBy"]()
|
| 162 |
+
|
| 163 |
from comfy import model_management
|
| 164 |
|
| 165 |
+
# Add all the models that load a safetensors file
|
| 166 |
+
model_loaders = [checkpointloadersimple_4, checkpointloadersimple_artistic]
|
| 167 |
|
| 168 |
# Check which models are valid and how to best load them
|
| 169 |
valid_models = [
|
| 170 |
+
getattr(loader[0], 'patcher', loader[0])
|
| 171 |
for loader in model_loaders
|
| 172 |
if not isinstance(loader[0], dict) and not isinstance(getattr(loader[0], 'patcher', None), dict)
|
| 173 |
]
|
| 174 |
|
| 175 |
model_management.load_models_gpu(valid_models)
|
| 176 |
|
| 177 |
+
@spaces.GPU(duration=30)
|
| 178 |
+
def generate_qr_code_unified(prompt: str, text_input: str, input_type: str = "URL", image_size: int = 512, border_size: int = 4, error_correction: str = "Medium (15%)", module_size: int = 12, module_drawer: str = "Square", use_custom_seed: bool = False, seed: int = 0, pipeline: str = "standard"):
|
| 179 |
# Only manipulate the text if it's a URL input type
|
| 180 |
qr_text = text_input
|
| 181 |
if input_type == "URL":
|
|
|
|
| 184 |
if "http://" in qr_text:
|
| 185 |
qr_text = qr_text.replace("http://", "")
|
| 186 |
|
| 187 |
+
# Use custom seed or random
|
| 188 |
+
actual_seed = seed if use_custom_seed else random.randint(1, 2**64)
|
| 189 |
+
|
| 190 |
with torch.inference_mode():
|
| 191 |
+
if pipeline == "standard":
|
| 192 |
+
yield from _pipeline_standard(prompt, qr_text, input_type, image_size, border_size, error_correction, module_size, module_drawer, actual_seed)
|
| 193 |
+
else: # artistic
|
| 194 |
+
yield from _pipeline_artistic(prompt, qr_text, input_type, image_size, border_size, error_correction, module_size, module_drawer, actual_seed)
|
| 195 |
+
|
| 196 |
+
def add_noise_to_border_only(image_tensor, seed: int, border_size: int, image_size: int, noise_strength: float = 0.5):
|
| 197 |
+
"""
|
| 198 |
+
Add random dark noise ONLY to the border region of a QR code image.
|
| 199 |
+
|
| 200 |
+
Args:
|
| 201 |
+
image_tensor: ComfyUI image tensor (batch, height, width, channels) with values 0-1
|
| 202 |
+
seed: Random seed for reproducible noise
|
| 203 |
+
border_size: Border size in QR modules (from QR generation settings)
|
| 204 |
+
image_size: Image size in pixels
|
| 205 |
+
noise_strength: Strength of noise to add (0-1 range, 0.5 = medium dark noise)
|
| 206 |
+
|
| 207 |
+
Returns:
|
| 208 |
+
Modified tensor with dark noise added only to border region
|
| 209 |
+
"""
|
| 210 |
+
# Convert to numpy for manipulation
|
| 211 |
+
img_np = image_tensor.cpu().numpy()
|
| 212 |
+
|
| 213 |
+
# Set random seed for reproducibility (ensure it's within numpy's valid range)
|
| 214 |
+
np.random.seed(seed % (2**32))
|
| 215 |
+
|
| 216 |
+
# Work with first image in batch
|
| 217 |
+
img = img_np[0] # (height, width, channels)
|
| 218 |
+
height, width, channels = img.shape
|
| 219 |
+
|
| 220 |
+
# Calculate border region in pixels
|
| 221 |
+
# Rough estimation: border_size modules out of total image
|
| 222 |
+
# We'll use a simple approach: outer X% of the image
|
| 223 |
+
border_thickness = max(int(height * 0.08), 20) # At least 20 pixels or 8% of image
|
| 224 |
+
|
| 225 |
+
# Create border mask (1 for border region, 0 for QR code interior)
|
| 226 |
+
border_mask = np.zeros((height, width), dtype=bool)
|
| 227 |
+
|
| 228 |
+
# Top border
|
| 229 |
+
border_mask[0:border_thickness, :] = True
|
| 230 |
+
# Bottom border
|
| 231 |
+
border_mask[height-border_thickness:height, :] = True
|
| 232 |
+
# Left border
|
| 233 |
+
border_mask[:, 0:border_thickness] = True
|
| 234 |
+
# Right border
|
| 235 |
+
border_mask[:, width-border_thickness:width] = True
|
| 236 |
+
|
| 237 |
+
# Only apply to white/light areas in the border (threshold > 240)
|
| 238 |
+
img_255 = (img * 255).astype(np.uint8)
|
| 239 |
+
white_mask = np.all(img_255 > 240, axis=-1)
|
| 240 |
+
|
| 241 |
+
# Combine: only border AND white areas
|
| 242 |
+
final_mask = border_mask & white_mask
|
| 243 |
+
|
| 244 |
+
# Generate random dark noise - only grayscale (same value for all channels)
|
| 245 |
+
noise_amount = np.random.uniform(0, noise_strength, size=(height, width))
|
| 246 |
+
|
| 247 |
+
# Apply noise to all channels equally (creates grayscale noise - dark pixels)
|
| 248 |
+
for c in range(channels):
|
| 249 |
+
# Subtract noise to make it darker (0.5 means subtract up to 0.5 from white = dark gray to black)
|
| 250 |
+
img[:, :, c] = np.where(final_mask, np.maximum(img[:, :, c] - noise_amount, 0), img[:, :, c])
|
| 251 |
+
|
| 252 |
+
# Put modified image back into batch array
|
| 253 |
+
img_np[0] = img
|
| 254 |
+
|
| 255 |
+
# Convert back to tensor
|
| 256 |
+
return torch.from_numpy(img_np).to(image_tensor.device)
|
| 257 |
|
| 258 |
+
def _pipeline_standard(prompt: str, qr_text: str, input_type: str, image_size: int, border_size: int, error_correction: str, module_size: int, module_drawer: str, seed: int):
|
| 259 |
+
emptylatentimage_5 = emptylatentimage.generate(
|
| 260 |
+
width=image_size, height=image_size, batch_size=1
|
| 261 |
+
)
|
| 262 |
+
|
| 263 |
+
cliptextencode_6 = cliptextencode.encode(
|
| 264 |
+
text=prompt,
|
| 265 |
+
clip=get_value_at_index(checkpointloadersimple_4, 1),
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
cliptextencode_7 = cliptextencode.encode(
|
| 269 |
+
text="ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, closed eyes, text, logo",
|
| 270 |
+
clip=get_value_at_index(checkpointloadersimple_4, 1),
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
controlnetloader_10 = controlnetloader.load_controlnet(
|
| 274 |
+
control_net_name="models/control_v1p_sd15_brightness.safetensors"
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
controlnetloader_12 = controlnetloader.load_controlnet(
|
| 278 |
+
control_net_name="control_v11f1e_sd15_tile_fp16.safetensors"
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
# Set protocol based on input type: None for plain text, Https for URLs
|
| 282 |
+
qr_protocol = "None" if input_type == "Plain Text" else "Https"
|
| 283 |
+
|
| 284 |
+
try:
|
| 285 |
+
comfy_qr_by_module_size_15 = comfy_qr_by_module_size.generate_qr(
|
| 286 |
+
protocol=qr_protocol,
|
| 287 |
+
text=qr_text,
|
| 288 |
+
module_size=module_size,
|
| 289 |
+
max_image_size=image_size,
|
| 290 |
+
fill_hexcolor="#000000",
|
| 291 |
+
back_hexcolor="#FFFFFF",
|
| 292 |
+
error_correction=error_correction,
|
| 293 |
+
border=border_size,
|
| 294 |
+
module_drawer=module_drawer,
|
| 295 |
+
)
|
| 296 |
+
except RuntimeError as e:
|
| 297 |
+
error_msg = (
|
| 298 |
+
f"Error generating QR code: {str(e)}\n"
|
| 299 |
+
"Try with a shorter text, increase the image size, or decrease the border size, module size, and error correction level under Advanced Settings."
|
| 300 |
+
)
|
| 301 |
+
yield None, error_msg
|
| 302 |
+
return
|
| 303 |
+
|
| 304 |
+
# 1) Yield the base QR image as the first intermediate result
|
| 305 |
+
base_qr_tensor = get_value_at_index(comfy_qr_by_module_size_15, 0)
|
| 306 |
+
base_qr_np = (base_qr_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 307 |
+
base_qr_np = base_qr_np[0]
|
| 308 |
+
base_qr_pil = Image.fromarray(base_qr_np)
|
| 309 |
+
yield base_qr_pil, "Generated base QR pattern… enhancing with AI (step 1/3)"
|
| 310 |
+
|
| 311 |
+
emptylatentimage_17 = emptylatentimage.generate(
|
| 312 |
+
width=image_size*2, height=image_size*2, batch_size=1
|
| 313 |
+
)
|
| 314 |
+
|
| 315 |
+
controlnetloader_19 = controlnetloader.load_controlnet(
|
| 316 |
+
control_net_name="control_v11f1e_sd15_tile_fp16.safetensors"
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
for q in range(1):
|
| 320 |
+
controlnetapplyadvanced_11 = controlnetapplyadvanced.apply_controlnet(
|
| 321 |
+
strength=0.45,
|
| 322 |
+
start_percent=0,
|
| 323 |
+
end_percent=1,
|
| 324 |
+
positive=get_value_at_index(cliptextencode_6, 0),
|
| 325 |
+
negative=get_value_at_index(cliptextencode_7, 0),
|
| 326 |
+
control_net=get_value_at_index(controlnetloader_10, 0),
|
| 327 |
+
image=get_value_at_index(comfy_qr_by_module_size_15, 0),
|
| 328 |
+
vae=get_value_at_index(checkpointloadersimple_4, 2),
|
| 329 |
+
)
|
| 330 |
+
|
| 331 |
+
tilepreprocessor_14 = tilepreprocessor.execute(
|
| 332 |
+
pyrUp_iters=3,
|
| 333 |
+
resolution=image_size,
|
| 334 |
+
image=get_value_at_index(comfy_qr_by_module_size_15, 0),
|
| 335 |
)
|
| 336 |
|
| 337 |
+
controlnetapplyadvanced_13 = controlnetapplyadvanced.apply_controlnet(
|
| 338 |
+
strength=0.45,
|
| 339 |
+
start_percent=0,
|
| 340 |
+
end_percent=1,
|
| 341 |
+
positive=get_value_at_index(controlnetapplyadvanced_11, 0),
|
| 342 |
+
negative=get_value_at_index(controlnetapplyadvanced_11, 1),
|
| 343 |
+
control_net=get_value_at_index(controlnetloader_12, 0),
|
| 344 |
+
image=get_value_at_index(tilepreprocessor_14, 0),
|
| 345 |
+
vae=get_value_at_index(checkpointloadersimple_4, 2),
|
| 346 |
)
|
| 347 |
|
| 348 |
+
ksampler_3 = ksampler.sample(
|
| 349 |
+
seed=seed,
|
| 350 |
+
steps=20,
|
| 351 |
+
cfg=7,
|
| 352 |
+
sampler_name="dpmpp_2m",
|
| 353 |
+
scheduler="karras",
|
| 354 |
+
denoise=1,
|
| 355 |
+
model=get_value_at_index(checkpointloadersimple_4, 0),
|
| 356 |
+
positive=get_value_at_index(controlnetapplyadvanced_13, 0),
|
| 357 |
+
negative=get_value_at_index(controlnetapplyadvanced_13, 1),
|
| 358 |
+
latent_image=get_value_at_index(emptylatentimage_5, 0),
|
| 359 |
)
|
| 360 |
|
| 361 |
+
vaedecode_8 = vaedecode.decode(
|
| 362 |
+
samples=get_value_at_index(ksampler_3, 0),
|
| 363 |
+
vae=get_value_at_index(checkpointloadersimple_4, 2),
|
| 364 |
)
|
| 365 |
|
| 366 |
+
# 2) Yield the first decoded image as a second intermediate result
|
| 367 |
+
mid_tensor = get_value_at_index(vaedecode_8, 0)
|
| 368 |
+
mid_np = (mid_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 369 |
+
mid_np = mid_np[0]
|
| 370 |
+
mid_pil = Image.fromarray(mid_np)
|
| 371 |
+
yield mid_pil, "First enhancement pass complete (step 2/3)… refining details"
|
| 372 |
+
|
| 373 |
+
controlnetapplyadvanced_20 = controlnetapplyadvanced.apply_controlnet(
|
| 374 |
+
strength=1,
|
| 375 |
+
start_percent=0,
|
| 376 |
+
end_percent=1,
|
| 377 |
+
positive=get_value_at_index(cliptextencode_6, 0),
|
| 378 |
+
negative=get_value_at_index(cliptextencode_7, 0),
|
| 379 |
+
control_net=get_value_at_index(controlnetloader_19, 0),
|
| 380 |
+
image=get_value_at_index(vaedecode_8, 0),
|
| 381 |
+
vae=get_value_at_index(checkpointloadersimple_4, 2),
|
| 382 |
)
|
| 383 |
|
| 384 |
+
ksampler_18 = ksampler.sample(
|
| 385 |
+
seed=seed + 1,
|
| 386 |
+
steps=20,
|
| 387 |
+
cfg=7,
|
| 388 |
+
sampler_name="dpmpp_2m",
|
| 389 |
+
scheduler="karras",
|
| 390 |
+
denoise=1,
|
| 391 |
+
model=get_value_at_index(checkpointloadersimple_4, 0),
|
| 392 |
+
positive=get_value_at_index(controlnetapplyadvanced_20, 0),
|
| 393 |
+
negative=get_value_at_index(controlnetapplyadvanced_20, 1),
|
| 394 |
+
latent_image=get_value_at_index(emptylatentimage_17, 0),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 395 |
)
|
| 396 |
|
| 397 |
+
vaedecode_21 = vaedecode.decode(
|
| 398 |
+
samples=get_value_at_index(ksampler_18, 0),
|
| 399 |
+
vae=get_value_at_index(checkpointloadersimple_4, 2),
|
| 400 |
)
|
| 401 |
|
| 402 |
+
# 3) Yield the final enhanced image
|
| 403 |
+
image_tensor = get_value_at_index(vaedecode_21, 0)
|
| 404 |
+
image_np = (image_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 405 |
+
image_np = image_np[0]
|
| 406 |
+
pil_image = Image.fromarray(image_np)
|
| 407 |
+
yield pil_image, "No errors, all good! Final QR art generated."
|
| 408 |
+
|
| 409 |
+
def _pipeline_artistic(prompt: str, qr_text: str, input_type: str, image_size: int, border_size: int, error_correction: str, module_size: int, module_drawer: str, seed: int):
|
| 410 |
+
# Generate QR code
|
| 411 |
+
qr_protocol = "None" if input_type == "Plain Text" else "Https"
|
| 412 |
+
|
| 413 |
+
try:
|
| 414 |
+
comfy_qr = comfy_qr_by_module_size.generate_qr(
|
| 415 |
+
protocol=qr_protocol,
|
| 416 |
+
text=qr_text,
|
| 417 |
+
module_size=module_size,
|
| 418 |
+
max_image_size=image_size,
|
| 419 |
+
fill_hexcolor="#000000",
|
| 420 |
+
back_hexcolor="#FFFFFF",
|
| 421 |
+
error_correction=error_correction,
|
| 422 |
+
border=border_size,
|
| 423 |
+
module_drawer=module_drawer,
|
| 424 |
+
)
|
| 425 |
+
except RuntimeError as e:
|
| 426 |
+
error_msg = (
|
| 427 |
+
f"Error generating QR code: {str(e)}\n"
|
| 428 |
+
"Try with a shorter text, increase the image size, or decrease the border size, module size, and error correction level under Advanced Settings."
|
| 429 |
+
)
|
| 430 |
+
yield None, error_msg
|
| 431 |
+
return
|
| 432 |
+
|
| 433 |
+
# Show the base QR code
|
| 434 |
+
base_qr_tensor = get_value_at_index(comfy_qr, 0)
|
| 435 |
+
base_qr_np = (base_qr_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 436 |
+
base_qr_np = base_qr_np[0]
|
| 437 |
+
base_qr_pil = Image.fromarray(base_qr_np)
|
| 438 |
+
|
| 439 |
+
# Only add noise if there's a border (border_size > 0)
|
| 440 |
+
if border_size > 0:
|
| 441 |
+
yield base_qr_pil, "Generated base QR pattern... adding border noise (step 1/5)"
|
| 442 |
+
|
| 443 |
+
# Add dark noise ONLY to border region (not QR code interior)
|
| 444 |
+
qr_with_border_noise = add_noise_to_border_only(
|
| 445 |
+
get_value_at_index(comfy_qr, 0),
|
| 446 |
+
seed=seed + 100,
|
| 447 |
+
border_size=border_size,
|
| 448 |
+
image_size=image_size,
|
| 449 |
+
noise_strength=0.5 # Dark gray to black pixels
|
| 450 |
+
)
|
| 451 |
+
|
| 452 |
+
# Show the noisy QR so you can see the border noise effect
|
| 453 |
+
noisy_qr_np = (qr_with_border_noise.cpu().numpy() * 255).astype(np.uint8)
|
| 454 |
+
noisy_qr_np = noisy_qr_np[0]
|
| 455 |
+
noisy_qr_pil = Image.fromarray(noisy_qr_np)
|
| 456 |
+
yield noisy_qr_pil, "Added dark noise to border only... enhancing with AI (step 2/5)"
|
| 457 |
+
else:
|
| 458 |
+
# No border, skip noise
|
| 459 |
+
qr_with_border_noise = get_value_at_index(comfy_qr, 0)
|
| 460 |
+
yield base_qr_pil, "Generated base QR pattern (no border)... enhancing with AI (step 1/4)"
|
| 461 |
+
|
| 462 |
+
# Generate latent image
|
| 463 |
+
latent_image = emptylatentimage.generate(
|
| 464 |
+
width=image_size, height=image_size, batch_size=1
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
# Encode text prompts
|
| 468 |
+
positive_prompt = cliptextencode.encode(
|
| 469 |
+
text=prompt,
|
| 470 |
+
clip=get_value_at_index(checkpointloadersimple_artistic, 1),
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
negative_prompt = cliptextencode.encode(
|
| 474 |
+
text="ugly, tiling, poorly drawn hands, poorly drawn feet, poorly drawn face, out of frame, extra limbs, body out of frame, blurry, bad anatomy, blurred, watermark, grainy, signature, cut off, draft, closed eyes, text, logo",
|
| 475 |
+
clip=get_value_at_index(checkpointloadersimple_artistic, 1),
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
# Load controlnets
|
| 479 |
+
brightness_controlnet = controlnetloader.load_controlnet(
|
| 480 |
+
control_net_name="models/control_v1p_sd15_brightness.safetensors"
|
| 481 |
+
)
|
| 482 |
+
|
| 483 |
+
tile_controlnet = controlnetloader.load_controlnet(
|
| 484 |
+
control_net_name="control_v11f1e_sd15_tile_fp16.safetensors"
|
| 485 |
+
)
|
| 486 |
+
|
| 487 |
+
# First ControlNet pass (using noisy QR)
|
| 488 |
+
controlnet_apply = controlnetapplyadvanced.apply_controlnet(
|
| 489 |
+
strength=0.45,
|
| 490 |
+
start_percent=0,
|
| 491 |
+
end_percent=1,
|
| 492 |
+
positive=get_value_at_index(positive_prompt, 0),
|
| 493 |
+
negative=get_value_at_index(negative_prompt, 0),
|
| 494 |
+
control_net=get_value_at_index(brightness_controlnet, 0),
|
| 495 |
+
image=qr_with_border_noise,
|
| 496 |
+
vae=get_value_at_index(checkpointloadersimple_artistic, 2),
|
| 497 |
+
)
|
| 498 |
+
|
| 499 |
+
# Tile preprocessor (using noisy QR)
|
| 500 |
+
tile_processed = tilepreprocessor.execute(
|
| 501 |
+
pyrUp_iters=3,
|
| 502 |
+
resolution=image_size,
|
| 503 |
+
image=qr_with_border_noise,
|
| 504 |
+
)
|
| 505 |
+
|
| 506 |
+
# Second ControlNet pass (using tile processed from noisy QR)
|
| 507 |
+
controlnet_apply = controlnetapplyadvanced.apply_controlnet(
|
| 508 |
+
strength=0.45,
|
| 509 |
+
start_percent=0,
|
| 510 |
+
end_percent=1,
|
| 511 |
+
positive=get_value_at_index(controlnet_apply, 0),
|
| 512 |
+
negative=get_value_at_index(controlnet_apply, 1),
|
| 513 |
+
control_net=get_value_at_index(tile_controlnet, 0),
|
| 514 |
+
image=get_value_at_index(tile_processed, 0),
|
| 515 |
+
vae=get_value_at_index(checkpointloadersimple_artistic, 2),
|
| 516 |
+
)
|
| 517 |
+
|
| 518 |
+
# First sampling pass
|
| 519 |
+
samples = ksampler.sample(
|
| 520 |
+
seed=seed,
|
| 521 |
+
steps=30,
|
| 522 |
+
cfg=7,
|
| 523 |
+
sampler_name="dpmpp_3m_sde",
|
| 524 |
+
scheduler="karras",
|
| 525 |
+
denoise=1,
|
| 526 |
+
model=get_value_at_index(checkpointloadersimple_artistic, 0),
|
| 527 |
+
positive=get_value_at_index(controlnet_apply, 0),
|
| 528 |
+
negative=get_value_at_index(controlnet_apply, 1),
|
| 529 |
+
latent_image=get_value_at_index(latent_image, 0),
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
# First decode
|
| 533 |
+
decoded = vaedecode.decode(
|
| 534 |
+
samples=get_value_at_index(samples, 0),
|
| 535 |
+
vae=get_value_at_index(checkpointloadersimple_artistic, 2),
|
| 536 |
+
)
|
| 537 |
+
|
| 538 |
+
# Show first pass result
|
| 539 |
+
first_pass_tensor = get_value_at_index(decoded, 0)
|
| 540 |
+
first_pass_np = (first_pass_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 541 |
+
first_pass_np = first_pass_np[0]
|
| 542 |
+
first_pass_pil = Image.fromarray(first_pass_np)
|
| 543 |
+
step_msg = "First enhancement pass complete (step 3/5)... upscaling image" if border_size > 0 else "First enhancement pass complete (step 2/4)... upscaling image"
|
| 544 |
+
yield first_pass_pil, step_msg
|
| 545 |
+
|
| 546 |
+
# Upscale image with model
|
| 547 |
+
upscaled = imageupscalewithmodel.upscale(
|
| 548 |
+
upscale_model=get_value_at_index(upscalemodelloader_30, 0),
|
| 549 |
+
image=get_value_at_index(decoded, 0),
|
| 550 |
+
)
|
| 551 |
+
|
| 552 |
+
# Resize to target size
|
| 553 |
+
resized = imagescale.upscale(
|
| 554 |
+
upscale_method="area",
|
| 555 |
+
width=image_size*2,
|
| 556 |
+
height=image_size*2,
|
| 557 |
+
crop="disabled",
|
| 558 |
+
image=get_value_at_index(upscaled, 0),
|
| 559 |
+
)
|
| 560 |
+
|
| 561 |
+
# Show upscaled result
|
| 562 |
+
upscaled_tensor = get_value_at_index(resized, 0)
|
| 563 |
+
upscaled_np = (upscaled_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 564 |
+
upscaled_np = upscaled_np[0]
|
| 565 |
+
upscaled_pil = Image.fromarray(upscaled_np)
|
| 566 |
+
step_msg = "Image upscaled (step 4/5)... final refinement pass" if border_size > 0 else "Image upscaled (step 3/4)... final refinement pass"
|
| 567 |
+
yield upscaled_pil, step_msg
|
| 568 |
+
|
| 569 |
+
# Final ControlNet pass
|
| 570 |
+
controlnet_apply_final = controlnetapplyadvanced.apply_controlnet(
|
| 571 |
+
strength=0.7,
|
| 572 |
+
start_percent=0,
|
| 573 |
+
end_percent=1,
|
| 574 |
+
positive=get_value_at_index(positive_prompt, 0),
|
| 575 |
+
negative=get_value_at_index(negative_prompt, 0),
|
| 576 |
+
control_net=get_value_at_index(tile_controlnet, 0),
|
| 577 |
+
image=get_value_at_index(resized, 0),
|
| 578 |
+
vae=get_value_at_index(checkpointloadersimple_artistic, 2),
|
| 579 |
+
)
|
| 580 |
+
|
| 581 |
+
# Upscale latent
|
| 582 |
+
upscaled_latent = latentupscaleby.upscale(
|
| 583 |
+
upscale_method="area",
|
| 584 |
+
scale_by=2.0,
|
| 585 |
+
samples=get_value_at_index(samples, 0),
|
| 586 |
+
)
|
| 587 |
+
|
| 588 |
+
# Final sampling pass
|
| 589 |
+
final_samples = ksampler.sample(
|
| 590 |
+
seed=seed + 1,
|
| 591 |
+
steps=30,
|
| 592 |
+
cfg=7,
|
| 593 |
+
sampler_name="dpmpp_3m_sde",
|
| 594 |
+
scheduler="karras",
|
| 595 |
+
denoise=0.8,
|
| 596 |
+
model=get_value_at_index(checkpointloadersimple_artistic, 0),
|
| 597 |
+
positive=get_value_at_index(controlnet_apply_final, 0),
|
| 598 |
+
negative=get_value_at_index(controlnet_apply_final, 1),
|
| 599 |
+
latent_image=get_value_at_index(upscaled_latent, 0),
|
| 600 |
+
)
|
| 601 |
+
|
| 602 |
+
# Final decode
|
| 603 |
+
final_decoded = vaedecode.decode(
|
| 604 |
+
samples=get_value_at_index(final_samples, 0),
|
| 605 |
+
vae=get_value_at_index(checkpointloadersimple_artistic, 2),
|
| 606 |
+
)
|
| 607 |
+
|
| 608 |
+
# Convert to PIL Image and return
|
| 609 |
+
image_tensor = get_value_at_index(final_decoded, 0)
|
| 610 |
+
image_np = (image_tensor.cpu().numpy() * 255).astype(np.uint8)
|
| 611 |
+
image_np = image_np[0]
|
| 612 |
+
final_image = Image.fromarray(image_np)
|
| 613 |
+
step_msg = "No errors, all good! Final artistic QR code generated. (step 5/5)" if border_size > 0 else "No errors, all good! Final artistic QR code generated. (step 4/4)"
|
| 614 |
+
yield final_image, step_msg
|
| 615 |
|
| 616 |
|
| 617 |
if __name__ == "__main__":
|
|
|
|
| 621 |
# Add a title and description
|
| 622 |
gr.Markdown("# QR Code Art Generator")
|
| 623 |
gr.Markdown("""
|
| 624 |
+
This is an AI-powered QR code generator that creates artistic QR codes using Stable Diffusion 1.5 and ControlNet models.
|
| 625 |
The application uses a custom ComfyUI workflow to generate QR codes.
|
| 626 |
|
| 627 |
### Tips:
|
|
|
|
| 630 |
- Choose **URL** mode for web links or **Plain Text** mode for VCARD, WiFi credentials, calendar events, etc.
|
| 631 |
- Try the examples below for inspiration
|
| 632 |
|
| 633 |
+
### Two Modes:
|
| 634 |
+
- **Standard QR**: Stable, accurate QR code generation (faster, more scannable)
|
| 635 |
+
- **Artistic QR**: More artistic and creative results with upscaling (slower, more creative)
|
| 636 |
+
|
| 637 |
### Note:
|
| 638 |
Feel free to share your suggestions or feedback on how to improve the app! Thanks!
|
| 639 |
+
""")
|
| 640 |
+
|
| 641 |
+
# Add tabs for different generation methods
|
| 642 |
+
with gr.Tabs():
|
| 643 |
+
# STANDARD QR TAB
|
| 644 |
+
with gr.TabItem("Standard QR"):
|
| 645 |
+
with gr.Row():
|
| 646 |
+
with gr.Column():
|
| 647 |
+
# Add input type selector
|
| 648 |
+
input_type = gr.Radio(
|
| 649 |
+
choices=["URL", "Plain Text"],
|
| 650 |
+
value="URL",
|
| 651 |
+
label="Input Type",
|
| 652 |
+
info="URL: For web links (auto-removes https://). Plain Text: For VCARD, WiFi, calendar, location, etc. (no manipulation)"
|
| 653 |
+
)
|
| 654 |
+
|
| 655 |
+
# Add inputs
|
| 656 |
+
prompt_input = gr.Textbox(
|
| 657 |
+
label="Prompt",
|
| 658 |
+
placeholder="Describe the image you want to generate (check examples below for inspiration)",
|
| 659 |
+
value="Enter your prompt here... For example: 'a beautiful sunset over mountains, photorealistic, detailed landscape'",
|
| 660 |
+
lines=3
|
| 661 |
+
)
|
| 662 |
+
text_input = gr.Textbox(
|
| 663 |
+
label="QR Code Content",
|
| 664 |
+
placeholder="Enter URL or plain text",
|
| 665 |
+
value="Enter your URL or text here... For example: https://github.com",
|
| 666 |
+
lines=3
|
| 667 |
+
)
|
| 668 |
+
|
| 669 |
+
with gr.Accordion("Advanced Settings", open=False):
|
| 670 |
+
# Add image size slider
|
| 671 |
+
image_size = gr.Slider(
|
| 672 |
+
minimum=512,
|
| 673 |
+
maximum=1024,
|
| 674 |
+
step=64,
|
| 675 |
+
value=512,
|
| 676 |
+
label="Image Size",
|
| 677 |
+
info="Base size of the generated image. Final output will be 2x this size (e.g., 512 → 1024) due to the two-step enhancement process. Higher values use more VRAM and take longer to process."
|
| 678 |
+
)
|
| 679 |
+
|
| 680 |
+
# Add border size slider
|
| 681 |
+
border_size = gr.Slider(
|
| 682 |
+
minimum=0,
|
| 683 |
+
maximum=8,
|
| 684 |
+
step=1,
|
| 685 |
+
value=4,
|
| 686 |
+
label="QR Code Border Size",
|
| 687 |
+
info="Number of modules (squares) to use as border around the QR code. Higher values add more whitespace."
|
| 688 |
+
)
|
| 689 |
+
|
| 690 |
+
# Add error correction dropdown
|
| 691 |
+
error_correction = gr.Dropdown(
|
| 692 |
+
choices=["Low (7%)", "Medium (15%)", "Quartile (25%)", "High (30%)"],
|
| 693 |
+
value="Medium (15%)",
|
| 694 |
+
label="Error Correction Level",
|
| 695 |
+
info="Higher error correction makes the QR code more scannable when damaged or obscured, but increases its size and complexity. Medium (15%) is a good starting point for most uses."
|
| 696 |
+
)
|
| 697 |
+
|
| 698 |
+
# Add module size slider
|
| 699 |
+
module_size = gr.Slider(
|
| 700 |
+
minimum=4,
|
| 701 |
+
maximum=16,
|
| 702 |
+
step=1,
|
| 703 |
+
value=12,
|
| 704 |
+
label="QR Module Size",
|
| 705 |
+
info="Pixel width of the smallest QR code unit. Larger values improve readability but require a larger image size. 12 is a good starting point."
|
| 706 |
+
)
|
| 707 |
+
|
| 708 |
+
# Add module drawer dropdown with style examples
|
| 709 |
+
module_drawer = gr.Dropdown(
|
| 710 |
+
choices=["Square", "Gapped Square", "Circle", "Rounded", "Vertical bars", "Horizontal bars"],
|
| 711 |
+
value="Square",
|
| 712 |
+
label="QR Code Style",
|
| 713 |
+
info="Select the style of the QR code modules (squares). See examples below. Different styles can give your QR code a unique look while maintaining scannability."
|
| 714 |
+
)
|
| 715 |
+
|
| 716 |
+
# Add seed controls
|
| 717 |
+
use_custom_seed = gr.Checkbox(
|
| 718 |
+
label="Use Custom Seed",
|
| 719 |
+
value=False,
|
| 720 |
+
info="Enable to use a specific seed for reproducible results"
|
| 721 |
+
)
|
| 722 |
+
seed = gr.Slider(
|
| 723 |
+
minimum=0,
|
| 724 |
+
maximum=2000000,
|
| 725 |
+
step=1,
|
| 726 |
+
value=0,
|
| 727 |
+
label="Seed",
|
| 728 |
+
info="Seed value for reproducibility. Same seed with same settings will produce the same result."
|
| 729 |
+
)
|
| 730 |
+
|
| 731 |
+
# Add style examples with labels
|
| 732 |
+
gr.Markdown("### Style Examples:")
|
| 733 |
+
|
| 734 |
+
# First row of examples
|
| 735 |
+
with gr.Row():
|
| 736 |
+
with gr.Column(scale=1, min_width=0):
|
| 737 |
+
gr.Markdown("**Square**", show_label=False)
|
| 738 |
+
gr.Image("custom_nodes/ComfyQR/img/square.png", width=100, show_label=False, show_download_button=False)
|
| 739 |
+
with gr.Column(scale=1, min_width=0):
|
| 740 |
+
gr.Markdown("**Gapped Square**", show_label=False)
|
| 741 |
+
gr.Image("custom_nodes/ComfyQR/img/gapped_square.png", width=100, show_label=False, show_download_button=False)
|
| 742 |
+
with gr.Column(scale=1, min_width=0):
|
| 743 |
+
gr.Markdown("**Circle**", show_label=False)
|
| 744 |
+
gr.Image("custom_nodes/ComfyQR/img/circle.png", width=100, show_label=False, show_download_button=False)
|
| 745 |
+
|
| 746 |
+
# Second row of examples
|
| 747 |
+
with gr.Row():
|
| 748 |
+
with gr.Column(scale=1, min_width=0):
|
| 749 |
+
gr.Markdown("**Rounded**", show_label=False)
|
| 750 |
+
gr.Image("custom_nodes/ComfyQR/img/rounded.png", width=100, show_label=False, show_download_button=False)
|
| 751 |
+
with gr.Column(scale=1, min_width=0):
|
| 752 |
+
gr.Markdown("**Vertical Bars**", show_label=False)
|
| 753 |
+
gr.Image("custom_nodes/ComfyQR/img/vertical-bars.png", width=100, show_label=False, show_download_button=False)
|
| 754 |
+
with gr.Column(scale=1, min_width=0):
|
| 755 |
+
gr.Markdown("**Horizontal Bars**", show_label=False)
|
| 756 |
+
gr.Image("custom_nodes/ComfyQR/img/horizontal-bars.png", width=100, show_label=False, show_download_button=False)
|
| 757 |
+
|
| 758 |
+
# The generate button
|
| 759 |
+
generate_btn = gr.Button("Generate Standard QR", variant="primary")
|
| 760 |
+
|
| 761 |
+
with gr.Column():
|
| 762 |
+
# The output image
|
| 763 |
+
output_image = gr.Image(label="Generated Standard QR Code")
|
| 764 |
+
error_message = gr.Textbox(
|
| 765 |
+
label="Status / Errors",
|
| 766 |
+
interactive=False,
|
| 767 |
+
lines=3,
|
| 768 |
+
)
|
| 769 |
+
|
| 770 |
+
# When clicking the button, it will trigger the main function
|
| 771 |
+
generate_btn.click(
|
| 772 |
+
fn=partial(generate_qr_code_unified, pipeline="standard"),
|
| 773 |
+
inputs=[prompt_input, text_input, input_type, image_size, border_size, error_correction, module_size, module_drawer, use_custom_seed, seed],
|
| 774 |
+
outputs=[output_image, error_message]
|
| 775 |
)
|
| 776 |
|
| 777 |
+
# Add examples
|
| 778 |
+
examples = [
|
| 779 |
+
[
|
| 780 |
+
"some clothes spread on ropes, realistic, great details, out in the open air sunny day realistic, great details,absence of people, Detailed and Intricate, CGI, Photoshoot,rim light, 8k, 16k, ultra detail",
|
| 781 |
+
"https://www.google.com",
|
| 782 |
+
"URL",
|
| 783 |
+
512,
|
| 784 |
+
4,
|
| 785 |
+
"Medium (15%)",
|
| 786 |
+
12,
|
| 787 |
+
"Square"
|
| 788 |
+
],
|
| 789 |
+
[
|
| 790 |
+
"some cards on poker tale, realistic, great details, realistic, great details,absence of people, Detailed and Intricate, CGI, Photoshoot,rim light, 8k, 16k, ultra detail",
|
| 791 |
+
"https://store.steampowered.com",
|
| 792 |
+
"URL",
|
| 793 |
+
512,
|
| 794 |
+
4,
|
| 795 |
+
"Medium (15%)",
|
| 796 |
+
12,
|
| 797 |
+
"Square"
|
| 798 |
+
],
|
| 799 |
+
[
|
| 800 |
+
"a beautiful sunset over mountains, photorealistic, detailed landscape, golden hour, dramatic lighting, 8k, ultra detailed",
|
| 801 |
+
"https://github.com",
|
| 802 |
+
"URL",
|
| 803 |
+
512,
|
| 804 |
+
4,
|
| 805 |
+
"Medium (15%)",
|
| 806 |
+
12,
|
| 807 |
+
"Square"
|
| 808 |
+
],
|
| 809 |
+
[
|
| 810 |
+
"underwater scene with coral reef and tropical fish, photorealistic, detailed, crystal clear water, sunlight rays, 8k, ultra detailed",
|
| 811 |
+
"https://twitter.com",
|
| 812 |
+
"URL",
|
| 813 |
+
512,
|
| 814 |
+
4,
|
| 815 |
+
"Medium (15%)",
|
| 816 |
+
12,
|
| 817 |
+
"Square"
|
| 818 |
+
],
|
| 819 |
+
[
|
| 820 |
+
"futuristic cityscape with flying cars and neon lights, cyberpunk style, detailed architecture, night scene, 8k, ultra detailed",
|
| 821 |
+
"https://linkedin.com",
|
| 822 |
+
"URL",
|
| 823 |
+
512,
|
| 824 |
+
4,
|
| 825 |
+
"Medium (15%)",
|
| 826 |
+
12,
|
| 827 |
+
"Square"
|
| 828 |
+
],
|
| 829 |
+
[
|
| 830 |
+
"vintage camera on wooden table, photorealistic, detailed textures, soft lighting, bokeh background, 8k, ultra detailed",
|
| 831 |
+
"https://instagram.com",
|
| 832 |
+
"URL",
|
| 833 |
+
512,
|
| 834 |
+
4,
|
| 835 |
+
"Medium (15%)",
|
| 836 |
+
12,
|
| 837 |
+
"Square"
|
| 838 |
+
],
|
| 839 |
+
[
|
| 840 |
+
"business card design, professional, modern, clean layout, corporate style, detailed, 8k, ultra detailed",
|
| 841 |
+
"BEGIN:VCARD\nVERSION:3.0\nFN:John Doe\nORG:Acme Corporation\nTITLE:Software Engineer\nTEL:+1-555-123-4567\nEMAIL:john.doe@example.com\nEND:VCARD",
|
| 842 |
+
"Plain Text",
|
| 843 |
+
832,
|
| 844 |
+
4,
|
| 845 |
+
"Medium (15%)",
|
| 846 |
+
12,
|
| 847 |
+
"Square"
|
| 848 |
+
],
|
| 849 |
+
[
|
| 850 |
+
"wifi network symbol, modern tech, digital art, glowing blue, detailed, 8k, ultra detailed",
|
| 851 |
+
"WIFI:T:WPA;S:MyNetwork;P:MyPassword123;;",
|
| 852 |
+
"Plain Text",
|
| 853 |
+
512,
|
| 854 |
+
4,
|
| 855 |
+
"Medium (15%)",
|
| 856 |
+
12,
|
| 857 |
+
"Square"
|
| 858 |
+
],
|
| 859 |
+
[
|
| 860 |
+
"calendar appointment reminder, organized planner, professional office, detailed, 8k, ultra detailed",
|
| 861 |
+
"BEGIN:VEVENT\nSUMMARY:Team Meeting\nDTSTART:20251115T140000Z\nDTEND:20251115T150000Z\nLOCATION:Conference Room A\nEND:VEVENT",
|
| 862 |
+
"Plain Text",
|
| 863 |
+
832,
|
| 864 |
+
4,
|
| 865 |
+
"Medium (15%)",
|
| 866 |
+
12,
|
| 867 |
+
"Square"
|
| 868 |
+
],
|
| 869 |
+
[
|
| 870 |
+
"location pin on map, travel destination, scenic view, detailed cartography, 8k, ultra detailed",
|
| 871 |
+
"geo:37.7749,-122.4194",
|
| 872 |
+
"Plain Text",
|
| 873 |
+
512,
|
| 874 |
+
4,
|
| 875 |
+
"Medium (15%)",
|
| 876 |
+
12,
|
| 877 |
+
"Square"
|
| 878 |
+
]
|
| 879 |
+
]
|
| 880 |
+
|
| 881 |
+
gr.Examples(
|
| 882 |
+
examples=examples,
|
| 883 |
+
inputs=[
|
| 884 |
+
prompt_input,
|
| 885 |
+
text_input,
|
| 886 |
+
input_type,
|
| 887 |
+
image_size,
|
| 888 |
+
border_size,
|
| 889 |
+
error_correction,
|
| 890 |
+
module_size,
|
| 891 |
+
module_drawer
|
| 892 |
+
],
|
| 893 |
+
outputs=[output_image, error_message],
|
| 894 |
+
fn=partial(generate_qr_code_unified, pipeline="standard"),
|
| 895 |
+
cache_examples=False
|
| 896 |
)
|
| 897 |
|
| 898 |
+
# ARTISTIC QR TAB
|
| 899 |
+
with gr.TabItem("Artistic QR"):
|
| 900 |
+
with gr.Row():
|
| 901 |
+
with gr.Column():
|
| 902 |
+
# Add input type selector for artistic QR
|
| 903 |
+
artistic_input_type = gr.Radio(
|
| 904 |
+
choices=["URL", "Plain Text"],
|
| 905 |
+
value="URL",
|
| 906 |
+
label="Input Type",
|
| 907 |
+
info="URL: For web links (auto-removes https://). Plain Text: For VCARD, WiFi, calendar, location, etc. (no manipulation)"
|
| 908 |
+
)
|
| 909 |
+
|
| 910 |
+
# Add inputs for artistic QR
|
| 911 |
+
artistic_prompt_input = gr.Textbox(
|
| 912 |
+
label="Prompt",
|
| 913 |
+
placeholder="Describe the image you want to generate (check examples below for inspiration)",
|
| 914 |
+
value="Enter your prompt here... For example: 'a beautiful sunset over mountains, photorealistic, detailed landscape'",
|
| 915 |
+
lines=3
|
| 916 |
+
)
|
| 917 |
+
artistic_text_input = gr.Textbox(
|
| 918 |
+
label="QR Code Content",
|
| 919 |
+
placeholder="Enter URL or plain text",
|
| 920 |
+
value="Enter your URL or text here... For example: https://github.com",
|
| 921 |
+
lines=3
|
| 922 |
+
)
|
| 923 |
+
|
| 924 |
+
with gr.Accordion("Advanced Settings", open=False):
|
| 925 |
+
# Add image size slider for artistic QR
|
| 926 |
+
artistic_image_size = gr.Slider(
|
| 927 |
+
minimum=512,
|
| 928 |
+
maximum=1024,
|
| 929 |
+
step=64,
|
| 930 |
+
value=512,
|
| 931 |
+
label="Image Size",
|
| 932 |
+
info="Base size of the generated image. Final output will be 2x this size (e.g., 512 → 1024) due to the two-step enhancement process. Higher values use more VRAM and take longer to process."
|
| 933 |
+
)
|
| 934 |
+
|
| 935 |
+
# Add border size slider for artistic QR
|
| 936 |
+
artistic_border_size = gr.Slider(
|
| 937 |
+
minimum=0,
|
| 938 |
+
maximum=8,
|
| 939 |
+
step=1,
|
| 940 |
+
value=4,
|
| 941 |
+
label="QR Code Border Size",
|
| 942 |
+
info="Number of modules (squares) to use as border around the QR code. Higher values add more whitespace."
|
| 943 |
+
)
|
| 944 |
+
|
| 945 |
+
# Add error correction dropdown for artistic QR
|
| 946 |
+
artistic_error_correction = gr.Dropdown(
|
| 947 |
+
choices=["Low (7%)", "Medium (15%)", "Quartile (25%)", "High (30%)"],
|
| 948 |
+
value="Medium (15%)",
|
| 949 |
+
label="Error Correction Level",
|
| 950 |
+
info="Higher error correction makes the QR code more scannable when damaged or obscured, but increases its size and complexity. Medium (15%) is a good starting point for most uses."
|
| 951 |
+
)
|
| 952 |
+
|
| 953 |
+
# Add module size slider for artistic QR
|
| 954 |
+
artistic_module_size = gr.Slider(
|
| 955 |
+
minimum=4,
|
| 956 |
+
maximum=16,
|
| 957 |
+
step=1,
|
| 958 |
+
value=12,
|
| 959 |
+
label="QR Module Size",
|
| 960 |
+
info="Pixel width of the smallest QR code unit. Larger values improve readability but require a larger image size. 12 is a good starting point."
|
| 961 |
+
)
|
| 962 |
+
|
| 963 |
+
# Add module drawer dropdown with style examples for artistic QR
|
| 964 |
+
artistic_module_drawer = gr.Dropdown(
|
| 965 |
+
choices=["Square", "Gapped Square", "Circle", "Rounded", "Vertical bars", "Horizontal bars"],
|
| 966 |
+
value="Square",
|
| 967 |
+
label="QR Code Style",
|
| 968 |
+
info="Select the style of the QR code modules (squares). See examples below. Different styles can give your QR code a unique look while maintaining scannability."
|
| 969 |
+
)
|
| 970 |
+
|
| 971 |
+
# Add seed controls for artistic QR
|
| 972 |
+
artistic_use_custom_seed = gr.Checkbox(
|
| 973 |
+
label="Use Custom Seed",
|
| 974 |
+
value=False,
|
| 975 |
+
info="Enable to use a specific seed for reproducible results"
|
| 976 |
+
)
|
| 977 |
+
artistic_seed = gr.Slider(
|
| 978 |
+
minimum=0,
|
| 979 |
+
maximum=2000000,
|
| 980 |
+
step=1,
|
| 981 |
+
value=0,
|
| 982 |
+
label="Seed",
|
| 983 |
+
info="Seed value for reproducibility. Same seed with same settings will produce the same result."
|
| 984 |
+
)
|
| 985 |
+
|
| 986 |
+
# Add style examples with labels
|
| 987 |
+
gr.Markdown("### Style Examples:")
|
| 988 |
+
|
| 989 |
+
# First row of examples
|
| 990 |
+
with gr.Row():
|
| 991 |
+
with gr.Column(scale=1, min_width=0):
|
| 992 |
+
gr.Markdown("**Square**", show_label=False)
|
| 993 |
+
gr.Image("custom_nodes/ComfyQR/img/square.png", width=100, show_label=False, show_download_button=False)
|
| 994 |
+
with gr.Column(scale=1, min_width=0):
|
| 995 |
+
gr.Markdown("**Gapped Square**", show_label=False)
|
| 996 |
+
gr.Image("custom_nodes/ComfyQR/img/gapped_square.png", width=100, show_label=False, show_download_button=False)
|
| 997 |
+
with gr.Column(scale=1, min_width=0):
|
| 998 |
+
gr.Markdown("**Circle**", show_label=False)
|
| 999 |
+
gr.Image("custom_nodes/ComfyQR/img/circle.png", width=100, show_label=False, show_download_button=False)
|
| 1000 |
+
|
| 1001 |
+
# Second row of examples
|
| 1002 |
+
with gr.Row():
|
| 1003 |
+
with gr.Column(scale=1, min_width=0):
|
| 1004 |
+
gr.Markdown("**Rounded**", show_label=False)
|
| 1005 |
+
gr.Image("custom_nodes/ComfyQR/img/rounded.png", width=100, show_label=False, show_download_button=False)
|
| 1006 |
+
with gr.Column(scale=1, min_width=0):
|
| 1007 |
+
gr.Markdown("**Vertical Bars**", show_label=False)
|
| 1008 |
+
gr.Image("custom_nodes/ComfyQR/img/vertical-bars.png", width=100, show_label=False, show_download_button=False)
|
| 1009 |
+
with gr.Column(scale=1, min_width=0):
|
| 1010 |
+
gr.Markdown("**Horizontal Bars**", show_label=False)
|
| 1011 |
+
gr.Image("custom_nodes/ComfyQR/img/horizontal-bars.png", width=100, show_label=False, show_download_button=False)
|
| 1012 |
+
|
| 1013 |
+
# The generate button for artistic QR
|
| 1014 |
+
artistic_generate_btn = gr.Button("Generate Artistic QR", variant="primary")
|
| 1015 |
+
|
| 1016 |
+
with gr.Column():
|
| 1017 |
+
# The output image for artistic QR
|
| 1018 |
+
artistic_output_image = gr.Image(label="Generated Artistic QR Code")
|
| 1019 |
+
artistic_error_message = gr.Textbox(
|
| 1020 |
+
label="Status / Errors",
|
| 1021 |
+
interactive=False,
|
| 1022 |
+
lines=3,
|
| 1023 |
+
)
|
| 1024 |
+
|
| 1025 |
+
# When clicking the button, it will trigger the artistic function
|
| 1026 |
+
artistic_generate_btn.click(
|
| 1027 |
+
fn=partial(generate_qr_code_unified, pipeline="artistic"),
|
| 1028 |
+
inputs=[artistic_prompt_input, artistic_text_input, artistic_input_type, artistic_image_size, artistic_border_size, artistic_error_correction, artistic_module_size, artistic_module_drawer, artistic_use_custom_seed, artistic_seed],
|
| 1029 |
+
outputs=[artistic_output_image, artistic_error_message]
|
| 1030 |
+
)
|
| 1031 |
|
| 1032 |
+
# Add examples for artistic QR
|
| 1033 |
+
artistic_examples = [
|
| 1034 |
+
[
|
| 1035 |
+
"some clothes spread on ropes, realistic, great details, out in the open air sunny day realistic, great details, absence of people, Detailed and Intricate, CGI, Photoshoot, rim light, 8k, 16k, ultra detail",
|
| 1036 |
+
"https://www.google.com",
|
| 1037 |
+
"URL",
|
| 1038 |
+
512,
|
| 1039 |
+
4,
|
| 1040 |
+
"Medium (15%)",
|
| 1041 |
+
12,
|
| 1042 |
+
"Square"
|
| 1043 |
+
],
|
| 1044 |
+
[
|
| 1045 |
+
"some cards on poker tale, realistic, great details, realistic, great details,absence of people, Detailed and Intricate, CGI, Photoshoot,rim light, 8k, 16k, ultra detail",
|
| 1046 |
+
"https://store.steampowered.com",
|
| 1047 |
+
"URL",
|
| 1048 |
+
512,
|
| 1049 |
+
4,
|
| 1050 |
+
"Medium (15%)",
|
| 1051 |
+
12,
|
| 1052 |
+
"Square"
|
| 1053 |
+
],
|
| 1054 |
+
[
|
| 1055 |
+
"a beautiful sunset over mountains, photorealistic, detailed landscape, golden hour, dramatic lighting, 8k, ultra detailed",
|
| 1056 |
+
"https://github.com",
|
| 1057 |
+
"URL",
|
| 1058 |
+
512,
|
| 1059 |
+
4,
|
| 1060 |
+
"Medium (15%)",
|
| 1061 |
+
12,
|
| 1062 |
+
"Square"
|
| 1063 |
+
],
|
| 1064 |
+
[
|
| 1065 |
+
"underwater scene with coral reef and tropical fish, photorealistic, detailed, crystal clear water, sunlight rays, 8k, ultra detailed",
|
| 1066 |
+
"https://twitter.com",
|
| 1067 |
+
"URL",
|
| 1068 |
+
512,
|
| 1069 |
+
4,
|
| 1070 |
+
"Medium (15%)",
|
| 1071 |
+
12,
|
| 1072 |
+
"Square"
|
| 1073 |
+
],
|
| 1074 |
+
[
|
| 1075 |
+
"futuristic cityscape with flying cars and neon lights, cyberpunk style, detailed architecture, night scene, 8k, ultra detailed",
|
| 1076 |
+
"https://linkedin.com",
|
| 1077 |
+
"URL",
|
| 1078 |
+
512,
|
| 1079 |
+
4,
|
| 1080 |
+
"Medium (15%)",
|
| 1081 |
+
12,
|
| 1082 |
+
"Square"
|
| 1083 |
+
],
|
| 1084 |
+
[
|
| 1085 |
+
"vintage camera on wooden table, photorealistic, detailed textures, soft lighting, bokeh background, 8k, ultra detailed",
|
| 1086 |
+
"https://instagram.com",
|
| 1087 |
+
"URL",
|
| 1088 |
+
512,
|
| 1089 |
+
4,
|
| 1090 |
+
"Medium (15%)",
|
| 1091 |
+
12,
|
| 1092 |
+
"Square"
|
| 1093 |
+
],
|
| 1094 |
+
[
|
| 1095 |
+
"business card design, professional, modern, clean layout, corporate style, detailed, 8k, ultra detailed",
|
| 1096 |
+
"BEGIN:VCARD\nVERSION:3.0\nFN:John Doe\nORG:Acme Corporation\nTITLE:Software Engineer\nTEL:+1-555-123-4567\nEMAIL:john.doe@example.com\nEND:VCARD",
|
| 1097 |
+
"Plain Text",
|
| 1098 |
+
832,
|
| 1099 |
+
4,
|
| 1100 |
+
"Medium (15%)",
|
| 1101 |
+
12,
|
| 1102 |
+
"Square"
|
| 1103 |
+
],
|
| 1104 |
+
[
|
| 1105 |
+
"wifi network symbol, modern tech, digital art, glowing blue, detailed, 8k, ultra detailed",
|
| 1106 |
+
"WIFI:T:WPA;S:MyNetwork;P:MyPassword123;;",
|
| 1107 |
+
"Plain Text",
|
| 1108 |
+
512,
|
| 1109 |
+
4,
|
| 1110 |
+
"Medium (15%)",
|
| 1111 |
+
12,
|
| 1112 |
+
"Square"
|
| 1113 |
+
],
|
| 1114 |
+
[
|
| 1115 |
+
"calendar appointment reminder, organized planner, professional office, detailed, 8k, ultra detailed",
|
| 1116 |
+
"BEGIN:VEVENT\nSUMMARY:Team Meeting\nDTSTART:20251115T140000Z\nDTEND:20251115T150000Z\nLOCATION:Conference Room A\nEND:VEVENT",
|
| 1117 |
+
"Plain Text",
|
| 1118 |
+
832,
|
| 1119 |
+
4,
|
| 1120 |
+
"Medium (15%)",
|
| 1121 |
+
12,
|
| 1122 |
+
"Square"
|
| 1123 |
+
],
|
| 1124 |
+
[
|
| 1125 |
+
"location pin on map, travel destination, scenic view, detailed cartography, 8k, ultra detailed",
|
| 1126 |
+
"geo:37.7749,-122.4194",
|
| 1127 |
+
"Plain Text",
|
| 1128 |
+
512,
|
| 1129 |
+
4,
|
| 1130 |
+
"Medium (15%)",
|
| 1131 |
+
12,
|
| 1132 |
+
"Square"
|
| 1133 |
+
]
|
| 1134 |
+
]
|
| 1135 |
+
|
| 1136 |
+
gr.Examples(
|
| 1137 |
+
examples=artistic_examples,
|
| 1138 |
+
inputs=[
|
| 1139 |
+
artistic_prompt_input,
|
| 1140 |
+
artistic_text_input,
|
| 1141 |
+
artistic_input_type,
|
| 1142 |
+
artistic_image_size,
|
| 1143 |
+
artistic_border_size,
|
| 1144 |
+
artistic_error_correction,
|
| 1145 |
+
artistic_module_size,
|
| 1146 |
+
artistic_module_drawer
|
| 1147 |
+
],
|
| 1148 |
+
outputs=[artistic_output_image, artistic_error_message],
|
| 1149 |
+
fn=partial(generate_qr_code_unified, pipeline="artistic"),
|
| 1150 |
+
cache_examples=False
|
| 1151 |
+
)
|
| 1152 |
|
| 1153 |
+
app.launch(share=False, mcp_server=True)
|