stablediffusiontutorials commited on
Commit
8333013
·
verified ·
1 Parent(s): 52d8787

Upload 3 files

Browse files
Qwen_Image_Controlnet_Patch.json ADDED
@@ -0,0 +1,1053 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
3
+ "revision": 0,
4
+ "last_node_id": 83,
5
+ "last_link_id": 149,
6
+ "nodes": [
7
+ {
8
+ "id": 75,
9
+ "type": "ImageScaleToTotalPixels",
10
+ "pos": [
11
+ -60,
12
+ 995
13
+ ],
14
+ "size": [
15
+ 270,
16
+ 82
17
+ ],
18
+ "flags": {},
19
+ "order": 11,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "image",
24
+ "type": "IMAGE",
25
+ "link": 140
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "IMAGE",
31
+ "type": "IMAGE",
32
+ "links": [
33
+ 141,
34
+ 143
35
+ ]
36
+ }
37
+ ],
38
+ "properties": {
39
+ "cnr_id": "comfy-core",
40
+ "ver": "0.3.51",
41
+ "Node name for S&R": "ImageScaleToTotalPixels"
42
+ },
43
+ "widgets_values": [
44
+ "area",
45
+ 1.68
46
+ ]
47
+ },
48
+ {
49
+ "id": 69,
50
+ "type": "QwenImageDiffsynthControlnet",
51
+ "pos": [
52
+ 810,
53
+ 70
54
+ ],
55
+ "size": [
56
+ 310,
57
+ 138
58
+ ],
59
+ "flags": {},
60
+ "order": 15,
61
+ "mode": 0,
62
+ "inputs": [
63
+ {
64
+ "name": "model",
65
+ "type": "MODEL",
66
+ "link": 130
67
+ },
68
+ {
69
+ "name": "model_patch",
70
+ "type": "MODEL_PATCH",
71
+ "link": 129
72
+ },
73
+ {
74
+ "name": "vae",
75
+ "type": "VAE",
76
+ "link": 132
77
+ },
78
+ {
79
+ "name": "image",
80
+ "type": "IMAGE",
81
+ "link": 135
82
+ },
83
+ {
84
+ "name": "mask",
85
+ "shape": 7,
86
+ "type": "MASK",
87
+ "link": null
88
+ }
89
+ ],
90
+ "outputs": [
91
+ {
92
+ "name": "MODEL",
93
+ "type": "MODEL",
94
+ "links": [
95
+ 131
96
+ ]
97
+ }
98
+ ],
99
+ "properties": {
100
+ "cnr_id": "comfy-core",
101
+ "ver": "0.3.51",
102
+ "Node name for S&R": "QwenImageDiffsynthControlnet"
103
+ },
104
+ "widgets_values": [
105
+ 1
106
+ ]
107
+ },
108
+ {
109
+ "id": 66,
110
+ "type": "ModelSamplingAuraFlow",
111
+ "pos": [
112
+ 810,
113
+ -40
114
+ ],
115
+ "size": [
116
+ 310,
117
+ 58
118
+ ],
119
+ "flags": {},
120
+ "order": 12,
121
+ "mode": 0,
122
+ "inputs": [
123
+ {
124
+ "name": "model",
125
+ "type": "MODEL",
126
+ "link": 149
127
+ }
128
+ ],
129
+ "outputs": [
130
+ {
131
+ "name": "MODEL",
132
+ "type": "MODEL",
133
+ "links": [
134
+ 130
135
+ ]
136
+ }
137
+ ],
138
+ "properties": {
139
+ "cnr_id": "comfy-core",
140
+ "ver": "0.3.51",
141
+ "Node name for S&R": "ModelSamplingAuraFlow"
142
+ },
143
+ "widgets_values": [
144
+ 3.1000000000000005
145
+ ]
146
+ },
147
+ {
148
+ "id": 79,
149
+ "type": "MarkdownNote",
150
+ "pos": [
151
+ 810,
152
+ 760
153
+ ],
154
+ "size": [
155
+ 310,
156
+ 140
157
+ ],
158
+ "flags": {},
159
+ "order": 0,
160
+ "mode": 0,
161
+ "inputs": [],
162
+ "outputs": [],
163
+ "title": "KSampler settings",
164
+ "properties": {},
165
+ "widgets_values": [
166
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 Steps lightning LoRA | 4 | 1.0 |\n"
167
+ ],
168
+ "color": "#432",
169
+ "bgcolor": "#653"
170
+ },
171
+ {
172
+ "id": 8,
173
+ "type": "VAEDecode",
174
+ "pos": [
175
+ 810,
176
+ 950
177
+ ],
178
+ "size": [
179
+ 310,
180
+ 46
181
+ ],
182
+ "flags": {},
183
+ "order": 18,
184
+ "mode": 0,
185
+ "inputs": [
186
+ {
187
+ "name": "samples",
188
+ "type": "LATENT",
189
+ "link": 128
190
+ },
191
+ {
192
+ "name": "vae",
193
+ "type": "VAE",
194
+ "link": 76
195
+ }
196
+ ],
197
+ "outputs": [
198
+ {
199
+ "name": "IMAGE",
200
+ "type": "IMAGE",
201
+ "slot_index": 0,
202
+ "links": [
203
+ 110
204
+ ]
205
+ }
206
+ ],
207
+ "properties": {
208
+ "cnr_id": "comfy-core",
209
+ "ver": "0.3.51",
210
+ "Node name for S&R": "VAEDecode"
211
+ },
212
+ "widgets_values": []
213
+ },
214
+ {
215
+ "id": 70,
216
+ "type": "ModelPatchLoader",
217
+ "pos": [
218
+ -120,
219
+ 130
220
+ ],
221
+ "size": [
222
+ 380,
223
+ 60
224
+ ],
225
+ "flags": {},
226
+ "order": 1,
227
+ "mode": 0,
228
+ "inputs": [],
229
+ "outputs": [
230
+ {
231
+ "name": "MODEL_PATCH",
232
+ "type": "MODEL_PATCH",
233
+ "links": [
234
+ 129
235
+ ]
236
+ }
237
+ ],
238
+ "properties": {
239
+ "cnr_id": "comfy-core",
240
+ "ver": "0.3.51",
241
+ "Node name for S&R": "ModelPatchLoader",
242
+ "models": [
243
+ {
244
+ "name": "qwen_image_canny_diffsynth_controlnet.safetensors",
245
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_canny_diffsynth_controlnet.safetensors",
246
+ "directory": "model_patches"
247
+ }
248
+ ]
249
+ },
250
+ "widgets_values": [
251
+ "qwen_image_canny_diffsynth_controlnet.safetensors"
252
+ ]
253
+ },
254
+ {
255
+ "id": 39,
256
+ "type": "VAELoader",
257
+ "pos": [
258
+ -120,
259
+ 400
260
+ ],
261
+ "size": [
262
+ 380,
263
+ 58
264
+ ],
265
+ "flags": {},
266
+ "order": 2,
267
+ "mode": 0,
268
+ "inputs": [],
269
+ "outputs": [
270
+ {
271
+ "name": "VAE",
272
+ "type": "VAE",
273
+ "slot_index": 0,
274
+ "links": [
275
+ 76,
276
+ 132,
277
+ 144
278
+ ]
279
+ }
280
+ ],
281
+ "properties": {
282
+ "cnr_id": "comfy-core",
283
+ "ver": "0.3.51",
284
+ "Node name for S&R": "VAELoader",
285
+ "models": [
286
+ {
287
+ "name": "qwen_image_vae.safetensors",
288
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
289
+ "directory": "vae"
290
+ }
291
+ ]
292
+ },
293
+ "widgets_values": [
294
+ "qwen_image_vae.safetensors"
295
+ ]
296
+ },
297
+ {
298
+ "id": 37,
299
+ "type": "UNETLoader",
300
+ "pos": [
301
+ -120,
302
+ 0
303
+ ],
304
+ "size": [
305
+ 380,
306
+ 82
307
+ ],
308
+ "flags": {},
309
+ "order": 3,
310
+ "mode": 0,
311
+ "inputs": [],
312
+ "outputs": [
313
+ {
314
+ "name": "MODEL",
315
+ "type": "MODEL",
316
+ "slot_index": 0,
317
+ "links": [
318
+ 145
319
+ ]
320
+ }
321
+ ],
322
+ "properties": {
323
+ "cnr_id": "comfy-core",
324
+ "ver": "0.3.51",
325
+ "Node name for S&R": "UNETLoader",
326
+ "models": [
327
+ {
328
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
329
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
330
+ "directory": "diffusion_models"
331
+ }
332
+ ]
333
+ },
334
+ "widgets_values": [
335
+ "qwen_image_fp8_e4m3fn.safetensors",
336
+ "default"
337
+ ]
338
+ },
339
+ {
340
+ "id": 76,
341
+ "type": "VAEEncode",
342
+ "pos": [
343
+ 640,
344
+ 630
345
+ ],
346
+ "size": [
347
+ 140,
348
+ 46
349
+ ],
350
+ "flags": {
351
+ "collapsed": true
352
+ },
353
+ "order": 14,
354
+ "mode": 0,
355
+ "inputs": [
356
+ {
357
+ "name": "pixels",
358
+ "type": "IMAGE",
359
+ "link": 143
360
+ },
361
+ {
362
+ "name": "vae",
363
+ "type": "VAE",
364
+ "link": 144
365
+ }
366
+ ],
367
+ "outputs": [
368
+ {
369
+ "name": "LATENT",
370
+ "type": "LATENT",
371
+ "links": [
372
+ 142
373
+ ]
374
+ }
375
+ ],
376
+ "properties": {
377
+ "cnr_id": "comfy-core",
378
+ "ver": "0.3.51",
379
+ "Node name for S&R": "VAEEncode"
380
+ },
381
+ "widgets_values": []
382
+ },
383
+ {
384
+ "id": 68,
385
+ "type": "Note",
386
+ "pos": [
387
+ 810,
388
+ -180
389
+ ],
390
+ "size": [
391
+ 310,
392
+ 90
393
+ ],
394
+ "flags": {},
395
+ "order": 4,
396
+ "mode": 0,
397
+ "inputs": [],
398
+ "outputs": [],
399
+ "properties": {},
400
+ "widgets_values": [
401
+ "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail."
402
+ ],
403
+ "color": "#432",
404
+ "bgcolor": "#653"
405
+ },
406
+ {
407
+ "id": 38,
408
+ "type": "CLIPLoader",
409
+ "pos": [
410
+ -120,
411
+ 240
412
+ ],
413
+ "size": [
414
+ 380,
415
+ 106
416
+ ],
417
+ "flags": {},
418
+ "order": 5,
419
+ "mode": 0,
420
+ "inputs": [],
421
+ "outputs": [
422
+ {
423
+ "name": "CLIP",
424
+ "type": "CLIP",
425
+ "slot_index": 0,
426
+ "links": [
427
+ 74,
428
+ 75
429
+ ]
430
+ }
431
+ ],
432
+ "properties": {
433
+ "cnr_id": "comfy-core",
434
+ "ver": "0.3.51",
435
+ "Node name for S&R": "CLIPLoader",
436
+ "models": [
437
+ {
438
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
439
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
440
+ "directory": "text_encoders"
441
+ }
442
+ ]
443
+ },
444
+ "widgets_values": [
445
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
446
+ "qwen_image",
447
+ "default"
448
+ ]
449
+ },
450
+ {
451
+ "id": 71,
452
+ "type": "LoadImage",
453
+ "pos": [
454
+ -70,
455
+ 635
456
+ ],
457
+ "size": [
458
+ 274.080078125,
459
+ 314.00006103515625
460
+ ],
461
+ "flags": {},
462
+ "order": 6,
463
+ "mode": 0,
464
+ "inputs": [],
465
+ "outputs": [
466
+ {
467
+ "name": "IMAGE",
468
+ "type": "IMAGE",
469
+ "links": [
470
+ 140
471
+ ]
472
+ },
473
+ {
474
+ "name": "MASK",
475
+ "type": "MASK",
476
+ "links": null
477
+ }
478
+ ],
479
+ "properties": {
480
+ "cnr_id": "comfy-core",
481
+ "ver": "0.3.51",
482
+ "Node name for S&R": "LoadImage"
483
+ },
484
+ "widgets_values": [
485
+ "ComfyUI_00944_.png",
486
+ "image"
487
+ ]
488
+ },
489
+ {
490
+ "id": 73,
491
+ "type": "PreviewImage",
492
+ "pos": [
493
+ 340,
494
+ 770
495
+ ],
496
+ "size": [
497
+ 380,
498
+ 320
499
+ ],
500
+ "flags": {},
501
+ "order": 16,
502
+ "mode": 0,
503
+ "inputs": [
504
+ {
505
+ "name": "images",
506
+ "type": "IMAGE",
507
+ "link": 136
508
+ }
509
+ ],
510
+ "outputs": [],
511
+ "properties": {
512
+ "cnr_id": "comfy-core",
513
+ "ver": "0.3.51",
514
+ "Node name for S&R": "PreviewImage"
515
+ },
516
+ "widgets_values": []
517
+ },
518
+ {
519
+ "id": 72,
520
+ "type": "Canny",
521
+ "pos": [
522
+ 340,
523
+ 630
524
+ ],
525
+ "size": [
526
+ 240,
527
+ 90
528
+ ],
529
+ "flags": {},
530
+ "order": 13,
531
+ "mode": 0,
532
+ "inputs": [
533
+ {
534
+ "name": "image",
535
+ "type": "IMAGE",
536
+ "link": 141
537
+ }
538
+ ],
539
+ "outputs": [
540
+ {
541
+ "name": "IMAGE",
542
+ "type": "IMAGE",
543
+ "links": [
544
+ 135,
545
+ 136
546
+ ]
547
+ }
548
+ ],
549
+ "properties": {
550
+ "cnr_id": "comfy-core",
551
+ "ver": "0.3.51",
552
+ "Node name for S&R": "Canny"
553
+ },
554
+ "widgets_values": [
555
+ 0.1,
556
+ 0.2
557
+ ],
558
+ "color": "#322",
559
+ "bgcolor": "#533"
560
+ },
561
+ {
562
+ "id": 60,
563
+ "type": "SaveImage",
564
+ "pos": [
565
+ 1150,
566
+ -40
567
+ ],
568
+ "size": [
569
+ 970,
570
+ 1030
571
+ ],
572
+ "flags": {},
573
+ "order": 19,
574
+ "mode": 0,
575
+ "inputs": [
576
+ {
577
+ "name": "images",
578
+ "type": "IMAGE",
579
+ "link": 110
580
+ }
581
+ ],
582
+ "outputs": [],
583
+ "properties": {
584
+ "cnr_id": "comfy-core",
585
+ "ver": "0.3.51"
586
+ },
587
+ "widgets_values": [
588
+ "ComfyUI"
589
+ ]
590
+ },
591
+ {
592
+ "id": 80,
593
+ "type": "LoraLoaderModelOnly",
594
+ "pos": [
595
+ 320,
596
+ -10
597
+ ],
598
+ "size": [
599
+ 430,
600
+ 82
601
+ ],
602
+ "flags": {},
603
+ "order": 8,
604
+ "mode": 4,
605
+ "inputs": [
606
+ {
607
+ "name": "model",
608
+ "type": "MODEL",
609
+ "link": 145
610
+ }
611
+ ],
612
+ "outputs": [
613
+ {
614
+ "name": "MODEL",
615
+ "type": "MODEL",
616
+ "links": [
617
+ 149
618
+ ]
619
+ }
620
+ ],
621
+ "properties": {
622
+ "cnr_id": "comfy-core",
623
+ "ver": "0.3.51",
624
+ "Node name for S&R": "LoraLoaderModelOnly",
625
+ "models": [
626
+ {
627
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
628
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
629
+ "directory": "loras"
630
+ }
631
+ ]
632
+ },
633
+ "widgets_values": [
634
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
635
+ 1
636
+ ]
637
+ },
638
+ {
639
+ "id": 6,
640
+ "type": "CLIPTextEncode",
641
+ "pos": [
642
+ 300,
643
+ 170
644
+ ],
645
+ "size": [
646
+ 460,
647
+ 164.31304931640625
648
+ ],
649
+ "flags": {},
650
+ "order": 9,
651
+ "mode": 0,
652
+ "inputs": [
653
+ {
654
+ "name": "clip",
655
+ "type": "CLIP",
656
+ "link": 74
657
+ }
658
+ ],
659
+ "outputs": [
660
+ {
661
+ "name": "CONDITIONING",
662
+ "type": "CONDITIONING",
663
+ "slot_index": 0,
664
+ "links": [
665
+ 46
666
+ ]
667
+ }
668
+ ],
669
+ "title": "CLIP Text Encode (Positive Prompt)",
670
+ "properties": {
671
+ "cnr_id": "comfy-core",
672
+ "ver": "0.3.51",
673
+ "Node name for S&R": "CLIPTextEncode"
674
+ },
675
+ "widgets_values": [
676
+ "Conceptual makeup, a fairy girl with pink hair, pink shimmery scales dotted at the corners of her eyes, starry eyeshadow, makeup painting, thin eyebrows, three-dimensional features, a glossy finish, dazzling gold powder, silver fine glitter, a sense of layering and depth in the makeup, decorated with gold ornaments, pearlescent eyeshadow, dreamy makeup, soft pastels and subtle sparkles, a mysterious and fantasy-filled atmosphere, high-end makeup, dappled light on the face, soft lighting, optimal shadows, complex depth of field, dramatic lighting, clear focus, 8k, high quality, Fujifilm filter, surreal, a dreamy pastel wonderland, bright colors, a starry pink background, realistic.\n"
677
+ ],
678
+ "color": "#232",
679
+ "bgcolor": "#353"
680
+ },
681
+ {
682
+ "id": 7,
683
+ "type": "CLIPTextEncode",
684
+ "pos": [
685
+ 300,
686
+ 380
687
+ ],
688
+ "size": [
689
+ 460,
690
+ 140
691
+ ],
692
+ "flags": {},
693
+ "order": 10,
694
+ "mode": 0,
695
+ "inputs": [
696
+ {
697
+ "name": "clip",
698
+ "type": "CLIP",
699
+ "link": 75
700
+ }
701
+ ],
702
+ "outputs": [
703
+ {
704
+ "name": "CONDITIONING",
705
+ "type": "CONDITIONING",
706
+ "slot_index": 0,
707
+ "links": [
708
+ 52
709
+ ]
710
+ }
711
+ ],
712
+ "title": "CLIP Text Encode (Negative Prompt)",
713
+ "properties": {
714
+ "cnr_id": "comfy-core",
715
+ "ver": "0.3.51",
716
+ "Node name for S&R": "CLIPTextEncode"
717
+ },
718
+ "widgets_values": [
719
+ " "
720
+ ],
721
+ "color": "#223",
722
+ "bgcolor": "#335"
723
+ },
724
+ {
725
+ "id": 3,
726
+ "type": "KSampler",
727
+ "pos": [
728
+ 810,
729
+ 260
730
+ ],
731
+ "size": [
732
+ 310,
733
+ 430
734
+ ],
735
+ "flags": {},
736
+ "order": 17,
737
+ "mode": 0,
738
+ "inputs": [
739
+ {
740
+ "name": "model",
741
+ "type": "MODEL",
742
+ "link": 131
743
+ },
744
+ {
745
+ "name": "positive",
746
+ "type": "CONDITIONING",
747
+ "link": 46
748
+ },
749
+ {
750
+ "name": "negative",
751
+ "type": "CONDITIONING",
752
+ "link": 52
753
+ },
754
+ {
755
+ "name": "latent_image",
756
+ "type": "LATENT",
757
+ "link": 142
758
+ }
759
+ ],
760
+ "outputs": [
761
+ {
762
+ "name": "LATENT",
763
+ "type": "LATENT",
764
+ "slot_index": 0,
765
+ "links": [
766
+ 128
767
+ ]
768
+ }
769
+ ],
770
+ "properties": {
771
+ "cnr_id": "comfy-core",
772
+ "ver": "0.3.51",
773
+ "Node name for S&R": "KSampler"
774
+ },
775
+ "widgets_values": [
776
+ 91832422759220,
777
+ "randomize",
778
+ 20,
779
+ 2.5,
780
+ "euler",
781
+ "simple",
782
+ 1
783
+ ]
784
+ },
785
+ {
786
+ "id": 78,
787
+ "type": "MarkdownNote",
788
+ "pos": [
789
+ -690,
790
+ -50
791
+ ],
792
+ "size": [
793
+ 540,
794
+ 630
795
+ ],
796
+ "flags": {},
797
+ "order": 7,
798
+ "mode": 0,
799
+ "inputs": [],
800
+ "outputs": [],
801
+ "title": "Model links",
802
+ "properties": {
803
+ "widget_ue_connectable": {}
804
+ },
805
+ "widgets_values": [
806
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**Model patches**\n\n- [qwen_image_canny_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_canny_diffsynth_controlnet.safetensors)\n- [qwen_image_depth_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_depth_diffsynth_controlnet.safetensors)\n- [qwen_image_inpaint_diffsynth_controlnet.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/model_patches/qwen_image_inpaint_diffsynth_controlnet.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 model_patches/ # create one if you can't find it\n│ │ ├── qwen_image_depth_diffsynth_controlnet.safetensors\n│ │ ├── qwen_image_canny_diffsynth_controlnet.safetensors\n│ │ └── qwen_image_inpaint_diffsynth_controlnet.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
807
+ ],
808
+ "color": "#432",
809
+ "bgcolor": "#653"
810
+ }
811
+ ],
812
+ "links": [
813
+ [
814
+ 46,
815
+ 6,
816
+ 0,
817
+ 3,
818
+ 1,
819
+ "CONDITIONING"
820
+ ],
821
+ [
822
+ 52,
823
+ 7,
824
+ 0,
825
+ 3,
826
+ 2,
827
+ "CONDITIONING"
828
+ ],
829
+ [
830
+ 74,
831
+ 38,
832
+ 0,
833
+ 6,
834
+ 0,
835
+ "CLIP"
836
+ ],
837
+ [
838
+ 75,
839
+ 38,
840
+ 0,
841
+ 7,
842
+ 0,
843
+ "CLIP"
844
+ ],
845
+ [
846
+ 76,
847
+ 39,
848
+ 0,
849
+ 8,
850
+ 1,
851
+ "VAE"
852
+ ],
853
+ [
854
+ 110,
855
+ 8,
856
+ 0,
857
+ 60,
858
+ 0,
859
+ "IMAGE"
860
+ ],
861
+ [
862
+ 128,
863
+ 3,
864
+ 0,
865
+ 8,
866
+ 0,
867
+ "LATENT"
868
+ ],
869
+ [
870
+ 129,
871
+ 70,
872
+ 0,
873
+ 69,
874
+ 1,
875
+ "MODEL_PATCH"
876
+ ],
877
+ [
878
+ 130,
879
+ 66,
880
+ 0,
881
+ 69,
882
+ 0,
883
+ "MODEL"
884
+ ],
885
+ [
886
+ 131,
887
+ 69,
888
+ 0,
889
+ 3,
890
+ 0,
891
+ "MODEL"
892
+ ],
893
+ [
894
+ 132,
895
+ 39,
896
+ 0,
897
+ 69,
898
+ 2,
899
+ "VAE"
900
+ ],
901
+ [
902
+ 135,
903
+ 72,
904
+ 0,
905
+ 69,
906
+ 3,
907
+ "IMAGE"
908
+ ],
909
+ [
910
+ 136,
911
+ 72,
912
+ 0,
913
+ 73,
914
+ 0,
915
+ "IMAGE"
916
+ ],
917
+ [
918
+ 140,
919
+ 71,
920
+ 0,
921
+ 75,
922
+ 0,
923
+ "IMAGE"
924
+ ],
925
+ [
926
+ 141,
927
+ 75,
928
+ 0,
929
+ 72,
930
+ 0,
931
+ "IMAGE"
932
+ ],
933
+ [
934
+ 142,
935
+ 76,
936
+ 0,
937
+ 3,
938
+ 3,
939
+ "LATENT"
940
+ ],
941
+ [
942
+ 143,
943
+ 75,
944
+ 0,
945
+ 76,
946
+ 0,
947
+ "IMAGE"
948
+ ],
949
+ [
950
+ 144,
951
+ 39,
952
+ 0,
953
+ 76,
954
+ 1,
955
+ "VAE"
956
+ ],
957
+ [
958
+ 145,
959
+ 37,
960
+ 0,
961
+ 80,
962
+ 0,
963
+ "MODEL"
964
+ ],
965
+ [
966
+ 149,
967
+ 80,
968
+ 0,
969
+ 66,
970
+ 0,
971
+ "MODEL"
972
+ ]
973
+ ],
974
+ "groups": [
975
+ {
976
+ "id": 1,
977
+ "title": "Step 1 - Upload models",
978
+ "bounding": [
979
+ -130,
980
+ -80,
981
+ 400,
982
+ 610
983
+ ],
984
+ "color": "#3f789e",
985
+ "font_size": 24,
986
+ "flags": {}
987
+ },
988
+ {
989
+ "id": 2,
990
+ "title": "Step 2 - Upload reference image",
991
+ "bounding": [
992
+ -130,
993
+ 550,
994
+ 400,
995
+ 550
996
+ ],
997
+ "color": "#3f789e",
998
+ "font_size": 24,
999
+ "flags": {}
1000
+ },
1001
+ {
1002
+ "id": 3,
1003
+ "title": "Image processing",
1004
+ "bounding": [
1005
+ 290,
1006
+ 550,
1007
+ 490,
1008
+ 550
1009
+ ],
1010
+ "color": "#3f789e",
1011
+ "font_size": 24,
1012
+ "flags": {}
1013
+ },
1014
+ {
1015
+ "id": 4,
1016
+ "title": "Step 3 - Prompt",
1017
+ "bounding": [
1018
+ 290,
1019
+ 100,
1020
+ 490,
1021
+ 430
1022
+ ],
1023
+ "color": "#3f789e",
1024
+ "font_size": 24,
1025
+ "flags": {}
1026
+ },
1027
+ {
1028
+ "id": 5,
1029
+ "title": "4 steps lightning LoRA",
1030
+ "bounding": [
1031
+ 290,
1032
+ -80,
1033
+ 490,
1034
+ 160
1035
+ ],
1036
+ "color": "#3f789e",
1037
+ "font_size": 24,
1038
+ "flags": {}
1039
+ }
1040
+ ],
1041
+ "config": {},
1042
+ "extra": {
1043
+ "ds": {
1044
+ "scale": 0.48559562289012265,
1045
+ "offset": [
1046
+ 1846.044139609573,
1047
+ 391.24067543168553
1048
+ ]
1049
+ },
1050
+ "frontendVersion": "1.26.6"
1051
+ },
1052
+ "version": 0.4
1053
+ }
Qwen_Image_InstantX_Controlnet.json ADDED
@@ -0,0 +1,2048 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "91f6bbe2-ed41-4fd6-bac7-71d5b5864ecb",
3
+ "revision": 0,
4
+ "last_node_id": 107,
5
+ "last_link_id": 185,
6
+ "nodes": [
7
+ {
8
+ "id": 71,
9
+ "type": "LoadImage",
10
+ "pos": [
11
+ -70,
12
+ 635
13
+ ],
14
+ "size": [
15
+ 274.080078125,
16
+ 314.00006103515625
17
+ ],
18
+ "flags": {},
19
+ "order": 0,
20
+ "mode": 0,
21
+ "inputs": [],
22
+ "outputs": [
23
+ {
24
+ "name": "IMAGE",
25
+ "type": "IMAGE",
26
+ "links": [
27
+ 140
28
+ ]
29
+ },
30
+ {
31
+ "name": "MASK",
32
+ "type": "MASK",
33
+ "links": null
34
+ }
35
+ ],
36
+ "properties": {
37
+ "Node name for S&R": "LoadImage",
38
+ "cnr_id": "comfy-core",
39
+ "ver": "0.3.51"
40
+ },
41
+ "widgets_values": [
42
+ "Zoomable image (3).jpg",
43
+ "image"
44
+ ]
45
+ },
46
+ {
47
+ "id": 7,
48
+ "type": "CLIPTextEncode",
49
+ "pos": [
50
+ 300,
51
+ 380
52
+ ],
53
+ "size": [
54
+ 460,
55
+ 140
56
+ ],
57
+ "flags": {},
58
+ "order": 14,
59
+ "mode": 0,
60
+ "inputs": [
61
+ {
62
+ "name": "clip",
63
+ "type": "CLIP",
64
+ "link": 75
65
+ }
66
+ ],
67
+ "outputs": [
68
+ {
69
+ "name": "CONDITIONING",
70
+ "type": "CONDITIONING",
71
+ "slot_index": 0,
72
+ "links": [
73
+ 152
74
+ ]
75
+ }
76
+ ],
77
+ "title": "CLIP Text Encode (Negative Prompt)",
78
+ "properties": {
79
+ "Node name for S&R": "CLIPTextEncode",
80
+ "cnr_id": "comfy-core",
81
+ "ver": "0.3.51"
82
+ },
83
+ "widgets_values": [
84
+ " "
85
+ ],
86
+ "color": "#223",
87
+ "bgcolor": "#335"
88
+ },
89
+ {
90
+ "id": 38,
91
+ "type": "CLIPLoader",
92
+ "pos": [
93
+ -120,
94
+ 130
95
+ ],
96
+ "size": [
97
+ 380,
98
+ 106
99
+ ],
100
+ "flags": {},
101
+ "order": 1,
102
+ "mode": 0,
103
+ "inputs": [],
104
+ "outputs": [
105
+ {
106
+ "name": "CLIP",
107
+ "type": "CLIP",
108
+ "slot_index": 0,
109
+ "links": [
110
+ 74,
111
+ 75
112
+ ]
113
+ }
114
+ ],
115
+ "properties": {
116
+ "Node name for S&R": "CLIPLoader",
117
+ "cnr_id": "comfy-core",
118
+ "ver": "0.3.51",
119
+ "models": [
120
+ {
121
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
122
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
123
+ "directory": "text_encoders"
124
+ }
125
+ ]
126
+ },
127
+ "widgets_values": [
128
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
129
+ "qwen_image",
130
+ "default"
131
+ ]
132
+ },
133
+ {
134
+ "id": 39,
135
+ "type": "VAELoader",
136
+ "pos": [
137
+ -120,
138
+ 290
139
+ ],
140
+ "size": [
141
+ 380,
142
+ 58
143
+ ],
144
+ "flags": {},
145
+ "order": 2,
146
+ "mode": 0,
147
+ "inputs": [],
148
+ "outputs": [
149
+ {
150
+ "name": "VAE",
151
+ "type": "VAE",
152
+ "slot_index": 0,
153
+ "links": [
154
+ 76,
155
+ 144,
156
+ 153
157
+ ]
158
+ }
159
+ ],
160
+ "properties": {
161
+ "Node name for S&R": "VAELoader",
162
+ "cnr_id": "comfy-core",
163
+ "ver": "0.3.51",
164
+ "models": [
165
+ {
166
+ "name": "qwen_image_vae.safetensors",
167
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
168
+ "directory": "vae"
169
+ }
170
+ ]
171
+ },
172
+ "widgets_values": [
173
+ "qwen_image_vae.safetensors"
174
+ ]
175
+ },
176
+ {
177
+ "id": 84,
178
+ "type": "ControlNetLoader",
179
+ "pos": [
180
+ -120,
181
+ 400
182
+ ],
183
+ "size": [
184
+ 380,
185
+ 58
186
+ ],
187
+ "flags": {},
188
+ "order": 3,
189
+ "mode": 0,
190
+ "inputs": [],
191
+ "outputs": [
192
+ {
193
+ "name": "CONTROL_NET",
194
+ "type": "CONTROL_NET",
195
+ "links": [
196
+ 150
197
+ ]
198
+ }
199
+ ],
200
+ "properties": {
201
+ "Node name for S&R": "ControlNetLoader",
202
+ "cnr_id": "comfy-core",
203
+ "ver": "0.3.51",
204
+ "models": [
205
+ {
206
+ "name": "Qwen-Image-InstantX-ControlNet-Union.safetensors",
207
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Union.safetensors",
208
+ "directory": "controlnet"
209
+ }
210
+ ]
211
+ },
212
+ "widgets_values": [
213
+ "Qwen-Image-InstantX-ControlNet-Union.safetensors"
214
+ ]
215
+ },
216
+ {
217
+ "id": 37,
218
+ "type": "UNETLoader",
219
+ "pos": [
220
+ -120,
221
+ 0
222
+ ],
223
+ "size": [
224
+ 380,
225
+ 82
226
+ ],
227
+ "flags": {},
228
+ "order": 4,
229
+ "mode": 0,
230
+ "inputs": [],
231
+ "outputs": [
232
+ {
233
+ "name": "MODEL",
234
+ "type": "MODEL",
235
+ "slot_index": 0,
236
+ "links": [
237
+ 145
238
+ ]
239
+ }
240
+ ],
241
+ "properties": {
242
+ "Node name for S&R": "UNETLoader",
243
+ "cnr_id": "comfy-core",
244
+ "ver": "0.3.51",
245
+ "models": [
246
+ {
247
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
248
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
249
+ "directory": "diffusion_models"
250
+ }
251
+ ]
252
+ },
253
+ "widgets_values": [
254
+ "qwen_image_fp8_e4m3fn.safetensors",
255
+ "default"
256
+ ]
257
+ },
258
+ {
259
+ "id": 66,
260
+ "type": "ModelSamplingAuraFlow",
261
+ "pos": [
262
+ 800,
263
+ 10
264
+ ],
265
+ "size": [
266
+ 310,
267
+ 58
268
+ ],
269
+ "flags": {},
270
+ "order": 18,
271
+ "mode": 0,
272
+ "inputs": [
273
+ {
274
+ "name": "model",
275
+ "type": "MODEL",
276
+ "link": 149
277
+ }
278
+ ],
279
+ "outputs": [
280
+ {
281
+ "name": "MODEL",
282
+ "type": "MODEL",
283
+ "links": [
284
+ 156
285
+ ]
286
+ }
287
+ ],
288
+ "properties": {
289
+ "Node name for S&R": "ModelSamplingAuraFlow",
290
+ "cnr_id": "comfy-core",
291
+ "ver": "0.3.51"
292
+ },
293
+ "widgets_values": [
294
+ 3.1000000000000005
295
+ ]
296
+ },
297
+ {
298
+ "id": 8,
299
+ "type": "VAEDecode",
300
+ "pos": [
301
+ 1150,
302
+ 480
303
+ ],
304
+ "size": [
305
+ 310,
306
+ 46
307
+ ],
308
+ "flags": {},
309
+ "order": 22,
310
+ "mode": 0,
311
+ "inputs": [
312
+ {
313
+ "name": "samples",
314
+ "type": "LATENT",
315
+ "link": 128
316
+ },
317
+ {
318
+ "name": "vae",
319
+ "type": "VAE",
320
+ "link": 76
321
+ }
322
+ ],
323
+ "outputs": [
324
+ {
325
+ "name": "IMAGE",
326
+ "type": "IMAGE",
327
+ "slot_index": 0,
328
+ "links": [
329
+ 110
330
+ ]
331
+ }
332
+ ],
333
+ "properties": {
334
+ "Node name for S&R": "VAEDecode",
335
+ "cnr_id": "comfy-core",
336
+ "ver": "0.3.51"
337
+ },
338
+ "widgets_values": []
339
+ },
340
+ {
341
+ "id": 68,
342
+ "type": "Note",
343
+ "pos": [
344
+ 810,
345
+ -140
346
+ ],
347
+ "size": [
348
+ 310,
349
+ 90
350
+ ],
351
+ "flags": {},
352
+ "order": 5,
353
+ "mode": 0,
354
+ "inputs": [],
355
+ "outputs": [],
356
+ "properties": {},
357
+ "widgets_values": [
358
+ "Increase the shift if you get too many blury/dark/bad images. Decrease if you want to try increasing detail."
359
+ ],
360
+ "color": "#432",
361
+ "bgcolor": "#653"
362
+ },
363
+ {
364
+ "id": 86,
365
+ "type": "Note",
366
+ "pos": [
367
+ 1150,
368
+ 580
369
+ ],
370
+ "size": [
371
+ 307.4002380371094,
372
+ 127.38092803955078
373
+ ],
374
+ "flags": {},
375
+ "order": 6,
376
+ "mode": 0,
377
+ "inputs": [],
378
+ "outputs": [],
379
+ "properties": {},
380
+ "widgets_values": [
381
+ "Set cfg to 1.0 for a speed boost at the cost of consistency. Samplers like res_multistep work pretty well at cfg 1.0\n\nThe official number of steps is 50 but I think that's too much. Even just 10 steps seems to work."
382
+ ],
383
+ "color": "#432",
384
+ "bgcolor": "#653"
385
+ },
386
+ {
387
+ "id": 79,
388
+ "type": "MarkdownNote",
389
+ "pos": [
390
+ 1160,
391
+ 770
392
+ ],
393
+ "size": [
394
+ 310,
395
+ 140
396
+ ],
397
+ "flags": {},
398
+ "order": 7,
399
+ "mode": 0,
400
+ "inputs": [],
401
+ "outputs": [],
402
+ "title": "KSampler settings",
403
+ "properties": {},
404
+ "widgets_values": [
405
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 Steps lightning LoRA | 4 | 1.0 |\n"
406
+ ],
407
+ "color": "#432",
408
+ "bgcolor": "#653"
409
+ },
410
+ {
411
+ "id": 75,
412
+ "type": "ImageScaleToTotalPixels",
413
+ "pos": [
414
+ -60,
415
+ 995
416
+ ],
417
+ "size": [
418
+ 270,
419
+ 82
420
+ ],
421
+ "flags": {},
422
+ "order": 12,
423
+ "mode": 0,
424
+ "inputs": [
425
+ {
426
+ "name": "image",
427
+ "type": "IMAGE",
428
+ "link": 140
429
+ }
430
+ ],
431
+ "outputs": [
432
+ {
433
+ "name": "IMAGE",
434
+ "type": "IMAGE",
435
+ "links": [
436
+ 143,
437
+ 183
438
+ ]
439
+ }
440
+ ],
441
+ "properties": {
442
+ "Node name for S&R": "ImageScaleToTotalPixels",
443
+ "cnr_id": "comfy-core",
444
+ "ver": "0.3.51"
445
+ },
446
+ "widgets_values": [
447
+ "area",
448
+ 1.68
449
+ ]
450
+ },
451
+ {
452
+ "id": 107,
453
+ "type": "MarkdownNote",
454
+ "pos": [
455
+ 320,
456
+ 840
457
+ ],
458
+ "size": [
459
+ 410,
460
+ 110
461
+ ],
462
+ "flags": {},
463
+ "order": 8,
464
+ "mode": 0,
465
+ "inputs": [],
466
+ "outputs": [],
467
+ "title": "About Lotus Depth",
468
+ "properties": {},
469
+ "widgets_values": [
470
+ "\"It's a subgraph. Double-click on the node or click the icon on the top-left to learn how it works.\n\nYou can use any SD1.5 VAE.\n\nOr you can use the canny node if you want to use the canny control.\n"
471
+ ],
472
+ "color": "#432",
473
+ "bgcolor": "#653"
474
+ },
475
+ {
476
+ "id": 72,
477
+ "type": "Canny",
478
+ "pos": [
479
+ 320,
480
+ 1000
481
+ ],
482
+ "size": [
483
+ 240,
484
+ 90
485
+ ],
486
+ "flags": {},
487
+ "order": 9,
488
+ "mode": 4,
489
+ "inputs": [
490
+ {
491
+ "name": "image",
492
+ "type": "IMAGE",
493
+ "link": null
494
+ }
495
+ ],
496
+ "outputs": [
497
+ {
498
+ "name": "IMAGE",
499
+ "type": "IMAGE",
500
+ "links": []
501
+ }
502
+ ],
503
+ "properties": {
504
+ "Node name for S&R": "Canny",
505
+ "cnr_id": "comfy-core",
506
+ "ver": "0.3.51"
507
+ },
508
+ "widgets_values": [
509
+ 0.1,
510
+ 0.2
511
+ ]
512
+ },
513
+ {
514
+ "id": 85,
515
+ "type": "ControlNetApplyAdvanced",
516
+ "pos": [
517
+ 820,
518
+ 620
519
+ ],
520
+ "size": [
521
+ 220,
522
+ 186
523
+ ],
524
+ "flags": {},
525
+ "order": 20,
526
+ "mode": 0,
527
+ "inputs": [
528
+ {
529
+ "name": "positive",
530
+ "type": "CONDITIONING",
531
+ "link": 151
532
+ },
533
+ {
534
+ "name": "negative",
535
+ "type": "CONDITIONING",
536
+ "link": 152
537
+ },
538
+ {
539
+ "name": "control_net",
540
+ "type": "CONTROL_NET",
541
+ "link": 150
542
+ },
543
+ {
544
+ "name": "image",
545
+ "type": "IMAGE",
546
+ "link": 185
547
+ },
548
+ {
549
+ "name": "vae",
550
+ "shape": 7,
551
+ "type": "VAE",
552
+ "link": 153
553
+ }
554
+ ],
555
+ "outputs": [
556
+ {
557
+ "name": "positive",
558
+ "type": "CONDITIONING",
559
+ "links": [
560
+ 154
561
+ ]
562
+ },
563
+ {
564
+ "name": "negative",
565
+ "type": "CONDITIONING",
566
+ "links": [
567
+ 155
568
+ ]
569
+ }
570
+ ],
571
+ "properties": {
572
+ "Node name for S&R": "ControlNetApplyAdvanced",
573
+ "cnr_id": "comfy-core",
574
+ "ver": "0.3.51"
575
+ },
576
+ "widgets_values": [
577
+ 1,
578
+ 0,
579
+ 1
580
+ ]
581
+ },
582
+ {
583
+ "id": 106,
584
+ "type": "ef3b4b73-ce32-4a60-a60e-d7f278bf6b14",
585
+ "pos": [
586
+ 320,
587
+ 640
588
+ ],
589
+ "size": [
590
+ 400,
591
+ 150
592
+ ],
593
+ "flags": {},
594
+ "order": 17,
595
+ "mode": 0,
596
+ "inputs": [
597
+ {
598
+ "name": "pixels",
599
+ "type": "IMAGE",
600
+ "link": 183
601
+ }
602
+ ],
603
+ "outputs": [
604
+ {
605
+ "name": "IMAGE",
606
+ "type": "IMAGE",
607
+ "links": [
608
+ 184,
609
+ 185
610
+ ]
611
+ }
612
+ ],
613
+ "properties": {
614
+ "cnr_id": "comfy-core",
615
+ "ver": "0.3.51"
616
+ },
617
+ "widgets_values": [
618
+ "vae-ft-mse-840000-ema-pruned.safetensors",
619
+ "lotus-depth-d-v1-1.safetensors",
620
+ 10000,
621
+ "euler"
622
+ ]
623
+ },
624
+ {
625
+ "id": 76,
626
+ "type": "VAEEncode",
627
+ "pos": [
628
+ 880,
629
+ 290
630
+ ],
631
+ "size": [
632
+ 140,
633
+ 46
634
+ ],
635
+ "flags": {
636
+ "collapsed": false
637
+ },
638
+ "order": 16,
639
+ "mode": 0,
640
+ "inputs": [
641
+ {
642
+ "name": "pixels",
643
+ "type": "IMAGE",
644
+ "link": 143
645
+ },
646
+ {
647
+ "name": "vae",
648
+ "type": "VAE",
649
+ "link": 144
650
+ }
651
+ ],
652
+ "outputs": [
653
+ {
654
+ "name": "LATENT",
655
+ "type": "LATENT",
656
+ "links": [
657
+ 142
658
+ ]
659
+ }
660
+ ],
661
+ "properties": {
662
+ "Node name for S&R": "VAEEncode",
663
+ "cnr_id": "comfy-core",
664
+ "ver": "0.3.51"
665
+ },
666
+ "widgets_values": []
667
+ },
668
+ {
669
+ "id": 73,
670
+ "type": "PreviewImage",
671
+ "pos": [
672
+ 820,
673
+ 850
674
+ ],
675
+ "size": [
676
+ 230,
677
+ 258
678
+ ],
679
+ "flags": {},
680
+ "order": 19,
681
+ "mode": 0,
682
+ "inputs": [
683
+ {
684
+ "name": "images",
685
+ "type": "IMAGE",
686
+ "link": 184
687
+ }
688
+ ],
689
+ "outputs": [],
690
+ "properties": {
691
+ "Node name for S&R": "PreviewImage",
692
+ "cnr_id": "comfy-core",
693
+ "ver": "0.3.51"
694
+ },
695
+ "widgets_values": []
696
+ },
697
+ {
698
+ "id": 80,
699
+ "type": "LoraLoaderModelOnly",
700
+ "pos": [
701
+ 320,
702
+ -10
703
+ ],
704
+ "size": [
705
+ 430,
706
+ 82
707
+ ],
708
+ "flags": {},
709
+ "order": 15,
710
+ "mode": 4,
711
+ "inputs": [
712
+ {
713
+ "name": "model",
714
+ "type": "MODEL",
715
+ "link": 145
716
+ }
717
+ ],
718
+ "outputs": [
719
+ {
720
+ "name": "MODEL",
721
+ "type": "MODEL",
722
+ "links": [
723
+ 149
724
+ ]
725
+ }
726
+ ],
727
+ "properties": {
728
+ "Node name for S&R": "LoraLoaderModelOnly",
729
+ "cnr_id": "comfy-core",
730
+ "ver": "0.3.51",
731
+ "models": [
732
+ {
733
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
734
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
735
+ "directory": "loras"
736
+ }
737
+ ]
738
+ },
739
+ "widgets_values": [
740
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
741
+ 1
742
+ ]
743
+ },
744
+ {
745
+ "id": 3,
746
+ "type": "KSampler",
747
+ "pos": [
748
+ 1150,
749
+ 0
750
+ ],
751
+ "size": [
752
+ 310,
753
+ 430
754
+ ],
755
+ "flags": {},
756
+ "order": 21,
757
+ "mode": 0,
758
+ "inputs": [
759
+ {
760
+ "name": "model",
761
+ "type": "MODEL",
762
+ "link": 156
763
+ },
764
+ {
765
+ "name": "positive",
766
+ "type": "CONDITIONING",
767
+ "link": 154
768
+ },
769
+ {
770
+ "name": "negative",
771
+ "type": "CONDITIONING",
772
+ "link": 155
773
+ },
774
+ {
775
+ "name": "latent_image",
776
+ "type": "LATENT",
777
+ "link": 142
778
+ }
779
+ ],
780
+ "outputs": [
781
+ {
782
+ "name": "LATENT",
783
+ "type": "LATENT",
784
+ "slot_index": 0,
785
+ "links": [
786
+ 128
787
+ ]
788
+ }
789
+ ],
790
+ "properties": {
791
+ "Node name for S&R": "KSampler",
792
+ "cnr_id": "comfy-core",
793
+ "ver": "0.3.51"
794
+ },
795
+ "widgets_values": [
796
+ 452179129219851,
797
+ "randomize",
798
+ 20,
799
+ 2.5,
800
+ "euler",
801
+ "simple",
802
+ 1
803
+ ]
804
+ },
805
+ {
806
+ "id": 60,
807
+ "type": "SaveImage",
808
+ "pos": [
809
+ 1510,
810
+ 0
811
+ ],
812
+ "size": [
813
+ 970,
814
+ 1030
815
+ ],
816
+ "flags": {},
817
+ "order": 23,
818
+ "mode": 0,
819
+ "inputs": [
820
+ {
821
+ "name": "images",
822
+ "type": "IMAGE",
823
+ "link": 110
824
+ }
825
+ ],
826
+ "outputs": [],
827
+ "properties": {
828
+ "cnr_id": "comfy-core",
829
+ "ver": "0.3.51"
830
+ },
831
+ "widgets_values": [
832
+ "ComfyUI"
833
+ ]
834
+ },
835
+ {
836
+ "id": 78,
837
+ "type": "MarkdownNote",
838
+ "pos": [
839
+ -690,
840
+ -50
841
+ ],
842
+ "size": [
843
+ 540,
844
+ 630
845
+ ],
846
+ "flags": {},
847
+ "order": 10,
848
+ "mode": 0,
849
+ "inputs": [],
850
+ "outputs": [],
851
+ "title": "Model links",
852
+ "properties": {
853
+ "widget_ue_connectable": {}
854
+ },
855
+ "widgets_values": [
856
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**ControlNet**\n\n- [Qwen-Image-InstantX-ControlNet-Union.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-InstantX-ControlNets/resolve/main/split_files/controlnet/Qwen-Image-InstantX-ControlNet-Union.safetensors)\n\n\n**LoRA**\n\n- [Qwen-Image-Lightning-4steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 controlnet/ \n│ │ └── Qwen-Image-InstantX-ControlNet-Union.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
857
+ ],
858
+ "color": "#432",
859
+ "bgcolor": "#653"
860
+ },
861
+ {
862
+ "id": 105,
863
+ "type": "MarkdownNote",
864
+ "pos": [
865
+ -680,
866
+ 630
867
+ ],
868
+ "size": [
869
+ 530,
870
+ 250
871
+ ],
872
+ "flags": {},
873
+ "order": 11,
874
+ "mode": 0,
875
+ "inputs": [],
876
+ "outputs": [],
877
+ "title": "Lotus Depth",
878
+ "properties": {},
879
+ "widgets_values": [
880
+ "**Diffusion Model**\n\nDownload [lotus-depth-d-v1-1.safetensors](https://huggingface.co/Comfy-Org/lotus/resolve/main/lotus-depth-d-v1-1.safetensors) \n and place it in **ComfyUI/models/diffusion_models**\n\n**VAE Model**\n\nDownload [vae-ft-mse-840000-ema-pruned.safetensors](https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors) and place it in **ComfyUI/models/vae** or you can use any SD1.5 VAE if you prefer.\n\n\n```\nComfyUI/\n├── models/\n│ ├── diffusion_models/\n│ │ └─── lotus-depth-d-v1-1.safetensors\n│ └── vae/\n│ └── lvae-ft-mse-840000-ema-pruned.safetensors\n```\n\n"
881
+ ],
882
+ "color": "#432",
883
+ "bgcolor": "#653"
884
+ },
885
+ {
886
+ "id": 6,
887
+ "type": "CLIPTextEncode",
888
+ "pos": [
889
+ 300,
890
+ 170
891
+ ],
892
+ "size": [
893
+ 460,
894
+ 164.31304931640625
895
+ ],
896
+ "flags": {},
897
+ "order": 13,
898
+ "mode": 0,
899
+ "inputs": [
900
+ {
901
+ "name": "clip",
902
+ "type": "CLIP",
903
+ "link": 74
904
+ }
905
+ ],
906
+ "outputs": [
907
+ {
908
+ "name": "CONDITIONING",
909
+ "type": "CONDITIONING",
910
+ "slot_index": 0,
911
+ "links": [
912
+ 151
913
+ ]
914
+ }
915
+ ],
916
+ "title": "CLIP Text Encode (Positive Prompt)",
917
+ "properties": {
918
+ "Node name for S&R": "CLIPTextEncode",
919
+ "cnr_id": "comfy-core",
920
+ "ver": "0.3.51"
921
+ },
922
+ "widgets_values": [
923
+ "Post-apocalyptic style clothing, long wavy hair, rough texture, exotic woman, tattered coarse-woven linen fabric, wearing a hood, mechanical aesthetics, mainly in dark gray tones, low-saturation earthy yellow, sense of impact and rebellion, doomsday aesthetics, grotesque aesthetics, works of art, backlighting, film photography, professional photography works, clear visible face, emotional and atmospheric dynamic photography, Fujichrome color positive film, shot with a 17mm Hasselblad ultra-wide-angle lens, f/1.2 large aperture, side backlighting, artistic light, hair light, Rembrandt light, 8K high-definition image quality, delicate real human skin texture."
924
+ ],
925
+ "color": "#232",
926
+ "bgcolor": "#353"
927
+ }
928
+ ],
929
+ "links": [
930
+ [
931
+ 74,
932
+ 38,
933
+ 0,
934
+ 6,
935
+ 0,
936
+ "CLIP"
937
+ ],
938
+ [
939
+ 75,
940
+ 38,
941
+ 0,
942
+ 7,
943
+ 0,
944
+ "CLIP"
945
+ ],
946
+ [
947
+ 76,
948
+ 39,
949
+ 0,
950
+ 8,
951
+ 1,
952
+ "VAE"
953
+ ],
954
+ [
955
+ 110,
956
+ 8,
957
+ 0,
958
+ 60,
959
+ 0,
960
+ "IMAGE"
961
+ ],
962
+ [
963
+ 128,
964
+ 3,
965
+ 0,
966
+ 8,
967
+ 0,
968
+ "LATENT"
969
+ ],
970
+ [
971
+ 140,
972
+ 71,
973
+ 0,
974
+ 75,
975
+ 0,
976
+ "IMAGE"
977
+ ],
978
+ [
979
+ 142,
980
+ 76,
981
+ 0,
982
+ 3,
983
+ 3,
984
+ "LATENT"
985
+ ],
986
+ [
987
+ 143,
988
+ 75,
989
+ 0,
990
+ 76,
991
+ 0,
992
+ "IMAGE"
993
+ ],
994
+ [
995
+ 144,
996
+ 39,
997
+ 0,
998
+ 76,
999
+ 1,
1000
+ "VAE"
1001
+ ],
1002
+ [
1003
+ 145,
1004
+ 37,
1005
+ 0,
1006
+ 80,
1007
+ 0,
1008
+ "MODEL"
1009
+ ],
1010
+ [
1011
+ 149,
1012
+ 80,
1013
+ 0,
1014
+ 66,
1015
+ 0,
1016
+ "MODEL"
1017
+ ],
1018
+ [
1019
+ 150,
1020
+ 84,
1021
+ 0,
1022
+ 85,
1023
+ 2,
1024
+ "CONTROL_NET"
1025
+ ],
1026
+ [
1027
+ 151,
1028
+ 6,
1029
+ 0,
1030
+ 85,
1031
+ 0,
1032
+ "CONDITIONING"
1033
+ ],
1034
+ [
1035
+ 152,
1036
+ 7,
1037
+ 0,
1038
+ 85,
1039
+ 1,
1040
+ "CONDITIONING"
1041
+ ],
1042
+ [
1043
+ 153,
1044
+ 39,
1045
+ 0,
1046
+ 85,
1047
+ 4,
1048
+ "VAE"
1049
+ ],
1050
+ [
1051
+ 154,
1052
+ 85,
1053
+ 0,
1054
+ 3,
1055
+ 1,
1056
+ "CONDITIONING"
1057
+ ],
1058
+ [
1059
+ 155,
1060
+ 85,
1061
+ 1,
1062
+ 3,
1063
+ 2,
1064
+ "CONDITIONING"
1065
+ ],
1066
+ [
1067
+ 156,
1068
+ 66,
1069
+ 0,
1070
+ 3,
1071
+ 0,
1072
+ "MODEL"
1073
+ ],
1074
+ [
1075
+ 183,
1076
+ 75,
1077
+ 0,
1078
+ 106,
1079
+ 0,
1080
+ "IMAGE"
1081
+ ],
1082
+ [
1083
+ 184,
1084
+ 106,
1085
+ 0,
1086
+ 73,
1087
+ 0,
1088
+ "IMAGE"
1089
+ ],
1090
+ [
1091
+ 185,
1092
+ 106,
1093
+ 0,
1094
+ 85,
1095
+ 3,
1096
+ "IMAGE"
1097
+ ]
1098
+ ],
1099
+ "groups": [
1100
+ {
1101
+ "id": 1,
1102
+ "title": "Step 1 - Upload models",
1103
+ "bounding": [
1104
+ -130,
1105
+ -80,
1106
+ 400,
1107
+ 610
1108
+ ],
1109
+ "color": "#3f789e",
1110
+ "font_size": 24,
1111
+ "flags": {}
1112
+ },
1113
+ {
1114
+ "id": 2,
1115
+ "title": "Step 2 - Upload reference image",
1116
+ "bounding": [
1117
+ -130,
1118
+ 550,
1119
+ 400,
1120
+ 550
1121
+ ],
1122
+ "color": "#3f789e",
1123
+ "font_size": 24,
1124
+ "flags": {}
1125
+ },
1126
+ {
1127
+ "id": 3,
1128
+ "title": "Image processing and applying ControlNet",
1129
+ "bounding": [
1130
+ 290,
1131
+ 550,
1132
+ 810,
1133
+ 550
1134
+ ],
1135
+ "color": "#3f789e",
1136
+ "font_size": 24,
1137
+ "flags": {}
1138
+ },
1139
+ {
1140
+ "id": 4,
1141
+ "title": "Step 3 - Prompt",
1142
+ "bounding": [
1143
+ 290,
1144
+ 100,
1145
+ 490,
1146
+ 430
1147
+ ],
1148
+ "color": "#3f789e",
1149
+ "font_size": 24,
1150
+ "flags": {}
1151
+ },
1152
+ {
1153
+ "id": 5,
1154
+ "title": "4 steps lightning LoRA",
1155
+ "bounding": [
1156
+ 290,
1157
+ -80,
1158
+ 490,
1159
+ 160
1160
+ ],
1161
+ "color": "#3f789e",
1162
+ "font_size": 24,
1163
+ "flags": {}
1164
+ }
1165
+ ],
1166
+ "definitions": {
1167
+ "subgraphs": [
1168
+ {
1169
+ "id": "ef3b4b73-ce32-4a60-a60e-d7f278bf6b14",
1170
+ "version": 1,
1171
+ "state": {
1172
+ "lastGroupId": 6,
1173
+ "lastNodeId": 105,
1174
+ "lastLinkId": 186,
1175
+ "lastRerouteId": 0
1176
+ },
1177
+ "revision": 0,
1178
+ "config": {},
1179
+ "name": "Lotus Depth",
1180
+ "inputNode": {
1181
+ "id": -10,
1182
+ "bounding": [
1183
+ -420,
1184
+ 1753,
1185
+ 120,
1186
+ 140
1187
+ ]
1188
+ },
1189
+ "outputNode": {
1190
+ "id": -20,
1191
+ "bounding": [
1192
+ 1045.2000122070312,
1193
+ 1753,
1194
+ 120,
1195
+ 60
1196
+ ]
1197
+ },
1198
+ "inputs": [
1199
+ {
1200
+ "id": "b946d075-34dd-429b-bbf4-46aa4c693ce1",
1201
+ "name": "pixels",
1202
+ "type": "IMAGE",
1203
+ "linkIds": [
1204
+ 175
1205
+ ],
1206
+ "localized_name": "pixels",
1207
+ "pos": {
1208
+ "0": -320,
1209
+ "1": 1773
1210
+ }
1211
+ },
1212
+ {
1213
+ "id": "25d3c1a2-cf9a-4785-8032-4cd13e275b02",
1214
+ "name": "vae_name",
1215
+ "type": "COMBO",
1216
+ "linkIds": [
1217
+ 183
1218
+ ],
1219
+ "pos": {
1220
+ "0": -320,
1221
+ "1": 1793
1222
+ }
1223
+ },
1224
+ {
1225
+ "id": "51418d0c-a7be-46cb-b995-aefc0c41133a",
1226
+ "name": "unet_name",
1227
+ "type": "COMBO",
1228
+ "linkIds": [
1229
+ 184
1230
+ ],
1231
+ "pos": {
1232
+ "0": -320,
1233
+ "1": 1813
1234
+ }
1235
+ },
1236
+ {
1237
+ "id": "e1521e97-d7b7-4b40-8d2c-37aa4938a67a",
1238
+ "name": "sigma",
1239
+ "type": "FLOAT",
1240
+ "linkIds": [
1241
+ 185
1242
+ ],
1243
+ "pos": {
1244
+ "0": -320,
1245
+ "1": 1833
1246
+ }
1247
+ },
1248
+ {
1249
+ "id": "4599e2fd-50fe-428b-88eb-f45862e22153",
1250
+ "name": "sampler_name",
1251
+ "type": "COMBO",
1252
+ "linkIds": [
1253
+ 186
1254
+ ],
1255
+ "pos": {
1256
+ "0": -320,
1257
+ "1": 1853
1258
+ }
1259
+ }
1260
+ ],
1261
+ "outputs": [
1262
+ {
1263
+ "id": "6889d480-8282-4e52-935b-952a148b23b7",
1264
+ "name": "IMAGE",
1265
+ "type": "IMAGE",
1266
+ "linkIds": [
1267
+ 179,
1268
+ 182
1269
+ ],
1270
+ "localized_name": "IMAGE",
1271
+ "pos": {
1272
+ "0": 1065.199951171875,
1273
+ "1": 1773
1274
+ }
1275
+ }
1276
+ ],
1277
+ "widgets": [],
1278
+ "nodes": [
1279
+ {
1280
+ "id": 92,
1281
+ "type": "SamplerCustomAdvanced",
1282
+ "pos": [
1283
+ 630,
1284
+ 1570
1285
+ ],
1286
+ "size": [
1287
+ 355.20001220703125,
1288
+ 326
1289
+ ],
1290
+ "flags": {},
1291
+ "order": 4,
1292
+ "mode": 0,
1293
+ "inputs": [
1294
+ {
1295
+ "localized_name": "noise",
1296
+ "name": "noise",
1297
+ "type": "NOISE",
1298
+ "link": 164
1299
+ },
1300
+ {
1301
+ "localized_name": "guider",
1302
+ "name": "guider",
1303
+ "type": "GUIDER",
1304
+ "link": 165
1305
+ },
1306
+ {
1307
+ "localized_name": "sampler",
1308
+ "name": "sampler",
1309
+ "type": "SAMPLER",
1310
+ "link": 166
1311
+ },
1312
+ {
1313
+ "localized_name": "sigmas",
1314
+ "name": "sigmas",
1315
+ "type": "SIGMAS",
1316
+ "link": 167
1317
+ },
1318
+ {
1319
+ "localized_name": "latent_image",
1320
+ "name": "latent_image",
1321
+ "type": "LATENT",
1322
+ "link": 168
1323
+ }
1324
+ ],
1325
+ "outputs": [
1326
+ {
1327
+ "localized_name": "output",
1328
+ "name": "output",
1329
+ "type": "LATENT",
1330
+ "slot_index": 0,
1331
+ "links": [
1332
+ 171
1333
+ ]
1334
+ },
1335
+ {
1336
+ "localized_name": "denoised_output",
1337
+ "name": "denoised_output",
1338
+ "type": "LATENT",
1339
+ "slot_index": 1,
1340
+ "links": []
1341
+ }
1342
+ ],
1343
+ "properties": {
1344
+ "Node name for S&R": "SamplerCustomAdvanced",
1345
+ "cnr_id": "comfy-core",
1346
+ "ver": "0.3.34",
1347
+ "widget_ue_connectable": {}
1348
+ },
1349
+ "widgets_values": []
1350
+ },
1351
+ {
1352
+ "id": 94,
1353
+ "type": "BasicGuider",
1354
+ "pos": [
1355
+ 370,
1356
+ 1640
1357
+ ],
1358
+ "size": [
1359
+ 210,
1360
+ 46
1361
+ ],
1362
+ "flags": {},
1363
+ "order": 5,
1364
+ "mode": 0,
1365
+ "inputs": [
1366
+ {
1367
+ "localized_name": "model",
1368
+ "name": "model",
1369
+ "type": "MODEL",
1370
+ "link": 173
1371
+ },
1372
+ {
1373
+ "localized_name": "conditioning",
1374
+ "name": "conditioning",
1375
+ "type": "CONDITIONING",
1376
+ "link": 169
1377
+ }
1378
+ ],
1379
+ "outputs": [
1380
+ {
1381
+ "localized_name": "GUIDER",
1382
+ "name": "GUIDER",
1383
+ "type": "GUIDER",
1384
+ "slot_index": 0,
1385
+ "links": [
1386
+ 165
1387
+ ]
1388
+ }
1389
+ ],
1390
+ "properties": {
1391
+ "Node name for S&R": "BasicGuider",
1392
+ "cnr_id": "comfy-core",
1393
+ "ver": "0.3.34",
1394
+ "widget_ue_connectable": {}
1395
+ },
1396
+ "widgets_values": []
1397
+ },
1398
+ {
1399
+ "id": 95,
1400
+ "type": "BasicScheduler",
1401
+ "pos": [
1402
+ 120,
1403
+ 1740
1404
+ ],
1405
+ "size": [
1406
+ 210,
1407
+ 106
1408
+ ],
1409
+ "flags": {},
1410
+ "order": 6,
1411
+ "mode": 0,
1412
+ "inputs": [
1413
+ {
1414
+ "localized_name": "model",
1415
+ "name": "model",
1416
+ "type": "MODEL",
1417
+ "link": 174
1418
+ }
1419
+ ],
1420
+ "outputs": [
1421
+ {
1422
+ "localized_name": "SIGMAS",
1423
+ "name": "SIGMAS",
1424
+ "type": "SIGMAS",
1425
+ "slot_index": 0,
1426
+ "links": [
1427
+ 170
1428
+ ]
1429
+ }
1430
+ ],
1431
+ "properties": {
1432
+ "Node name for S&R": "BasicScheduler",
1433
+ "cnr_id": "comfy-core",
1434
+ "ver": "0.3.34",
1435
+ "widget_ue_connectable": {}
1436
+ },
1437
+ "widgets_values": [
1438
+ "normal",
1439
+ 1,
1440
+ 1
1441
+ ]
1442
+ },
1443
+ {
1444
+ "id": 96,
1445
+ "type": "LotusConditioning",
1446
+ "pos": [
1447
+ 130,
1448
+ 1660
1449
+ ],
1450
+ "size": [
1451
+ 210,
1452
+ 26
1453
+ ],
1454
+ "flags": {},
1455
+ "order": 0,
1456
+ "mode": 0,
1457
+ "inputs": [],
1458
+ "outputs": [
1459
+ {
1460
+ "localized_name": "conditioning",
1461
+ "name": "conditioning",
1462
+ "type": "CONDITIONING",
1463
+ "slot_index": 0,
1464
+ "links": [
1465
+ 169
1466
+ ]
1467
+ }
1468
+ ],
1469
+ "properties": {
1470
+ "Node name for S&R": "LotusConditioning",
1471
+ "cnr_id": "comfy-core",
1472
+ "ver": "0.3.34",
1473
+ "widget_ue_connectable": {}
1474
+ },
1475
+ "widgets_values": []
1476
+ },
1477
+ {
1478
+ "id": 97,
1479
+ "type": "VAEEncode",
1480
+ "pos": [
1481
+ 130,
1482
+ 1900
1483
+ ],
1484
+ "size": [
1485
+ 210,
1486
+ 46
1487
+ ],
1488
+ "flags": {},
1489
+ "order": 7,
1490
+ "mode": 0,
1491
+ "inputs": [
1492
+ {
1493
+ "localized_name": "pixels",
1494
+ "name": "pixels",
1495
+ "type": "IMAGE",
1496
+ "link": 175
1497
+ },
1498
+ {
1499
+ "localized_name": "vae",
1500
+ "name": "vae",
1501
+ "type": "VAE",
1502
+ "link": 176
1503
+ }
1504
+ ],
1505
+ "outputs": [
1506
+ {
1507
+ "localized_name": "LATENT",
1508
+ "name": "LATENT",
1509
+ "type": "LATENT",
1510
+ "slot_index": 0,
1511
+ "links": [
1512
+ 168
1513
+ ]
1514
+ }
1515
+ ],
1516
+ "properties": {
1517
+ "Node name for S&R": "VAEEncode",
1518
+ "cnr_id": "comfy-core",
1519
+ "ver": "0.3.34",
1520
+ "widget_ue_connectable": {}
1521
+ },
1522
+ "widgets_values": []
1523
+ },
1524
+ {
1525
+ "id": 93,
1526
+ "type": "DisableNoise",
1527
+ "pos": [
1528
+ 370,
1529
+ 1570
1530
+ ],
1531
+ "size": [
1532
+ 210,
1533
+ 26
1534
+ ],
1535
+ "flags": {},
1536
+ "order": 1,
1537
+ "mode": 0,
1538
+ "inputs": [],
1539
+ "outputs": [
1540
+ {
1541
+ "localized_name": "NOISE",
1542
+ "name": "NOISE",
1543
+ "type": "NOISE",
1544
+ "slot_index": 0,
1545
+ "links": [
1546
+ 164
1547
+ ]
1548
+ }
1549
+ ],
1550
+ "properties": {
1551
+ "Node name for S&R": "DisableNoise",
1552
+ "cnr_id": "comfy-core",
1553
+ "ver": "0.3.34",
1554
+ "widget_ue_connectable": {}
1555
+ },
1556
+ "widgets_values": []
1557
+ },
1558
+ {
1559
+ "id": 98,
1560
+ "type": "VAEDecode",
1561
+ "pos": [
1562
+ 750,
1563
+ 1940
1564
+ ],
1565
+ "size": [
1566
+ 210,
1567
+ 46
1568
+ ],
1569
+ "flags": {},
1570
+ "order": 8,
1571
+ "mode": 0,
1572
+ "inputs": [
1573
+ {
1574
+ "localized_name": "samples",
1575
+ "name": "samples",
1576
+ "type": "LATENT",
1577
+ "link": 171
1578
+ },
1579
+ {
1580
+ "localized_name": "vae",
1581
+ "name": "vae",
1582
+ "type": "VAE",
1583
+ "link": 177
1584
+ }
1585
+ ],
1586
+ "outputs": [
1587
+ {
1588
+ "localized_name": "IMAGE",
1589
+ "name": "IMAGE",
1590
+ "type": "IMAGE",
1591
+ "slot_index": 0,
1592
+ "links": [
1593
+ 172
1594
+ ]
1595
+ }
1596
+ ],
1597
+ "properties": {
1598
+ "Node name for S&R": "VAEDecode",
1599
+ "cnr_id": "comfy-core",
1600
+ "ver": "0.3.34",
1601
+ "widget_ue_connectable": {}
1602
+ },
1603
+ "widgets_values": []
1604
+ },
1605
+ {
1606
+ "id": 99,
1607
+ "type": "ImageInvert",
1608
+ "pos": [
1609
+ 760,
1610
+ 2040
1611
+ ],
1612
+ "size": [
1613
+ 210,
1614
+ 26
1615
+ ],
1616
+ "flags": {},
1617
+ "order": 9,
1618
+ "mode": 0,
1619
+ "inputs": [
1620
+ {
1621
+ "localized_name": "image",
1622
+ "name": "image",
1623
+ "type": "IMAGE",
1624
+ "link": 172
1625
+ }
1626
+ ],
1627
+ "outputs": [
1628
+ {
1629
+ "localized_name": "IMAGE",
1630
+ "name": "IMAGE",
1631
+ "type": "IMAGE",
1632
+ "slot_index": 0,
1633
+ "links": [
1634
+ 179,
1635
+ 182
1636
+ ]
1637
+ }
1638
+ ],
1639
+ "properties": {
1640
+ "Node name for S&R": "ImageInvert",
1641
+ "cnr_id": "comfy-core",
1642
+ "ver": "0.3.34",
1643
+ "widget_ue_connectable": {}
1644
+ },
1645
+ "widgets_values": []
1646
+ },
1647
+ {
1648
+ "id": 89,
1649
+ "type": "VAELoader",
1650
+ "pos": [
1651
+ -230,
1652
+ 1570
1653
+ ],
1654
+ "size": [
1655
+ 305.93701171875,
1656
+ 58
1657
+ ],
1658
+ "flags": {},
1659
+ "order": 3,
1660
+ "mode": 0,
1661
+ "inputs": [
1662
+ {
1663
+ "localized_name": "vae_name",
1664
+ "name": "vae_name",
1665
+ "type": "COMBO",
1666
+ "widget": {
1667
+ "name": "vae_name"
1668
+ },
1669
+ "link": 183
1670
+ }
1671
+ ],
1672
+ "outputs": [
1673
+ {
1674
+ "localized_name": "VAE",
1675
+ "name": "VAE",
1676
+ "type": "VAE",
1677
+ "slot_index": 0,
1678
+ "links": [
1679
+ 176,
1680
+ 177
1681
+ ]
1682
+ }
1683
+ ],
1684
+ "properties": {
1685
+ "Node name for S&R": "VAELoader",
1686
+ "cnr_id": "comfy-core",
1687
+ "ver": "0.3.34",
1688
+ "models": [
1689
+ {
1690
+ "name": "vae-ft-mse-840000-ema-pruned.safetensors",
1691
+ "url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors",
1692
+ "directory": "vae"
1693
+ }
1694
+ ],
1695
+ "widget_ue_connectable": {}
1696
+ },
1697
+ "widgets_values": [
1698
+ "vae-ft-mse-840000-ema-pruned.safetensors"
1699
+ ]
1700
+ },
1701
+ {
1702
+ "id": 87,
1703
+ "type": "UNETLoader",
1704
+ "pos": [
1705
+ -230,
1706
+ 1690
1707
+ ],
1708
+ "size": [
1709
+ 305.93701171875,
1710
+ 82
1711
+ ],
1712
+ "flags": {},
1713
+ "order": 2,
1714
+ "mode": 0,
1715
+ "inputs": [
1716
+ {
1717
+ "localized_name": "unet_name",
1718
+ "name": "unet_name",
1719
+ "type": "COMBO",
1720
+ "widget": {
1721
+ "name": "unet_name"
1722
+ },
1723
+ "link": 184
1724
+ }
1725
+ ],
1726
+ "outputs": [
1727
+ {
1728
+ "localized_name": "MODEL",
1729
+ "name": "MODEL",
1730
+ "type": "MODEL",
1731
+ "slot_index": 0,
1732
+ "links": [
1733
+ 173,
1734
+ 174
1735
+ ]
1736
+ }
1737
+ ],
1738
+ "properties": {
1739
+ "Node name for S&R": "UNETLoader",
1740
+ "cnr_id": "comfy-core",
1741
+ "ver": "0.3.34",
1742
+ "models": [
1743
+ {
1744
+ "name": "lotus-depth-d-v1-1.safetensors",
1745
+ "url": "https://huggingface.co/Comfy-Org/lotus/resolve/main/lotus-depth-d-v1-1.safetensors",
1746
+ "directory": "diffusion_models"
1747
+ }
1748
+ ],
1749
+ "widget_ue_connectable": {}
1750
+ },
1751
+ "widgets_values": [
1752
+ "lotus-depth-d-v1-1.safetensors",
1753
+ "default"
1754
+ ]
1755
+ },
1756
+ {
1757
+ "id": 101,
1758
+ "type": "SetFirstSigma",
1759
+ "pos": [
1760
+ 370,
1761
+ 1830
1762
+ ],
1763
+ "size": [
1764
+ 210,
1765
+ 58
1766
+ ],
1767
+ "flags": {},
1768
+ "order": 11,
1769
+ "mode": 0,
1770
+ "inputs": [
1771
+ {
1772
+ "localized_name": "sigmas",
1773
+ "name": "sigmas",
1774
+ "type": "SIGMAS",
1775
+ "link": 170
1776
+ },
1777
+ {
1778
+ "localized_name": "sigma",
1779
+ "name": "sigma",
1780
+ "type": "FLOAT",
1781
+ "widget": {
1782
+ "name": "sigma"
1783
+ },
1784
+ "link": 185
1785
+ }
1786
+ ],
1787
+ "outputs": [
1788
+ {
1789
+ "localized_name": "SIGMAS",
1790
+ "name": "SIGMAS",
1791
+ "type": "SIGMAS",
1792
+ "slot_index": 0,
1793
+ "links": [
1794
+ 167
1795
+ ]
1796
+ }
1797
+ ],
1798
+ "properties": {
1799
+ "Node name for S&R": "SetFirstSigma",
1800
+ "cnr_id": "comfy-core",
1801
+ "ver": "0.3.34",
1802
+ "widget_ue_connectable": {}
1803
+ },
1804
+ "widgets_values": [
1805
+ 10000
1806
+ ]
1807
+ },
1808
+ {
1809
+ "id": 100,
1810
+ "type": "KSamplerSelect",
1811
+ "pos": [
1812
+ 370,
1813
+ 1730
1814
+ ],
1815
+ "size": [
1816
+ 210,
1817
+ 58
1818
+ ],
1819
+ "flags": {},
1820
+ "order": 10,
1821
+ "mode": 0,
1822
+ "inputs": [
1823
+ {
1824
+ "localized_name": "sampler_name",
1825
+ "name": "sampler_name",
1826
+ "type": "COMBO",
1827
+ "widget": {
1828
+ "name": "sampler_name"
1829
+ },
1830
+ "link": 186
1831
+ }
1832
+ ],
1833
+ "outputs": [
1834
+ {
1835
+ "localized_name": "SAMPLER",
1836
+ "name": "SAMPLER",
1837
+ "type": "SAMPLER",
1838
+ "slot_index": 0,
1839
+ "links": [
1840
+ 166
1841
+ ]
1842
+ }
1843
+ ],
1844
+ "properties": {
1845
+ "Node name for S&R": "KSamplerSelect",
1846
+ "cnr_id": "comfy-core",
1847
+ "ver": "0.3.34",
1848
+ "widget_ue_connectable": {}
1849
+ },
1850
+ "widgets_values": [
1851
+ "euler"
1852
+ ]
1853
+ }
1854
+ ],
1855
+ "groups": [
1856
+ {
1857
+ "id": 6,
1858
+ "title": "Load models",
1859
+ "bounding": [
1860
+ -240,
1861
+ 1500,
1862
+ 325.93701171875,
1863
+ 285.6000061035156
1864
+ ],
1865
+ "color": "#3f789e",
1866
+ "font_size": 24,
1867
+ "flags": {}
1868
+ }
1869
+ ],
1870
+ "links": [
1871
+ {
1872
+ "id": 164,
1873
+ "origin_id": 93,
1874
+ "origin_slot": 0,
1875
+ "target_id": 92,
1876
+ "target_slot": 0,
1877
+ "type": "NOISE"
1878
+ },
1879
+ {
1880
+ "id": 165,
1881
+ "origin_id": 94,
1882
+ "origin_slot": 0,
1883
+ "target_id": 92,
1884
+ "target_slot": 1,
1885
+ "type": "GUIDER"
1886
+ },
1887
+ {
1888
+ "id": 166,
1889
+ "origin_id": 100,
1890
+ "origin_slot": 0,
1891
+ "target_id": 92,
1892
+ "target_slot": 2,
1893
+ "type": "SAMPLER"
1894
+ },
1895
+ {
1896
+ "id": 167,
1897
+ "origin_id": 101,
1898
+ "origin_slot": 0,
1899
+ "target_id": 92,
1900
+ "target_slot": 3,
1901
+ "type": "SIGMAS"
1902
+ },
1903
+ {
1904
+ "id": 168,
1905
+ "origin_id": 97,
1906
+ "origin_slot": 0,
1907
+ "target_id": 92,
1908
+ "target_slot": 4,
1909
+ "type": "LATENT"
1910
+ },
1911
+ {
1912
+ "id": 173,
1913
+ "origin_id": 87,
1914
+ "origin_slot": 0,
1915
+ "target_id": 94,
1916
+ "target_slot": 0,
1917
+ "type": "MODEL"
1918
+ },
1919
+ {
1920
+ "id": 169,
1921
+ "origin_id": 96,
1922
+ "origin_slot": 0,
1923
+ "target_id": 94,
1924
+ "target_slot": 1,
1925
+ "type": "CONDITIONING"
1926
+ },
1927
+ {
1928
+ "id": 174,
1929
+ "origin_id": 87,
1930
+ "origin_slot": 0,
1931
+ "target_id": 95,
1932
+ "target_slot": 0,
1933
+ "type": "MODEL"
1934
+ },
1935
+ {
1936
+ "id": 176,
1937
+ "origin_id": 89,
1938
+ "origin_slot": 0,
1939
+ "target_id": 97,
1940
+ "target_slot": 1,
1941
+ "type": "VAE"
1942
+ },
1943
+ {
1944
+ "id": 170,
1945
+ "origin_id": 95,
1946
+ "origin_slot": 0,
1947
+ "target_id": 101,
1948
+ "target_slot": 0,
1949
+ "type": "SIGMAS"
1950
+ },
1951
+ {
1952
+ "id": 171,
1953
+ "origin_id": 92,
1954
+ "origin_slot": 0,
1955
+ "target_id": 98,
1956
+ "target_slot": 0,
1957
+ "type": "LATENT"
1958
+ },
1959
+ {
1960
+ "id": 177,
1961
+ "origin_id": 89,
1962
+ "origin_slot": 0,
1963
+ "target_id": 98,
1964
+ "target_slot": 1,
1965
+ "type": "VAE"
1966
+ },
1967
+ {
1968
+ "id": 172,
1969
+ "origin_id": 98,
1970
+ "origin_slot": 0,
1971
+ "target_id": 99,
1972
+ "target_slot": 0,
1973
+ "type": "IMAGE"
1974
+ },
1975
+ {
1976
+ "id": 175,
1977
+ "origin_id": -10,
1978
+ "origin_slot": 0,
1979
+ "target_id": 97,
1980
+ "target_slot": 0,
1981
+ "type": "IMAGE"
1982
+ },
1983
+ {
1984
+ "id": 179,
1985
+ "origin_id": 99,
1986
+ "origin_slot": 0,
1987
+ "target_id": -20,
1988
+ "target_slot": 0,
1989
+ "type": "IMAGE"
1990
+ },
1991
+ {
1992
+ "id": 182,
1993
+ "origin_id": 99,
1994
+ "origin_slot": 0,
1995
+ "target_id": -20,
1996
+ "target_slot": 0,
1997
+ "type": "IMAGE"
1998
+ },
1999
+ {
2000
+ "id": 183,
2001
+ "origin_id": -10,
2002
+ "origin_slot": 1,
2003
+ "target_id": 89,
2004
+ "target_slot": 0,
2005
+ "type": "COMBO"
2006
+ },
2007
+ {
2008
+ "id": 184,
2009
+ "origin_id": -10,
2010
+ "origin_slot": 2,
2011
+ "target_id": 87,
2012
+ "target_slot": 0,
2013
+ "type": "COMBO"
2014
+ },
2015
+ {
2016
+ "id": 185,
2017
+ "origin_id": -10,
2018
+ "origin_slot": 3,
2019
+ "target_id": 101,
2020
+ "target_slot": 1,
2021
+ "type": "FLOAT"
2022
+ },
2023
+ {
2024
+ "id": 186,
2025
+ "origin_id": -10,
2026
+ "origin_slot": 4,
2027
+ "target_id": 100,
2028
+ "target_slot": 0,
2029
+ "type": "COMBO"
2030
+ }
2031
+ ],
2032
+ "extra": {}
2033
+ }
2034
+ ]
2035
+ },
2036
+ "config": {},
2037
+ "extra": {
2038
+ "ds": {
2039
+ "scale": 1.143471730470958,
2040
+ "offset": [
2041
+ 272.78316057387894,
2042
+ -25.534779791767107
2043
+ ]
2044
+ },
2045
+ "frontendVersion": "1.26.7"
2046
+ },
2047
+ "version": 0.4
2048
+ }
Qwen_Image_Union_Control_Lora.json ADDED
@@ -0,0 +1,1097 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "00000000-0000-0000-0000-000000000000",
3
+ "revision": 0,
4
+ "last_node_id": 82,
5
+ "last_link_id": 46,
6
+ "nodes": [
7
+ {
8
+ "id": 7,
9
+ "type": "CLIPTextEncode",
10
+ "pos": [
11
+ 420,
12
+ 710
13
+ ],
14
+ "size": [
15
+ 400,
16
+ 150
17
+ ],
18
+ "flags": {},
19
+ "order": 9,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "clip",
24
+ "type": "CLIP",
25
+ "link": 25
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "CONDITIONING",
31
+ "type": "CONDITIONING",
32
+ "links": [
33
+ 33
34
+ ]
35
+ }
36
+ ],
37
+ "title": "CLIP Text Encode (Negative Prompt)",
38
+ "properties": {
39
+ "Node name for S&R": "CLIPTextEncode",
40
+ "cnr_id": "comfy-core",
41
+ "ver": "0.3.51"
42
+ },
43
+ "widgets_values": [
44
+ " "
45
+ ],
46
+ "color": "#223",
47
+ "bgcolor": "#335"
48
+ },
49
+ {
50
+ "id": 73,
51
+ "type": "LoadImage",
52
+ "pos": [
53
+ 60,
54
+ 860
55
+ ],
56
+ "size": [
57
+ 274.080078125,
58
+ 314.00006103515625
59
+ ],
60
+ "flags": {},
61
+ "order": 0,
62
+ "mode": 0,
63
+ "inputs": [],
64
+ "outputs": [
65
+ {
66
+ "name": "IMAGE",
67
+ "type": "IMAGE",
68
+ "links": [
69
+ 41
70
+ ]
71
+ },
72
+ {
73
+ "name": "MASK",
74
+ "type": "MASK",
75
+ "links": null
76
+ }
77
+ ],
78
+ "properties": {
79
+ "Node name for S&R": "LoadImage",
80
+ "cnr_id": "comfy-core",
81
+ "ver": "0.3.51"
82
+ },
83
+ "widgets_values": [
84
+ "ComfyUI_00752_.png",
85
+ "image"
86
+ ]
87
+ },
88
+ {
89
+ "id": 74,
90
+ "type": "Canny",
91
+ "pos": [
92
+ 440,
93
+ 1000
94
+ ],
95
+ "size": [
96
+ 350,
97
+ 82
98
+ ],
99
+ "flags": {},
100
+ "order": 11,
101
+ "mode": 0,
102
+ "inputs": [
103
+ {
104
+ "name": "image",
105
+ "type": "IMAGE",
106
+ "link": 42
107
+ }
108
+ ],
109
+ "outputs": [
110
+ {
111
+ "name": "IMAGE",
112
+ "type": "IMAGE",
113
+ "links": [
114
+ 35,
115
+ 38
116
+ ]
117
+ }
118
+ ],
119
+ "properties": {
120
+ "Node name for S&R": "Canny",
121
+ "cnr_id": "comfy-core",
122
+ "ver": "0.3.51"
123
+ },
124
+ "widgets_values": [
125
+ 0.4,
126
+ 0.8
127
+ ]
128
+ },
129
+ {
130
+ "id": 75,
131
+ "type": "PreviewImage",
132
+ "pos": [
133
+ 450,
134
+ 1140
135
+ ],
136
+ "size": [
137
+ 330,
138
+ 290
139
+ ],
140
+ "flags": {},
141
+ "order": 14,
142
+ "mode": 0,
143
+ "inputs": [
144
+ {
145
+ "name": "images",
146
+ "type": "IMAGE",
147
+ "link": 38
148
+ }
149
+ ],
150
+ "outputs": [],
151
+ "properties": {
152
+ "Node name for S&R": "PreviewImage",
153
+ "cnr_id": "comfy-core",
154
+ "ver": "0.3.51"
155
+ },
156
+ "widgets_values": []
157
+ },
158
+ {
159
+ "id": 70,
160
+ "type": "ReferenceLatent",
161
+ "pos": [
162
+ 860,
163
+ 470
164
+ ],
165
+ "size": [
166
+ 197.712890625,
167
+ 46
168
+ ],
169
+ "flags": {},
170
+ "order": 16,
171
+ "mode": 0,
172
+ "inputs": [
173
+ {
174
+ "name": "conditioning",
175
+ "type": "CONDITIONING",
176
+ "link": 31
177
+ },
178
+ {
179
+ "name": "latent",
180
+ "shape": 7,
181
+ "type": "LATENT",
182
+ "link": 32
183
+ }
184
+ ],
185
+ "outputs": [
186
+ {
187
+ "name": "CONDITIONING",
188
+ "type": "CONDITIONING",
189
+ "links": [
190
+ 21
191
+ ]
192
+ }
193
+ ],
194
+ "properties": {
195
+ "Node name for S&R": "ReferenceLatent",
196
+ "cnr_id": "comfy-core",
197
+ "ver": "0.3.51"
198
+ },
199
+ "widgets_values": []
200
+ },
201
+ {
202
+ "id": 71,
203
+ "type": "ReferenceLatent",
204
+ "pos": [
205
+ 850,
206
+ 720
207
+ ],
208
+ "size": [
209
+ 197.712890625,
210
+ 46
211
+ ],
212
+ "flags": {},
213
+ "order": 17,
214
+ "mode": 0,
215
+ "inputs": [
216
+ {
217
+ "name": "conditioning",
218
+ "type": "CONDITIONING",
219
+ "link": 33
220
+ },
221
+ {
222
+ "name": "latent",
223
+ "shape": 7,
224
+ "type": "LATENT",
225
+ "link": 34
226
+ }
227
+ ],
228
+ "outputs": [
229
+ {
230
+ "name": "CONDITIONING",
231
+ "type": "CONDITIONING",
232
+ "links": [
233
+ 22
234
+ ]
235
+ }
236
+ ],
237
+ "properties": {
238
+ "Node name for S&R": "ReferenceLatent",
239
+ "cnr_id": "comfy-core",
240
+ "ver": "0.3.51"
241
+ },
242
+ "widgets_values": []
243
+ },
244
+ {
245
+ "id": 72,
246
+ "type": "VAEEncode",
247
+ "pos": [
248
+ 900,
249
+ 950
250
+ ],
251
+ "size": [
252
+ 140,
253
+ 46
254
+ ],
255
+ "flags": {},
256
+ "order": 13,
257
+ "mode": 0,
258
+ "inputs": [
259
+ {
260
+ "name": "pixels",
261
+ "type": "IMAGE",
262
+ "link": 35
263
+ },
264
+ {
265
+ "name": "vae",
266
+ "type": "VAE",
267
+ "link": 36
268
+ }
269
+ ],
270
+ "outputs": [
271
+ {
272
+ "name": "LATENT",
273
+ "type": "LATENT",
274
+ "links": [
275
+ 32,
276
+ 34,
277
+ 44
278
+ ]
279
+ }
280
+ ],
281
+ "properties": {
282
+ "Node name for S&R": "VAEEncode",
283
+ "cnr_id": "comfy-core",
284
+ "ver": "0.3.51"
285
+ },
286
+ "widgets_values": []
287
+ },
288
+ {
289
+ "id": 79,
290
+ "type": "LoraLoaderModelOnly",
291
+ "pos": [
292
+ 490,
293
+ 210
294
+ ],
295
+ "size": [
296
+ 470,
297
+ 82
298
+ ],
299
+ "flags": {},
300
+ "order": 12,
301
+ "mode": 4,
302
+ "inputs": [
303
+ {
304
+ "name": "model",
305
+ "type": "MODEL",
306
+ "link": 45
307
+ }
308
+ ],
309
+ "outputs": [
310
+ {
311
+ "name": "MODEL",
312
+ "type": "MODEL",
313
+ "links": [
314
+ 46
315
+ ]
316
+ }
317
+ ],
318
+ "properties": {
319
+ "Node name for S&R": "LoraLoaderModelOnly",
320
+ "cnr_id": "comfy-core",
321
+ "ver": "0.3.51",
322
+ "models": [
323
+ {
324
+ "name": "Qwen-Image-Lightning-4steps-V1.0.safetensors",
325
+ "url": "https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-4steps-V1.0.safetensors",
326
+ "directory": "loras"
327
+ }
328
+ ]
329
+ },
330
+ "widgets_values": [
331
+ "Qwen-Image-Lightning-4steps-V1.0.safetensors",
332
+ 1
333
+ ]
334
+ },
335
+ {
336
+ "id": 39,
337
+ "type": "VAELoader",
338
+ "pos": [
339
+ 30,
340
+ 650
341
+ ],
342
+ "size": [
343
+ 330,
344
+ 58
345
+ ],
346
+ "flags": {},
347
+ "order": 1,
348
+ "mode": 0,
349
+ "inputs": [],
350
+ "outputs": [
351
+ {
352
+ "name": "VAE",
353
+ "type": "VAE",
354
+ "links": [
355
+ 27,
356
+ 36
357
+ ]
358
+ }
359
+ ],
360
+ "properties": {
361
+ "Node name for S&R": "VAELoader",
362
+ "cnr_id": "comfy-core",
363
+ "ver": "0.3.51",
364
+ "models": [
365
+ {
366
+ "name": "qwen_image_vae.safetensors",
367
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors",
368
+ "directory": "vae"
369
+ }
370
+ ]
371
+ },
372
+ "widgets_values": [
373
+ "qwen_image_vae.safetensors"
374
+ ]
375
+ },
376
+ {
377
+ "id": 38,
378
+ "type": "CLIPLoader",
379
+ "pos": [
380
+ 30,
381
+ 490
382
+ ],
383
+ "size": [
384
+ 330,
385
+ 110
386
+ ],
387
+ "flags": {},
388
+ "order": 2,
389
+ "mode": 0,
390
+ "inputs": [],
391
+ "outputs": [
392
+ {
393
+ "name": "CLIP",
394
+ "type": "CLIP",
395
+ "links": [
396
+ 24,
397
+ 25
398
+ ]
399
+ }
400
+ ],
401
+ "properties": {
402
+ "Node name for S&R": "CLIPLoader",
403
+ "cnr_id": "comfy-core",
404
+ "ver": "0.3.51",
405
+ "models": [
406
+ {
407
+ "name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
408
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
409
+ "directory": "text_encoders"
410
+ }
411
+ ]
412
+ },
413
+ "widgets_values": [
414
+ "qwen_2.5_vl_7b_fp8_scaled.safetensors",
415
+ "qwen_image",
416
+ "default"
417
+ ]
418
+ },
419
+ {
420
+ "id": 69,
421
+ "type": "LoraLoaderModelOnly",
422
+ "pos": [
423
+ 30,
424
+ 360
425
+ ],
426
+ "size": [
427
+ 330,
428
+ 82
429
+ ],
430
+ "flags": {},
431
+ "order": 10,
432
+ "mode": 0,
433
+ "inputs": [
434
+ {
435
+ "name": "model",
436
+ "type": "MODEL",
437
+ "link": 30
438
+ }
439
+ ],
440
+ "outputs": [
441
+ {
442
+ "name": "MODEL",
443
+ "type": "MODEL",
444
+ "links": [
445
+ 45
446
+ ]
447
+ }
448
+ ],
449
+ "properties": {
450
+ "Node name for S&R": "LoraLoaderModelOnly",
451
+ "cnr_id": "comfy-core",
452
+ "ver": "0.3.51",
453
+ "models": [
454
+ {
455
+ "name": "qwen_image_union_diffsynth_lora.safetensors",
456
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors",
457
+ "directory": "loras"
458
+ }
459
+ ]
460
+ },
461
+ "widgets_values": [
462
+ "qwen_image_union_diffsynth_lora.safetensors",
463
+ 1
464
+ ]
465
+ },
466
+ {
467
+ "id": 37,
468
+ "type": "UNETLoader",
469
+ "pos": [
470
+ 30,
471
+ 220
472
+ ],
473
+ "size": [
474
+ 330,
475
+ 82
476
+ ],
477
+ "flags": {},
478
+ "order": 3,
479
+ "mode": 0,
480
+ "inputs": [],
481
+ "outputs": [
482
+ {
483
+ "name": "MODEL",
484
+ "type": "MODEL",
485
+ "links": [
486
+ 30
487
+ ]
488
+ }
489
+ ],
490
+ "properties": {
491
+ "Node name for S&R": "UNETLoader",
492
+ "cnr_id": "comfy-core",
493
+ "ver": "0.3.51",
494
+ "models": [
495
+ {
496
+ "name": "qwen_image_fp8_e4m3fn.safetensors",
497
+ "url": "https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors",
498
+ "directory": "diffusion_models"
499
+ }
500
+ ]
501
+ },
502
+ "widgets_values": [
503
+ "qwen_image_fp8_e4m3fn.safetensors",
504
+ "default"
505
+ ]
506
+ },
507
+ {
508
+ "id": 77,
509
+ "type": "ImageScaleToTotalPixels",
510
+ "pos": [
511
+ 60,
512
+ 1220
513
+ ],
514
+ "size": [
515
+ 270,
516
+ 82
517
+ ],
518
+ "flags": {},
519
+ "order": 7,
520
+ "mode": 0,
521
+ "inputs": [
522
+ {
523
+ "name": "image",
524
+ "type": "IMAGE",
525
+ "link": 41
526
+ }
527
+ ],
528
+ "outputs": [
529
+ {
530
+ "name": "IMAGE",
531
+ "type": "IMAGE",
532
+ "links": [
533
+ 42
534
+ ]
535
+ }
536
+ ],
537
+ "properties": {
538
+ "Node name for S&R": "ImageScaleToTotalPixels",
539
+ "cnr_id": "comfy-core",
540
+ "ver": "0.3.51"
541
+ },
542
+ "widgets_values": [
543
+ "lanczos",
544
+ 1
545
+ ]
546
+ },
547
+ {
548
+ "id": 82,
549
+ "type": "MarkdownNote",
550
+ "pos": [
551
+ 60,
552
+ 1350
553
+ ],
554
+ "size": [
555
+ 270,
556
+ 120
557
+ ],
558
+ "flags": {},
559
+ "order": 4,
560
+ "mode": 0,
561
+ "inputs": [],
562
+ "outputs": [],
563
+ "title": "About Scale Image to Total Pixels",
564
+ "properties": {},
565
+ "widgets_values": [
566
+ "This node is to avoid poor output results caused by excessively large input image sizes. You can remove it or use **ctrl + B** to bypass it if you don't need it."
567
+ ],
568
+ "color": "#432",
569
+ "bgcolor": "#653"
570
+ },
571
+ {
572
+ "id": 3,
573
+ "type": "KSampler",
574
+ "pos": [
575
+ 1100,
576
+ 280
577
+ ],
578
+ "size": [
579
+ 260,
580
+ 450
581
+ ],
582
+ "flags": {},
583
+ "order": 18,
584
+ "mode": 0,
585
+ "inputs": [
586
+ {
587
+ "name": "model",
588
+ "type": "MODEL",
589
+ "link": 20
590
+ },
591
+ {
592
+ "name": "positive",
593
+ "type": "CONDITIONING",
594
+ "link": 21
595
+ },
596
+ {
597
+ "name": "negative",
598
+ "type": "CONDITIONING",
599
+ "link": 22
600
+ },
601
+ {
602
+ "name": "latent_image",
603
+ "type": "LATENT",
604
+ "link": 44
605
+ }
606
+ ],
607
+ "outputs": [
608
+ {
609
+ "name": "LATENT",
610
+ "type": "LATENT",
611
+ "links": [
612
+ 26
613
+ ]
614
+ }
615
+ ],
616
+ "properties": {
617
+ "Node name for S&R": "KSampler",
618
+ "cnr_id": "comfy-core",
619
+ "ver": "0.3.51"
620
+ },
621
+ "widgets_values": [
622
+ 70741926012422,
623
+ "randomize",
624
+ 20,
625
+ 2.5,
626
+ "euler",
627
+ "simple",
628
+ 1
629
+ ]
630
+ },
631
+ {
632
+ "id": 60,
633
+ "type": "SaveImage",
634
+ "pos": [
635
+ 1400,
636
+ 280
637
+ ],
638
+ "size": [
639
+ 1030,
640
+ 1150
641
+ ],
642
+ "flags": {},
643
+ "order": 20,
644
+ "mode": 0,
645
+ "inputs": [
646
+ {
647
+ "name": "images",
648
+ "type": "IMAGE",
649
+ "link": 28
650
+ }
651
+ ],
652
+ "outputs": [],
653
+ "properties": {
654
+ "cnr_id": "comfy-core",
655
+ "ver": "0.3.51"
656
+ },
657
+ "widgets_values": [
658
+ "ComfyUI"
659
+ ]
660
+ },
661
+ {
662
+ "id": 81,
663
+ "type": "MarkdownNote",
664
+ "pos": [
665
+ 1100,
666
+ 780
667
+ ],
668
+ "size": [
669
+ 260,
670
+ 150
671
+ ],
672
+ "flags": {},
673
+ "order": 5,
674
+ "mode": 0,
675
+ "inputs": [],
676
+ "outputs": [],
677
+ "title": "KSampler settings",
678
+ "properties": {},
679
+ "widgets_values": [
680
+ "You can test and find the best setting by yourself. The following table is for reference.\n\n| model | steps | cfg |\n|---------------------|---------------|---------------|\n| fp8_e4m3fn | 20 | 2.5 |\n| fp8_e4m3fn + 4 steps LoRA | 4 | 1.0 |\n"
681
+ ],
682
+ "color": "#432",
683
+ "bgcolor": "#653"
684
+ },
685
+ {
686
+ "id": 66,
687
+ "type": "ModelSamplingAuraFlow",
688
+ "pos": [
689
+ 1100,
690
+ 170
691
+ ],
692
+ "size": [
693
+ 260,
694
+ 58
695
+ ],
696
+ "flags": {},
697
+ "order": 15,
698
+ "mode": 0,
699
+ "inputs": [
700
+ {
701
+ "name": "model",
702
+ "type": "MODEL",
703
+ "link": 46
704
+ }
705
+ ],
706
+ "outputs": [
707
+ {
708
+ "name": "MODEL",
709
+ "type": "MODEL",
710
+ "links": [
711
+ 20
712
+ ]
713
+ }
714
+ ],
715
+ "properties": {
716
+ "Node name for S&R": "ModelSamplingAuraFlow",
717
+ "cnr_id": "comfy-core",
718
+ "ver": "0.3.51"
719
+ },
720
+ "widgets_values": [
721
+ 3.1
722
+ ]
723
+ },
724
+ {
725
+ "id": 6,
726
+ "type": "CLIPTextEncode",
727
+ "pos": [
728
+ 420,
729
+ 460
730
+ ],
731
+ "size": [
732
+ 400,
733
+ 200
734
+ ],
735
+ "flags": {},
736
+ "order": 8,
737
+ "mode": 0,
738
+ "inputs": [
739
+ {
740
+ "name": "clip",
741
+ "type": "CLIP",
742
+ "link": 24
743
+ }
744
+ ],
745
+ "outputs": [
746
+ {
747
+ "name": "CONDITIONING",
748
+ "type": "CONDITIONING",
749
+ "links": [
750
+ 31
751
+ ]
752
+ }
753
+ ],
754
+ "title": "CLIP Text Encode (Positive Prompt)",
755
+ "properties": {
756
+ "Node name for S&R": "CLIPTextEncode",
757
+ "cnr_id": "comfy-core",
758
+ "ver": "0.3.51"
759
+ },
760
+ "widgets_values": [
761
+ "Extreme close-up shot, realistic digital illustration, close eyes, peaceful,oil painting with thick application, girl with curly hair, large black flower, black nail polish, ring details, soft light and shadow, dark green backdrop, delicate hair texture, smooth skin rendering, fine artistic details, dreamy and elegant atmosphere, dark style, grotesque. White hair, huge black flower behind her (with yellow stamens, green stems and leaves), black turtleneck clothing, green leaves and black flowers around, artistic illustration style, sharp color contrast, mysterious atmosphere, delicate brushstrokes, thick oil painting, thickly applied oil painting, the whole picture is filled with layered flowers, huge, petals spreading, beautiful composition, unexpected angle, layered background. Macro, eyes looking down, thick application, brushstrokes, splatters, mottled, old, extremely romantic, light and shadow, strong contrast, maximalist style, full-frame composition."
762
+ ],
763
+ "color": "#232",
764
+ "bgcolor": "#353"
765
+ },
766
+ {
767
+ "id": 8,
768
+ "type": "VAEDecode",
769
+ "pos": [
770
+ 1400,
771
+ 170
772
+ ],
773
+ "size": [
774
+ 140,
775
+ 46
776
+ ],
777
+ "flags": {},
778
+ "order": 19,
779
+ "mode": 0,
780
+ "inputs": [
781
+ {
782
+ "name": "samples",
783
+ "type": "LATENT",
784
+ "link": 26
785
+ },
786
+ {
787
+ "name": "vae",
788
+ "type": "VAE",
789
+ "link": 27
790
+ }
791
+ ],
792
+ "outputs": [
793
+ {
794
+ "name": "IMAGE",
795
+ "type": "IMAGE",
796
+ "links": [
797
+ 28
798
+ ]
799
+ }
800
+ ],
801
+ "properties": {
802
+ "Node name for S&R": "VAEDecode",
803
+ "cnr_id": "comfy-core",
804
+ "ver": "0.3.51"
805
+ },
806
+ "widgets_values": []
807
+ },
808
+ {
809
+ "id": 80,
810
+ "type": "MarkdownNote",
811
+ "pos": [
812
+ -560,
813
+ 160
814
+ ],
815
+ "size": [
816
+ 540,
817
+ 630
818
+ ],
819
+ "flags": {},
820
+ "order": 6,
821
+ "mode": 0,
822
+ "inputs": [],
823
+ "outputs": [],
824
+ "title": "Model links",
825
+ "properties": {
826
+ "widget_ue_connectable": {}
827
+ },
828
+ "widgets_values": [
829
+ "[Tutorial](https://docs.comfy.org/tutorials/image/qwen/qwen-image) | [教程](https://docs.comfy.org/zh-CN/tutorials/image/qwen/qwen-image)\n\n\n## Model links\n\nYou can find all the models on [Huggingface](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/tree/main) or [Modelscope](https://modelscope.cn/models/Comfy-Org/Qwen-Image_ComfyUI/files)\n\n**Diffusion model**\n\n- [qwen_image_fp8_e4m3fn.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/diffusion_models/qwen_image_fp8_e4m3fn.safetensors)\n\n**LoRA**\n\n- [Qwen-Image-Lightning-8steps-V1.0.safetensors](https://huggingface.co/lightx2v/Qwen-Image-Lightning/resolve/main/Qwen-Image-Lightning-8steps-V1.0.safetensors)\n- [qwen_image_union_diffsynth_lora.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image-DiffSynth-ControlNets/resolve/main/split_files/loras/qwen_image_union_diffsynth_lora.safetensors)\n\n**Text encoder**\n\n- [qwen_2.5_vl_7b_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors)\n\n**VAE**\n\n- [qwen_image_vae.safetensors](https://huggingface.co/Comfy-Org/Qwen-Image_ComfyUI/resolve/main/split_files/vae/qwen_image_vae.safetensors)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 diffusion_models/\n│ │ ├── qwen_image_fp8_e4m3fn.safetensors\n│ │ └── qwen_image_distill_full_fp8_e4m3fn.safetensors\n│ ├── 📂 loras/\n│ │ ├── qwen_image_union_diffsynth_lora.safetensors\n│ │ └── Qwen-Image-Lightning-8steps-V1.0.safetensors\n│ ├── 📂 vae/\n│ │ └── qwen_image_vae.safetensors\n│ └── 📂 text_encoders/\n│ └── qwen_2.5_vl_7b_fp8_scaled.safetensors\n```\n"
830
+ ],
831
+ "color": "#432",
832
+ "bgcolor": "#653"
833
+ }
834
+ ],
835
+ "links": [
836
+ [
837
+ 20,
838
+ 66,
839
+ 0,
840
+ 3,
841
+ 0,
842
+ "MODEL"
843
+ ],
844
+ [
845
+ 21,
846
+ 70,
847
+ 0,
848
+ 3,
849
+ 1,
850
+ "CONDITIONING"
851
+ ],
852
+ [
853
+ 22,
854
+ 71,
855
+ 0,
856
+ 3,
857
+ 2,
858
+ "CONDITIONING"
859
+ ],
860
+ [
861
+ 24,
862
+ 38,
863
+ 0,
864
+ 6,
865
+ 0,
866
+ "CLIP"
867
+ ],
868
+ [
869
+ 25,
870
+ 38,
871
+ 0,
872
+ 7,
873
+ 0,
874
+ "CLIP"
875
+ ],
876
+ [
877
+ 26,
878
+ 3,
879
+ 0,
880
+ 8,
881
+ 0,
882
+ "LATENT"
883
+ ],
884
+ [
885
+ 27,
886
+ 39,
887
+ 0,
888
+ 8,
889
+ 1,
890
+ "VAE"
891
+ ],
892
+ [
893
+ 28,
894
+ 8,
895
+ 0,
896
+ 60,
897
+ 0,
898
+ "IMAGE"
899
+ ],
900
+ [
901
+ 30,
902
+ 37,
903
+ 0,
904
+ 69,
905
+ 0,
906
+ "MODEL"
907
+ ],
908
+ [
909
+ 31,
910
+ 6,
911
+ 0,
912
+ 70,
913
+ 0,
914
+ "CONDITIONING"
915
+ ],
916
+ [
917
+ 32,
918
+ 72,
919
+ 0,
920
+ 70,
921
+ 1,
922
+ "LATENT"
923
+ ],
924
+ [
925
+ 33,
926
+ 7,
927
+ 0,
928
+ 71,
929
+ 0,
930
+ "CONDITIONING"
931
+ ],
932
+ [
933
+ 34,
934
+ 72,
935
+ 0,
936
+ 71,
937
+ 1,
938
+ "LATENT"
939
+ ],
940
+ [
941
+ 35,
942
+ 74,
943
+ 0,
944
+ 72,
945
+ 0,
946
+ "IMAGE"
947
+ ],
948
+ [
949
+ 36,
950
+ 39,
951
+ 0,
952
+ 72,
953
+ 1,
954
+ "VAE"
955
+ ],
956
+ [
957
+ 38,
958
+ 74,
959
+ 0,
960
+ 75,
961
+ 0,
962
+ "IMAGE"
963
+ ],
964
+ [
965
+ 41,
966
+ 73,
967
+ 0,
968
+ 77,
969
+ 0,
970
+ "IMAGE"
971
+ ],
972
+ [
973
+ 42,
974
+ 77,
975
+ 0,
976
+ 74,
977
+ 0,
978
+ "IMAGE"
979
+ ],
980
+ [
981
+ 44,
982
+ 72,
983
+ 0,
984
+ 3,
985
+ 3,
986
+ "LATENT"
987
+ ],
988
+ [
989
+ 45,
990
+ 69,
991
+ 0,
992
+ 79,
993
+ 0,
994
+ "MODEL"
995
+ ],
996
+ [
997
+ 46,
998
+ 79,
999
+ 0,
1000
+ 66,
1001
+ 0,
1002
+ "MODEL"
1003
+ ]
1004
+ ],
1005
+ "groups": [
1006
+ {
1007
+ "id": 1,
1008
+ "title": "Step 1 - Load models",
1009
+ "bounding": [
1010
+ 10,
1011
+ 130,
1012
+ 370,
1013
+ 620
1014
+ ],
1015
+ "color": "#3f789e",
1016
+ "font_size": 24,
1017
+ "flags": {}
1018
+ },
1019
+ {
1020
+ "id": 2,
1021
+ "title": "Step 2 - Upload reference image",
1022
+ "bounding": [
1023
+ 10,
1024
+ 770,
1025
+ 370,
1026
+ 730
1027
+ ],
1028
+ "color": "#3f789e",
1029
+ "font_size": 24,
1030
+ "flags": {}
1031
+ },
1032
+ {
1033
+ "id": 5,
1034
+ "title": "Conditioning",
1035
+ "bounding": [
1036
+ 400,
1037
+ 330,
1038
+ 680,
1039
+ 570
1040
+ ],
1041
+ "color": "#3f789e",
1042
+ "font_size": 24,
1043
+ "flags": {}
1044
+ },
1045
+ {
1046
+ "id": 3,
1047
+ "title": "Step 3 - Prompt",
1048
+ "bounding": [
1049
+ 410,
1050
+ 390,
1051
+ 420,
1052
+ 490
1053
+ ],
1054
+ "color": "#3f789e",
1055
+ "font_size": 24,
1056
+ "flags": {}
1057
+ },
1058
+ {
1059
+ "id": 4,
1060
+ "title": "Image Processing",
1061
+ "bounding": [
1062
+ 410,
1063
+ 920,
1064
+ 410,
1065
+ 573.5999755859375
1066
+ ],
1067
+ "color": "#3f789e",
1068
+ "font_size": 24,
1069
+ "flags": {}
1070
+ },
1071
+ {
1072
+ "id": 6,
1073
+ "title": "4 steps lighting LoRA",
1074
+ "bounding": [
1075
+ 400,
1076
+ 130,
1077
+ 680,
1078
+ 180
1079
+ ],
1080
+ "color": "#3f789e",
1081
+ "font_size": 24,
1082
+ "flags": {}
1083
+ }
1084
+ ],
1085
+ "config": {},
1086
+ "extra": {
1087
+ "ds": {
1088
+ "scale": 0.7213855104977631,
1089
+ "offset": [
1090
+ 235.0958937828103,
1091
+ -225.58323513433564
1092
+ ]
1093
+ },
1094
+ "frontendVersion": "1.26.6"
1095
+ },
1096
+ "version": 0.4
1097
+ }