stablediffusiontutorials commited on
Commit
3b5946c
Β·
verified Β·
1 Parent(s): 42597b2

Upload Flux.1_Krea_Dev_workflow.json

Browse files
Files changed (1) hide show
  1. Flux.1_Krea_Dev_workflow.json +543 -0
Flux.1_Krea_Dev_workflow.json ADDED
@@ -0,0 +1,543 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "908d0bfb-e192-4627-9b57-147496e6e2dd",
3
+ "revision": 0,
4
+ "last_node_id": 51,
5
+ "last_link_id": 70,
6
+ "nodes": [
7
+ {
8
+ "id": 9,
9
+ "type": "SaveImage",
10
+ "pos": [
11
+ 380,
12
+ 110
13
+ ],
14
+ "size": [
15
+ 640,
16
+ 660
17
+ ],
18
+ "flags": {},
19
+ "order": 9,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "images",
24
+ "type": "IMAGE",
25
+ "link": 9
26
+ }
27
+ ],
28
+ "outputs": [],
29
+ "properties": {
30
+ "cnr_id": "comfy-core",
31
+ "ver": "0.3.40"
32
+ },
33
+ "widgets_values": [
34
+ "flux_krea/flux_krea"
35
+ ]
36
+ },
37
+ {
38
+ "id": 40,
39
+ "type": "DualCLIPLoader",
40
+ "pos": [
41
+ -320,
42
+ 290
43
+ ],
44
+ "size": [
45
+ 270,
46
+ 130
47
+ ],
48
+ "flags": {},
49
+ "order": 0,
50
+ "mode": 0,
51
+ "inputs": [],
52
+ "outputs": [
53
+ {
54
+ "name": "CLIP",
55
+ "type": "CLIP",
56
+ "links": [
57
+ 64
58
+ ]
59
+ }
60
+ ],
61
+ "properties": {
62
+ "Node name for S&R": "DualCLIPLoader",
63
+ "cnr_id": "comfy-core",
64
+ "ver": "0.3.40",
65
+ "models": [
66
+ {
67
+ "name": "clip_l.safetensors",
68
+ "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors",
69
+ "directory": "text_encoders"
70
+ },
71
+ {
72
+ "name": "t5xxl_fp16.safetensors",
73
+ "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors",
74
+ "directory": "text_encoders"
75
+ }
76
+ ]
77
+ },
78
+ "widgets_values": [
79
+ "clip_l.safetensors",
80
+ "t5xxl_fp16.safetensors",
81
+ "flux",
82
+ "default"
83
+ ]
84
+ },
85
+ {
86
+ "id": 39,
87
+ "type": "VAELoader",
88
+ "pos": [
89
+ -320,
90
+ 470
91
+ ],
92
+ "size": [
93
+ 270,
94
+ 58
95
+ ],
96
+ "flags": {},
97
+ "order": 1,
98
+ "mode": 0,
99
+ "inputs": [],
100
+ "outputs": [
101
+ {
102
+ "name": "VAE",
103
+ "type": "VAE",
104
+ "links": [
105
+ 58
106
+ ]
107
+ }
108
+ ],
109
+ "properties": {
110
+ "Node name for S&R": "VAELoader",
111
+ "cnr_id": "comfy-core",
112
+ "ver": "0.3.40",
113
+ "models": [
114
+ {
115
+ "name": "ae.safetensors",
116
+ "url": "https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors",
117
+ "directory": "vae"
118
+ }
119
+ ]
120
+ },
121
+ "widgets_values": [
122
+ "ae.safetensors"
123
+ ]
124
+ },
125
+ {
126
+ "id": 42,
127
+ "type": "ConditioningZeroOut",
128
+ "pos": [
129
+ -10,
130
+ 460
131
+ ],
132
+ "size": [
133
+ 200,
134
+ 30
135
+ ],
136
+ "flags": {
137
+ "collapsed": false
138
+ },
139
+ "order": 6,
140
+ "mode": 0,
141
+ "inputs": [
142
+ {
143
+ "name": "conditioning",
144
+ "type": "CONDITIONING",
145
+ "link": 66
146
+ }
147
+ ],
148
+ "outputs": [
149
+ {
150
+ "name": "CONDITIONING",
151
+ "type": "CONDITIONING",
152
+ "links": [
153
+ 63
154
+ ]
155
+ }
156
+ ],
157
+ "properties": {
158
+ "Node name for S&R": "ConditioningZeroOut",
159
+ "cnr_id": "comfy-core",
160
+ "ver": "0.3.40"
161
+ },
162
+ "widgets_values": []
163
+ },
164
+ {
165
+ "id": 8,
166
+ "type": "VAEDecode",
167
+ "pos": [
168
+ 230,
169
+ 470
170
+ ],
171
+ "size": [
172
+ 210,
173
+ 46
174
+ ],
175
+ "flags": {
176
+ "collapsed": true
177
+ },
178
+ "order": 8,
179
+ "mode": 0,
180
+ "inputs": [
181
+ {
182
+ "name": "samples",
183
+ "type": "LATENT",
184
+ "link": 52
185
+ },
186
+ {
187
+ "name": "vae",
188
+ "type": "VAE",
189
+ "link": 58
190
+ }
191
+ ],
192
+ "outputs": [
193
+ {
194
+ "name": "IMAGE",
195
+ "type": "IMAGE",
196
+ "slot_index": 0,
197
+ "links": [
198
+ 9
199
+ ]
200
+ }
201
+ ],
202
+ "properties": {
203
+ "Node name for S&R": "VAEDecode",
204
+ "cnr_id": "comfy-core",
205
+ "ver": "0.3.40"
206
+ },
207
+ "widgets_values": []
208
+ },
209
+ {
210
+ "id": 27,
211
+ "type": "EmptySD3LatentImage",
212
+ "pos": [
213
+ -320,
214
+ 630
215
+ ],
216
+ "size": [
217
+ 270,
218
+ 120
219
+ ],
220
+ "flags": {},
221
+ "order": 2,
222
+ "mode": 0,
223
+ "inputs": [],
224
+ "outputs": [
225
+ {
226
+ "name": "LATENT",
227
+ "type": "LATENT",
228
+ "slot_index": 0,
229
+ "links": [
230
+ 51
231
+ ]
232
+ }
233
+ ],
234
+ "properties": {
235
+ "Node name for S&R": "EmptySD3LatentImage",
236
+ "cnr_id": "comfy-core",
237
+ "ver": "0.3.40"
238
+ },
239
+ "widgets_values": [
240
+ 1024,
241
+ 1024,
242
+ 1
243
+ ]
244
+ },
245
+ {
246
+ "id": 45,
247
+ "type": "CLIPTextEncode",
248
+ "pos": [
249
+ 10,
250
+ 170
251
+ ],
252
+ "size": [
253
+ 330,
254
+ 210
255
+ ],
256
+ "flags": {},
257
+ "order": 5,
258
+ "mode": 0,
259
+ "inputs": [
260
+ {
261
+ "name": "clip",
262
+ "type": "CLIP",
263
+ "link": 64
264
+ }
265
+ ],
266
+ "outputs": [
267
+ {
268
+ "name": "CONDITIONING",
269
+ "type": "CONDITIONING",
270
+ "links": [
271
+ 65,
272
+ 66
273
+ ]
274
+ }
275
+ ],
276
+ "properties": {
277
+ "Node name for S&R": "CLIPTextEncode",
278
+ "cnr_id": "comfy-core",
279
+ "ver": "0.3.47"
280
+ },
281
+ "widgets_values": [
282
+ "Highly realistic portrait of a Nordic woman with blonde hair and blue eyes, very few freckles on her face, gaze sharp and intellectual. The lighting should reflect the unique coolness of Northern Europe. Outfit is minimalist and modern, background is blurred in cool tones. Needs to perfectly capture the characteristics of a Scandinavian woman. solo, Centered composition\n"
283
+ ]
284
+ },
285
+ {
286
+ "id": 31,
287
+ "type": "KSampler",
288
+ "pos": [
289
+ 10,
290
+ 550
291
+ ],
292
+ "size": [
293
+ 315,
294
+ 262
295
+ ],
296
+ "flags": {},
297
+ "order": 7,
298
+ "mode": 0,
299
+ "inputs": [
300
+ {
301
+ "name": "model",
302
+ "type": "MODEL",
303
+ "link": 61
304
+ },
305
+ {
306
+ "name": "positive",
307
+ "type": "CONDITIONING",
308
+ "link": 65
309
+ },
310
+ {
311
+ "name": "negative",
312
+ "type": "CONDITIONING",
313
+ "link": 63
314
+ },
315
+ {
316
+ "name": "latent_image",
317
+ "type": "LATENT",
318
+ "link": 51
319
+ }
320
+ ],
321
+ "outputs": [
322
+ {
323
+ "name": "LATENT",
324
+ "type": "LATENT",
325
+ "slot_index": 0,
326
+ "links": [
327
+ 52
328
+ ]
329
+ }
330
+ ],
331
+ "properties": {
332
+ "Node name for S&R": "KSampler",
333
+ "cnr_id": "comfy-core",
334
+ "ver": "0.3.40"
335
+ },
336
+ "widgets_values": [
337
+ 277251746703202,
338
+ "randomize",
339
+ 20,
340
+ 1,
341
+ "euler",
342
+ "simple",
343
+ 1
344
+ ]
345
+ },
346
+ {
347
+ "id": 38,
348
+ "type": "UNETLoader",
349
+ "pos": [
350
+ -320,
351
+ 150
352
+ ],
353
+ "size": [
354
+ 270,
355
+ 82
356
+ ],
357
+ "flags": {},
358
+ "order": 3,
359
+ "mode": 0,
360
+ "inputs": [],
361
+ "outputs": [
362
+ {
363
+ "name": "MODEL",
364
+ "type": "MODEL",
365
+ "links": [
366
+ 61
367
+ ]
368
+ }
369
+ ],
370
+ "properties": {
371
+ "Node name for S&R": "UNETLoader",
372
+ "cnr_id": "comfy-core",
373
+ "ver": "0.3.40",
374
+ "models": [
375
+ {
376
+ "name": "flux1-krea-dev_fp8_scaled.safetensors",
377
+ "url": "https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors",
378
+ "directory": "diffusion_models"
379
+ }
380
+ ]
381
+ },
382
+ "widgets_values": [
383
+ "flux1-krea-dev_fp8_scaled.safetensors",
384
+ "default"
385
+ ]
386
+ },
387
+ {
388
+ "id": 43,
389
+ "type": "MarkdownNote",
390
+ "pos": [
391
+ -870,
392
+ 110
393
+ ],
394
+ "size": [
395
+ 520,
396
+ 390
397
+ ],
398
+ "flags": {},
399
+ "order": 4,
400
+ "mode": 0,
401
+ "inputs": [],
402
+ "outputs": [],
403
+ "title": "Model links",
404
+ "properties": {},
405
+ "widgets_values": [
406
+ "## Model links\n\n**Diffusion Model**\n\n- [flux1-krea-dev_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors)\n\nIf you need the original weights, head to [black-forest-labs/FLUX.1-Krea-dev](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/), accept the agreement in the repo, then click the link below to download the models:\n\n- [flux1-krea-dev.safetensors](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/resolve/main/flux1-krea-dev.safetensors)\n\n**Text Encoder**\n\n- [clip_l.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors)\n\n- [t5xxl_fp16.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors) or [t5xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors)\n\n**VAE**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors)\n\n\n```\nComfyUI/\nβ”œβ”€β”€ models/\nβ”‚ β”œβ”€β”€ diffusion_models/\nβ”‚ β”‚ └─── flux1-krea-dev_fp8_scaled.safetensors\nβ”‚ β”œβ”€β”€ text_encoders/\nβ”‚ β”‚ β”œβ”€β”€ clip_l.safetensors\nβ”‚ β”‚ └─── t5xxl_fp16.safetensors # or t5xxl_fp8_e4m3fn_scaled.safetensors\nβ”‚ └── vae/\nβ”‚ └── ae.safetensors\n```\n"
407
+ ],
408
+ "color": "#432",
409
+ "bgcolor": "#653"
410
+ }
411
+ ],
412
+ "links": [
413
+ [
414
+ 9,
415
+ 8,
416
+ 0,
417
+ 9,
418
+ 0,
419
+ "IMAGE"
420
+ ],
421
+ [
422
+ 51,
423
+ 27,
424
+ 0,
425
+ 31,
426
+ 3,
427
+ "LATENT"
428
+ ],
429
+ [
430
+ 52,
431
+ 31,
432
+ 0,
433
+ 8,
434
+ 0,
435
+ "LATENT"
436
+ ],
437
+ [
438
+ 58,
439
+ 39,
440
+ 0,
441
+ 8,
442
+ 1,
443
+ "VAE"
444
+ ],
445
+ [
446
+ 61,
447
+ 38,
448
+ 0,
449
+ 31,
450
+ 0,
451
+ "MODEL"
452
+ ],
453
+ [
454
+ 63,
455
+ 42,
456
+ 0,
457
+ 31,
458
+ 2,
459
+ "CONDITIONING"
460
+ ],
461
+ [
462
+ 64,
463
+ 40,
464
+ 0,
465
+ 45,
466
+ 0,
467
+ "CLIP"
468
+ ],
469
+ [
470
+ 65,
471
+ 45,
472
+ 0,
473
+ 31,
474
+ 1,
475
+ "CONDITIONING"
476
+ ],
477
+ [
478
+ 66,
479
+ 45,
480
+ 0,
481
+ 42,
482
+ 0,
483
+ "CONDITIONING"
484
+ ]
485
+ ],
486
+ "groups": [
487
+ {
488
+ "id": 1,
489
+ "title": "Step 1 - Load Models Here",
490
+ "bounding": [
491
+ -330,
492
+ 80,
493
+ 300,
494
+ 460
495
+ ],
496
+ "color": "#3f789e",
497
+ "font_size": 24,
498
+ "flags": {}
499
+ },
500
+ {
501
+ "id": 2,
502
+ "title": "Step 2 - Image Size",
503
+ "bounding": [
504
+ -330,
505
+ 560,
506
+ 300,
507
+ 200
508
+ ],
509
+ "color": "#3f789e",
510
+ "font_size": 24,
511
+ "flags": {}
512
+ },
513
+ {
514
+ "id": 3,
515
+ "title": "Step 3 - Prompt",
516
+ "bounding": [
517
+ -10,
518
+ 80,
519
+ 360,
520
+ 333.6000061035156
521
+ ],
522
+ "color": "#3f789e",
523
+ "font_size": 24,
524
+ "flags": {}
525
+ }
526
+ ],
527
+ "config": {},
528
+ "extra": {
529
+ "ds": {
530
+ "scale": 0.6534031413612565,
531
+ "offset": [
532
+ 1016.3535547042429,
533
+ 202.5185688696577
534
+ ]
535
+ },
536
+ "frontendVersion": "1.25.3",
537
+ "VHS_latentpreview": false,
538
+ "VHS_latentpreviewrate": 0,
539
+ "VHS_MetadataImage": true,
540
+ "VHS_KeepIntermediate": true
541
+ },
542
+ "version": 0.4
543
+ }