SudoInstallAI commited on
Commit
bd8f612
·
verified ·
1 Parent(s): 909084f

Upload ChronoEdit_GGUF.json

Browse files
Files changed (1) hide show
  1. ChronoEdit_GGUF.json +1151 -0
ChronoEdit_GGUF.json ADDED
@@ -0,0 +1,1151 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "id": "1805b8e4-0356-4384-adec-44f12a18f32e",
3
+ "revision": 0,
4
+ "last_node_id": 93,
5
+ "last_link_id": 163,
6
+ "nodes": [
7
+ {
8
+ "id": 58,
9
+ "type": "ScaleROPE",
10
+ "pos": [
11
+ 570,
12
+ -150
13
+ ],
14
+ "size": [
15
+ 320,
16
+ 178
17
+ ],
18
+ "flags": {},
19
+ "order": 17,
20
+ "mode": 0,
21
+ "inputs": [
22
+ {
23
+ "name": "model",
24
+ "type": "MODEL",
25
+ "link": 116
26
+ }
27
+ ],
28
+ "outputs": [
29
+ {
30
+ "name": "MODEL",
31
+ "type": "MODEL",
32
+ "links": [
33
+ 117
34
+ ]
35
+ }
36
+ ],
37
+ "properties": {
38
+ "cnr_id": "comfy-core",
39
+ "ver": "0.3.67",
40
+ "Node name for S&R": "ScaleROPE"
41
+ },
42
+ "widgets_values": [
43
+ 1,
44
+ 0,
45
+ 1,
46
+ 0,
47
+ 7,
48
+ 0
49
+ ]
50
+ },
51
+ {
52
+ "id": 8,
53
+ "type": "VAEDecode",
54
+ "pos": [
55
+ 570,
56
+ 600
57
+ ],
58
+ "size": [
59
+ 320,
60
+ 46
61
+ ],
62
+ "flags": {},
63
+ "order": 19,
64
+ "mode": 0,
65
+ "inputs": [
66
+ {
67
+ "name": "samples",
68
+ "type": "LATENT",
69
+ "link": 35
70
+ },
71
+ {
72
+ "name": "vae",
73
+ "type": "VAE",
74
+ "link": 76
75
+ }
76
+ ],
77
+ "outputs": [
78
+ {
79
+ "name": "IMAGE",
80
+ "type": "IMAGE",
81
+ "slot_index": 0,
82
+ "links": [
83
+ 118
84
+ ]
85
+ }
86
+ ],
87
+ "properties": {
88
+ "cnr_id": "comfy-core",
89
+ "ver": "0.3.67",
90
+ "Node name for S&R": "VAEDecode"
91
+ },
92
+ "widgets_values": []
93
+ },
94
+ {
95
+ "id": 59,
96
+ "type": "ImageFromBatch",
97
+ "pos": [
98
+ 920,
99
+ -250
100
+ ],
101
+ "size": [
102
+ 270,
103
+ 82
104
+ ],
105
+ "flags": {},
106
+ "order": 20,
107
+ "mode": 0,
108
+ "inputs": [
109
+ {
110
+ "name": "image",
111
+ "type": "IMAGE",
112
+ "link": 118
113
+ }
114
+ ],
115
+ "outputs": [
116
+ {
117
+ "name": "IMAGE",
118
+ "type": "IMAGE",
119
+ "links": [
120
+ 119
121
+ ]
122
+ }
123
+ ],
124
+ "properties": {
125
+ "cnr_id": "comfy-core",
126
+ "ver": "0.3.67",
127
+ "Node name for S&R": "ImageFromBatch"
128
+ },
129
+ "widgets_values": [
130
+ 4,
131
+ 1
132
+ ]
133
+ },
134
+ {
135
+ "id": 50,
136
+ "type": "WanImageToVideo",
137
+ "pos": [
138
+ 150,
139
+ 290
140
+ ],
141
+ "size": [
142
+ 342.5999755859375,
143
+ 210
144
+ ],
145
+ "flags": {},
146
+ "order": 15,
147
+ "mode": 0,
148
+ "inputs": [
149
+ {
150
+ "name": "positive",
151
+ "type": "CONDITIONING",
152
+ "link": 97
153
+ },
154
+ {
155
+ "name": "negative",
156
+ "type": "CONDITIONING",
157
+ "link": 98
158
+ },
159
+ {
160
+ "name": "vae",
161
+ "type": "VAE",
162
+ "link": 99
163
+ },
164
+ {
165
+ "name": "clip_vision_output",
166
+ "shape": 7,
167
+ "type": "CLIP_VISION_OUTPUT",
168
+ "link": 120
169
+ },
170
+ {
171
+ "name": "start_image",
172
+ "shape": 7,
173
+ "type": "IMAGE",
174
+ "link": 160
175
+ }
176
+ ],
177
+ "outputs": [
178
+ {
179
+ "name": "positive",
180
+ "type": "CONDITIONING",
181
+ "slot_index": 0,
182
+ "links": [
183
+ 101
184
+ ]
185
+ },
186
+ {
187
+ "name": "negative",
188
+ "type": "CONDITIONING",
189
+ "slot_index": 1,
190
+ "links": [
191
+ 102
192
+ ]
193
+ },
194
+ {
195
+ "name": "latent",
196
+ "type": "LATENT",
197
+ "slot_index": 2,
198
+ "links": [
199
+ 103
200
+ ]
201
+ }
202
+ ],
203
+ "properties": {
204
+ "cnr_id": "comfy-core",
205
+ "ver": "0.3.67",
206
+ "Node name for S&R": "WanImageToVideo"
207
+ },
208
+ "widgets_values": [
209
+ 1024,
210
+ 1024,
211
+ 5,
212
+ 1
213
+ ]
214
+ },
215
+ {
216
+ "id": 7,
217
+ "type": "CLIPTextEncode",
218
+ "pos": [
219
+ 110,
220
+ 10
221
+ ],
222
+ "size": [
223
+ 425.27801513671875,
224
+ 180.6060791015625
225
+ ],
226
+ "flags": {},
227
+ "order": 10,
228
+ "mode": 0,
229
+ "inputs": [
230
+ {
231
+ "name": "clip",
232
+ "type": "CLIP",
233
+ "link": 75
234
+ }
235
+ ],
236
+ "outputs": [
237
+ {
238
+ "name": "CONDITIONING",
239
+ "type": "CONDITIONING",
240
+ "slot_index": 0,
241
+ "links": [
242
+ 98
243
+ ]
244
+ }
245
+ ],
246
+ "title": "CLIP Text Encode (Negative Prompt)",
247
+ "properties": {
248
+ "cnr_id": "comfy-core",
249
+ "ver": "0.3.67",
250
+ "Node name for S&R": "CLIPTextEncode"
251
+ },
252
+ "widgets_values": [
253
+ "色调艳丽,过曝,静态,细节模糊不清,字幕,风格,作品,画作,画面,静止,整体发灰,最差质量,低质量,JPEG压缩残留,丑陋的,残缺的,多余的手指,画得不好的手部,画得不好的脸��,畸形的,毁容的,形态畸形的肢体,手指融合,静止不动的画面,杂乱的背景,三条腿,背景人很多,倒着走"
254
+ ],
255
+ "color": "#223",
256
+ "bgcolor": "#335"
257
+ },
258
+ {
259
+ "id": 60,
260
+ "type": "SaveImage",
261
+ "pos": [
262
+ 930,
263
+ -120
264
+ ],
265
+ "size": [
266
+ 730,
267
+ 760
268
+ ],
269
+ "flags": {},
270
+ "order": 21,
271
+ "mode": 0,
272
+ "inputs": [
273
+ {
274
+ "name": "images",
275
+ "type": "IMAGE",
276
+ "link": 119
277
+ }
278
+ ],
279
+ "outputs": [],
280
+ "properties": {
281
+ "cnr_id": "comfy-core",
282
+ "ver": "0.3.67"
283
+ },
284
+ "widgets_values": [
285
+ "Chrono_Edit_14B"
286
+ ]
287
+ },
288
+ {
289
+ "id": 87,
290
+ "type": "MarkdownNote",
291
+ "pos": [
292
+ 570,
293
+ 690
294
+ ],
295
+ "size": [
296
+ 340,
297
+ 200
298
+ ],
299
+ "flags": {},
300
+ "order": 0,
301
+ "mode": 0,
302
+ "inputs": [],
303
+ "outputs": [],
304
+ "title": "Settings",
305
+ "properties": {},
306
+ "widgets_values": [
307
+ "The distilled LoRA can speed up this workflow, but it will also sacrifice some of the final output quality.\n\nIf you enable the distilled LoRA, don't forget to change the KSampler settings.\n\n| Parameters | original | With distill LoRA enable |\n|----------------|---------------------|------------------------|\n| ModelSamplingSD3 - shift | 5.00 | 2.0 |\n| KSampler - steps | 20 | 8 |\n| KSampler - cfg | 4.0 | 1.0 |"
308
+ ],
309
+ "color": "#432",
310
+ "bgcolor": "#653"
311
+ },
312
+ {
313
+ "id": 64,
314
+ "type": "MarkdownNote",
315
+ "pos": [
316
+ -850,
317
+ -60
318
+ ],
319
+ "size": [
320
+ 490,
321
+ 540
322
+ ],
323
+ "flags": {},
324
+ "order": 1,
325
+ "mode": 0,
326
+ "inputs": [],
327
+ "outputs": [],
328
+ "title": "Model links (for local users)",
329
+ "properties": {},
330
+ "widgets_values": [
331
+ "\n## Model links\n\n**text_encoders**\n\n- [umt5_xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors)\n\n**clip_vision**\n\n- [clip_vision_h.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/clip_vision/clip_vision_h.safetensors)\n\n**loras**\n\n- [chronoedit_distill_lora.safetensors](https://huggingface.co/nvidia/ChronoEdit-14B-Diffusers/resolve/main/lora/chronoedit_distill_lora.safetensors)\n\n**diffusion_models**\n\n- [chrono_edit_14B_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/chrono_edit_14B_fp16.safetensors)\n\n**vae**\n\n- [wan_2.1_vae.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 text_encoders/\n│ │ └── umt5_xxl_fp8_e4m3fn_scaled.safetensors\n│ ├── 📂 clip_vision/\n│ │ └── clip_vision_h.safetensors\n│ ├── 📂 loras/\n│ │ └── chronoedit_distill_lora.safetensors\n│ ├── 📂 diffusion_models/\n│ │ └── chrono_edit_14B_fp16.safetensors\n│ └── 📂 vae/\n│ └── wan_2.1_vae.safetensors\n```\n\n## Report issue\n\nIf you have any problems while using this workflow, please report template-related issues via this link: [report the template issue here](https://github.com/Comfy-Org/workflow_templates/issues)."
332
+ ],
333
+ "color": "#432",
334
+ "bgcolor": "#653"
335
+ },
336
+ {
337
+ "id": 89,
338
+ "type": "ImageScaleToMaxDimension",
339
+ "pos": [
340
+ -290,
341
+ 1080
342
+ ],
343
+ "size": [
344
+ 330,
345
+ 90
346
+ ],
347
+ "flags": {},
348
+ "order": 11,
349
+ "mode": 4,
350
+ "inputs": [
351
+ {
352
+ "name": "image",
353
+ "type": "IMAGE",
354
+ "link": 159
355
+ }
356
+ ],
357
+ "outputs": [
358
+ {
359
+ "name": "IMAGE",
360
+ "type": "IMAGE",
361
+ "links": [
362
+ 160,
363
+ 161
364
+ ]
365
+ }
366
+ ],
367
+ "properties": {
368
+ "cnr_id": "comfy-core",
369
+ "ver": "0.3.67",
370
+ "Node name for S&R": "ImageScaleToMaxDimension"
371
+ },
372
+ "widgets_values": [
373
+ "area",
374
+ 1280
375
+ ]
376
+ },
377
+ {
378
+ "id": 91,
379
+ "type": "MarkdownNote",
380
+ "pos": [
381
+ -310,
382
+ 1250
383
+ ],
384
+ "size": [
385
+ 390,
386
+ 130
387
+ ],
388
+ "flags": {},
389
+ "order": 2,
390
+ "mode": 0,
391
+ "inputs": [],
392
+ "outputs": [],
393
+ "title": "Note: Image size",
394
+ "properties": {},
395
+ "widgets_values": [
396
+ "This model is fine - tuned from Wan2.1 - I2V - 14B 720P (1280x720). So, please don't upload images whose size is too large; that might take up a very large amount of VRAM or lead to bad results.\n\nYou can use `ImageScaleToMaxDimension` to scale it down."
397
+ ],
398
+ "color": "#432",
399
+ "bgcolor": "#653"
400
+ },
401
+ {
402
+ "id": 88,
403
+ "type": "MarkdownNote",
404
+ "pos": [
405
+ -850,
406
+ -250
407
+ ],
408
+ "size": [
409
+ 490,
410
+ 140
411
+ ],
412
+ "flags": {},
413
+ "order": 3,
414
+ "mode": 0,
415
+ "inputs": [],
416
+ "outputs": [],
417
+ "title": "About ChronoEdit 14B",
418
+ "properties": {},
419
+ "widgets_values": [
420
+ "[ChronoEdit-14B](https://huggingface.co/nvidia/ChronoEdit-14B-Diffusers) is finetuned from the pretrain model of Wan2.1-I2V-14B 720P\n\n[ChronoEdit](https://research.nvidia.com/labs/toronto-ai/chronoedit/), a framework developed by teams from NVIDIA and the University of Toronto, reframes image editing as a two-frame video generation task. It leverages the temporal priors of pretrained video generative models and incorporates a temporal reasoning mechanism to achieve editing results with both visual fidelity and physical consistency. Additionally, it comes with the PBench-Edit benchmark for evaluating physical consistency, making it suitable for scenarios like world simulation that require strict adherence to physical laws."
421
+ ],
422
+ "color": "#432",
423
+ "bgcolor": "#653"
424
+ },
425
+ {
426
+ "id": 51,
427
+ "type": "CLIPVisionEncode",
428
+ "pos": [
429
+ 150,
430
+ 580
431
+ ],
432
+ "size": [
433
+ 340,
434
+ 78
435
+ ],
436
+ "flags": {},
437
+ "order": 13,
438
+ "mode": 0,
439
+ "inputs": [
440
+ {
441
+ "name": "clip_vision",
442
+ "type": "CLIP_VISION",
443
+ "link": 94
444
+ },
445
+ {
446
+ "name": "image",
447
+ "type": "IMAGE",
448
+ "link": 161
449
+ }
450
+ ],
451
+ "outputs": [
452
+ {
453
+ "name": "CLIP_VISION_OUTPUT",
454
+ "type": "CLIP_VISION_OUTPUT",
455
+ "slot_index": 0,
456
+ "links": [
457
+ 120
458
+ ]
459
+ }
460
+ ],
461
+ "properties": {
462
+ "cnr_id": "comfy-core",
463
+ "ver": "0.3.67",
464
+ "Node name for S&R": "CLIPVisionEncode"
465
+ },
466
+ "widgets_values": [
467
+ "none"
468
+ ]
469
+ },
470
+ {
471
+ "id": 49,
472
+ "type": "CLIPVisionLoader",
473
+ "pos": [
474
+ -319.99999999999994,
475
+ 403.13716389237646
476
+ ],
477
+ "size": [
478
+ 390,
479
+ 58
480
+ ],
481
+ "flags": {},
482
+ "order": 4,
483
+ "mode": 0,
484
+ "inputs": [],
485
+ "outputs": [
486
+ {
487
+ "name": "CLIP_VISION",
488
+ "type": "CLIP_VISION",
489
+ "slot_index": 0,
490
+ "links": [
491
+ 94
492
+ ]
493
+ }
494
+ ],
495
+ "properties": {
496
+ "cnr_id": "comfy-core",
497
+ "ver": "0.3.67",
498
+ "Node name for S&R": "CLIPVisionLoader",
499
+ "models": [
500
+ {
501
+ "name": "clip_vision_h.safetensors",
502
+ "url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/clip_vision/clip_vision_h.safetensors",
503
+ "directory": "clip_vision"
504
+ }
505
+ ]
506
+ },
507
+ "widgets_values": [
508
+ "clip-vision_vit-h.safetensors.safetensors"
509
+ ]
510
+ },
511
+ {
512
+ "id": 39,
513
+ "type": "VAELoader",
514
+ "pos": [
515
+ -320.9467761112108,
516
+ 304.0839400035871
517
+ ],
518
+ "size": [
519
+ 390,
520
+ 58
521
+ ],
522
+ "flags": {},
523
+ "order": 5,
524
+ "mode": 0,
525
+ "inputs": [],
526
+ "outputs": [
527
+ {
528
+ "name": "VAE",
529
+ "type": "VAE",
530
+ "slot_index": 0,
531
+ "links": [
532
+ 76,
533
+ 99
534
+ ]
535
+ }
536
+ ],
537
+ "properties": {
538
+ "cnr_id": "comfy-core",
539
+ "ver": "0.3.67",
540
+ "Node name for S&R": "VAELoader",
541
+ "models": [
542
+ {
543
+ "name": "wan_2.1_vae.safetensors",
544
+ "url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors",
545
+ "directory": "vae"
546
+ }
547
+ ]
548
+ },
549
+ "widgets_values": [
550
+ "wan_2.1_vae.safetensors"
551
+ ]
552
+ },
553
+ {
554
+ "id": 38,
555
+ "type": "CLIPLoader",
556
+ "pos": [
557
+ -320.9467761112108,
558
+ 153.13716389237678
559
+ ],
560
+ "size": [
561
+ 390,
562
+ 106
563
+ ],
564
+ "flags": {},
565
+ "order": 6,
566
+ "mode": 0,
567
+ "inputs": [],
568
+ "outputs": [
569
+ {
570
+ "name": "CLIP",
571
+ "type": "CLIP",
572
+ "slot_index": 0,
573
+ "links": [
574
+ 74,
575
+ 75
576
+ ]
577
+ }
578
+ ],
579
+ "properties": {
580
+ "cnr_id": "comfy-core",
581
+ "ver": "0.3.67",
582
+ "Node name for S&R": "CLIPLoader",
583
+ "models": [
584
+ {
585
+ "name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors",
586
+ "url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors",
587
+ "directory": "text_encoders"
588
+ }
589
+ ]
590
+ },
591
+ "widgets_values": [
592
+ "umt5_xxl_fp8_e4m3fn_scaled.safetensors",
593
+ "wan",
594
+ "default"
595
+ ]
596
+ },
597
+ {
598
+ "id": 93,
599
+ "type": "LoraLoaderModelOnly",
600
+ "pos": [
601
+ -316.1319726630361,
602
+ -100.36046334450923
603
+ ],
604
+ "size": [
605
+ 355.2098500089686,
606
+ 82
607
+ ],
608
+ "flags": {},
609
+ "order": 12,
610
+ "mode": 0,
611
+ "inputs": [
612
+ {
613
+ "name": "model",
614
+ "type": "MODEL",
615
+ "link": 162
616
+ }
617
+ ],
618
+ "outputs": [
619
+ {
620
+ "name": "MODEL",
621
+ "type": "MODEL",
622
+ "links": [
623
+ 163
624
+ ]
625
+ }
626
+ ],
627
+ "properties": {
628
+ "cnr_id": "comfy-core",
629
+ "ver": "0.3.68",
630
+ "Node name for S&R": "LoraLoaderModelOnly"
631
+ },
632
+ "widgets_values": [
633
+ "chronoedit_distill_lora.safetensors",
634
+ 1
635
+ ]
636
+ },
637
+ {
638
+ "id": 3,
639
+ "type": "KSampler",
640
+ "pos": [
641
+ 570,
642
+ 70
643
+ ],
644
+ "size": [
645
+ 320,
646
+ 480
647
+ ],
648
+ "flags": {},
649
+ "order": 18,
650
+ "mode": 0,
651
+ "inputs": [
652
+ {
653
+ "name": "model",
654
+ "type": "MODEL",
655
+ "link": 117
656
+ },
657
+ {
658
+ "name": "positive",
659
+ "type": "CONDITIONING",
660
+ "link": 101
661
+ },
662
+ {
663
+ "name": "negative",
664
+ "type": "CONDITIONING",
665
+ "link": 102
666
+ },
667
+ {
668
+ "name": "latent_image",
669
+ "type": "LATENT",
670
+ "link": 103
671
+ }
672
+ ],
673
+ "outputs": [
674
+ {
675
+ "name": "LATENT",
676
+ "type": "LATENT",
677
+ "slot_index": 0,
678
+ "links": [
679
+ 35
680
+ ]
681
+ }
682
+ ],
683
+ "properties": {
684
+ "cnr_id": "comfy-core",
685
+ "ver": "0.3.67",
686
+ "Node name for S&R": "KSampler"
687
+ },
688
+ "widgets_values": [
689
+ 746408637863328,
690
+ "randomize",
691
+ 8,
692
+ 1,
693
+ "uni_pc",
694
+ "simple",
695
+ 1
696
+ ]
697
+ },
698
+ {
699
+ "id": 54,
700
+ "type": "ModelSamplingSD3",
701
+ "pos": [
702
+ 570,
703
+ -250
704
+ ],
705
+ "size": [
706
+ 320,
707
+ 58
708
+ ],
709
+ "flags": {},
710
+ "order": 16,
711
+ "mode": 0,
712
+ "inputs": [
713
+ {
714
+ "name": "model",
715
+ "type": "MODEL",
716
+ "link": 115
717
+ }
718
+ ],
719
+ "outputs": [
720
+ {
721
+ "name": "MODEL",
722
+ "type": "MODEL",
723
+ "slot_index": 0,
724
+ "links": [
725
+ 116
726
+ ]
727
+ }
728
+ ],
729
+ "properties": {
730
+ "cnr_id": "comfy-core",
731
+ "ver": "0.3.67",
732
+ "Node name for S&R": "ModelSamplingSD3"
733
+ },
734
+ "widgets_values": [
735
+ 2
736
+ ]
737
+ },
738
+ {
739
+ "id": 52,
740
+ "type": "LoadImage",
741
+ "pos": [
742
+ -300,
743
+ 590
744
+ ],
745
+ "size": [
746
+ 360,
747
+ 440
748
+ ],
749
+ "flags": {},
750
+ "order": 7,
751
+ "mode": 0,
752
+ "inputs": [],
753
+ "outputs": [
754
+ {
755
+ "name": "IMAGE",
756
+ "type": "IMAGE",
757
+ "slot_index": 0,
758
+ "links": [
759
+ 159
760
+ ]
761
+ },
762
+ {
763
+ "name": "MASK",
764
+ "type": "MASK",
765
+ "slot_index": 1,
766
+ "links": null
767
+ }
768
+ ],
769
+ "properties": {
770
+ "cnr_id": "comfy-core",
771
+ "ver": "0.3.67",
772
+ "Node name for S&R": "LoadImage"
773
+ },
774
+ "widgets_values": [
775
+ "Profile2_512.jpg",
776
+ "image"
777
+ ]
778
+ },
779
+ {
780
+ "id": 92,
781
+ "type": "UnetLoaderGGUF",
782
+ "pos": [
783
+ -309.1258294400758,
784
+ -205.45261168890357
785
+ ],
786
+ "size": [
787
+ 345.47722354586904,
788
+ 59.67727163435262
789
+ ],
790
+ "flags": {},
791
+ "order": 8,
792
+ "mode": 0,
793
+ "inputs": [],
794
+ "outputs": [
795
+ {
796
+ "name": "MODEL",
797
+ "type": "MODEL",
798
+ "links": [
799
+ 162
800
+ ]
801
+ }
802
+ ],
803
+ "properties": {
804
+ "cnr_id": "ComfyUI-GGUF",
805
+ "ver": "02dac863ee1b65852d39ce6b9180bf5d9bc8a636",
806
+ "Node name for S&R": "UnetLoaderGGUF"
807
+ },
808
+ "widgets_values": [
809
+ "ChronoEdit-14B-Q4_K_S.gguf"
810
+ ]
811
+ },
812
+ {
813
+ "id": 57,
814
+ "type": "LoraLoaderModelOnly",
815
+ "pos": [
816
+ -313.78710444484284,
817
+ 23.96623833901335
818
+ ],
819
+ "size": [
820
+ 360,
821
+ 82
822
+ ],
823
+ "flags": {},
824
+ "order": 14,
825
+ "mode": 4,
826
+ "inputs": [
827
+ {
828
+ "name": "model",
829
+ "type": "MODEL",
830
+ "link": 163
831
+ }
832
+ ],
833
+ "outputs": [
834
+ {
835
+ "name": "MODEL",
836
+ "type": "MODEL",
837
+ "links": [
838
+ 115
839
+ ]
840
+ }
841
+ ],
842
+ "properties": {
843
+ "cnr_id": "comfy-core",
844
+ "ver": "0.3.67",
845
+ "Node name for S&R": "LoraLoaderModelOnly",
846
+ "models": [
847
+ {
848
+ "name": "chronoedit_distill_lora.safetensors",
849
+ "url": "https://huggingface.co/nvidia/ChronoEdit-14B-Diffusers/resolve/main/lora/chronoedit_distill_lora.safetensors",
850
+ "directory": "loras"
851
+ }
852
+ ]
853
+ },
854
+ "widgets_values": [
855
+ "upsample_lora_diffusers.safetensors",
856
+ 1
857
+ ]
858
+ },
859
+ {
860
+ "id": 6,
861
+ "type": "CLIPTextEncode",
862
+ "pos": [
863
+ 110,
864
+ -210
865
+ ],
866
+ "size": [
867
+ 422.84503173828125,
868
+ 164.31304931640625
869
+ ],
870
+ "flags": {},
871
+ "order": 9,
872
+ "mode": 0,
873
+ "inputs": [
874
+ {
875
+ "name": "clip",
876
+ "type": "CLIP",
877
+ "link": 74
878
+ }
879
+ ],
880
+ "outputs": [
881
+ {
882
+ "name": "CONDITIONING",
883
+ "type": "CONDITIONING",
884
+ "slot_index": 0,
885
+ "links": [
886
+ 97
887
+ ]
888
+ }
889
+ ],
890
+ "title": "CLIP Text Encode (Positive Prompt)",
891
+ "properties": {
892
+ "cnr_id": "comfy-core",
893
+ "ver": "0.3.67",
894
+ "Node name for S&R": "CLIPTextEncode"
895
+ },
896
+ "widgets_values": [
897
+ "Give this penguin a pair of glasses."
898
+ ],
899
+ "color": "#232",
900
+ "bgcolor": "#353"
901
+ }
902
+ ],
903
+ "links": [
904
+ [
905
+ 35,
906
+ 3,
907
+ 0,
908
+ 8,
909
+ 0,
910
+ "LATENT"
911
+ ],
912
+ [
913
+ 74,
914
+ 38,
915
+ 0,
916
+ 6,
917
+ 0,
918
+ "CLIP"
919
+ ],
920
+ [
921
+ 75,
922
+ 38,
923
+ 0,
924
+ 7,
925
+ 0,
926
+ "CLIP"
927
+ ],
928
+ [
929
+ 76,
930
+ 39,
931
+ 0,
932
+ 8,
933
+ 1,
934
+ "VAE"
935
+ ],
936
+ [
937
+ 94,
938
+ 49,
939
+ 0,
940
+ 51,
941
+ 0,
942
+ "CLIP_VISION"
943
+ ],
944
+ [
945
+ 97,
946
+ 6,
947
+ 0,
948
+ 50,
949
+ 0,
950
+ "CONDITIONING"
951
+ ],
952
+ [
953
+ 98,
954
+ 7,
955
+ 0,
956
+ 50,
957
+ 1,
958
+ "CONDITIONING"
959
+ ],
960
+ [
961
+ 99,
962
+ 39,
963
+ 0,
964
+ 50,
965
+ 2,
966
+ "VAE"
967
+ ],
968
+ [
969
+ 101,
970
+ 50,
971
+ 0,
972
+ 3,
973
+ 1,
974
+ "CONDITIONING"
975
+ ],
976
+ [
977
+ 102,
978
+ 50,
979
+ 1,
980
+ 3,
981
+ 2,
982
+ "CONDITIONING"
983
+ ],
984
+ [
985
+ 103,
986
+ 50,
987
+ 2,
988
+ 3,
989
+ 3,
990
+ "LATENT"
991
+ ],
992
+ [
993
+ 115,
994
+ 57,
995
+ 0,
996
+ 54,
997
+ 0,
998
+ "MODEL"
999
+ ],
1000
+ [
1001
+ 116,
1002
+ 54,
1003
+ 0,
1004
+ 58,
1005
+ 0,
1006
+ "MODEL"
1007
+ ],
1008
+ [
1009
+ 117,
1010
+ 58,
1011
+ 0,
1012
+ 3,
1013
+ 0,
1014
+ "MODEL"
1015
+ ],
1016
+ [
1017
+ 118,
1018
+ 8,
1019
+ 0,
1020
+ 59,
1021
+ 0,
1022
+ "IMAGE"
1023
+ ],
1024
+ [
1025
+ 119,
1026
+ 59,
1027
+ 0,
1028
+ 60,
1029
+ 0,
1030
+ "IMAGE"
1031
+ ],
1032
+ [
1033
+ 120,
1034
+ 51,
1035
+ 0,
1036
+ 50,
1037
+ 3,
1038
+ "CLIP_VISION_OUTPUT"
1039
+ ],
1040
+ [
1041
+ 159,
1042
+ 52,
1043
+ 0,
1044
+ 89,
1045
+ 0,
1046
+ "IMAGE"
1047
+ ],
1048
+ [
1049
+ 160,
1050
+ 89,
1051
+ 0,
1052
+ 50,
1053
+ 4,
1054
+ "IMAGE"
1055
+ ],
1056
+ [
1057
+ 161,
1058
+ 89,
1059
+ 0,
1060
+ 51,
1061
+ 1,
1062
+ "IMAGE"
1063
+ ],
1064
+ [
1065
+ 162,
1066
+ 92,
1067
+ 0,
1068
+ 93,
1069
+ 0,
1070
+ "MODEL"
1071
+ ],
1072
+ [
1073
+ 163,
1074
+ 93,
1075
+ 0,
1076
+ 57,
1077
+ 0,
1078
+ "MODEL"
1079
+ ]
1080
+ ],
1081
+ "groups": [
1082
+ {
1083
+ "id": 1,
1084
+ "title": "Step 4: Prompt",
1085
+ "bounding": [
1086
+ 100,
1087
+ -280,
1088
+ 445.27801513671875,
1089
+ 484.2060791015625
1090
+ ],
1091
+ "color": "#3f789e",
1092
+ "font_size": 24,
1093
+ "flags": {}
1094
+ },
1095
+ {
1096
+ "id": 2,
1097
+ "title": "Step 1 - Load models (for local users)",
1098
+ "bounding": [
1099
+ -330,
1100
+ -280,
1101
+ 410,
1102
+ 760
1103
+ ],
1104
+ "color": "#3f789e",
1105
+ "font_size": 24,
1106
+ "flags": {}
1107
+ },
1108
+ {
1109
+ "id": 3,
1110
+ "title": "Step 2 - Upload image",
1111
+ "bounding": [
1112
+ -330,
1113
+ 510,
1114
+ 410,
1115
+ 690
1116
+ ],
1117
+ "color": "#3f789e",
1118
+ "font_size": 24,
1119
+ "flags": {}
1120
+ },
1121
+ {
1122
+ "id": 4,
1123
+ "title": "Step3: Image size",
1124
+ "bounding": [
1125
+ 110,
1126
+ 220,
1127
+ 430,
1128
+ 290
1129
+ ],
1130
+ "color": "#3f789e",
1131
+ "font_size": 24,
1132
+ "flags": {}
1133
+ }
1134
+ ],
1135
+ "config": {},
1136
+ "extra": {
1137
+ "ds": {
1138
+ "scale": 0.5962063505509394,
1139
+ "offset": [
1140
+ 938.0999919824351,
1141
+ 286.1851644443301
1142
+ ]
1143
+ },
1144
+ "frontendVersion": "1.28.8",
1145
+ "VHS_latentpreview": false,
1146
+ "VHS_latentpreviewrate": 0,
1147
+ "VHS_MetadataImage": true,
1148
+ "VHS_KeepIntermediate": true
1149
+ },
1150
+ "version": 0.4
1151
+ }