Diwashbarla commited on
Commit
79c0bfc
·
verified ·
1 Parent(s): 1fe12b5

Update workflow.json

Browse files
Files changed (1) hide show
  1. workflow.json +7 -8
workflow.json CHANGED
@@ -1,19 +1,18 @@
1
  {
2
- "1": { "class_type": "LoadImage", "inputs": { "image": "default.jpg" } },
3
  "2": { "class_type": "CLIPLoader", "inputs": { "clip_name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors", "type": "wan" } },
4
  "3": { "class_type": "VAELoader", "inputs": { "vae_name": "wan_2.1_vae.safetensors" } },
5
- "4": { "class_type": "UNETLoader", "inputs": { "unet_name": "wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors", "weight_dtype": "default" } },
6
- "5": { "class_type": "UNETLoader", "inputs": { "unet_name": "wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors", "weight_dtype": "default" } },
7
  "6": { "class_type": "CLIPTextEncode", "inputs": { "text": "Positive prompt...", "clip": ["2", 0] } },
8
  "7": { "class_type": "CLIPTextEncode", "inputs": { "text": "色调艳丽, 过曝, 静态, 细节模糊不清, 字幕", "clip": ["2", 0] } },
9
- "8": { "class_type": "WanImageToVideo", "inputs": { "width": 640, "height": 640, "length": 81, "batch_size": 1, "positive": ["6", 0], "negative": ["7", 0], "vae": ["3", 0], "start_image": ["1", 0] } },
10
- "9": { "class_type": "LoraLoaderModelOnly", "inputs": { "lora_name": "wan2.2_i2v_lightx2v_4steps_lora_v1_high_noise.safetensors", "strength_model": 1.0, "model": ["4", 0] } },
11
- "10": { "class_type": "LoraLoaderModelOnly", "inputs": { "lora_name": "wan2.2_i2v_lightx2v_4steps_lora_v1_low_noise.safetensors", "strength_model": 1.0, "model": ["5", 0] } },
12
  "11": { "class_type": "ModelSamplingSD3", "inputs": { "shift": 5.0, "model": ["9", 0] } },
13
  "12": { "class_type": "ModelSamplingSD3", "inputs": { "shift": 5.0, "model": ["10", 0] } },
14
  "13": { "class_type": "KSamplerAdvanced", "inputs": { "add_noise": "enable", "noise_seed": 12345, "control_after_generate": "randomize", "steps": 4, "cfg": 1.0, "sampler_name": "euler", "scheduler": "simple", "start_at_step": 0, "end_at_step": 2, "return_with_leftover_noise": "enable", "model": ["11", 0], "positive": ["8", 0], "negative": ["8", 1], "latent_image": ["8", 2] } },
15
  "14": { "class_type": "KSamplerAdvanced", "inputs": { "add_noise": "disable", "noise_seed": 12345, "control_after_generate": "randomize", "steps": 4, "cfg": 1.0, "sampler_name": "euler", "scheduler": "simple", "start_at_step": 2, "end_at_step": 4, "return_with_leftover_noise": "disable", "model": ["12", 0], "positive": ["8", 0], "negative": ["8", 1], "latent_image": ["13", 0] } },
16
  "15": { "class_type": "VAEDecode", "inputs": { "samples": ["14", 0], "vae": ["3", 0] } },
17
  "16": { "class_type": "CreateVideo", "inputs": { "fps": 16, "images": ["15", 0] } },
18
- "17": { "class_type": "SaveVideo", "inputs": { "filename_prefix": "video/Wan2.2_i2v", "fps": 16, "video": ["16", 0] } }
19
- }
 
1
  {
 
2
  "2": { "class_type": "CLIPLoader", "inputs": { "clip_name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors", "type": "wan" } },
3
  "3": { "class_type": "VAELoader", "inputs": { "vae_name": "wan_2.1_vae.safetensors" } },
4
+ "4": { "class_type": "UNETLoader", "inputs": { "unet_name": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors", "weight_dtype": "default" } },
5
+ "5": { "class_type": "UNETLoader", "inputs": { "unet_name": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors", "weight_dtype": "default" } },
6
  "6": { "class_type": "CLIPTextEncode", "inputs": { "text": "Positive prompt...", "clip": ["2", 0] } },
7
  "7": { "class_type": "CLIPTextEncode", "inputs": { "text": "色调艳丽, 过曝, 静态, 细节模糊不清, 字幕", "clip": ["2", 0] } },
8
+ "8": { "class_type": "WanTextToVideo", "inputs": { "width": 640, "height": 640, "length": 81, "batch_size": 1, "positive": ["6", 0], "negative": ["7", 0], "vae": ["3", 0] } },
9
+ "9": { "class_type": "LoraLoaderModelOnly", "inputs": { "lora_name": "wan2.2_t2v_lightx2v_4steps_lora_v1.1_high_noise.safetensors", "strength_model": 1.0, "model": ["4", 0] } },
10
+ "10": { "class_type": "LoraLoaderModelOnly", "inputs": { "lora_name": "wan2.2_t2v_lightx2v_4steps_lora_v1.1_low_noise.safetensors", "strength_model": 1.0, "model": ["5", 0] } },
11
  "11": { "class_type": "ModelSamplingSD3", "inputs": { "shift": 5.0, "model": ["9", 0] } },
12
  "12": { "class_type": "ModelSamplingSD3", "inputs": { "shift": 5.0, "model": ["10", 0] } },
13
  "13": { "class_type": "KSamplerAdvanced", "inputs": { "add_noise": "enable", "noise_seed": 12345, "control_after_generate": "randomize", "steps": 4, "cfg": 1.0, "sampler_name": "euler", "scheduler": "simple", "start_at_step": 0, "end_at_step": 2, "return_with_leftover_noise": "enable", "model": ["11", 0], "positive": ["8", 0], "negative": ["8", 1], "latent_image": ["8", 2] } },
14
  "14": { "class_type": "KSamplerAdvanced", "inputs": { "add_noise": "disable", "noise_seed": 12345, "control_after_generate": "randomize", "steps": 4, "cfg": 1.0, "sampler_name": "euler", "scheduler": "simple", "start_at_step": 2, "end_at_step": 4, "return_with_leftover_noise": "disable", "model": ["12", 0], "positive": ["8", 0], "negative": ["8", 1], "latent_image": ["13", 0] } },
15
  "15": { "class_type": "VAEDecode", "inputs": { "samples": ["14", 0], "vae": ["3", 0] } },
16
  "16": { "class_type": "CreateVideo", "inputs": { "fps": 16, "images": ["15", 0] } },
17
+ "17": { "class_type": "SaveVideo", "inputs": { "filename_prefix": "video/Wan2.2_t2v", "fps": 16, "video": ["16", 0], "format": "video/h264-mp4", "codec": "h264" } }
18
+ }