loracaptionertaz / constants /defaultWorkflow.ts
comfyuiman's picture
Upload 20 files
cd3f86a verified
export const DEFAULT_COMFY_WORKFLOW = {
"3": {
"inputs": {
"seed": 912870720080529,
"steps": 8,
"cfg": 1,
"sampler_name": "euler",
"scheduler": "simple",
"denoise": 1,
"model": [
"66",
0
],
"positive": [
"6",
0
],
"negative": [
"7",
0
],
"latent_image": [
"58",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"6": {
"inputs": {
"text": "",
"clip": [
"38",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Positive Prompt)"
}
},
"7": {
"inputs": {
"text": "",
"clip": [
"38",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Negative Prompt)"
}
},
"8": {
"inputs": {
"samples": [
"3",
0
],
"vae": [
"39",
0
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"37": {
"inputs": {
"unet_name": "qwen_image_fp8_e4m3fn.safetensors",
"weight_dtype": "default"
},
"class_type": "UNETLoader",
"_meta": {
"title": "Load Diffusion Model"
}
},
"38": {
"inputs": {
"clip_name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
"type": "qwen_image",
"device": "default"
},
"class_type": "CLIPLoader",
"_meta": {
"title": "Load CLIP"
}
},
"39": {
"inputs": {
"vae_name": "qwen_image_vae.safetensors"
},
"class_type": "VAELoader",
"_meta": {
"title": "Load VAE"
}
},
"58": {
"inputs": {
"width": 1024,
"height": 1024,
"batch_size": 1
},
"class_type": "EmptySD3LatentImage",
"_meta": {
"title": "EmptySD3LatentImage"
}
},
"60": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"8",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"66": {
"inputs": {
"shift": 3,
"model": [
"73",
0
]
},
"class_type": "ModelSamplingAuraFlow",
"_meta": {
"title": "ModelSamplingAuraFlow"
}
},
"73": {
"inputs": {
"lora_name": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
"strength_model": 1,
"model": [
"37",
0
]
},
"class_type": "LoraLoaderModelOnly",
"_meta": {
"title": "LoraLoaderModelOnly"
}
}
};