WeddingBride / API_changeBackground_v1 /ChangeBackground_v2_Compact_api.json
theanhntp's picture
Upload ChangeBackground_v2_Compact_api.json
226baa7 verified
{
"2": {
"inputs": {
"ckpt_name": "XLWedding.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": {
"title": "Load Checkpoint"
}
},
"5": {
"inputs": {
"text": [
"106",
0
],
"clip": [
"2",
1
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"6": {
"inputs": {
"samples": [
"111",
0
],
"vae": [
"2",
2
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"8": {
"inputs": {
"image": "431052095_451611487221025_867692776290485785_n.png",
"upload": "image"
},
"class_type": "LoadImage",
"_meta": {
"title": "Load Image"
}
},
"10": {
"inputs": {
"prompt": "object, food, rice, white disk",
"threshold": 0.35000000000000003,
"sam_model": [
"91",
0
],
"grounding_dino_model": [
"11",
0
],
"image": [
"86",
0
]
},
"class_type": "GroundingDinoSAMSegment (segment anything)",
"_meta": {
"title": "GroundingDinoSAMSegment (segment anything)"
}
},
"11": {
"inputs": {
"model_name": "GroundingDINO_SwinT_OGC (694MB)"
},
"class_type": "GroundingDinoModelLoader (segment anything)",
"_meta": {
"title": "GroundingDinoModelLoader (segment anything)"
}
},
"12": {
"inputs": {
"images": [
"13",
0
]
},
"class_type": "PreviewImage",
"_meta": {
"title": "Preview Image"
}
},
"13": {
"inputs": {
"mask": [
"73",
0
]
},
"class_type": "MaskToImage",
"_meta": {
"title": "Convert Mask to Image"
}
},
"15": {
"inputs": {
"images": [
"6",
0
]
},
"class_type": "PreviewImage",
"_meta": {
"title": "Preview Image"
}
},
"16": {
"inputs": {
"grow_mask_by": 6,
"pixels": [
"10",
0
],
"vae": [
"2",
2
],
"mask": [
"73",
0
]
},
"class_type": "VAEEncodeForInpaint",
"_meta": {
"title": "VAE Encode (for Inpainting)"
}
},
"17": {
"inputs": {
"low_threshold": 100,
"high_threshold": 200,
"resolution": 512,
"image": [
"10",
0
]
},
"class_type": "CannyEdgePreprocessor",
"_meta": {
"title": "Canny Edge"
}
},
"18": {
"inputs": {
"strength": 0.1,
"conditioning": [
"33",
0
],
"control_net": [
"19",
0
],
"image": [
"17",
0
]
},
"class_type": "ControlNetApply",
"_meta": {
"title": "Apply ControlNet"
}
},
"19": {
"inputs": {
"control_net_name": "XLControlnet/sai_xl_canny_128lora.safetensors"
},
"class_type": "ControlNetLoader",
"_meta": {
"title": "Load ControlNet Model"
}
},
"20": {
"inputs": {
"images": [
"10",
0
]
},
"class_type": "PreviewImage",
"_meta": {
"title": "Preview Image"
}
},
"33": {
"inputs": {
"strength": 0.13,
"conditioning": [
"104",
0
],
"control_net": [
"34",
0
],
"image": [
"35",
0
]
},
"class_type": "ControlNetApply",
"_meta": {
"title": "Apply ControlNet"
}
},
"34": {
"inputs": {
"control_net_name": "XLControlnet/t2i-adapter_diffusers_xl_depth_zoe.safetensors"
},
"class_type": "ControlNetLoader",
"_meta": {
"title": "Load ControlNet Model"
}
},
"35": {
"inputs": {
"rm_nearest": 0,
"rm_background": 2.5,
"boost": "disable",
"resolution": 640,
"image": [
"10",
0
]
},
"class_type": "LeReS-DepthMapPreprocessor",
"_meta": {
"title": "LeReS Depth Map (enable boost for leres++)"
}
},
"36": {
"inputs": {
"images": [
"35",
0
]
},
"class_type": "PreviewImage",
"_meta": {
"title": "Preview Image"
}
},
"73": {
"inputs": {
"mask": [
"10",
1
]
},
"class_type": "InvertMask (segment anything)",
"_meta": {
"title": "InvertMask (segment anything)"
}
},
"86": {
"inputs": {
"mode": "rescale",
"supersample": "true",
"resampling": "lanczos",
"rescale_factor": 2,
"resize_width": 768,
"resize_height": 1024,
"image": [
"8",
0
]
},
"class_type": "Image Resize",
"_meta": {
"title": "Image Resize"
}
},
"87": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"6",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"91": {
"inputs": {
"model_name": "sam_vit_l (1.25GB)"
},
"class_type": "SAMModelLoader (segment anything)",
"_meta": {
"title": "SAMModelLoader (segment anything)"
}
},
"93": {
"inputs": {
"images": [
"17",
0
]
},
"class_type": "PreviewImage",
"_meta": {
"title": "Preview Image"
}
},
"98": {
"inputs": {
"lora_name": "sdxlmonan.safetensors",
"strength_model": 1.2,
"model": [
"2",
0
]
},
"class_type": "LoraLoaderModelOnly",
"_meta": {
"title": "LoraLoaderModelOnly"
}
},
"102": {
"inputs": {
"text": "(no food behind:1.5), garnish, winter theme background, snow, highly detailed, ultra-high resolutions, 32K UHD, best quality, masterpiece,",
"seed": 1512,
"autorefresh": "No"
},
"class_type": "DPRandomGenerator",
"_meta": {
"title": "Random Prompts"
}
},
"104": {
"inputs": {
"text": [
"102",
0
],
"clip": [
"2",
1
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"106": {
"inputs": {
"text": "(object behind:1.5), (shadow:1.5), (worst quality:1.5), (low quality:1.5), (normal quality:1.5), lowres, bad anatomy, bad hands, multiple eyebrow, (cropped), extra limb, missing limbs, deformed hands, long neck, long body, (bad hands), signature, username, artist name, conjoined fingers, deformed fingers, ugly eyes, imperfect eyes, skewed eyes, unnatural face, unnatural body, error, painting by bad-artist ",
"seed": 547,
"autorefresh": "No"
},
"class_type": "DPRandomGenerator",
"_meta": {
"title": "Random Prompts"
}
},
"111": {
"inputs": {
"seed": 934163702532617,
"steps": 50,
"cfg": 9,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1,
"model": [
"98",
0
],
"positive": [
"18",
0
],
"negative": [
"5",
0
],
"latent_image": [
"16",
0
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
}
}