WeddingBride / API_changeBackground_v1 /ChangeBackground_v1_Compact_api.json
theanhntp's picture
Upload ChangeBackground_v1_Compact_api.json
d5486bb verified
{
"13": {
"inputs": {
"prompt": "fruit, plate,",
"threshold": 0.2,
"sam_model": [
"14",
0
],
"grounding_dino_model": [
"15",
0
],
"image": [
"60",
0
]
},
"class_type": "GroundingDinoSAMSegment (segment anything)",
"_meta": {
"title": "GroundingDinoSAMSegment (segment anything)"
}
},
"14": {
"inputs": {
"model_name": "sam_hq_vit_h (2.57GB)"
},
"class_type": "SAMModelLoader (segment anything)",
"_meta": {
"title": "SAMModelLoader (segment anything)"
}
},
"15": {
"inputs": {
"model_name": "GroundingDINO_SwinT_OGC (694MB)"
},
"class_type": "GroundingDinoModelLoader (segment anything)",
"_meta": {
"title": "GroundingDinoModelLoader (segment anything)"
}
},
"16": {
"inputs": {
"images": [
"13",
0
]
},
"class_type": "PreviewImage",
"_meta": {
"title": "Preview Image"
}
},
"17": {
"inputs": {
"mask": [
"13",
1
]
},
"class_type": "InvertMask",
"_meta": {
"title": "InvertMask"
}
},
"18": {
"inputs": {
"expand": 1,
"tapered_corners": true,
"mask": [
"17",
0
]
},
"class_type": "GrowMask",
"_meta": {
"title": "GrowMask"
}
},
"29": {
"inputs": {
"positive": [
"54:1",
0
],
"negative": [
"54:1",
1
],
"vae": [
"58",
2
],
"pixels": [
"60",
0
],
"mask": [
"18",
0
]
},
"class_type": "InpaintModelConditioning",
"_meta": {
"title": "InpaintModelConditioning"
}
},
"35": {
"inputs": {
"seed": 1105431360287181,
"steps": 8,
"cfg": 0.96,
"sampler_name": "euler_ancestral",
"scheduler": "normal",
"denoise": 1,
"model": [
"61",
0
],
"positive": [
"29",
0
],
"negative": [
"29",
1
],
"latent_image": [
"29",
2
]
},
"class_type": "KSampler",
"_meta": {
"title": "KSampler"
}
},
"36": {
"inputs": {
"samples": [
"35",
0
],
"vae": [
"58",
2
]
},
"class_type": "VAEDecode",
"_meta": {
"title": "VAE Decode"
}
},
"39": {
"inputs": {
"filename_prefix": "ComfyUI",
"images": [
"36",
0
]
},
"class_type": "SaveImage",
"_meta": {
"title": "Save Image"
}
},
"56": {
"inputs": {
"image": "IMG20240313112329.jpg",
"upload": "image"
},
"class_type": "LoadImage",
"_meta": {
"title": "Load Image"
}
},
"57": {
"inputs": {
"text": "(worst quality, low quality, normal quality:2),Text,Watermark,logo,nsfw,text,lowres,bad anatomy,bad hands,text,error,missing fingers,extra",
"seed": 1665,
"autorefresh": "No"
},
"class_type": "DPRandomGenerator",
"_meta": {
"title": "Negative Prompts"
}
},
"58": {
"inputs": {
"ckpt_name": "XLWedding.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": {
"title": "Load Checkpoint"
}
},
"59": {
"inputs": {
"text": "A stunning intricate shot of frontal view of tea package and white cup and coffee beans on an empty white wooden table with a modern kitch in the background, sharp focus, natural lighting, subsurface scattering, f2,35mm, film grain",
"seed": 726,
"autorefresh": "No"
},
"class_type": "DPRandomGenerator",
"_meta": {
"title": "Positive prompt"
}
},
"60": {
"inputs": {
"mode": "resize",
"supersample": "true",
"resampling": "nearest",
"rescale_factor": 2,
"resize_width": 768,
"resize_height": 1024,
"image": [
"56",
0
]
},
"class_type": "Image Resize",
"_meta": {
"title": "Image Resize"
}
},
"61": {
"inputs": {
"lora_name": "sdxl_lightning_8step_lora.safetensors",
"strength_model": 1,
"model": [
"58",
0
]
},
"class_type": "LoraLoaderModelOnly",
"_meta": {
"title": "LoraLoaderModelOnly"
}
},
"62": {
"inputs": {
"stop_at_clip_layer": -1,
"clip": [
"58",
1
]
},
"class_type": "CLIPSetLastLayer",
"_meta": {
"title": "CLIP Set Last Layer"
}
},
"63": {
"inputs": {
"text": [
"59",
0
],
"clip": [
"62",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"64": {
"inputs": {
"text": [
"57",
0
],
"clip": [
"62",
0
]
},
"class_type": "CLIPTextEncode",
"_meta": {
"title": "CLIP Text Encode (Prompt)"
}
},
"54:0": {
"inputs": {
"control_net_name": "XLControlnet/sai_xl_depth_128lora.safetensors"
},
"class_type": "ControlNetLoader",
"_meta": {
"title": "Load ControlNet Model"
}
},
"54:1": {
"inputs": {
"strength": 1,
"start_percent": 0,
"end_percent": 1,
"positive": [
"63",
0
],
"negative": [
"64",
0
],
"control_net": [
"54:0",
0
],
"image": [
"13",
0
]
},
"class_type": "ControlNetApplyAdvanced",
"_meta": {
"title": "Apply ControlNet (Advanced)"
}
}
}