| { | |
| "2": { | |
| "inputs": { | |
| "channel": "red", | |
| "image": [ | |
| "22", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageToMask", | |
| "_meta": { | |
| "title": "Convert Image to Mask" | |
| } | |
| }, | |
| "7": { | |
| "inputs": { | |
| "text": "blur, text, watermark, CGI, Unreal, Airbrushed, Digital", | |
| "clip": [ | |
| "12", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncode", | |
| "_meta": { | |
| "title": "CLIP Text Encode (Prompt)" | |
| } | |
| }, | |
| "8": { | |
| "inputs": { | |
| "samples": [ | |
| "19", | |
| 0 | |
| ], | |
| "vae": [ | |
| "12", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "VAEDecode", | |
| "_meta": { | |
| "title": "VAE Decode" | |
| } | |
| }, | |
| "9": { | |
| "inputs": { | |
| "seed": 938412506294300 | |
| }, | |
| "class_type": "Seed Generator", | |
| "_meta": { | |
| "title": "Seed Generator" | |
| } | |
| }, | |
| "12": { | |
| "inputs": { | |
| "ckpt_name": "XLHxzmixInteriorAnd_v10.safetensors" | |
| }, | |
| "class_type": "CheckpointLoaderSimple", | |
| "_meta": { | |
| "title": "Load Checkpoint" | |
| } | |
| }, | |
| "13": { | |
| "inputs": { | |
| "noise_mask": true, | |
| "positive": [ | |
| "34", | |
| 0 | |
| ], | |
| "negative": [ | |
| "7", | |
| 0 | |
| ], | |
| "vae": [ | |
| "12", | |
| 2 | |
| ], | |
| "pixels": [ | |
| "23", | |
| 0 | |
| ], | |
| "mask": [ | |
| "2", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "InpaintModelConditioning", | |
| "_meta": { | |
| "title": "InpaintModelConditioning" | |
| } | |
| }, | |
| "14": { | |
| "inputs": { | |
| "ipadapter_file": "ip-adapter_sdxl_vit-h.safetensors" | |
| }, | |
| "class_type": "IPAdapterModelLoader", | |
| "_meta": { | |
| "title": "IPAdapter Model Loader" | |
| } | |
| }, | |
| "17": { | |
| "inputs": { | |
| "preprocessor": "LineartStandardPreprocessor", | |
| "resolution": 320, | |
| "image": [ | |
| "23", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "AIO_Preprocessor", | |
| "_meta": { | |
| "title": "AIO Aux Preprocessor" | |
| } | |
| }, | |
| "18": { | |
| "inputs": { | |
| "ckpt_name": "depth_anything_vits14.pth", | |
| "resolution": 320, | |
| "image": [ | |
| "23", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "DepthAnythingPreprocessor", | |
| "_meta": { | |
| "title": "Depth Anything" | |
| } | |
| }, | |
| "19": { | |
| "inputs": { | |
| "seed": [ | |
| "9", | |
| 0 | |
| ], | |
| "steps": 6, | |
| "cfg": 2, | |
| "sampler_name": "dpmpp_sde_gpu", | |
| "scheduler": "karras", | |
| "denoise": 1, | |
| "model": [ | |
| "30", | |
| 0 | |
| ], | |
| "positive": [ | |
| "20", | |
| 0 | |
| ], | |
| "negative": [ | |
| "20", | |
| 1 | |
| ], | |
| "latent_image": [ | |
| "13", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "KSampler", | |
| "_meta": { | |
| "title": "KSampler" | |
| } | |
| }, | |
| "20": { | |
| "inputs": { | |
| "strength": 0.25000000000000006, | |
| "start_percent": 0, | |
| "end_percent": 0.30000000000000004, | |
| "positive": [ | |
| "21", | |
| 0 | |
| ], | |
| "negative": [ | |
| "21", | |
| 1 | |
| ], | |
| "control_net": [ | |
| "39", | |
| 0 | |
| ], | |
| "image": [ | |
| "17", | |
| 0 | |
| ], | |
| "model_optional": [ | |
| "21", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "ACN_AdvancedControlNetApply", | |
| "_meta": { | |
| "title": "Apply Advanced ControlNet 🛂🅐🅒🅝" | |
| } | |
| }, | |
| "21": { | |
| "inputs": { | |
| "strength": 0.25000000000000006, | |
| "start_percent": 0, | |
| "end_percent": 0.30000000000000004, | |
| "positive": [ | |
| "13", | |
| 0 | |
| ], | |
| "negative": [ | |
| "13", | |
| 1 | |
| ], | |
| "control_net": [ | |
| "38", | |
| 0 | |
| ], | |
| "image": [ | |
| "18", | |
| 0 | |
| ], | |
| "model_optional": [ | |
| "12", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ACN_AdvancedControlNetApply", | |
| "_meta": { | |
| "title": "Apply Advanced ControlNet 🛂🅐🅒🅝" | |
| } | |
| }, | |
| "22": { | |
| "inputs": { | |
| "upscale_method": "nearest-exact", | |
| "width": 0, | |
| "height": 960, | |
| "crop": "disabled", | |
| "image": [ | |
| "42", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageScale", | |
| "_meta": { | |
| "title": "Upscale Image" | |
| } | |
| }, | |
| "23": { | |
| "inputs": { | |
| "upscale_method": "nearest-exact", | |
| "width": 0, | |
| "height": 960, | |
| "crop": "disabled", | |
| "image": [ | |
| "35", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageScale", | |
| "_meta": { | |
| "title": "Upscale Image" | |
| } | |
| }, | |
| "24": { | |
| "inputs": { | |
| "clip_name": "CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors" | |
| }, | |
| "class_type": "CLIPVisionLoader", | |
| "_meta": { | |
| "title": "Load CLIP Vision" | |
| } | |
| }, | |
| "30": { | |
| "inputs": { | |
| "weight": 1.4000000000000004, | |
| "weight_type": "strong style transfer", | |
| "combine_embeds": "concat", | |
| "start_at": 0, | |
| "end_at": 1, | |
| "embeds_scaling": "V only", | |
| "model": [ | |
| "20", | |
| 2 | |
| ], | |
| "ipadapter": [ | |
| "14", | |
| 0 | |
| ], | |
| "image": [ | |
| "37", | |
| 0 | |
| ], | |
| "clip_vision": [ | |
| "24", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "IPAdapterAdvanced", | |
| "_meta": { | |
| "title": "IPAdapter Advanced" | |
| } | |
| }, | |
| "34": { | |
| "inputs": { | |
| "text": "cozy and artistic coastal-themed bedroom, decorated in a harmonious palette of sea blue, sand beige, and natural wood tones. The centerpiece is a wooden bed with a light oak finish, topped with layered bedding in various shades of blue—sky blue, turquoise, and denim—with a mix of textures including linen, cotton, and knit. The bed is adorned with an assortment of decorative pillows in coordinating oceanic patterns such as waves, corals, and stripes.\n", | |
| "clip": [ | |
| "12", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncode", | |
| "_meta": { | |
| "title": "CLIP Text Encode (Prompt)" | |
| } | |
| }, | |
| "35": { | |
| "inputs": { | |
| "image": "ComfyUI_temp_qtbpb_00021_.png" | |
| }, | |
| "class_type": "LoadImage", | |
| "_meta": { | |
| "title": "Room Photo" | |
| } | |
| }, | |
| "37": { | |
| "inputs": { | |
| "image": "input3_custom.jpg" | |
| }, | |
| "class_type": "LoadImage", | |
| "_meta": { | |
| "title": "Sample Material" | |
| } | |
| }, | |
| "38": { | |
| "inputs": { | |
| "control_net_name": "diffusion_pytorch_model.fp16_depth.safetensors" | |
| }, | |
| "class_type": "ControlNetLoader", | |
| "_meta": { | |
| "title": "Load ControlNet Model" | |
| } | |
| }, | |
| "39": { | |
| "inputs": { | |
| "control_net_name": "diffusion_pytorch_model.fp16_canny.safetensors" | |
| }, | |
| "class_type": "ControlNetLoader", | |
| "_meta": { | |
| "title": "Load ControlNet Model" | |
| } | |
| }, | |
| "41": { | |
| "inputs": { | |
| "filename_prefix": "ComfyUI", | |
| "images": [ | |
| "8", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "42": { | |
| "inputs": { | |
| "image": "mask_kitchen.png" | |
| }, | |
| "class_type": "LoadImage", | |
| "_meta": { | |
| "title": "Mask Photo" | |
| } | |
| } | |
| } |