Spaces:
Build error
Build error
| { | |
| "3": { | |
| "inputs": { | |
| "seed": 0, | |
| "steps": 20, | |
| "cfg": 4, | |
| "sampler_name": "dpmpp_2m_sde", | |
| "scheduler": "karras", | |
| "denoise": 1, | |
| "model": [ | |
| "10", | |
| 0 | |
| ], | |
| "positive": [ | |
| "6", | |
| 0 | |
| ], | |
| "negative": [ | |
| "7", | |
| 0 | |
| ], | |
| "latent_image": [ | |
| "5", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "KSampler", | |
| "_meta": { | |
| "title": "KSampler" | |
| } | |
| }, | |
| "4": { | |
| "inputs": { | |
| "ckpt_name": "ghostxl_v10BakedVAE.safetensors" | |
| }, | |
| "class_type": "CheckpointLoaderSimple", | |
| "_meta": { | |
| "title": "Load Checkpoint" | |
| } | |
| }, | |
| "5": { | |
| "inputs": { | |
| "width": 768, | |
| "height": 1024, | |
| "batch_size": 1 | |
| }, | |
| "class_type": "EmptyLatentImage", | |
| "_meta": { | |
| "title": "Empty Latent Image" | |
| } | |
| }, | |
| "6": { | |
| "inputs": { | |
| "text": "masterpiece, 8K, best quality, clean background", | |
| "clip": [ | |
| "4", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncode", | |
| "_meta": { | |
| "title": "CLIP Text Encode (Prompt)" | |
| } | |
| }, | |
| "7": { | |
| "inputs": { | |
| "text": "nsfw,blurry, low quality, distorted, photo, frame, naked, horror,embedding:EasyNegative", | |
| "clip": [ | |
| "4", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncode", | |
| "_meta": { | |
| "title": "CLIP Text Encode (Prompt)" | |
| } | |
| }, | |
| "8": { | |
| "inputs": { | |
| "samples": [ | |
| "3", | |
| 0 | |
| ], | |
| "vae": [ | |
| "4", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "VAEDecode", | |
| "_meta": { | |
| "title": "VAE Decode" | |
| } | |
| }, | |
| "9": { | |
| "inputs": { | |
| "filename_prefix": "ComfyUI", | |
| "images": [ | |
| "8", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "10": { | |
| "inputs": { | |
| "weight": 1, | |
| "noise": 0, | |
| "weight_type": "original", | |
| "start_at": 0, | |
| "end_at": 1, | |
| "faceid_v2": true, | |
| "weight_v2": 1, | |
| "unfold_batch": false, | |
| "ipadapter": [ | |
| "11", | |
| 0 | |
| ], | |
| "clip_vision": [ | |
| "41", | |
| 0 | |
| ], | |
| "insightface": [ | |
| "13", | |
| 0 | |
| ], | |
| "image": [ | |
| "14", | |
| 0 | |
| ], | |
| "model": [ | |
| "39", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "IPAdapterApplyFaceID", | |
| "_meta": { | |
| "title": "Apply IPAdapter FaceID" | |
| } | |
| }, | |
| "11": { | |
| "inputs": { | |
| "ipadapter_file": "ip-adapter-faceid-plusv2_sdxl.bin" | |
| }, | |
| "class_type": "IPAdapterModelLoader", | |
| "_meta": { | |
| "title": "Load IPAdapter Model" | |
| } | |
| }, | |
| "13": { | |
| "inputs": { | |
| "provider": "CUDA" | |
| }, | |
| "class_type": "InsightFaceLoader", | |
| "_meta": { | |
| "title": "Load InsightFace" | |
| } | |
| }, | |
| "14": { | |
| "inputs": { | |
| "image": "comfyworkflows_f0942efd-fb40-422b-8cd4-cbaa39529fab (3).png", | |
| "upload": "image" | |
| }, | |
| "class_type": "LoadImage", | |
| "_meta": { | |
| "title": "Load Image" | |
| } | |
| }, | |
| "39": { | |
| "inputs": { | |
| "lora_name": "ip-adapter-faceid-plusv2_sdxl_lora.safetensors", | |
| "strength_model": 1, | |
| "model": [ | |
| "4", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "LoraLoaderModelOnly", | |
| "_meta": { | |
| "title": "LoraLoaderModelOnly" | |
| } | |
| }, | |
| "41": { | |
| "inputs": { | |
| "clip_name": "ipadpter1.5.safetensors" | |
| }, | |
| "class_type": "CLIPVisionLoader", | |
| "_meta": { | |
| "title": "Load CLIP Vision" | |
| } | |
| } | |
| } |