| { |
| "templates": { |
| "video": { |
| "description": "Video generation setup with Hunyuan models", |
| "diffusion_models": [ |
| "https://huggingface.co/Kijai/HunyuanVideo_comfy/resolve/main/hunyuan_video_720_cfgdistill_fp8_e4m3fn.safetensors" |
| ], |
| "vae": [ |
| "https://huggingface.co/Kijai/HunyuanVideo_comfy/resolve/main/hunyuan_video_vae_bf16.safetensors" |
| ], |
| "text_encoders": [ |
| "https://huggingface.co/calcuis/hunyuan-gguf/resolve/main/clip_l.safetensors", |
| "https://huggingface.co/calcuis/hunyuan-gguf/resolve/main/llava_llama3_fp8_scaled.safetensors" |
| ], |
| "loras": [ |
| "https://huggingface.co/Kijai/HunyuanVideo_comfy/resolve/main/hyvideo_FastVideo_LoRA-fp8.safetensors", |
| "https://huggingface.co/Kijai/Leapfusion-image2vid-comfy/resolve/main/leapfusion_img2vid544p_comfy.safetensors" |
| ] |
| }, |
| "image": { |
| "description": "Image generation setup with FLUX models", |
| "upscale_models": [ |
| "https://huggingface.co/uwg/upscaler/resolve/main/ESRGAN/8x_NMKD-Superscale_150000_G.pth" |
| ], |
| "vae": [ |
| "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors" |
| ], |
| "text_encoders": [ |
| "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors", |
| "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn.safetensors" |
| ], |
| "checkpoints": [ |
| "https://huggingface.co/Patarapoom/model/resolve/main/juggernautXL_juggXIByRundiffusion.safetensors" |
| ], |
| "unet": [ |
| "https://huggingface.co/Kijai/flux-fp8/resolve/main/flux1-dev-fp8.safetensors", |
| "https://huggingface.co/Patarapoom/model/resolve/main/flux1FillDevFp8_v10.safetensors" |
| ], |
| "controlnet": [ |
| "https://huggingface.co/xinsir/controlnet-union-sdxl-1.0/resolve/main/diffusion_pytorch_model.safetensors", |
| "https://huggingface.co/dim/Shakker-Labs_FLUX.1-dev-ControlNet-Union-Pro-fp8.safetensors/resolve/main/Shakker-Labs_FLUX.1-dev-ControlNet-Union-Pro-fp8.safetensors" |
| ], |
| "clip_vision": [ |
| "https://huggingface.co/h94/IP-Adapter/resolve/main/models/image_encoder/model.safetensors", |
| "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors" |
| ], |
| "ipadapter": [ |
| "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl_vit-h.safetensors", |
| "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter-plus_sdxl_vit-h.safetensors", |
| "https://huggingface.co/h94/IP-Adapter/resolve/main/sdxl_models/ip-adapter_sdxl.safetensors" |
| ], |
| "style_models": [ |
| "https://huggingface.co/Patarapoom/model/resolve/main/flux1-redux-dev.safetensors" |
| ], |
| "diffusion_models": [] |
| }, |
| "empty": { |
| "description": "Empty template with no model downloads", |
| "diffusion_models": [], |
| "unet": [], |
| "vae": [], |
| "text_encoders": [], |
| "upscale_models": [], |
| "checkpoints": [], |
| "controlnet": [], |
| "clip_vision": [], |
| "ipadapter": [], |
| "style_models": [] |
| } |
| } |
| } |