File size: 9,615 Bytes
9a3a806 cd37450 9a3a806 df9936a f1504c3 7773c8c 9a3a806 df9936a 9a3a806 df9936a 9a3a806 df9936a 9a3a806 464310b 9a3a806 464310b 9a3a806 464310b 9a3a806 464310b 9a3a806 464310b 9a3a806 464310b 9a3a806 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 | {
"name": "VNCCS Core Models",
"models": [
{
"name": "Qwen-Image-Edit-2511-GGUF-Q4",
"type": "gguf",
"hf_repo": "unsloth/Qwen-Image-Edit-2511-GGUF",
"hf_path": "qwen-image-edit-2511-Q4_0.gguf",
"local_path": "models/unet/qwen-image-edit-2511-Q4_0.gguf",
"version": "1.0",
"description": "Quantized Q4 GGUF β lower VRAM, slightly lower quality."
},
{
"name": "Qwen-Image-Edit-2511-GGUF-Q5",
"type": "gguf",
"hf_repo": "unsloth/Qwen-Image-Edit-2511-GGUF",
"hf_path": "qwen-image-edit-2511-Q5_0.gguf",
"local_path": "models/unet/qwen-image-edit-2511-Q5_0.gguf",
"version": "1.0",
"description": "Quantized Q5 GGUF β lower VRAM, slightly lower quality."
},
{
"name": "Qwen-Image-Edit-2511-GGUF-Q8",
"type": "gguf",
"hf_repo": "unsloth/Qwen-Image-Edit-2511-GGUF",
"hf_path": "qwen-image-edit-2511-Q8_0.gguf",
"local_path": "models/unet/qwen-image-edit-2511-Q8_0.gguf",
"version": "1.0",
"description": "Quantized Q8 GGUF β near-lossless, higher VRAM than Q4."
},
{
"name": "Qwen-Image-Edit-2511-nunchaku-balance-fp4",
"type": "nunchaku",
"hf_repo": "QuantFunc/Nunchaku-Qwen-Image-EDIT-2511",
"hf_path": "nunchaku_qwen_image_edit_2511_balance_fp4.safetensors",
"local_path": "models/diffusion_models/nunchaku_qwen_image_edit_2511_balance_fp4.safetensors",
"version": "1.0",
"description": "(NVIDIA 5xxx series ONLY) Nunchaku Balance FP4 β balanced quality/speed."
},
{
"name": "Qwen-Image-Edit-2511-nunchaku-balance-int4",
"type": "nunchaku",
"hf_repo": "QuantFunc/Nunchaku-Qwen-Image-EDIT-2511",
"hf_path": "nunchaku_qwen_image_edit_2511_balance_int4.safetensors",
"local_path": "models/diffusion_models/nunchaku_qwen_image_edit_2511_balance_int4.safetensors",
"version": "1.0",
"description": "(NVIDIA 4xxx and lower) Nunchaku Balance INT4 β balanced quality/speed."
},
{
"name": "Qwen-Image-Edit-2511-nunchaku-best-quality-fp4",
"type": "nunchaku",
"hf_repo": "QuantFunc/Nunchaku-Qwen-Image-EDIT-2511",
"hf_path": "nunchaku_qwen_image_edit_2511_best_quality_fp4.safetensors",
"local_path": "models/diffusion_models/nunchaku_qwen_image_edit_2511_best_quality_fp4.safetensors",
"version": "1.0",
"description": "(NVIDIA 5xxx series ONLY) Nunchaku Best Quality FP4 β highest quality, higher VRAM."
},
{
"name": "Qwen-Image-Edit-2511-nunchaku-best-quality-int4",
"type": "nunchaku",
"hf_repo": "QuantFunc/Nunchaku-Qwen-Image-EDIT-2511",
"hf_path": "nunchaku_qwen_image_edit_2511_best_quality_int4.safetensors",
"local_path": "models/diffusion_models/nunchaku_qwen_image_edit_2511_best_quality_int4.safetensors",
"version": "1.0",
"description": "(NVIDIA 4xxx and lower) Nunchaku Best Quality INT4 β highest quality, higher VRAM."
},
{
"name": "Qwen-Image-Edit-2511-nunchaku-ultimate-speed-fp4",
"type": "nunchaku",
"hf_repo": "QuantFunc/Nunchaku-Qwen-Image-EDIT-2511",
"hf_path": "nunchaku_qwen_image_edit_2511_ultimate_speed_fp4.safetensors",
"local_path": "models/diffusion_models/nunchaku_qwen_image_edit_2511_ultimate_speed_fp4.safetensors",
"version": "1.0",
"description": "(NVIDIA 5xxx series ONLY) Nunchaku Ultimate Speed FP4 β fastest inference, lower VRAM."
},
{
"name": "Qwen-Image-Edit-2511-nunchaku-ultimate-speed-int4",
"type": "nunchaku",
"hf_repo": "QuantFunc/Nunchaku-Qwen-Image-EDIT-2511",
"hf_path": "nunchaku_qwen_image_edit_2511_ultimate_speed_int4.safetensors",
"local_path": "models/diffusion_models/nunchaku_qwen_image_edit_2511_ultimate_speed_int4.safetensors",
"version": "1.0",
"description": "(NVIDIA 4xxx and lower) Nunchaku Ultimate Speed INT4 β fastest inference, lower VRAM."
},
{
"name": "Qwen-Image-Edit-2511-NVFP4",
"type": "unet",
"hf_repo": "Bedovyy/Qwen-Image-Edit-2511-NVFP4",
"hf_path": "qwen_image_edit_2511_nvfp4.safetensors",
"local_path": "models/diffusion_models/qwen_image_edit_2511_nvfp4.safetensors",
"version": "1.0",
"description": "(NVIDIA 5xxx series ONLY) Native NVFP4 UNet β full quality at reduced VRAM."
}
],
"clip": [
{
"name": "QIE2511_Text_Encoder",
"clip_type": "qwen_image",
"hf_repo": "f5aiteam/CLIP",
"hf_path": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
"local_path": "models/text_encoders/qwen_2.5_vl_7b_fp8_scaled.safetensors",
"version": "1.0",
"description": "Text encoder for Qwen Image Edit 2511."
}
],
"vae": [
{
"name": "QIE2511 VAE",
"hf_repo": "Comfy-Org/Qwen-Image_ComfyUI",
"hf_path": "split_files/vae/qwen_image_vae.safetensors",
"local_path": "models/vae/qwen_image_vae.safetensors",
"version": "1.0",
"description": "Official VAE for Qwen Image Edit 2511."
}
],
"lora": [
{
"name": "VNCCS Clothes Core",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/loras/qwen/VNCCS/VNCCS_QIE2511_ClothesCore-RC2.safetensors",
"local_path": "models/loras/qwen/VNCCS/VNCCS_QIE2511_ClothesCore-RC2.safetensors",
"version": "0.2.0",
"description": "Helps maintain clothing consistency."
},
{
"name": "VNCCS Emotion Core",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/loras/qwen/VNCCS/VNCCS_QIE2511_EmotionCore-RC1.safetensors",
"local_path": "models/loras/qwen/VNCCS/VNCCS_QIE2511_EmotionCore-RC1.safetensors",
"version": "0.1.0",
"description": "Core LoRA for generating character emotions."
},
{
"name": "VNCCS Pose Studio ART",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/loras/qwen/VNCCS/VNCCS_QIE2511_PoseStudio_ART_V5.safetensors",
"local_path": "models/loras/qwen/VNCCS/VNCCS_QIE2511_PoseStudio_ART_V5.safetensors",
"version": "5.0.0",
"description": "LoRA for VNCCS Pose Studio node."
},
{
"name": "Qwen Image Edit 2511 Lightning",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/loras/qwen/Qwen-Image-Edit-2511-Lightning-4steps-V1.0-bf16.safetensors",
"local_path": "models/loras/qwen/Qwen-Image-Edit-2511-Lightning-4steps-V1.0-bf16.safetensors",
"version": "1.0.0",
"description": "4-Step Lightning LoRA for Qwen Image Edit."
},
{
"name": "DMD2 SDXL Lightning",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/loras/DMD2/dmd2_sdxl_4step_lora_fp16.safetensors",
"local_path": "models/loras/DMD2/dmd2_sdxl_4step_lora_fp16.safetensors",
"version": "1.0.0",
"description": "4-Step Lightning/Turbo LoRA for SDXL models."
},
{
"name": "Mimimeter",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/loras/IL/mimimeter.safetensors",
"local_path": "models/loras/IL/mimimeter.safetensors",
"version": "1.0.0",
"description": "SDXL LoRA for age control."
}
],
"controlnet": [
{
"name": "ControlNet AnyTest",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/controlnet/SDXL/AnytestV4.safetensors",
"local_path": "models/controlnet/SDXL/AnytestV4.safetensors",
"version": "4.0.0",
"description": "SDXL ControlNet for high-quality pose guidance."
},
{
"name": "ControlNet OpenPose Illustrious",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/controlnet/SDXL/IllustriousXL_openpose.safetensors",
"local_path": "models/controlnet/SDXL/IllustriousXL_openpose.safetensors",
"version": "1.0.0",
"description": "OpenPose ControlNet optimized for illustrious models."
}
],
"other": [
{
"name": "2x_APISR_RRDB_GAN",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/upscale_models/2x_APISR_RRDB_GAN_generator.pth",
"local_path": "models/upscale_models/2x_APISR_RRDB_GAN_generator.pth",
"version": "1.0.0",
"description": "Anime Production Inspired Real-World Anime Super-Resolution."
},
{
"name": "4x_APISR_GRL_GAN",
"hf_repo": "MIUProject/VNCCS_V2",
"hf_path": "models/upscale_models/4x_APISR_GRL_GAN_generator.pth",
"local_path": "models/upscale_models/4x_APISR_GRL_GAN_generator.pth",
"version": "1.0.0",
"description": "4x Anime Production Inspired GRL GAN Super-Resolution."
}
]
}
|