Z-Image-GGUF / example_workflow.json
jayn7's picture
Upload folder using huggingface_hub
a05cc58 verified
{
"id": "062bcb7a-6ffe-4748-95f8-a55736c587f3",
"revision": 0,
"last_node_id": 19,
"last_link_id": 15,
"nodes": [
{
"id": 13,
"type": "UNETLoader",
"pos": [
60.51125727779435,
963.2787917855696
],
"size": [
270,
82
],
"flags": {},
"order": 0,
"mode": 4,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": null
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "UNETLoader"
},
"widgets_values": [
"z_image_turbo_bf16.safetensors",
"default"
]
},
{
"id": 7,
"type": "CLIPLoaderGGUF",
"pos": [
55.06271452353618,
1243.511966336947
],
"size": [
270,
82
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [
2,
8
]
}
],
"properties": {
"cnr_id": "ComfyUI-GGUF",
"ver": "d3e14efda6bb891de0406d2debffcb07e88d4231",
"Node name for S&R": "CLIPLoaderGGUF"
},
"widgets_values": [
"Qwen3-4B-UD-Q8_K_XL.gguf",
"lumina2"
]
},
{
"id": 11,
"type": "ModelSamplingAuraFlow",
"pos": [
386.1151500227518,
980.8767169837448
],
"size": [
315,
58
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 3
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
4
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "ModelSamplingAuraFlow"
},
"widgets_values": [
3
]
},
{
"id": 3,
"type": "CLIPTextEncode",
"pos": [
372.10654442724336,
1381.4525729399613
],
"size": [
425.27801513671875,
180.6060791015625
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 2
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
6
]
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"blurry ugly bad"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 6,
"type": "VAEDecode",
"pos": [
1164.6819973885608,
1171.8362082475069
],
"size": [
210,
46
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 9
},
{
"name": "vae",
"type": "VAE",
"link": 10
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
14
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 4,
"type": "VAELoader",
"pos": [
1166.7410141886078,
1065.0144512421527
],
"size": [
210,
58
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [
10
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "VAELoader"
},
"widgets_values": [
"ae.safetensors"
]
},
{
"id": 18,
"type": "SaveImage",
"pos": [
1396.4213324200182,
1167.700771943608
],
"size": [
270,
270
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 14
}
],
"outputs": [],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "SaveImage"
},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 8,
"type": "CLIPTextEncode",
"pos": [
378.06563269610695,
1157.150761094689
],
"size": [
423.83001708984375,
177.11770629882812
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 8
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
5
]
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"一名年轻的亚裔女性独自站立在室内,背景为朴素的灰色纹理墙面,地毯地面。她身穿浅色系汉服(如米白、浅杏或淡青色),衣料轻盈飘逸,设计简约典雅,袖口宽松,体现传统东方美感。女性留着自然披散的长发或半扎发型,佩戴小巧的古风耳饰。她面带温和微笑,直视镜头,一只手向镜头比出和平手势(V 手势),姿态自然放松。自然光照明,柔和阴影,中性色调,传统与现代融合的摄影风格,中等景深,面部和上半身对焦清晰,氛围宁静、优雅、温柔。"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 10,
"type": "KSampler",
"pos": [
829.4083562725156,
1171.0280259012795
],
"size": [
315,
663.4255319148936
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 4
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 5
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 7
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
9
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "KSampler"
},
"widgets_values": [
526213216603097,
"fixed",
25,
4,
"res_multistep",
"simple",
1
]
},
{
"id": 5,
"type": "EmptySD3LatentImage",
"pos": [
377.22659089524416,
1605.0280259012798
],
"size": [
315,
106
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
7
]
}
],
"properties": {
"cnr_id": "comfy-core",
"ver": "0.3.75",
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
]
},
{
"id": 16,
"type": "MarkdownNote",
"pos": [
-281.9143857960713,
1332.2874217087465
],
"size": [
317.08330880163544,
376.7915207153559
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Resources",
"properties": {},
"widgets_values": [
"## Model links (BF16)\n\n**text_encoders**\n\n- [qwen_3_4b.safetensors](https://huggingface.co/Comfy-Org/z_image/tree/main/split_files/text_encoders)\n\n**diffusion_models**\n\n- [z_image_bf16.safetensors](https://huggingface.co/Comfy-Org/z_image/resolve/main/split_files/diffusion_models/z_image_bf16.safetensors)\n\n**vae**\n\n- - [ae.safetensors](https://huggingface.co/Comfy-Org/z_image/resolve/main/split_files/vae/ae.safetensors)\n\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 text_encoders/\n│ │ └── qwen_3_4b.safetensors\n│ ├── 📂 diffusion_models/\n│ │ └── z_image_bf16.safetensors\n│ └── 📂 vae/\n│ └── ae.safetensors\n```\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 9,
"type": "UnetLoaderGGUF",
"pos": [
64.17096039991915,
1097.4653220593357
],
"size": [
270,
58
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
3
]
}
],
"properties": {
"cnr_id": "ComfyUI-GGUF",
"ver": "d3e14efda6bb891de0406d2debffcb07e88d4231",
"Node name for S&R": "UnetLoaderGGUF"
},
"widgets_values": [
"z_image-Q8_0.gguf"
]
},
{
"id": 17,
"type": "MarkdownNote",
"pos": [
-285.66104024749944,
910.7495208635008
],
"size": [
317.08330880163544,
376.7915207153559
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Resources",
"properties": {},
"widgets_values": [
"## Model links (GGUF)\n\n**text_encoders**\n\n- [qwen_3_4b-QX.gguf](https://huggingface.co/unsloth/Qwen3-4B-GGUF/tree/main) \n\n**diffusion_models**\n\n- [z_image-QX.gguf](https://huggingface.co/jayn7/Z-Image-GGUF)\n\n**vae**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/z_image/resolve/main/split_files/vae/ae.safetensors)\n\n**custom_nodes**\n- [ComfyUI-GGUF](https://github.com/city96/ComfyUI-GGUF)\n\nModel Storage Location\n\n```\n📂 ComfyUI/\n├── 📂 models/\n│ ├── 📂 text_encoders/\n│ │ └── qwen_3_4b-QX.gguf\n│ ├── 📂 diffusion_models/\n│ │ └── z_image-QX.gguf\n│ └── 📂 vae/\n│ └── ae.safetensors\n```\n"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
2,
7,
0,
3,
0,
"CLIP"
],
[
3,
9,
0,
11,
0,
"MODEL"
],
[
4,
11,
0,
10,
0,
"MODEL"
],
[
5,
8,
0,
10,
1,
"CONDITIONING"
],
[
6,
3,
0,
10,
2,
"CONDITIONING"
],
[
7,
5,
0,
10,
3,
"LATENT"
],
[
8,
7,
0,
8,
0,
"CLIP"
],
[
9,
10,
0,
6,
0,
"LATENT"
],
[
10,
4,
0,
6,
1,
"VAE"
],
[
14,
6,
0,
18,
0,
"IMAGE"
]
],
"groups": [
{
"id": 1,
"title": "Load Model",
"bounding": [
50.51125727779435,
889.6787917855696,
293.6597031221248,
275.78653027376606
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.839054528882405,
"offset": [
144.08673151862217,
-834.2820206068768
]
},
"frontendVersion": "1.37.11",
"workflowRendererVersion": "LG",
"VHS_latentpreview": false,
"VHS_latentpreviewrate": 0,
"VHS_MetadataImage": false,
"VHS_KeepIntermediate": false
},
"version": 0.4
}