File size: 866 Bytes
6d1b668 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
{
"model_type": "HunyuanImage-3.0-Instruct-Distil",
"quantization_method": "bitsandbytes_nf4",
"load_in_4bit": true,
"bnb_4bit_quant_type": "nf4",
"bnb_4bit_use_double_quant": true,
"bnb_4bit_compute_dtype": "torch.bfloat16",
"expected_vram_gb": 12,
"total_params": "80B",
"active_params": "13B (MoE)",
"modules_kept_bf16": [
"vae",
"vision_model",
"vision_aligner",
"patch_embed",
"final_layer",
"time_embed",
"time_embed_2",
"timestep_emb",
"guidance_emb",
"timestep_r_emb",
"attention_projections"
],
"distil_features": {
"cfg_distilled": true,
"meanflow": true,
"description": "Single-step CFG-free generation with meanflow"
},
"notes": "NF4 quantized Distil model - extremely compact, single GPU friendly.",
"attention_layers_quantized": false
} |