Qwen3.5-2B-int4-AutoRound / quantization_config.json
INC4AI's picture
Upload folder using huggingface_hub
4d0880d verified
{
"bits": 4,
"data_type": "int",
"group_size": 128,
"sym": true,
"autoround_version": "0.12.0",
"block_name_to_quantize": "model.language_model.layers",
"quant_method": "auto-round",
"packing_format": "auto_round:auto_gptq",
"extra_config": {
"model.language_model.layers.0.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.0.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.1.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.1.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.2.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.2.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.4.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.4.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.5.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.5.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.6.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.6.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.8.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.8.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.9.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.9.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.10.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.10.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.12.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.12.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.13.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.13.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.14.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.14.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.16.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.16.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.17.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.17.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.18.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.18.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.20.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.20.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.21.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.21.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.22.linear_attn.in_proj_b": {
"bits": 16,
"data_type": "fp"
},
"model.language_model.layers.22.linear_attn.in_proj_a": {
"bits": 16,
"data_type": "fp"
}
}
}