sharpenb's picture
08d5a103fd1d4bcd6830db1e8ae43bd411bc81dea2c616b62348dad6e6378d4a
d78f0fd verified
raw
history blame
1.25 kB
{
"comp_cgenerate_active": false,
"comp_ctranslate_active": false,
"comp_cwhisper_active": false,
"comp_diffusers2_active": false,
"comp_ifw_active": false,
"comp_onediff_active": false,
"comp_step_caching_active": false,
"comp_torch_compile_active": false,
"comp_ws2t_active": false,
"comp_x-fast_active": false,
"prune_torch-structured_active": false,
"quant_aqlm_active": false,
"quant_awq_active": false,
"quant_gptq_active": false,
"quant_half_active": false,
"quant_hqq_active": false,
"quant_llm-int8_active": true,
"quant_quanto_active": false,
"quant_torch_dynamic_active": false,
"quant_torch_static_active": false,
"quant_llm-int8_compute_dtype": "bfloat16",
"quant_llm-int8_double_quant": false,
"quant_llm-int8_enable_fp32_cpu_offload": false,
"quant_llm-int8_has_fp16_weight": false,
"quant_llm-int8_quant_type": "fp4",
"quant_llm-int8_threshold": 6.0,
"quant_llm-int8_weight_bits": 8,
"max_batch_size": 1,
"device": "cuda",
"cache_dir": "/covalent/.cache/models/tmpwcpi1f2n",
"task": "",
"save_load_fn": "bitsandbytes",
"save_load_fn_args": {
"weight_quantization_bits": "param.dtype"
}
}