krishnateja95's picture
Upload folder using huggingface_hub
da46321 verified
raw
history blame contribute delete
256 Bytes
default_stage:
default_modifiers:
GPTQModifier:
targets: [Linear]
ignore: [lm_head, 're:.*block_sparse_moe.router']
scheme: W4A16
block_size: 128
dampening_frac: 0.01
actorder: static
offload_hessians: false