Shams03's picture
Upload folder using huggingface_hub
3ce1533 verified
default_stage:
default_modifiers:
AWQModifier:
targets: [Linear]
ignore: [lm_head]
scheme: W4A16_ASYM
bypass_divisibility_checks: false
mappings:
- smooth_layer: re:.*input_layernorm$
balance_layers: ['re:.*self_attn.q_proj$', 're:.*self_attn.k_proj$', 're:.*self_attn.v_proj$',
're:.*mlp.gate_proj$', 're:.*mlp.up_proj$']
activation_hook_target: null
- smooth_layer: re:.*v_proj$
balance_layers: ['re:.*o_proj$']
activation_hook_target: null
- smooth_layer: re:.*up_proj$
balance_layers: ['re:.*down_proj$']
activation_hook_target: null
duo_scaling: true
n_grid: 20