| default_stage: | |
| default_modifiers: | |
| GPTQModifier: | |
| targets: ['re:.*self_attn\.(k|q|o|v)_proj$'] | |
| ignore: [] | |
| scheme: W8A8 | |
| sequential_update: true | |
| block_size: 128 | |
| dampening_frac: 0.01 | |
| actorder: static | |
| offload_hessians: false | |
| AWQModifier: | |
| targets: ['re:.*mlp\.(down|gate|up)_proj$'] | |
| ignore: [] | |
| scheme: W4A16 | |
| mappings: | |
| - smooth_layer: re:.*post_attention_layernorm$ | |
| balance_layers: ['re:.*gate_proj$', 're:.*up_proj$'] | |
| - smooth_layer: re:.*up_proj$ | |
| balance_layers: ['re:.*down_proj$'] | |
| duo_scaling: true | |