File size: 358 Bytes
7efa561 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | {
"fp8": {
"model\\.layers\\.\\d+": [
"mlp.down_proj",
"mlp.gate_proj",
"mlp.up_proj",
"self_attn.k_proj",
"self_attn.o_proj",
"self_attn.q_proj",
"self_attn.v_proj"
]
},
"bf16": {
"model\\.embed_tokens": [],
"lm_head": []
}
} |