File size: 11,253 Bytes
4624d4a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 | {
"weight_map": {
"model.layers.8.self_attn.sinks": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.q_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.k_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.v_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.8.self_attn.o_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.8.mlp.router.weight": "model-00004-of-00008.safetensors",
"model.layers.8.mlp.router.bias": "model-00004-of-00008.safetensors",
"model.layers.8.mlp.experts.gate_up_proj": "model-00004-of-00008.safetensors",
"model.layers.8.mlp.experts.gate_up_proj_bias": "model-00004-of-00008.safetensors",
"model.layers.8.mlp.experts.down_proj": "model-00004-of-00008.safetensors",
"model.layers.8.mlp.experts.down_proj_bias": "model-00004-of-00008.safetensors",
"model.layers.8.input_layernorm.weight": "model-00004-of-00008.safetensors",
"model.layers.8.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.sinks": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.q_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.k_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.v_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.9.self_attn.o_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.9.mlp.router.weight": "model-00004-of-00008.safetensors",
"model.layers.9.mlp.router.bias": "model-00004-of-00008.safetensors",
"model.layers.9.mlp.experts.gate_up_proj": "model-00004-of-00008.safetensors",
"model.layers.9.mlp.experts.gate_up_proj_bias": "model-00004-of-00008.safetensors",
"model.layers.9.mlp.experts.down_proj": "model-00004-of-00008.safetensors",
"model.layers.9.mlp.experts.down_proj_bias": "model-00004-of-00008.safetensors",
"model.layers.9.input_layernorm.weight": "model-00004-of-00008.safetensors",
"model.layers.9.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.sinks": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.q_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.k_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.v_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.10.self_attn.o_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.10.mlp.router.weight": "model-00004-of-00008.safetensors",
"model.layers.10.mlp.router.bias": "model-00004-of-00008.safetensors",
"model.layers.10.mlp.experts.gate_up_proj": "model-00004-of-00008.safetensors",
"model.layers.10.mlp.experts.gate_up_proj_bias": "model-00004-of-00008.safetensors",
"model.layers.10.mlp.experts.down_proj": "model-00004-of-00008.safetensors",
"model.layers.10.mlp.experts.down_proj_bias": "model-00004-of-00008.safetensors",
"model.layers.10.input_layernorm.weight": "model-00004-of-00008.safetensors",
"model.layers.10.post_attention_layernorm.weight": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.sinks": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.q_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.q_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.k_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.k_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.v_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.v_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.o_proj.weight": "model-00004-of-00008.safetensors",
"model.layers.11.self_attn.o_proj.bias": "model-00004-of-00008.safetensors",
"model.layers.11.mlp.router.weight": "model-00004-of-00008.safetensors",
"model.layers.11.mlp.router.bias": "model-00004-of-00008.safetensors",
"model.layers.11.mlp.experts.gate_up_proj": "model-00005-of-00008.safetensors",
"model.layers.11.mlp.experts.gate_up_proj_bias": "model-00005-of-00008.safetensors",
"model.layers.11.mlp.experts.down_proj": "model-00005-of-00008.safetensors",
"model.layers.11.mlp.experts.down_proj_bias": "model-00005-of-00008.safetensors",
"model.layers.11.input_layernorm.weight": "model-00005-of-00008.safetensors",
"model.layers.11.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.sinks": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.q_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.k_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.v_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.12.self_attn.o_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.12.mlp.router.weight": "model-00005-of-00008.safetensors",
"model.layers.12.mlp.router.bias": "model-00005-of-00008.safetensors",
"model.layers.12.mlp.experts.gate_up_proj": "model-00005-of-00008.safetensors",
"model.layers.12.mlp.experts.gate_up_proj_bias": "model-00005-of-00008.safetensors",
"model.layers.12.mlp.experts.down_proj": "model-00005-of-00008.safetensors",
"model.layers.12.mlp.experts.down_proj_bias": "model-00005-of-00008.safetensors",
"model.layers.12.input_layernorm.weight": "model-00005-of-00008.safetensors",
"model.layers.12.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.sinks": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.q_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.k_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.v_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.13.self_attn.o_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.13.mlp.router.weight": "model-00005-of-00008.safetensors",
"model.layers.13.mlp.router.bias": "model-00005-of-00008.safetensors",
"model.layers.13.mlp.experts.gate_up_proj": "model-00005-of-00008.safetensors",
"model.layers.13.mlp.experts.gate_up_proj_bias": "model-00005-of-00008.safetensors",
"model.layers.13.mlp.experts.down_proj": "model-00005-of-00008.safetensors",
"model.layers.13.mlp.experts.down_proj_bias": "model-00005-of-00008.safetensors",
"model.layers.13.input_layernorm.weight": "model-00005-of-00008.safetensors",
"model.layers.13.post_attention_layernorm.weight": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.sinks": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.q_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.q_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.k_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.k_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.v_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.v_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.o_proj.weight": "model-00005-of-00008.safetensors",
"model.layers.14.self_attn.o_proj.bias": "model-00005-of-00008.safetensors",
"model.layers.14.mlp.router.weight": "model-00005-of-00008.safetensors",
"model.layers.14.mlp.router.bias": "model-00005-of-00008.safetensors",
"model.layers.14.mlp.experts.gate_up_proj": "model-00006-of-00008.safetensors",
"model.layers.14.mlp.experts.gate_up_proj_bias": "model-00006-of-00008.safetensors",
"model.layers.14.mlp.experts.down_proj": "model-00006-of-00008.safetensors",
"model.layers.14.mlp.experts.down_proj_bias": "model-00006-of-00008.safetensors",
"model.layers.14.input_layernorm.weight": "model-00006-of-00008.safetensors",
"model.layers.14.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.sinks": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.q_proj.weight": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.q_proj.bias": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.k_proj.weight": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.k_proj.bias": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.v_proj.weight": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.v_proj.bias": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.o_proj.weight": "model-00006-of-00008.safetensors",
"model.layers.15.self_attn.o_proj.bias": "model-00006-of-00008.safetensors",
"model.layers.15.mlp.router.weight": "model-00006-of-00008.safetensors",
"model.layers.15.mlp.router.bias": "model-00006-of-00008.safetensors",
"model.layers.15.mlp.experts.gate_up_proj": "model-00006-of-00008.safetensors",
"model.layers.15.mlp.experts.gate_up_proj_bias": "model-00006-of-00008.safetensors",
"model.layers.15.mlp.experts.down_proj": "model-00006-of-00008.safetensors",
"model.layers.15.mlp.experts.down_proj_bias": "model-00006-of-00008.safetensors",
"model.layers.15.input_layernorm.weight": "model-00006-of-00008.safetensors",
"model.layers.15.post_attention_layernorm.weight": "model-00006-of-00008.safetensors",
"model.layers.16.self_attn.sinks": "model-00006-of-00008.safetensors"
},
"shard_count": 3
} |