Fischerboot commited on
Commit
f17da94
·
verified ·
1 Parent(s): 45ba674

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  base_model:
3
- - Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
4
  library_name: transformers
5
  tags:
6
  - mergekit
@@ -19,7 +19,7 @@ This model was merged using the SLERP merge method.
19
  ### Models Merged
20
 
21
  The following models were included in the merge:
22
- * [Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP](https://huggingface.co/Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP)
23
 
24
  ### Configuration
25
 
@@ -28,12 +28,12 @@ The following YAML configuration was used to produce this model:
28
  ```yaml
29
  slices:
30
  - sources:
31
- - model: Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
32
- layer_range: [0, 31]
33
- - model: Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
34
- layer_range: [1, 32]
35
  merge_method: slerp
36
- base_model: Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
37
  parameters:
38
  t:
39
  - filter: self_attn
 
1
  ---
2
  base_model:
3
+ - Fischerboot/ll3-test-1
4
  library_name: transformers
5
  tags:
6
  - mergekit
 
19
  ### Models Merged
20
 
21
  The following models were included in the merge:
22
+ * [Fischerboot/ll3-test-1](https://huggingface.co/Fischerboot/ll3-test-1)
23
 
24
  ### Configuration
25
 
 
28
  ```yaml
29
  slices:
30
  - sources:
31
+ - model: Fischerboot/ll3-test-1
32
+ layer_range: [0, 30]
33
+ - model: Fischerboot/ll3-test-1
34
+ layer_range: [1, 31]
35
  merge_method: slerp
36
+ base_model: Fischerboot/ll3-test-1
37
  parameters:
38
  t:
39
  - filter: self_attn
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -15,7 +15,7 @@
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
- "num_hidden_layers": 31,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
 
1
  {
2
+ "_name_or_path": "Fischerboot/ll3-test-1",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
+ "num_hidden_layers": 30,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
mergekit_config.yml CHANGED
@@ -1,11 +1,11 @@
1
  slices:
2
  - sources:
3
- - model: Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
4
- layer_range: [0, 31]
5
- - model: Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
6
- layer_range: [1, 32]
7
  merge_method: slerp
8
- base_model: Fischerboot/LLama3-Lexi-Aura-3Some-SLERP-SLERP
9
  parameters:
10
  t:
11
  - filter: self_attn
 
1
  slices:
2
  - sources:
3
+ - model: Fischerboot/ll3-test-1
4
+ layer_range: [0, 30]
5
+ - model: Fischerboot/ll3-test-1
6
+ layer_range: [1, 31]
7
  merge_method: slerp
8
+ base_model: Fischerboot/ll3-test-1
9
  parameters:
10
  t:
11
  - filter: self_attn
model-00003-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e9a913960ae71c057c6abe78ad59d9496400234c694661ed98c19ee8640627ac
3
  size 989899000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd6332f90994ebad1e9a4355973620553c6aec2f51e37b79dd61c2ba0349eb68
3
  size 989899000
model-00004-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:702503ad699c408d299e0184ca8dd2df91f278b93fd46c6f8653fca9c517dfc1
3
  size 989890728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc636acb723ec60df9d5e893151093c4d136a5e1da49631b9404af14873cab67
3
  size 989890728
model-00005-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a41028fba34615768a55a8d92a66d947ebc12e30201689f70ca0be64cb1b1c28
3
  size 998287760
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:757a0a96cbbb2e75ec0bf9978bdf40c02532792a417768891e088835bb96595f
3
  size 998287760
model-00006-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5d49b93ff8bbcf0bfe7a4b0c908e47ef23f4dfda5587048a4bde37c0a97706c2
3
  size 947956216
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d59d03f0941894fec5d51f51dbda88a03d92ae39b0590f6f7d8673afdf30cb3
3
  size 947956216
model-00007-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d4468bd5003e7e46265ad4bb0f352d484fb57410a577af61d49a4140168d47e7
3
  size 989890712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6b6cb51a1e192cd7f476bf54373092944cb446dc06f10f68e4b1ffddd2acd0d
3
  size 989890712
model-00008-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3c80843217e7f1d199ba7629f672923d6b05388486c201ad7c4c867cb858a255
3
  size 989890720
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b27a22c7205404a6805d7dac486fa517a4e170047d1460a74a045adb052a7645
3
  size 989890720
model-00009-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b76c5a8f5872c511b8937eca89035026e2fd52a1599f277aeea5dd09564cffad
3
  size 998287760
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0b7e715a8b3f0d432b08006546a2d568b7427974fb5a3fa81e049be5ee7ffb1
3
  size 998287760
model-00010-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:69b1e31846befe1db9ca56415e01e254ee586c66cdcd0f8c6124aa78cf32a076
3
  size 947956216
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04d7cc7ee5399f42102dd9e1d3e38dc326db2d077e0107691392e1beda8b238c
3
  size 947956216
model-00011-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f513f4208c9d56f2a1ec9e97f8e638e52227bc8e926b828c0c3f76ce8dfe9a58
3
  size 989890728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c102095f82e8f4ef35319d0c8e55bef4c89a41dbc5d084b2ec62a75da6f66790
3
  size 989890728
model-00012-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:399c1d830557a636ad104bc99c85422d32f0e78b390c8252d7cc46f5b83b5760
3
  size 989890712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47dd99acd863ebaeb39a7fb9056ade37a668c43aa9abb902ab90835559059bf5
3
  size 989890712
model-00013-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e21aae4f1d3ef65c93a74990c49bf57e949b4cda08193d9e46306fd96d0bf55
3
  size 998287752
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8470a611e4d2172c33c1f023ec7a02f3a4f7796b877a76d6b399eccaf59638d2
3
  size 998287752
model-00014-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:61124904f3f5fec4ab57d8bc1173b9171446aa9cc6c2e363900b9ed4d11d6628
3
  size 947956192
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19a38caa31ce107c1324a0fa2220d55318dab8e68aa0f2d32a8cc98f97d2f097
3
  size 947956192
model-00015-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07da348a8d2f6d0f4b2f8cf6dc80c1051cbf8a356cf899da2e0cdfa134312b9a
3
  size 989890712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74b1017acb0125501e783a444d9b1d68cfd54857b911304e74042d2a75ac98e4
3
  size 989890712
model-00016-of-00016.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6544e0aa9d4a9454bab509b7e0036d7d6d77164f4e7dacedc4dba883ab0badad
3
- size 755009440
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08c54a15b53ccaf58768bb8ff50471e3c69428a7ffc085c9b39a31ac2dc15dcf
3
+ size 318784416
model.safetensors.index.json CHANGED
@@ -1 +1 @@
1
- {"metadata": {"mergekit_version": "0.0.4.4", "total_size": 15624298496}, "weight_map": {"lm_head.weight": "model-00001-of-00016.safetensors", "model.embed_tokens.weight": "model-00002-of-00016.safetensors", "model.layers.0.input_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.input_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00016.safetensors", "model.layers.10.input_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00003-of-00016.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.input_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00004-of-00016.safetensors", "model.layers.12.input_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00004-of-00016.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00004-of-00016.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.input_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.input_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00006-of-00016.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00006-of-00016.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.input_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.input_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00006-of-00016.safetensors", "model.layers.17.input_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.input_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00007-of-00016.safetensors", "model.layers.1.input_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00007-of-00016.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.input_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00008-of-00016.safetensors", "model.layers.20.input_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00008-of-00016.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00008-of-00016.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.input_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.input_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00010-of-00016.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00010-of-00016.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.input_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.input_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00010-of-00016.safetensors", "model.layers.25.input_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.input_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00011-of-00016.safetensors", "model.layers.27.input_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00011-of-00016.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.input_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00012-of-00016.safetensors", "model.layers.2.input_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00012-of-00016.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00012-of-00016.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.input_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00013-of-00016.safetensors", "model.layers.30.input_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00013-of-00016.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00013-of-00016.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00013-of-00016.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00013-of-00016.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00014-of-00016.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00014-of-00016.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.input_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.input_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.input_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00015-of-00016.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00015-of-00016.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00015-of-00016.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00015-of-00016.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00015-of-00016.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00015-of-00016.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.input_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.input_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00016-of-00016.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00016-of-00016.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00016-of-00016.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00016-of-00016.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00016-of-00016.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00016-of-00016.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.input_layernorm.weight": "model-00016-of-00016.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00016-of-00016.safetensors", "model.norm.weight": "model-00016-of-00016.safetensors"}}
 
1
+ {"metadata": {"mergekit_version": "0.0.4.4", "total_size": 15188074496}, "weight_map": {"lm_head.weight": "model-00001-of-00016.safetensors", "model.embed_tokens.weight": "model-00002-of-00016.safetensors", "model.layers.0.input_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00003-of-00016.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.input_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00016.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00016.safetensors", "model.layers.10.input_layernorm.weight": "model-00003-of-00016.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00003-of-00016.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00004-of-00016.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.input_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00004-of-00016.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00004-of-00016.safetensors", "model.layers.12.input_layernorm.weight": "model-00004-of-00016.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00004-of-00016.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00004-of-00016.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00005-of-00016.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.input_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00005-of-00016.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.input_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00005-of-00016.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00005-of-00016.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00006-of-00016.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00006-of-00016.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.input_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00006-of-00016.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.input_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00006-of-00016.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00006-of-00016.safetensors", "model.layers.17.input_layernorm.weight": "model-00006-of-00016.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00007-of-00016.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.input_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00007-of-00016.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00007-of-00016.safetensors", "model.layers.1.input_layernorm.weight": "model-00007-of-00016.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00007-of-00016.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00008-of-00016.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.input_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00008-of-00016.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00008-of-00016.safetensors", "model.layers.20.input_layernorm.weight": "model-00008-of-00016.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00008-of-00016.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00008-of-00016.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00009-of-00016.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.input_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00009-of-00016.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.input_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00009-of-00016.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00009-of-00016.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00010-of-00016.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00010-of-00016.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.input_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00010-of-00016.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.input_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00010-of-00016.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00010-of-00016.safetensors", "model.layers.25.input_layernorm.weight": "model-00010-of-00016.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00011-of-00016.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.input_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00011-of-00016.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00011-of-00016.safetensors", "model.layers.27.input_layernorm.weight": "model-00011-of-00016.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00011-of-00016.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00012-of-00016.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.input_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00012-of-00016.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00012-of-00016.safetensors", "model.layers.2.input_layernorm.weight": "model-00012-of-00016.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00012-of-00016.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00012-of-00016.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00013-of-00016.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.input_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00013-of-00016.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00013-of-00016.safetensors", "model.layers.3.input_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00013-of-00016.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00013-of-00016.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00013-of-00016.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00013-of-00016.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00013-of-00016.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00014-of-00016.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.input_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00014-of-00016.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.input_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00014-of-00016.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00014-of-00016.safetensors", "model.layers.6.input_layernorm.weight": "model-00014-of-00016.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00015-of-00016.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.input_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00015-of-00016.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00015-of-00016.safetensors", "model.layers.8.input_layernorm.weight": "model-00015-of-00016.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00015-of-00016.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00016-of-00016.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00016-of-00016.safetensors", "model.norm.weight": "model-00016-of-00016.safetensors"}}