Upload LlamaForCausalLM
Browse files- config.json +1 -1
- pytorch_model-00001-of-00002.bin +2 -2
- pytorch_model-00002-of-00002.bin +2 -2
- pytorch_model.bin.index.json +4 -4
config.json
CHANGED
|
@@ -22,5 +22,5 @@
|
|
| 22 |
"torch_dtype": "float16",
|
| 23 |
"transformers_version": "4.34.0.dev0",
|
| 24 |
"use_cache": true,
|
| 25 |
-
"vocab_size":
|
| 26 |
}
|
|
|
|
| 22 |
"torch_dtype": "float16",
|
| 23 |
"transformers_version": "4.34.0.dev0",
|
| 24 |
"use_cache": true,
|
| 25 |
+
"vocab_size": 42414
|
| 26 |
}
|
pytorch_model-00001-of-00002.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a206bc33ad9407f02e9819a4ecc2f6cb2f5d597317e96f31352ffdfb5f638933
|
| 3 |
+
size 9971739657
|
pytorch_model-00002-of-00002.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8922244af2d785fea6c862d5364b863bb5939907bd106e5284b0042cb7bc09b0
|
| 3 |
+
size 3675818233
|
pytorch_model.bin.index.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
-
"total_size":
|
| 4 |
},
|
| 5 |
"weight_map": {
|
| 6 |
"lm_head.weight": "pytorch_model-00002-of-00002.bin",
|
|
@@ -149,11 +149,11 @@
|
|
| 149 |
"model.layers.22.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 150 |
"model.layers.22.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 151 |
"model.layers.22.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 152 |
-
"model.layers.23.input_layernorm.weight": "pytorch_model-
|
| 153 |
-
"model.layers.23.mlp.down_proj.weight": "pytorch_model-
|
| 154 |
"model.layers.23.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 155 |
"model.layers.23.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 156 |
-
"model.layers.23.post_attention_layernorm.weight": "pytorch_model-
|
| 157 |
"model.layers.23.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 158 |
"model.layers.23.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 159 |
"model.layers.23.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
|
|
|
|
| 1 |
{
|
| 2 |
"metadata": {
|
| 3 |
+
"total_size": 13647454208
|
| 4 |
},
|
| 5 |
"weight_map": {
|
| 6 |
"lm_head.weight": "pytorch_model-00002-of-00002.bin",
|
|
|
|
| 149 |
"model.layers.22.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 150 |
"model.layers.22.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 151 |
"model.layers.22.self_attn.v_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 152 |
+
"model.layers.23.input_layernorm.weight": "pytorch_model-00002-of-00002.bin",
|
| 153 |
+
"model.layers.23.mlp.down_proj.weight": "pytorch_model-00002-of-00002.bin",
|
| 154 |
"model.layers.23.mlp.gate_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 155 |
"model.layers.23.mlp.up_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 156 |
+
"model.layers.23.post_attention_layernorm.weight": "pytorch_model-00002-of-00002.bin",
|
| 157 |
"model.layers.23.self_attn.k_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 158 |
"model.layers.23.self_attn.o_proj.weight": "pytorch_model-00001-of-00002.bin",
|
| 159 |
"model.layers.23.self_attn.q_proj.weight": "pytorch_model-00001-of-00002.bin",
|