Upload LlamaForCausalLM
Browse files- config.json +3 -4
- pytorch_model.bin +1 -1
config.json
CHANGED
|
@@ -19,11 +19,10 @@
|
|
| 19 |
"num_key_value_heads": 8,
|
| 20 |
"pretraining_tp": 1,
|
| 21 |
"quantization_config": {
|
| 22 |
-
"linear_class": "
|
| 23 |
"modules_to_not_convert": [],
|
| 24 |
-
"quantization_mode": "online",
|
| 25 |
"quant_method": "bitnet",
|
| 26 |
-
"
|
| 27 |
"with_rms_norm": true
|
| 28 |
},
|
| 29 |
"rms_norm_eps": 1e-05,
|
|
@@ -36,7 +35,7 @@
|
|
| 36 |
},
|
| 37 |
"rope_theta": 500000.0,
|
| 38 |
"tie_word_embeddings": false,
|
| 39 |
-
"torch_dtype": "
|
| 40 |
"transformers_version": "4.52.0.dev0",
|
| 41 |
"use_cache": true,
|
| 42 |
"vocab_size": 128256
|
|
|
|
| 19 |
"num_key_value_heads": 8,
|
| 20 |
"pretraining_tp": 1,
|
| 21 |
"quantization_config": {
|
| 22 |
+
"linear_class": "bitlinear",
|
| 23 |
"modules_to_not_convert": [],
|
|
|
|
| 24 |
"quant_method": "bitnet",
|
| 25 |
+
"quantization_mode": "offline",
|
| 26 |
"with_rms_norm": true
|
| 27 |
},
|
| 28 |
"rms_norm_eps": 1e-05,
|
|
|
|
| 35 |
},
|
| 36 |
"rope_theta": 500000.0,
|
| 37 |
"tie_word_embeddings": false,
|
| 38 |
+
"torch_dtype": "bfloat16",
|
| 39 |
"transformers_version": "4.52.0.dev0",
|
| 40 |
"use_cache": true,
|
| 41 |
"vocab_size": 128256
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2930108766
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b1071a4553fc5f0c2fa6959936abf141090ab06748eb37cd9067ad913511482a
|
| 3 |
size 2930108766
|