Upload config.json with huggingface_hub
Browse files- config.json +2 -3
config.json
CHANGED
|
@@ -22,7 +22,7 @@
|
|
| 22 |
"num_attention_heads": 32,
|
| 23 |
"num_hidden_layers": 32,
|
| 24 |
"num_key_value_heads": 8,
|
| 25 |
-
"pad_token_id":
|
| 26 |
"pretraining_tp": 1,
|
| 27 |
"rms_norm_eps": 1e-05,
|
| 28 |
"rope_scaling": {
|
|
@@ -36,7 +36,6 @@
|
|
| 36 |
"tie_word_embeddings": false,
|
| 37 |
"torch_dtype": "bfloat16",
|
| 38 |
"transformers_version": "4.48.0",
|
| 39 |
-
"unsloth_version": "2024.11.9",
|
| 40 |
"use_cache": false,
|
| 41 |
-
"vocab_size":
|
| 42 |
}
|
|
|
|
| 22 |
"num_attention_heads": 32,
|
| 23 |
"num_hidden_layers": 32,
|
| 24 |
"num_key_value_heads": 8,
|
| 25 |
+
"pad_token_id": 128256,
|
| 26 |
"pretraining_tp": 1,
|
| 27 |
"rms_norm_eps": 1e-05,
|
| 28 |
"rope_scaling": {
|
|
|
|
| 36 |
"tie_word_embeddings": false,
|
| 37 |
"torch_dtype": "bfloat16",
|
| 38 |
"transformers_version": "4.48.0",
|
|
|
|
| 39 |
"use_cache": false,
|
| 40 |
+
"vocab_size": 128257
|
| 41 |
}
|