Update config.json
Browse files- config.json +18 -1
config.json
CHANGED
|
@@ -20,5 +20,22 @@
|
|
| 20 |
"num_hidden_layers": 4,
|
| 21 |
"pre_trained": "",
|
| 22 |
"structure": [],
|
| 23 |
-
"type_vocab_size": 2
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
}
|
|
|
|
| 20 |
"num_hidden_layers": 4,
|
| 21 |
"pre_trained": "",
|
| 22 |
"structure": [],
|
| 23 |
+
"type_vocab_size": 2,
|
| 24 |
+
"attention_bias": false,
|
| 25 |
+
"attention_dropout": 0.0,
|
| 26 |
+
"head_dim": 128,
|
| 27 |
+
"num_key_value_heads": 8,
|
| 28 |
+
"pretraining_tp": 1,
|
| 29 |
+
"rms_norm_eps": 1e-05,
|
| 30 |
+
"rope_scaling": {
|
| 31 |
+
"factor": 32.0,
|
| 32 |
+
"high_freq_factor": 4.0,
|
| 33 |
+
"low_freq_factor": 1.0,
|
| 34 |
+
"original_max_position_embeddings": 8192,
|
| 35 |
+
"rope_type": "llama3"
|
| 36 |
+
},
|
| 37 |
+
"rope_theta": 500000.0,
|
| 38 |
+
"tie_word_embeddings": true,
|
| 39 |
+
"transformers_version": "4.45.0.dev0",
|
| 40 |
+
"use_cache": true
|
| 41 |
}
|