Upload config.json with huggingface_hub
Browse files- config.json +2 -2
config.json
CHANGED
|
@@ -41,6 +41,7 @@
|
|
| 41 |
"d_model": 768,
|
| 42 |
"dense_act_fn": "relu",
|
| 43 |
"dropout_rate": 0.1,
|
|
|
|
| 44 |
"feed_forward_proj": "relu",
|
| 45 |
"initializer_factor": 0.05,
|
| 46 |
"is_gated_act": false,
|
|
@@ -51,8 +52,7 @@
|
|
| 51 |
"pad_token_id": 0,
|
| 52 |
"reg_token_id": 1,
|
| 53 |
"rope_theta": 10000.0,
|
| 54 |
-
"
|
| 55 |
-
"transformers_version": "4.55.4",
|
| 56 |
"use_cache": true,
|
| 57 |
"vocab_size": 2
|
| 58 |
}
|
|
|
|
| 41 |
"d_model": 768,
|
| 42 |
"dense_act_fn": "relu",
|
| 43 |
"dropout_rate": 0.1,
|
| 44 |
+
"dtype": "float32",
|
| 45 |
"feed_forward_proj": "relu",
|
| 46 |
"initializer_factor": 0.05,
|
| 47 |
"is_gated_act": false,
|
|
|
|
| 52 |
"pad_token_id": 0,
|
| 53 |
"reg_token_id": 1,
|
| 54 |
"rope_theta": 10000.0,
|
| 55 |
+
"transformers_version": "4.57.1",
|
|
|
|
| 56 |
"use_cache": true,
|
| 57 |
"vocab_size": 2
|
| 58 |
}
|