- config.json +3 -3
config.json
CHANGED
|
@@ -1,15 +1,15 @@
|
|
| 1 |
{
|
| 2 |
"_name_or_path": "bart-base",
|
| 3 |
-
"activation_dropout": 0.
|
| 4 |
"activation_function": "gelu",
|
| 5 |
"add_bias_logits": false,
|
| 6 |
"add_final_layer_norm": false,
|
| 7 |
"architectures": [
|
| 8 |
"BartModel"
|
| 9 |
],
|
| 10 |
-
"attention_dropout": 0.
|
| 11 |
"bos_token_id": 0,
|
| 12 |
-
"classif_dropout": 0.
|
| 13 |
"classifier_dropout": 0.0,
|
| 14 |
"d_model": 1024,
|
| 15 |
"decoder_attention_heads": 16,
|
|
|
|
| 1 |
{
|
| 2 |
"_name_or_path": "bart-base",
|
| 3 |
+
"activation_dropout": 0.1,
|
| 4 |
"activation_function": "gelu",
|
| 5 |
"add_bias_logits": false,
|
| 6 |
"add_final_layer_norm": false,
|
| 7 |
"architectures": [
|
| 8 |
"BartModel"
|
| 9 |
],
|
| 10 |
+
"attention_dropout": 0.1,
|
| 11 |
"bos_token_id": 0,
|
| 12 |
+
"classif_dropout": 0.1,
|
| 13 |
"classifier_dropout": 0.0,
|
| 14 |
"d_model": 1024,
|
| 15 |
"decoder_attention_heads": 16,
|