Preeti Dave commited on
Commit ·
5fe7779
1
Parent(s): ce124f4
- config.json +2 -1
config.json
CHANGED
|
@@ -10,5 +10,6 @@
|
|
| 10 |
"initializer_range": 0.02,
|
| 11 |
"layer_norm_eps": 1e-12,
|
| 12 |
"hidden_dropout_prob": 0.1,
|
| 13 |
-
"attention_probs_dropout_prob": 0.1
|
|
|
|
| 14 |
}
|
|
|
|
| 10 |
"initializer_range": 0.02,
|
| 11 |
"layer_norm_eps": 1e-12,
|
| 12 |
"hidden_dropout_prob": 0.1,
|
| 13 |
+
"attention_probs_dropout_prob": 0.1,
|
| 14 |
+
"library_name": "transformers"
|
| 15 |
}
|