add config
Browse files- config.json +9 -1
config.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "lingmess-
|
| 3 |
"architectures": [
|
| 4 |
"LingMessCoref"
|
| 5 |
],
|
|
@@ -33,6 +33,14 @@
|
|
| 33 |
],
|
| 34 |
"bos_token_id": 0,
|
| 35 |
"classifier_dropout": null,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
"eos_token_id": 2,
|
| 37 |
"gradient_checkpointing": false,
|
| 38 |
"hidden_act": "gelu",
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "biu-nlp/lingmess-coref",
|
| 3 |
"architectures": [
|
| 4 |
"LingMessCoref"
|
| 5 |
],
|
|
|
|
| 33 |
],
|
| 34 |
"bos_token_id": 0,
|
| 35 |
"classifier_dropout": null,
|
| 36 |
+
"coref_head": {
|
| 37 |
+
"dropout_prob": 0.3,
|
| 38 |
+
"ffnn_size": 2048,
|
| 39 |
+
"max_doc_len": 4096,
|
| 40 |
+
"max_segment_len": 512,
|
| 41 |
+
"max_span_length": 30,
|
| 42 |
+
"top_lambda": 0.4
|
| 43 |
+
},
|
| 44 |
"eos_token_id": 2,
|
| 45 |
"gradient_checkpointing": false,
|
| 46 |
"hidden_act": "gelu",
|