Upload folder using huggingface_hub
Browse files- modeling_havelock.py +3 -6
modeling_havelock.py
CHANGED
|
@@ -106,9 +106,9 @@ class HavelockTokenClassifier(PreTrainedModel):
|
|
| 106 |
|
| 107 |
# Accept injected backbone (from_pretrained path) or build from config
|
| 108 |
if backbone is not None:
|
| 109 |
-
self.
|
| 110 |
else:
|
| 111 |
-
self.
|
| 112 |
|
| 113 |
self.dropout = nn.Dropout(getattr(config, "hidden_dropout_prob", 0.1))
|
| 114 |
self.classifier = nn.Linear(config.hidden_size, config.num_types * 3)
|
|
@@ -135,16 +135,13 @@ class HavelockTokenClassifier(PreTrainedModel):
|
|
| 135 |
use_crf=use_crf,
|
| 136 |
**backbone_config.to_dict(),
|
| 137 |
)
|
| 138 |
-
|
| 139 |
model = cls(config, backbone=backbone)
|
| 140 |
-
|
| 141 |
if use_crf and obi_bias is not None:
|
| 142 |
model.crf.emission_bias = obi_bias.reshape(1, 1, 1, 3)
|
| 143 |
-
|
| 144 |
return model
|
| 145 |
|
| 146 |
def forward(self, input_ids, attention_mask=None, **kwargs):
|
| 147 |
-
hidden = self.
|
| 148 |
input_ids=input_ids, attention_mask=attention_mask
|
| 149 |
).last_hidden_state
|
| 150 |
hidden = self.dropout(hidden)
|
|
|
|
| 106 |
|
| 107 |
# Accept injected backbone (from_pretrained path) or build from config
|
| 108 |
if backbone is not None:
|
| 109 |
+
self.bert = backbone
|
| 110 |
else:
|
| 111 |
+
self.bert = AutoModel.from_config(config)
|
| 112 |
|
| 113 |
self.dropout = nn.Dropout(getattr(config, "hidden_dropout_prob", 0.1))
|
| 114 |
self.classifier = nn.Linear(config.hidden_size, config.num_types * 3)
|
|
|
|
| 135 |
use_crf=use_crf,
|
| 136 |
**backbone_config.to_dict(),
|
| 137 |
)
|
|
|
|
| 138 |
model = cls(config, backbone=backbone)
|
|
|
|
| 139 |
if use_crf and obi_bias is not None:
|
| 140 |
model.crf.emission_bias = obi_bias.reshape(1, 1, 1, 3)
|
|
|
|
| 141 |
return model
|
| 142 |
|
| 143 |
def forward(self, input_ids, attention_mask=None, **kwargs):
|
| 144 |
+
hidden = self.bert(
|
| 145 |
input_ids=input_ids, attention_mask=attention_mask
|
| 146 |
).last_hidden_state
|
| 147 |
hidden = self.dropout(hidden)
|