File size: 942 Bytes
c94c8c9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
import torch.nn as nn
from transformers import BertConfig, BertModel, BertTokenizer
from modules.build import LANGUAGE_REGISTRY
@LANGUAGE_REGISTRY.register()
class BERTLanguageEncoder(nn.Module):
def __init__(self, cfg, weights="bert-base-uncased", hidden_size=768,
num_hidden_layers=4, num_attention_heads=12, type_vocab_size=2):
super().__init__()
self.tokenizer = BertTokenizer.from_pretrained(
weights, do_lower_case=True
)
self.bert_config = BertConfig(
hidden_size=hidden_size,
num_hidden_layers=num_hidden_layers,
num_attention_heads=num_attention_heads,
type_vocab_size=type_vocab_size
)
self.model = BertModel.from_pretrained(
weights, config=self.bert_config
)
def forward(self, txt_ids, txt_masks, **kwargs):
return self.model(txt_ids, txt_masks).last_hidden_state
|