MostHumble commited on
Commit
fdbf1b3
·
1 Parent(s): eae4556

rm weights download

Browse files
Files changed (1) hide show
  1. model.py +5 -5
model.py CHANGED
@@ -1,13 +1,13 @@
1
  from torch import nn
2
- from transformers import BertModel
3
- import logging
4
  from transformers.modeling_outputs import TokenClassifierOutput
5
 
6
 
7
  class BertClassifier(nn.Module):
8
- def __init__(self, bert_model="Sifal/dzarabert", num_labels=2, dropout=0.1):
9
  super().__init__()
10
- self.bert = BertModel.from_pretrained(bert_model)
 
11
  self.num_labels = num_labels
12
  self.classifier = nn.Sequential(
13
  nn.Linear(self.bert.config.hidden_size, self.bert.config.hidden_size),
@@ -15,7 +15,7 @@ class BertClassifier(nn.Module):
15
  nn.Dropout(dropout),
16
  nn.Linear(self.bert.config.hidden_size, num_labels))
17
 
18
- def forward(self, input_ids=None, attention_mask=None,labels=None, *args, **kwargs):
19
  output = self.bert(input_ids, attention_mask=attention_mask)
20
  logits = self.classifier(output.pooler_output)
21
  loss = None
 
1
  from torch import nn
2
+ from transformers import BertModel,BertConfig
 
3
  from transformers.modeling_outputs import TokenClassifierOutput
4
 
5
 
6
  class BertClassifier(nn.Module):
7
+ def __init__(self, num_labels=2, dropout=0.1):
8
  super().__init__()
9
+ config = BertConfig(vocab_size=34688, max_position_embeddings=512)
10
+ self.bert = BertModel(config=config)
11
  self.num_labels = num_labels
12
  self.classifier = nn.Sequential(
13
  nn.Linear(self.bert.config.hidden_size, self.bert.config.hidden_size),
 
15
  nn.Dropout(dropout),
16
  nn.Linear(self.bert.config.hidden_size, num_labels))
17
 
18
+ def forward(self, input_ids=None, attention_mask=None,labels=None):
19
  output = self.bert(input_ids, attention_mask=attention_mask)
20
  logits = self.classifier(output.pooler_output)
21
  loss = None