luerhard commited on
Commit
0e6cb92
·
1 Parent(s): 9e65093

Upload PopBERT

Browse files
Files changed (3) hide show
  1. config.json +4 -0
  2. module.py +52 -0
  3. pytorch_model.bin +1 -1
config.json CHANGED
@@ -2,6 +2,10 @@
2
  "architectures": [
3
  "PopBERT"
4
  ],
 
 
 
 
5
  "model_type": "popbert",
6
  "num_classes": 4,
7
  "torch_dtype": "float32",
 
2
  "architectures": [
3
  "PopBERT"
4
  ],
5
+ "auto_map": {
6
+ "AutoConfig": "module.PopBERTConfig",
7
+ "AutoModel": "module.PopBERT"
8
+ },
9
  "model_type": "popbert",
10
  "num_classes": 4,
11
  "torch_dtype": "float32",
module.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from torch import nn
3
+ from torch.nn import BCEWithLogitsLoss
4
+ from transformers import AutoModelForSequenceClassification
5
+ from transformers import PretrainedConfig
6
+ from transformers import PreTrainedModel
7
+
8
+
9
+ class PopBERTConfig(PretrainedConfig):
10
+ model_type = "popbert"
11
+
12
+ def __init__(self, num_classes: int = 4, **kwargs):
13
+ super().__init__(**kwargs)
14
+ self.num_classes = num_classes
15
+
16
+
17
+ class PopBERT(PreTrainedModel):
18
+ config_class = PopBERTConfig
19
+
20
+ def __init__(self, config):
21
+ super().__init__(config)
22
+ self.sigmoid = nn.Sigmoid()
23
+ self.bert = AutoModelForSequenceClassification.from_pretrained(
24
+ "deepset/gbert-large",
25
+ num_labels=config.num_classes,
26
+ )
27
+
28
+ def forward(
29
+ self,
30
+ input_ids: torch.Tensor | None = None,
31
+ attention_mask: torch.Tensor | None = None,
32
+ token_type_ids: torch.Tensor | None = None,
33
+ position_ids: torch.Tensor | None = None,
34
+ head_mask: torch.Tensor | None = None,
35
+ inputs_embeds: torch.Tensor | None = None,
36
+ labels: torch.Tensor | None = None,
37
+ ):
38
+ pred = self.bert(
39
+ input_ids,
40
+ attention_mask=attention_mask,
41
+ token_type_ids=token_type_ids,
42
+ position_ids=position_ids,
43
+ head_mask=head_mask,
44
+ inputs_embeds=inputs_embeds,
45
+ )
46
+
47
+ loss = None
48
+ if labels is not None:
49
+ loss_fn = BCEWithLogitsLoss()
50
+ loss = loss_fn(pred.logits, labels.float())
51
+
52
+ return loss, self.sigmoid(pred.logits)
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:17b602ee2fa8553d861f68a2420e5c136b6dca34b5f77a5939417ab075feb06e
3
  size 1343094965
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6483ab1821245614459ff691f00ad8dd291e37e39c6952c3e67b5427f2c6ea22
3
  size 1343094965