PeteBleackley commited on
Commit
8172944
·
1 Parent(s): e095479

Removed unnecessary parameter

Browse files
Files changed (1) hide show
  1. qarac/models/QaracDecoderModel.py +2 -2
qarac/models/QaracDecoderModel.py CHANGED
@@ -11,7 +11,7 @@ import torch
11
 
12
  class QaracDecoderHead(torch.nn.Module):
13
 
14
- def __init__(self,base_model,config,input_embeddings):
15
  """
16
  Creates the Decoder head
17
 
@@ -25,7 +25,7 @@ class QaracDecoderHead(torch.nn.Module):
25
  None.
26
 
27
  """
28
- super(QaracDecoderHead,self).from_pretrained(base_model,config)
29
  self.layer_0 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
30
  self.layer_1 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
31
  self.head = transformers.models.roberta.modeling_roberta.RobertaLMHead(config,
 
11
 
12
  class QaracDecoderHead(torch.nn.Module):
13
 
14
+ def __init__(self,config,input_embeddings):
15
  """
16
  Creates the Decoder head
17
 
 
25
  None.
26
 
27
  """
28
+ super(QaracDecoderHead,self).__init__()
29
  self.layer_0 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
30
  self.layer_1 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
31
  self.head = transformers.models.roberta.modeling_roberta.RobertaLMHead(config,