PeteBleackley commited on
Commit
7285c8a
·
1 Parent(s): c833bc7

Head needed to know input embeddings

Browse files
Files changed (1) hide show
  1. qarac/models/QaracDecoderModel.py +5 -3
qarac/models/QaracDecoderModel.py CHANGED
@@ -12,7 +12,7 @@ import transformers
12
 
13
  class QaracDecoderHead(keras.layers.Layer):
14
 
15
- def __init__(self,config):
16
  """
17
  Creates the Decoder head
18
 
@@ -30,7 +30,8 @@ class QaracDecoderHead(keras.layers.Layer):
30
  self.concat = keras.layers.Concatenate(axis=1)
31
  self.layer_0 = transformers.models.roberta.modeling_tf_roberta.TFRobertaLayer(config)
32
  self.layer_1 = transformers.models.roberta.modeling_tf_roberta.TFRobertaLayer(config)
33
- self.head = transformers.models.roberta.modeling_tf_roberta.TFRobertaLMHead(config)
 
34
 
35
  def build(self,input_shape):
36
  """
@@ -85,7 +86,8 @@ class QaracDecoderModel(transformers.TFPreTrainedModel,transformers.generation_t
85
  """
86
  super(QaracDecoderModel,self).__init__(base_model.config)
87
  self.base_model = base_model
88
- self.decoder_head = QaracDecoderHead(self.base_model.config)
 
89
  self.tokenizer = tokenizer
90
  self.start=None
91
  self.end=None
 
12
 
13
  class QaracDecoderHead(keras.layers.Layer):
14
 
15
+ def __init__(self,config,input_embeddings):
16
  """
17
  Creates the Decoder head
18
 
 
30
  self.concat = keras.layers.Concatenate(axis=1)
31
  self.layer_0 = transformers.models.roberta.modeling_tf_roberta.TFRobertaLayer(config)
32
  self.layer_1 = transformers.models.roberta.modeling_tf_roberta.TFRobertaLayer(config)
33
+ self.head = transformers.models.roberta.modeling_tf_roberta.TFRobertaLMHead(config,
34
+ input_embeddings)
35
 
36
  def build(self,input_shape):
37
  """
 
86
  """
87
  super(QaracDecoderModel,self).__init__(base_model.config)
88
  self.base_model = base_model
89
+ self.decoder_head = QaracDecoderHead(self.base_model.config,
90
+ self.base_model.roberta.get_input_embeddings())
91
  self.tokenizer = tokenizer
92
  self.start=None
93
  self.end=None