Spaces:
Build error
Build error
PeteBleackley
commited on
Commit
·
8172944
1
Parent(s):
e095479
Removed unnecessary parameter
Browse files
qarac/models/QaracDecoderModel.py
CHANGED
|
@@ -11,7 +11,7 @@ import torch
|
|
| 11 |
|
| 12 |
class QaracDecoderHead(torch.nn.Module):
|
| 13 |
|
| 14 |
-
def __init__(self,
|
| 15 |
"""
|
| 16 |
Creates the Decoder head
|
| 17 |
|
|
@@ -25,7 +25,7 @@ class QaracDecoderHead(torch.nn.Module):
|
|
| 25 |
None.
|
| 26 |
|
| 27 |
"""
|
| 28 |
-
super(QaracDecoderHead,self).
|
| 29 |
self.layer_0 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
|
| 30 |
self.layer_1 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
|
| 31 |
self.head = transformers.models.roberta.modeling_roberta.RobertaLMHead(config,
|
|
|
|
| 11 |
|
| 12 |
class QaracDecoderHead(torch.nn.Module):
|
| 13 |
|
| 14 |
+
def __init__(self,config,input_embeddings):
|
| 15 |
"""
|
| 16 |
Creates the Decoder head
|
| 17 |
|
|
|
|
| 25 |
None.
|
| 26 |
|
| 27 |
"""
|
| 28 |
+
super(QaracDecoderHead,self).__init__()
|
| 29 |
self.layer_0 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
|
| 30 |
self.layer_1 = transformers.models.roberta.modeling_roberta.RobertaLayer(config)
|
| 31 |
self.head = transformers.models.roberta.modeling_roberta.RobertaLMHead(config,
|