Transformers 5.0.0 compatibility
Browse files- modeling_gpjtgpt2.py +2 -0
modeling_gpjtgpt2.py
CHANGED
|
@@ -16,6 +16,7 @@ class GPJTGPT2Model(PreTrainedModel):
|
|
| 16 |
def __init__(self, config):
|
| 17 |
super().__init__(config)
|
| 18 |
self.model = GPTModel(config.cfg)
|
|
|
|
| 19 |
|
| 20 |
|
| 21 |
def forward(self, input_ids, **kwargs):
|
|
@@ -31,6 +32,7 @@ class GPJTGPT2ModelForCausalLM(PreTrainedModel, GenerationMixin):
|
|
| 31 |
def __init__(self, config):
|
| 32 |
super().__init__(config)
|
| 33 |
self.model = GPTModel(config.cfg)
|
|
|
|
| 34 |
|
| 35 |
|
| 36 |
def forward(self, input_ids, attention_mask=None, labels=None, **kwargs):
|
|
|
|
| 16 |
def __init__(self, config):
|
| 17 |
super().__init__(config)
|
| 18 |
self.model = GPTModel(config.cfg)
|
| 19 |
+
self.post_init()
|
| 20 |
|
| 21 |
|
| 22 |
def forward(self, input_ids, **kwargs):
|
|
|
|
| 32 |
def __init__(self, config):
|
| 33 |
super().__init__(config)
|
| 34 |
self.model = GPTModel(config.cfg)
|
| 35 |
+
self.post_init()
|
| 36 |
|
| 37 |
|
| 38 |
def forward(self, input_ids, attention_mask=None, labels=None, **kwargs):
|