Upload AbcTransformer
Browse files- config.json +3 -0
- pytorch_model.bin +1 -1
- transformers_model.py +48 -0
config.json
CHANGED
|
@@ -2,6 +2,9 @@
|
|
| 2 |
"architectures": [
|
| 3 |
"AbcTransformer"
|
| 4 |
],
|
|
|
|
|
|
|
|
|
|
| 5 |
"block_size": 128,
|
| 6 |
"device": "cpu",
|
| 7 |
"dropout": 0.2,
|
|
|
|
| 2 |
"architectures": [
|
| 3 |
"AbcTransformer"
|
| 4 |
],
|
| 5 |
+
"auto_map": {
|
| 6 |
+
"AutoModelForCausalLM": "transformers_model.AbcTransformer"
|
| 7 |
+
},
|
| 8 |
"block_size": 128,
|
| 9 |
"device": "cpu",
|
| 10 |
"dropout": 0.2,
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 18965
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:682aafa0732ff611771441cd3059543c3b9fba5be2c0f6a0f851cc37baa8f075
|
| 3 |
size 18965
|
transformers_model.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import transformers
|
| 2 |
+
|
| 3 |
+
import model
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class AbcTransformerConfig(transformers.PretrainedConfig):
|
| 7 |
+
model_type = 'abc-transformer'
|
| 8 |
+
def __init__(
|
| 9 |
+
self,
|
| 10 |
+
vocab_size=113,
|
| 11 |
+
n_embd=384,
|
| 12 |
+
block_size=128,
|
| 13 |
+
n_heads=6,
|
| 14 |
+
n_layers=6,
|
| 15 |
+
dropout=0.2,
|
| 16 |
+
device=None,
|
| 17 |
+
**kwargs
|
| 18 |
+
):
|
| 19 |
+
self.vocab_size = vocab_size
|
| 20 |
+
self.n_embd = n_embd
|
| 21 |
+
self.block_size = block_size
|
| 22 |
+
self.n_heads = n_heads
|
| 23 |
+
self.n_layers = n_layers
|
| 24 |
+
self.dropout = dropout
|
| 25 |
+
self.device = device
|
| 26 |
+
super().__init__(**kwargs)
|
| 27 |
+
|
| 28 |
+
class AbcTransformer(transformers.PreTrainedModel):
|
| 29 |
+
config_class = AbcTransformerConfig
|
| 30 |
+
|
| 31 |
+
def __init__(self, config):
|
| 32 |
+
super().__init__(config)
|
| 33 |
+
self.model = model.AbcTransformer(
|
| 34 |
+
vocab_size=config.vocab_size,
|
| 35 |
+
n_embd=config.n_embd,
|
| 36 |
+
block_size=config.block_size,
|
| 37 |
+
n_heads=config.n_heads,
|
| 38 |
+
n_layers=config.n_layers,
|
| 39 |
+
dropout=config.dropout,
|
| 40 |
+
device=config.device,
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
def forward(self, tensor, labels):
|
| 44 |
+
return self.model(tensor, labels)
|
| 45 |
+
|
| 46 |
+
transformers.AutoConfig.register('abc-transformer', AbcTransformerConfig)
|
| 47 |
+
AbcTransformer.register_for_auto_class("AutoModelForCausalLM")
|
| 48 |
+
transformers.AutoModel.register(AbcTransformerConfig, AbcTransformer)
|