first version
Browse files- another_module.py +4 -0
- config.json +13 -0
- model.safetensors +3 -0
- my_configuration.py +8 -0
- my_model.py +15 -0
another_module.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch.nn as nn
|
| 2 |
+
|
| 3 |
+
def create_super_module(size):
|
| 4 |
+
return nn.Linear(size, 1)
|
config.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"MyModel"
|
| 4 |
+
],
|
| 5 |
+
"auto_map": {
|
| 6 |
+
"AutoConfig": "my_configuration.MyConfig",
|
| 7 |
+
"AutoModel": "my_model.MyModel"
|
| 8 |
+
},
|
| 9 |
+
"model_type": "my_model",
|
| 10 |
+
"size": 256,
|
| 11 |
+
"torch_dtype": "float32",
|
| 12 |
+
"transformers_version": "4.48.3"
|
| 13 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1ac953718d18f8167e2065e1b62b45066ad327d0e05bc2e507bbc68fe52286f6
|
| 3 |
+
size 1204
|
my_configuration.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from transformers import PretrainedConfig
|
| 2 |
+
|
| 3 |
+
class MyConfig(PretrainedConfig):
|
| 4 |
+
model_type = "my_model"
|
| 5 |
+
|
| 6 |
+
def __init__(self, size: int = 512, **kwargs):
|
| 7 |
+
super().__init__(**kwargs)
|
| 8 |
+
self.size = size
|
my_model.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch.nn as nn
|
| 2 |
+
from transformers import PreTrainedModel
|
| 3 |
+
from .my_configuration import MyConfig
|
| 4 |
+
from .another_module import create_super_module
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class MyModel(PreTrainedModel):
|
| 8 |
+
config_class = MyConfig
|
| 9 |
+
|
| 10 |
+
def __init__(self, config: MyConfig):
|
| 11 |
+
super().__init__(config)
|
| 12 |
+
self.linear = create_super_module(config.size)
|
| 13 |
+
|
| 14 |
+
def forward(self, input, **kwargs):
|
| 15 |
+
return self.linear(input)
|