Upload ProPrimeForPretraining
Browse files- config.json +2 -1
- modeling_proprime.py +2 -2
config.json
CHANGED
|
@@ -1,11 +1,12 @@
|
|
| 1 |
{
|
| 2 |
-
"_name_or_path": "
|
| 3 |
"architectures": [
|
| 4 |
"ProPrimeForPretraining"
|
| 5 |
],
|
| 6 |
"attention_probs_dropout_prob": 0.0,
|
| 7 |
"auto_map": {
|
| 8 |
"AutoConfig": "configuration_proprime.ProPrimeConfig",
|
|
|
|
| 9 |
"AutoModelForMaskedLM": "AI4Protein/ProPrime_650M--modeling_proprime.ProPrimeForMaskedLM"
|
| 10 |
},
|
| 11 |
"emb_layer_norm_before": false,
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "pretrain_checkpoint/ProPrime_650M_260K",
|
| 3 |
"architectures": [
|
| 4 |
"ProPrimeForPretraining"
|
| 5 |
],
|
| 6 |
"attention_probs_dropout_prob": 0.0,
|
| 7 |
"auto_map": {
|
| 8 |
"AutoConfig": "configuration_proprime.ProPrimeConfig",
|
| 9 |
+
"AutoModel": "modeling_proprime.ProPrimeForPretraining",
|
| 10 |
"AutoModelForMaskedLM": "AI4Protein/ProPrime_650M--modeling_proprime.ProPrimeForMaskedLM"
|
| 11 |
},
|
| 12 |
"emb_layer_norm_before": false,
|
modeling_proprime.py
CHANGED
|
@@ -18,7 +18,7 @@ from transformers.modeling_utils import (
|
|
| 18 |
prune_linear_layer,
|
| 19 |
)
|
| 20 |
from transformers.utils import logging
|
| 21 |
-
from
|
| 22 |
from torch.nn.functional import scaled_dot_product_attention
|
| 23 |
|
| 24 |
logger = logging.get_logger(__name__)
|
|
@@ -31,7 +31,7 @@ def consine_based_loss(x1, x2):
|
|
| 31 |
return 1 - cos(x1, x2).mean()
|
| 32 |
|
| 33 |
PROPRIME_PRETRAINED_MODEL_ARCHIVE_LIST = [
|
| 34 |
-
"AI4protein/
|
| 35 |
]
|
| 36 |
|
| 37 |
|
|
|
|
| 18 |
prune_linear_layer,
|
| 19 |
)
|
| 20 |
from transformers.utils import logging
|
| 21 |
+
from .configuration_proprime import ProPrimeConfig
|
| 22 |
from torch.nn.functional import scaled_dot_product_attention
|
| 23 |
|
| 24 |
logger = logging.get_logger(__name__)
|
|
|
|
| 31 |
return 1 - cos(x1, x2).mean()
|
| 32 |
|
| 33 |
PROPRIME_PRETRAINED_MODEL_ARCHIVE_LIST = [
|
| 34 |
+
"AI4protein/ProPrime_650M",
|
| 35 |
]
|
| 36 |
|
| 37 |
|