File size: 1,146 Bytes
77341cc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
import os
from typing import Dict, List, Optional, Union
from transformers.configuration_utils import PretrainedConfig
from transformers.utils import logging
logger = logging.get_logger(__name__)
class PDeepPPConfig(PretrainedConfig):
model_type = "PDeepPP"
def __init__(
self,
input_size=1280,
output_size=128,
num_heads=8,
hidden_size=256,
num_transformer_layers=4,
dropout=0.3,
task_type="", # 留空,依赖 convert 文件动态补充
esm_ratio=None, # 留空,依赖 convert 文件动态补充
lambda_=None, # 留空,依赖 convert 文件动态补充
**kwargs
):
super().__init__(**kwargs)
self.input_size = input_size
self.output_size = output_size
self.num_heads = num_heads
self.hidden_size = hidden_size
self.num_transformer_layers = num_transformer_layers
self.dropout = dropout
self.task_type = task_type # 默认留空
self.esm_ratio = esm_ratio # 默认留空
self.lambda_ = lambda_ # 默认留空
PDeepPPConfig.register_for_auto_class() |