| import os | |
| from typing import Dict, List, Optional, Union | |
| from transformers.configuration_utils import PretrainedConfig | |
| from transformers.utils import logging | |
| logger = logging.get_logger(__name__) | |
| class PDeepPPConfig(PretrainedConfig): | |
| """ | |
| PDeepPP模型的配置类,用于PTM预测 | |
| """ | |
| model_type = "pdeeppp" | |
| def __init__( | |
| self, | |
| input_size=1280, | |
| output_size=128, | |
| num_heads=8, | |
| hidden_size=256, | |
| num_transformer_layers=4, | |
| dropout=0.3, | |
| ptm_type="ACE", | |
| esm_ratio=0.96, | |
| lambda_=1, | |
| **kwargs | |
| ): | |
| super().__init__(**kwargs) | |
| self.input_size = input_size | |
| self.output_size = output_size | |
| self.num_heads = num_heads | |
| self.hidden_size = hidden_size | |
| self.num_transformer_layers = num_transformer_layers | |
| self.dropout = dropout | |
| self.ptm_type = ptm_type | |
| self.esm_ratio = esm_ratio | |
| self.lambda_ = lambda_ | |
| PDeepPPConfig.register_for_auto_class() |