| { | |
| "_name_or_path": "/rgzn/limc/ProSST/oracle_checkpoint3/ss_2051_0_aa2pos_pos2aa_aa2ss_ss2aa_False/ProSSTX-2048", | |
| "architectures": [ | |
| "ProSSTXForMaskedLM" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "auto_map": { | |
| "AutoConfig": "configuration_prosst.ProSSTXConfig", | |
| "AutoModelForMaskedLM": "modeling_prosst.ProSSTXForMaskedLM" | |
| }, | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 1024, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 4096, | |
| "layer_norm_eps": 1e-05, | |
| "mask_token_id": 24, | |
| "max_position_embeddings": -1, | |
| "max_relative_positions": 1024, | |
| "mlm_probability": 0.15, | |
| "model_type": "ProSSTX", | |
| "num_attention_heads": 16, | |
| "num_hidden_layers": 24, | |
| "pad_token_id": 0, | |
| "pooler_dropout": 0.1, | |
| "pooler_hidden_act": "gelu", | |
| "pooler_hidden_size": 1024, | |
| "pooling_head": "mean", | |
| "pos_att_type": [ | |
| "aa2pos", | |
| "pos2aa", | |
| "aa2ss", | |
| "ss2aa" | |
| ], | |
| "position_biased_input": false, | |
| "position_embedding_type": "relative", | |
| "relative_attention": true, | |
| "scale_hidden": 1, | |
| "ss_vocab_size": 2051, | |
| "token_dropout": true, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.44.2", | |
| "type_vocab_size": 0, | |
| "vocab_size": 25 | |
| } | |