| from transformers import PretrainedConfig | |
| class MyGPTConfig(PretrainedConfig): | |
| model_type = "my_gpt" | |
| def __init__( | |
| self, | |
| vocab_size=50257, | |
| hidden_size=256, | |
| num_attention_heads=4, | |
| num_hidden_layers=12, | |
| max_position_embeddings=256, | |
| drop_rate=0.1, | |
| qkv_bias=False, | |
| bos_token_id=50256, | |
| eos_token_id=50256, | |
| pad_token_id=50256, | |
| **kwargs | |
| ): | |
| super().__init__( | |
| bos_token_id=bos_token_id, | |
| eos_token_id=eos_token_id, | |
| pad_token_id=pad_token_id, | |
| **kwargs | |
| ) | |
| # 🔥 Standard HF field names | |
| self.vocab_size = vocab_size | |
| self.hidden_size = hidden_size | |
| self.num_attention_heads = num_attention_heads | |
| self.num_hidden_layers = num_hidden_layers | |
| self.max_position_embeddings = max_position_embeddings | |
| # Custom fields | |
| self.drop_rate = drop_rate | |
| self.qkv_bias = qkv_bias | |
| self.is_decoder = True | |
| self.is_encoder_decoder = False | |