| from transformers import PretrainedConfig | |
| from typing import List | |
| class ExGPTConfig(PretrainedConfig): | |
| model_type = "ExGPT" | |
| def __init__( | |
| self, | |
| block_size: int = 1024, # Ctx length? | |
| vocab_size: int = 50527, # 50,000 BPE merges + 256 bytes tokens + 1 <|endoftext|> token | |
| n_layer: int = 12, | |
| n_head: int = 12, | |
| n_embd: int = 768, | |
| **kwargs | |
| ): | |
| self.block_size = block_size | |
| self.vocab_size = vocab_size | |
| self.n_layer = n_layer | |
| self.n_head = n_head | |
| self.n_embd = n_embd | |
| super().__init__(**kwargs) |