# configure.py from transformers import PretrainedConfig class RecombinationTransformerConfig(PretrainedConfig): model_type = "RecombinationTransformer" def __init__(self, embed_dim=768, num_heads=8, num_layers=4, vocab_size=50280, eos_token_id=0, **kwargs): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads = num_heads self.num_layers = num_layers self.vocab_size = vocab_size self.eos_token_id = eos_token_id