File size: 484 Bytes
b0377a8 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 | from transformers import PretrainedConfig
class Im2LatexTransformerConfig(PretrainedConfig):
model_type = "Im2LatexTransformer"
def __init__(self, **kwargs):
self.vocab_size = 544
self.max_len = 512
self.d_model = 512
self.nhead = 8
self.num_layers = 6
self.dim_feedforward = 2048
self.dropout = 0.1
self.in_channels = 1
super().__init__(
pad_token_id=0,
sos_token_id=1,
eos_token_id=2,
) |