Im2LatexTransformer / config.json
pedrolcs63's picture
Upload folder using huggingface_hub
b0377a8 verified
raw
history blame contribute delete
376 Bytes
{
"architectures": [
"Im2LatexTransformer"
],
"d_model": 512,
"dim_feedforward": 2048,
"dropout": 0.1,
"eos_token_id": 2,
"in_channels": 1,
"max_len": 512,
"model_type": "Im2LatexTransformer",
"nhead": 8,
"num_layers": 6,
"pad_token_id": 0,
"sos_token_id": 1,
"torch_dtype": "float32",
"transformers_version": "4.53.2",
"vocab_size": 544
}