HF_NMT_FINETUNED / tokenizer_config.json
zuu's picture
Upload tokenizer
b8ba7d3
raw
history blame contribute delete
277 Bytes
{
"eos_token": "</s>",
"model_max_length": 512,
"pad_token": "<pad>",
"separate_vocabs": false,
"source_lang": "eng",
"sp_model_kwargs": {},
"special_tokens_map_file": null,
"target_lang": "spa",
"tokenizer_class": "MarianTokenizer",
"unk_token": "<unk>"
}