emotion / tokenizer.json
isreegan's picture
Upload 5 files
cd4e358 verified
{
"version": "1.0",
"truncation": null,
"padding": null,
"added_tokens": [],
"normalizer": {
"type": "Lowercase"
},
"pre_tokenizer": {
"type": "Whitespace"
},
"post_processor": null,
"decoder": {
"type": "WordPiece",
"cleanup": true
},
"model": {
"vocab": {
"one": 0,
"two": 1,
"three": 2,
"four": 3,
"five": 4,
"six": 5,
"seven": 6,
"eight": 7,
"nine": 8,
"ten": 9,
"eleven": 10,
"twelve": 11,
"thirteen": 12,
"fourteen": 13,
"fifteen": 14,
"sixteen": 15,
"seventeen": 16,
"eighteen": 17,
"nineteen": 18,
"twenty": 19,
"twenty-one": 20,
"twenty-two": 21,
"twenty-three": 22,
"twenty-four": 23,
"twenty-five": 24,
"twenty-six": 25,
"twenty-seven": 26,
"twenty-eight": 27,
"twenty-nine": 28,
"thirty": 29,
"forty": 30,
"fifty": 31,
"sixty": 32,
"seventy": 33,
"eighty": 34,
"ninety": 35,
"hundred": 36
},
"type": "WordLevel",
"unk_token": "[UNK]"
}
}