File size: 1,132 Bytes
a0c67f0 927a24f c692763 a0c67f0 c692763 a0c67f0 c692763 a0c67f0 927a24f c692763 a0c67f0 c692763 a0c67f0 c692763 a06471c 927a24f c692763 a06471c 927a24f c692763 a06471c a0c67f0 927a24f a0c67f0 927a24f a0c67f0 d94d6a6 a0c67f0 d94d6a6 a0c67f0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 | {
"added_tokens_decoder": {
"42": {
"content": "[UNK]",
"lstrip": true,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"43": {
"content": "[PAD]",
"lstrip": true,
"normalized": false,
"rstrip": true,
"single_word": false,
"special": false
},
"44": {
"content": "<s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"45": {
"content": "</s>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
},
"bos_token": "<s>",
"clean_up_tokenization_spaces": false,
"do_lower_case": false,
"eos_token": "</s>",
"extra_special_tokens": {},
"model_max_length": 1000000000000000019884624838656,
"pad_token": "[PAD]",
"replace_word_delimiter_char": " ",
"target_lang": null,
"tokenizer_class": "Wav2Vec2CTCTokenizer",
"unk_token": "[UNK]",
"word_delimiter_token": "|"
}
|