add tokenizer
Browse files- special_tokens_map.json +1 -0
- tokenizer_config.json +1 -0
- vocab.json +1 -0
special_tokens_map.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "pad_token": "[PAD]"}
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"unk_token": "[UNK]", "bos_token": "<s>", "eos_token": "</s>", "pad_token": "[PAD]", "do_lower_case": true, "word_delimiter_token": "|", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
|
vocab.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"s": 0, "t": 1, "j": 2, "ö": 3, "h": 4, "w": 5, "f": 6, "i": 7, "o": 8, "p": 9, "e": 10, "r": 11, "y": 13, "x": 14, "d": 15, "m": 16, "ä": 17, "g": 18, "a": 19, "k": 20, "b": 21, "c": 22, "å": 23, "q": 24, "v": 25, "l": 26, "n": 27, "u": 28, "z": 29, "|": 12, "[UNK]": 30, "[PAD]": 31}
|