Mustafaa4a commited on
Commit
10b6875
·
1 Parent(s): 655163d

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +0 -28
  2. tokenizer_config.json +3 -1
special_tokens_map.json CHANGED
@@ -1,33 +1,5 @@
1
  {
2
  "additional_special_tokens": [
3
- {
4
- "content": "<s>",
5
- "lstrip": false,
6
- "normalized": true,
7
- "rstrip": false,
8
- "single_word": false
9
- },
10
- {
11
- "content": "</s>",
12
- "lstrip": false,
13
- "normalized": true,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- {
18
- "content": "<s>",
19
- "lstrip": false,
20
- "normalized": true,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- {
25
- "content": "</s>",
26
- "lstrip": false,
27
- "normalized": true,
28
- "rstrip": false,
29
- "single_word": false
30
- },
31
  {
32
  "content": "<s>",
33
  "lstrip": false,
 
1
  {
2
  "additional_special_tokens": [
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  {
4
  "content": "<s>",
5
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -2,11 +2,13 @@
2
  "bos_token": "<s>",
3
  "clean_up_tokenization_spaces": true,
4
  "do_lower_case": false,
 
5
  "eos_token": "</s>",
6
  "model_max_length": 1000000000000000019884624838656,
7
  "pad_token": "[PAD]",
8
  "replace_word_delimiter_char": " ",
9
- "tokenizer_class": "Wav2Vec2CTCTokenizer",
 
10
  "unk_token": "[UNK]",
11
  "word_delimiter_token": "|"
12
  }
 
2
  "bos_token": "<s>",
3
  "clean_up_tokenization_spaces": true,
4
  "do_lower_case": false,
5
+ "do_normalize": false,
6
  "eos_token": "</s>",
7
  "model_max_length": 1000000000000000019884624838656,
8
  "pad_token": "[PAD]",
9
  "replace_word_delimiter_char": " ",
10
+ "return_attention_mask": false,
11
+ "tokenizer_class": "Wav2Vec2Tokenizer",
12
  "unk_token": "[UNK]",
13
  "word_delimiter_token": "|"
14
  }