Reihaneh commited on
Commit
a88c3bc
·
verified ·
1 Parent(s): 39ac4ee

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +2 -2
  2. tokenizer_config.json +6 -5
  3. vocab.json +47 -52
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 54,
3
- "<s>": 53
4
  }
 
1
  {
2
+ "</s>": 49,
3
+ "<s>": 48
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "51": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "52": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "53": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "54": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
@@ -34,9 +34,10 @@
34
  }
35
  },
36
  "bos_token": "<s>",
37
- "clean_up_tokenization_spaces": true,
38
  "do_lower_case": false,
39
  "eos_token": "</s>",
 
40
  "model_max_length": 1000000000000000019884624838656,
41
  "pad_token": "[PAD]",
42
  "replace_word_delimiter_char": " ",
 
1
  {
2
  "added_tokens_decoder": {
3
+ "46": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "47": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "48": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": false,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "49": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": false,
 
34
  }
35
  },
36
  "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": false,
38
  "do_lower_case": false,
39
  "eos_token": "</s>",
40
+ "extra_special_tokens": {},
41
  "model_max_length": 1000000000000000019884624838656,
42
  "pad_token": "[PAD]",
43
  "replace_word_delimiter_char": " ",
vocab.json CHANGED
@@ -1,55 +1,50 @@
1
  {
2
- "'": 18,
3
- "[PAD]": 52,
4
- "[UNK]": 51,
5
- "_": 33,
6
- "a": 1,
7
- "b": 45,
8
- "c": 25,
9
- "d": 19,
10
- "e": 10,
11
- "f": 12,
12
- "g": 44,
13
- "h": 24,
14
- "i": 3,
15
- "j": 50,
16
- "k": 31,
17
- "l": 46,
18
- "m": 8,
19
- "n": 34,
20
- "o": 7,
21
- "p": 6,
22
- "q": 49,
23
- "r": 30,
24
- "s": 4,
25
- "t": 40,
26
- "u": 47,
27
- "v": 17,
28
- "w": 41,
29
- "x": 14,
30
- "y": 29,
31
- "z": 13,
32
- "|": 20,
33
- "¡": 21,
34
- "©": 26,
35
  "«": 11,
36
- "»": 16,
37
- "¿": 27,
38
- "á": 2,
39
- "ã": 35,
40
- "ç": 0,
41
- "é": 37,
42
- "í": 9,
43
- "ñ": 38,
44
- "ó": 28,
45
- "ú": 23,
46
- "ü": 22,
47
- "": 15,
48
- "": 42,
49
- "": 39,
50
- "": 43,
51
- "“": 5,
52
- "”": 36,
53
- "…": 32,
54
- "−": 48
55
  }
 
1
  {
2
+ "'": 28,
3
+ "[PAD]": 47,
4
+ "[UNK]": 46,
5
+ "_": 34,
6
+ "a": 39,
7
+ "b": 24,
8
+ "c": 6,
9
+ "d": 27,
10
+ "e": 8,
11
+ "f": 29,
12
+ "g": 14,
13
+ "h": 10,
14
+ "i": 1,
15
+ "j": 9,
16
+ "k": 38,
17
+ "l": 3,
18
+ "m": 5,
19
+ "n": 42,
20
+ "o": 35,
21
+ "p": 43,
22
+ "q": 4,
23
+ "r": 25,
24
+ "s": 30,
25
+ "t": 18,
26
+ "u": 7,
27
+ "v": 21,
28
+ "w": 0,
29
+ "x": 19,
30
+ "y": 26,
31
+ "z": 33,
32
+ "|": 15,
33
+ "¡": 13,
 
34
  "«": 11,
35
+ "»": 20,
36
+ "¿": 17,
37
+ "á": 45,
38
+ "é": 22,
39
+ "í": 12,
40
+ "ñ": 32,
41
+ "ó": 31,
42
+ "ú": 36,
43
+ "ü": 41,
44
+ "": 2,
45
+ "": 40,
46
+ "": 37,
47
+ "": 44,
48
+ "": 23,
49
+ "": 16
 
 
 
 
50
  }