Sayan01 commited on
Commit
fe0529f
·
1 Parent(s): 802308d

Upload tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<pad>": 32000
3
+ }
special_tokens_map.json CHANGED
@@ -13,6 +13,13 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
 
 
 
 
 
 
16
  "unk_token": {
17
  "content": "<unk>",
18
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
tokenizer.json CHANGED
@@ -29,6 +29,15 @@
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
 
 
 
 
 
 
 
 
 
32
  }
33
  ],
34
  "normalizer": {
 
29
  "rstrip": false,
30
  "normalized": false,
31
  "special": true
32
+ },
33
+ {
34
+ "id": 32000,
35
+ "content": "<pad>",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
  }
42
  ],
43
  "normalizer": {
tokenizer_config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
@@ -23,6 +25,14 @@
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
 
 
 
 
 
 
 
 
26
  }
27
  },
28
  "bos_token": "<s>",
@@ -30,7 +40,7 @@
30
  "eos_token": "</s>",
31
  "legacy": true,
32
  "model_max_length": 1000000000000000019884624838656,
33
- "pad_token": null,
34
  "sp_model_kwargs": {},
35
  "spaces_between_special_tokens": false,
36
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
+ },
29
+ "32000": {
30
+ "content": "<pad>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
  }
37
  },
38
  "bos_token": "<s>",
 
40
  "eos_token": "</s>",
41
  "legacy": true,
42
  "model_max_length": 1000000000000000019884624838656,
43
+ "pad_token": "<pad>",
44
  "sp_model_kwargs": {},
45
  "spaces_between_special_tokens": false,
46
  "tokenizer_class": "LlamaTokenizer",