calbors commited on
Commit
59f8703
·
verified ·
1 Parent(s): 1bbf301

Upload tokenizer

Browse files
Files changed (2) hide show
  1. special_tokens_map.json +4 -0
  2. tokenizer_config.json +45 -0
special_tokens_map.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "pad_token": "_",
3
+ "unk_token": "?"
4
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "4": {
4
+ "content": "?",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "5": {
12
+ "content": "_",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ }
19
+ },
20
+ "auto_map": {
21
+ "AutoTokenizer": [
22
+ "minimal_hub_utils.Tokenizer",
23
+ null
24
+ ]
25
+ },
26
+ "clean_up_tokenization_spaces": false,
27
+ "extra_special_tokens": {},
28
+ "model_max_length": 1000000000000000019884624838656,
29
+ "pad_token": "_",
30
+ "special_tokens": {
31
+ "pad": "_",
32
+ "unk": "?"
33
+ },
34
+ "split_special_tokens": true,
35
+ "tokenizer_class": "Tokenizer",
36
+ "unk_token": "?",
37
+ "vocab": [
38
+ "a",
39
+ "b",
40
+ "c",
41
+ "d",
42
+ "?",
43
+ "_"
44
+ ]
45
+ }