Nuri-Tas commited on
Commit
8c63427
·
1 Parent(s): 458e6d0

Upload tokenizer

Browse files
Files changed (3) hide show
  1. tokenizer.json +0 -0
  2. tokenizer_config.json +3 -0
  3. vocab.json +0 -0
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -48,9 +48,12 @@
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
 
51
  "model_max_length": 1000000000000000019884624838656,
52
  "pad_token": "<pad>",
 
53
  "sep_token": "</s>",
54
  "tokenizer_class": "RobertaTokenizer",
 
55
  "unk_token": "<unk>"
56
  }
 
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
+ "max_length": 256,
52
  "model_max_length": 1000000000000000019884624838656,
53
  "pad_token": "<pad>",
54
+ "return_special_tokens_mask": true,
55
  "sep_token": "</s>",
56
  "tokenizer_class": "RobertaTokenizer",
57
+ "trim_offsets": true,
58
  "unk_token": "<unk>"
59
  }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff