alenaa commited on
Commit
29f73bb
·
verified ·
1 Parent(s): 32c575f

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer_config.json +2 -2
  2. vocab.txt +0 -0
tokenizer_config.json CHANGED
@@ -44,9 +44,9 @@
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_basic_tokenize": true,
47
- "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
- "model_max_length": 512,
50
  "never_split": null,
51
  "pad_token": "[PAD]",
52
  "sep_token": "[SEP]",
 
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_basic_tokenize": true,
47
+ "do_lower_case": false,
48
  "mask_token": "[MASK]",
49
+ "model_max_length": 1000000000000000019884624838656,
50
  "never_split": null,
51
  "pad_token": "[PAD]",
52
  "sep_token": "[SEP]",
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff