Upload tokenizer

#4
by ArthurZ HF Staff - opened
special_tokens_map.json CHANGED
@@ -227,7 +227,7 @@
227
  "mask_token": {
228
  "content": "<mask>",
229
  "lstrip": true,
230
- "normalized": true,
231
  "rstrip": false,
232
  "single_word": false
233
  },
 
227
  "mask_token": {
228
  "content": "<mask>",
229
  "lstrip": true,
230
+ "normalized": false,
231
  "rstrip": false,
232
  "single_word": false
233
  },
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:610501fe8857739dbb451ab69a0a795cb87dadcf8873d7e2227764d165e72e72
3
- size 17331379
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9633f7d33a10432bf06e8865c3c7d4e4798ed432be7bbc7a4a29051a7f5e594
3
+ size 17331380
tokenizer_config.json CHANGED
@@ -1651,7 +1651,7 @@
1651
  "256203": {
1652
  "content": "<mask>",
1653
  "lstrip": true,
1654
- "normalized": true,
1655
  "rstrip": false,
1656
  "single_word": false,
1657
  "special": true
 
1651
  "256203": {
1652
  "content": "<mask>",
1653
  "lstrip": true,
1654
+ "normalized": false,
1655
  "rstrip": false,
1656
  "single_word": false,
1657
  "special": true