Upload OGBERT tokenizer (vocab_size=32768)
Browse files- tokenizer.json +3 -1
tokenizer.json
CHANGED
|
@@ -76,7 +76,9 @@
|
|
| 76 |
"special": false
|
| 77 |
}
|
| 78 |
],
|
| 79 |
-
"normalizer":
|
|
|
|
|
|
|
| 80 |
"pre_tokenizer": {
|
| 81 |
"type": "Split",
|
| 82 |
"pattern": {
|
|
|
|
| 76 |
"special": false
|
| 77 |
}
|
| 78 |
],
|
| 79 |
+
"normalizer": {
|
| 80 |
+
"type": "Lowercase"
|
| 81 |
+
},
|
| 82 |
"pre_tokenizer": {
|
| 83 |
"type": "Split",
|
| 84 |
"pattern": {
|