Upload tokenizer.json
Browse files- tokenizer.json +1 -1
tokenizer.json
CHANGED
|
@@ -89,7 +89,7 @@
|
|
| 89 |
"pre_tokenizer": {
|
| 90 |
"type": "Split",
|
| 91 |
"pattern": {
|
| 92 |
-
"Regex": "([\\s\\S])"
|
| 93 |
},
|
| 94 |
"behavior": "Removed",
|
| 95 |
"invert": true
|
|
|
|
| 89 |
"pre_tokenizer": {
|
| 90 |
"type": "Split",
|
| 91 |
"pattern": {
|
| 92 |
+
"Regex": "(\\[UNK\\]|[\\s\\S])"
|
| 93 |
},
|
| 94 |
"behavior": "Removed",
|
| 95 |
"invert": true
|