Upload tokenizer
Browse files- tokenizer_config.json +1 -0
tokenizer_config.json
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
{
|
|
|
|
| 2 |
"name_or_path": "Gunulhona/tbbarttokenizer",
|
| 3 |
"special_tokens_map_file": "/root/.cache/huggingface/transformers/3e6abf40f4fadbea9e7b539c182868d979838d8f7e6cdcdf2ed52ddcf01420c0.15447ae63ad4a2eba8bc7a5146360711dc32b315b4f1488b4806debf35315e9a",
|
| 4 |
"tokenizer_class": "PreTrainedTokenizerFast"
|
|
|
|
| 1 |
{
|
| 2 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 3 |
"name_or_path": "Gunulhona/tbbarttokenizer",
|
| 4 |
"special_tokens_map_file": "/root/.cache/huggingface/transformers/3e6abf40f4fadbea9e7b539c182868d979838d8f7e6cdcdf2ed52ddcf01420c0.15447ae63ad4a2eba8bc7a5146360711dc32b315b4f1488b4806debf35315e9a",
|
| 5 |
"tokenizer_class": "PreTrainedTokenizerFast"
|