tikanosa commited on
Commit
e52ffca
·
verified ·
1 Parent(s): 9f8d5cf

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. README.md +0 -0
  2. model.safetensors +1 -1
  3. tokenizer_config.json +7 -0
README.md CHANGED
The diff for this file is too large to render. See raw diff
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d8715a17c2d0de9dd405e210012c58cb9c52b1f62c16083a10ea1b82c3482c2
3
  size 90866412
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1ac80013c430d1209ad9843e8c62f6cba7873ab2c9970c41ad2f3d0aa7acb19
3
  size 90866412
tokenizer_config.json CHANGED
@@ -47,12 +47,19 @@
47
  "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
 
50
  "model_max_length": 512,
51
  "never_split": null,
 
52
  "pad_token": "[PAD]",
 
 
53
  "sep_token": "[SEP]",
 
54
  "strip_accents": null,
55
  "tokenize_chinese_chars": true,
56
  "tokenizer_class": "BertTokenizer",
 
 
57
  "unk_token": "[UNK]"
58
  }
 
47
  "do_lower_case": true,
48
  "extra_special_tokens": {},
49
  "mask_token": "[MASK]",
50
+ "max_length": 512,
51
  "model_max_length": 512,
52
  "never_split": null,
53
+ "pad_to_multiple_of": null,
54
  "pad_token": "[PAD]",
55
+ "pad_token_type_id": 0,
56
+ "padding_side": "right",
57
  "sep_token": "[SEP]",
58
+ "stride": 0,
59
  "strip_accents": null,
60
  "tokenize_chinese_chars": true,
61
  "tokenizer_class": "BertTokenizer",
62
+ "truncation_side": "right",
63
+ "truncation_strategy": "longest_first",
64
  "unk_token": "[UNK]"
65
  }