nosuchjihyun commited on
Commit
4d664b2
·
verified ·
1 Parent(s): ace55df

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. model.safetensors +1 -1
  2. tokenizer.json +6 -1
  3. tokenizer_config.json +4 -0
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50b4b8c859f3d8fb4f5bdea5ccbb59b06d2dca1017564606af59a1e0953f1b1c
3
  size 503914648
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:584204b6484b226852feca624d854e6429346db32a8765e263557a7de7bc35ed
3
  size 503914648
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 2014,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
tokenizer_config.json CHANGED
@@ -256,8 +256,12 @@
256
  "cls_token": "<classification>",
257
  "eos_token": "<eos>",
258
  "extra_special_tokens": {},
 
259
  "model_max_length": 1000000000000000019884624838656,
260
  "pad_token": "<pad>",
 
261
  "tokenizer_class": "GPT2Tokenizer",
 
 
262
  "unk_token": "<unk>"
263
  }
 
256
  "cls_token": "<classification>",
257
  "eos_token": "<eos>",
258
  "extra_special_tokens": {},
259
+ "max_length": 2014,
260
  "model_max_length": 1000000000000000019884624838656,
261
  "pad_token": "<pad>",
262
+ "stride": 0,
263
  "tokenizer_class": "GPT2Tokenizer",
264
+ "truncation_side": "right",
265
+ "truncation_strategy": "longest_first",
266
  "unk_token": "<unk>"
267
  }