albertmartinez commited on
Commit
0f4ad0d
·
verified ·
1 Parent(s): 584cacb

Training in progress, step 500

Browse files
Files changed (3) hide show
  1. model.safetensors +1 -1
  2. tokenizer_config.json +7 -0
  3. training_args.bin +2 -2
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7966d4102590d17013c47301e1edca13a01bfbc58a1c3aa39e080e42f4a0abae
3
  size 438001712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba285bf4c4fe72efa5a64f224b6a869ce74d6610d064a8d73a9620aa40f321f8
3
  size 438001712
tokenizer_config.json CHANGED
@@ -46,12 +46,19 @@
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "mask_token": "[MASK]",
 
49
  "model_max_length": 512,
50
  "never_split": null,
 
51
  "pad_token": "[PAD]",
 
 
52
  "sep_token": "[SEP]",
 
53
  "strip_accents": null,
54
  "tokenize_chinese_chars": true,
55
  "tokenizer_class": "BertTokenizer",
 
 
56
  "unk_token": "[UNK]"
57
  }
 
46
  "do_basic_tokenize": true,
47
  "do_lower_case": true,
48
  "mask_token": "[MASK]",
49
+ "max_length": 512,
50
  "model_max_length": 512,
51
  "never_split": null,
52
+ "pad_to_multiple_of": null,
53
  "pad_token": "[PAD]",
54
+ "pad_token_type_id": 0,
55
+ "padding_side": "right",
56
  "sep_token": "[SEP]",
57
+ "stride": 0,
58
  "strip_accents": null,
59
  "tokenize_chinese_chars": true,
60
  "tokenizer_class": "BertTokenizer",
61
+ "truncation_side": "right",
62
+ "truncation_strategy": "longest_first",
63
  "unk_token": "[UNK]"
64
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53bc8014e93020f2b6690a7cf2c38c59c476d0a5b91777b8c2949fde8c0c562b
3
- size 5240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b8e3a24dd99f0a7f00a946f5503fa7918ad78899f0c7d143c925cb79ff32051
3
+ size 5176