Sayan01 commited on
Commit
aba4087
·
1 Parent(s): 20cb4d8

Training in progress, epoch 1

Browse files
logs/1655971813.007557/events.out.tfevents.1655971813.9fd42316f55c.73.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:effb3c8df6abdc9b83ecb06d845710f6b2c52a07545015b913d2f748eb1a3449
3
+ size 5347
logs/events.out.tfevents.1655971812.9fd42316f55c.73.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0fdb19d085f59969e3b5961227a78389c9becd2568d2012a270077dd31c701a
3
+ size 4155
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ddfe4d331bdca9f9bdcf51e3c5617e8ad3f67af70dd99e97a3b039e4215e1fe7
3
  size 17561831
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ccabbce134e53c045eb9320fa7969dd973aa76b1167a8240db27743e81d354f
3
  size 17561831
tokenizer.json CHANGED
@@ -1,6 +1,11 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
  "padding": null,
10
  "added_tokens": [
11
  {
tokenizer_config.json CHANGED
@@ -1,14 +1,12 @@
1
  {
2
  "cls_token": "[CLS]",
3
- "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
  "model_max_length": 512,
7
- "name_or_path": "textattack/bert-base-uncased-SST-2",
8
- "never_split": null,
9
  "pad_token": "[PAD]",
10
  "sep_token": "[SEP]",
11
- "special_tokens_map_file": "/root/.cache/huggingface/transformers/c7c9b9c5d8bab3ba2ddaa08b138aa385f9790f30e8dce3bfe47e3f10bd97f4ad.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d",
12
  "strip_accents": null,
13
  "tokenize_chinese_chars": true,
14
  "tokenizer_class": "BertTokenizer",
 
1
  {
2
  "cls_token": "[CLS]",
 
3
  "do_lower_case": true,
4
  "mask_token": "[MASK]",
5
  "model_max_length": 512,
6
+ "name_or_path": "bert-base-uncased",
 
7
  "pad_token": "[PAD]",
8
  "sep_token": "[SEP]",
9
+ "special_tokens_map_file": null,
10
  "strip_accents": null,
11
  "tokenize_chinese_chars": true,
12
  "tokenizer_class": "BertTokenizer",
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3fbc4afdf016b7660c7ccfabb4499eb33e5e2817ad9ae2c2cd3a4c2da2333c14
3
- size 3375
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29960c9880a7a1dfde93d1da31e4db3b0590e1d6d53c5ec8fa453761f56d052e
3
+ size 3311