Sayan01 commited on
Commit
e4e6ef2
·
1 Parent(s): 274e9fb

Training in progress, epoch 1

Browse files
logs/1655920549.4711905/events.out.tfevents.1655920549.851641e31c58.72.97 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e11a413c4afefd5a84d141887880780dbbccaf66fb17ca87447d66614b34f089
3
+ size 5347
logs/events.out.tfevents.1655918554.851641e31c58.72.94 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:de03844ce2f7ff26c403ad048732890b1b7b4a362be2eae54313940025b0876e
3
- size 8475
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bef5ed971fcb620f055cc98f71e7570766b12e0bea4192c05ab1885317ed0b2c
3
+ size 8829
logs/events.out.tfevents.1655920549.851641e31c58.72.96 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc4738dd757b371f141a98c1b38b35acc2af3aedad6a831ae9090c59d541343a
3
+ size 4155
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:83e507ccd56c08089fb9e161ffce503cbd4919496245e2791dc181a49a9d1a5a
3
  size 17561831
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0eb5111e74968a1e30c285342cd61f894945b8349431c36d7bd3d0cf7c52216e
3
  size 17561831
tokenizer_config.json CHANGED
@@ -1,12 +1,14 @@
1
  {
2
  "cls_token": "[CLS]",
 
3
  "do_lower_case": true,
4
  "mask_token": "[MASK]",
5
  "model_max_length": 512,
6
- "name_or_path": "bert-base-uncased",
 
7
  "pad_token": "[PAD]",
8
  "sep_token": "[SEP]",
9
- "special_tokens_map_file": null,
10
  "strip_accents": null,
11
  "tokenize_chinese_chars": true,
12
  "tokenizer_class": "BertTokenizer",
 
1
  {
2
  "cls_token": "[CLS]",
3
+ "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
  "model_max_length": 512,
7
+ "name_or_path": "textattack/bert-base-uncased-SST-2",
8
+ "never_split": null,
9
  "pad_token": "[PAD]",
10
  "sep_token": "[SEP]",
11
+ "special_tokens_map_file": "/root/.cache/huggingface/transformers/c7c9b9c5d8bab3ba2ddaa08b138aa385f9790f30e8dce3bfe47e3f10bd97f4ad.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d",
12
  "strip_accents": null,
13
  "tokenize_chinese_chars": true,
14
  "tokenizer_class": "BertTokenizer",