havens2 commited on
Commit
ee6cb7d
·
1 Parent(s): b0fa49a

Training in progress epoch 0

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +1 -1
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "allenai/scibert_scivocab_uncased",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "havens2/scBERT_SER",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d317745e9d683a8aa8b51b000f7a4594e62e6aa31af822f4193b793480d22a5d
3
  size 437411309
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39499f1cd6da800d50df94ff5a5c26a749d52791d19f5ed212e6cba334c08630
3
  size 437411309
tokenizer_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
- "name_or_path": "allenai/scibert_scivocab_uncased",
7
  "never_split": null,
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",
 
3
  "do_basic_tokenize": true,
4
  "do_lower_case": true,
5
  "mask_token": "[MASK]",
6
+ "name_or_path": "havens2/scBERT_SER",
7
  "never_split": null,
8
  "pad_token": "[PAD]",
9
  "sep_token": "[SEP]",