ZON8955 commited on
Commit
3204f08
·
verified ·
1 Parent(s): b45262e

Training in progress, step 30

Browse files
config.json CHANGED
@@ -5,21 +5,19 @@
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
  "directionality": "bidi",
8
- "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "\u8cc7\u6599\u67e5\u8a62",
14
- "1": "\u8cc7\u683c\u67e5\u8a62",
15
- "2": "\u9032\u5ea6\u67e5\u8a62"
16
  },
17
  "initializer_range": 0.02,
18
  "intermediate_size": 3072,
19
  "label2id": {
20
- "\u8cc7\u6599\u67e5\u8a62": 0,
21
- "\u8cc7\u683c\u67e5\u8a62": 1,
22
- "\u9032\u5ea6\u67e5\u8a62": 2
23
  },
24
  "layer_norm_eps": 1e-12,
25
  "max_position_embeddings": 512,
@@ -34,8 +32,7 @@
34
  "pooler_type": "first_token_transform",
35
  "position_embedding_type": "absolute",
36
  "problem_type": "single_label_classification",
37
- "torch_dtype": "float32",
38
- "transformers_version": "4.51.3",
39
  "type_vocab_size": 2,
40
  "use_cache": true,
41
  "vocab_size": 21128
 
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
  "directionality": "bidi",
8
+ "dtype": "float32",
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "\u8001\u4eba\u5065\u4fdd\u88dc\u52a9",
14
+ "1": "\u91cd\u967d\u79ae\u91d1"
 
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
+ "\u8001\u4eba\u5065\u4fdd\u88dc\u52a9": 0,
20
+ "\u91cd\u967d\u79ae\u91d1": 1
 
21
  },
22
  "layer_norm_eps": 1e-12,
23
  "max_position_embeddings": 512,
 
32
  "pooler_type": "first_token_transform",
33
  "position_embedding_type": "absolute",
34
  "problem_type": "single_label_classification",
35
+ "transformers_version": "4.57.3",
 
36
  "type_vocab_size": 2,
37
  "use_cache": true,
38
  "vocab_size": 21128
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1d3f694a1e070d371437cee3da8d4a3b77039b5b6028890db017edfc1193134d
3
- size 409103316
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f3ef910a1cfe74b25c49fd0d5209816f757aeffd95c54cf055ab18223fe1786
3
+ size 409100240
special_tokens_map.json CHANGED
@@ -1,37 +1,7 @@
1
  {
2
- "cls_token": {
3
- "content": "[CLS]",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "mask_token": {
10
- "content": "[MASK]",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "sep_token": {
24
- "content": "[SEP]",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- },
30
- "unk_token": {
31
- "content": "[UNK]",
32
- "lstrip": false,
33
- "normalized": false,
34
- "rstrip": false,
35
- "single_word": false
36
- }
37
  }
 
1
  {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
@@ -54,7 +68,7 @@
54
  "clean_text": true,
55
  "handle_chinese_chars": true,
56
  "strip_accents": null,
57
- "lowercase": true
58
  },
59
  "pre_tokenizer": {
60
  "type": "BertPreTokenizer"
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 512
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "[PAD]"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
68
  "clean_text": true,
69
  "handle_chinese_chars": true,
70
  "strip_accents": null,
71
+ "lowercase": false
72
  },
73
  "pre_tokenizer": {
74
  "type": "BertPreTokenizer"
tokenizer_config.json CHANGED
@@ -43,10 +43,10 @@
43
  },
44
  "clean_up_tokenization_spaces": false,
45
  "cls_token": "[CLS]",
46
- "do_lower_case": true,
47
  "extra_special_tokens": {},
48
  "mask_token": "[MASK]",
49
- "model_max_length": 1000000000000000019884624838656,
50
  "pad_token": "[PAD]",
51
  "sep_token": "[SEP]",
52
  "strip_accents": null,
 
43
  },
44
  "clean_up_tokenization_spaces": false,
45
  "cls_token": "[CLS]",
46
+ "do_lower_case": false,
47
  "extra_special_tokens": {},
48
  "mask_token": "[MASK]",
49
+ "model_max_length": 512,
50
  "pad_token": "[PAD]",
51
  "sep_token": "[SEP]",
52
  "strip_accents": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:19a00ff81b3f0f4a0b6902742698cbf6f85dad7aea304c2b4fa88cad50bfb5fc
3
- size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8d980749353593f888779985c2afcf68b488325292158b69b2c306613f6808d
3
+ size 5841