ZON8955 commited on
Commit
3c6d0e9
·
verified ·
1 Parent(s): 1665ac5

Training in progress, step 30

Browse files
Files changed (4) hide show
  1. config.json +8 -13
  2. model.safetensors +2 -2
  3. tokenizer.json +2 -2
  4. training_args.bin +2 -2
config.json CHANGED
@@ -1,28 +1,23 @@
1
  {
2
  "architectures": [
3
- "BertForTokenClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
  "directionality": "bidi",
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
- "0": "B-CLASS",
13
- "1": "B-SUP",
14
- "2": "I-CLASS",
15
- "3": "I-SUP",
16
- "4": "O"
17
  },
18
  "initializer_range": 0.02,
19
  "intermediate_size": 3072,
20
  "label2id": {
21
- "B-CLASS": 0,
22
- "B-SUP": 1,
23
- "I-CLASS": 2,
24
- "I-SUP": 3,
25
- "O": 4
26
  },
27
  "layer_norm_eps": 1e-12,
28
  "max_position_embeddings": 512,
@@ -36,8 +31,8 @@
36
  "pooler_size_per_head": 128,
37
  "pooler_type": "first_token_transform",
38
  "position_embedding_type": "absolute",
39
- "torch_dtype": "float32",
40
- "transformers_version": "4.51.3",
41
  "type_vocab_size": 2,
42
  "use_cache": true,
43
  "vocab_size": 21128
 
1
  {
2
  "architectures": [
3
+ "BertForSequenceClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "classifier_dropout": null,
7
  "directionality": "bidi",
8
+ "dtype": "float32",
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "\u8001\u4eba\u5065\u4fdd\u88dc\u52a9",
14
+ "1": "\u91cd\u967d\u79ae\u91d1"
 
 
 
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
+ "\u8001\u4eba\u5065\u4fdd\u88dc\u52a9": 0,
20
+ "\u91cd\u967d\u79ae\u91d1": 1
 
 
 
21
  },
22
  "layer_norm_eps": 1e-12,
23
  "max_position_embeddings": 512,
 
31
  "pooler_size_per_head": 128,
32
  "pooler_type": "first_token_transform",
33
  "position_embedding_type": "absolute",
34
+ "problem_type": "single_label_classification",
35
+ "transformers_version": "4.57.3",
36
  "type_vocab_size": 2,
37
  "use_cache": true,
38
  "vocab_size": 21128
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b33564274b6a98f46c070acf890ed783533461d63f1212d1c63b06fccfc83cf3
3
- size 406746908
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b86c06a0da00f5f50f9059dc9c0b9c93896109e6f33a9fe86f6d60757fa6811
3
+ size 409100240
tokenizer.json CHANGED
@@ -2,13 +2,13 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 128,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
  "padding": {
10
  "strategy": {
11
- "Fixed": 128
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 512,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
9
  "padding": {
10
  "strategy": {
11
+ "Fixed": 512
12
  },
13
  "direction": "Right",
14
  "pad_to_multiple_of": null,
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:917a511b0819ebafe857efd5c15913ad7035f00504a54bded92a8615f4c88fba
3
- size 5304
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79271b4ab200834f3f75873a16c83755ff5cc406b2a7e0d00ed0dec995b6737c
3
+ size 5841