Eraly-ml commited on
Commit
7f25454
·
verified ·
1 Parent(s): d05edcb

Upload BertForTokenClassification

Browse files
Files changed (1) hide show
  1. config.json +103 -104
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "Eraly-ml/KazBERT",
3
  "architectures": [
4
  "BertForTokenClassification"
5
  ],
@@ -10,112 +9,112 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "LABEL_0",
14
- "1": "LABEL_1",
15
- "2": "LABEL_2",
16
- "3": "LABEL_3",
17
- "4": "LABEL_4",
18
- "5": "LABEL_5",
19
- "6": "LABEL_6",
20
- "7": "LABEL_7",
21
- "8": "LABEL_8",
22
- "9": "LABEL_9",
23
- "10": "LABEL_10",
24
- "11": "LABEL_11",
25
- "12": "LABEL_12",
26
- "13": "LABEL_13",
27
- "14": "LABEL_14",
28
- "15": "LABEL_15",
29
- "16": "LABEL_16",
30
- "17": "LABEL_17",
31
- "18": "LABEL_18",
32
- "19": "LABEL_19",
33
- "20": "LABEL_20",
34
- "21": "LABEL_21",
35
- "22": "LABEL_22",
36
- "23": "LABEL_23",
37
- "24": "LABEL_24",
38
- "25": "LABEL_25",
39
- "26": "LABEL_26",
40
- "27": "LABEL_27",
41
- "28": "LABEL_28",
42
- "29": "LABEL_29",
43
- "30": "LABEL_30",
44
- "31": "LABEL_31",
45
- "32": "LABEL_32",
46
- "33": "LABEL_33",
47
- "34": "LABEL_34",
48
- "35": "LABEL_35",
49
- "36": "LABEL_36",
50
- "37": "LABEL_37",
51
- "38": "LABEL_38",
52
- "39": "LABEL_39",
53
- "40": "LABEL_40",
54
- "41": "LABEL_41",
55
- "42": "LABEL_42",
56
- "43": "LABEL_43",
57
- "44": "LABEL_44",
58
- "45": "LABEL_45",
59
- "46": "LABEL_46",
60
- "47": "LABEL_47",
61
- "48": "LABEL_48",
62
- "49": "LABEL_49",
63
- "50": "LABEL_50"
64
  },
65
  "initializer_range": 0.02,
66
  "intermediate_size": 3072,
67
  "label2id": {
68
- "LABEL_0": 0,
69
- "LABEL_1": 1,
70
- "LABEL_10": 10,
71
- "LABEL_11": 11,
72
- "LABEL_12": 12,
73
- "LABEL_13": 13,
74
- "LABEL_14": 14,
75
- "LABEL_15": 15,
76
- "LABEL_16": 16,
77
- "LABEL_17": 17,
78
- "LABEL_18": 18,
79
- "LABEL_19": 19,
80
- "LABEL_2": 2,
81
- "LABEL_20": 20,
82
- "LABEL_21": 21,
83
- "LABEL_22": 22,
84
- "LABEL_23": 23,
85
- "LABEL_24": 24,
86
- "LABEL_25": 25,
87
- "LABEL_26": 26,
88
- "LABEL_27": 27,
89
- "LABEL_28": 28,
90
- "LABEL_29": 29,
91
- "LABEL_3": 3,
92
- "LABEL_30": 30,
93
- "LABEL_31": 31,
94
- "LABEL_32": 32,
95
- "LABEL_33": 33,
96
- "LABEL_34": 34,
97
- "LABEL_35": 35,
98
- "LABEL_36": 36,
99
- "LABEL_37": 37,
100
- "LABEL_38": 38,
101
- "LABEL_39": 39,
102
- "LABEL_4": 4,
103
- "LABEL_40": 40,
104
- "LABEL_41": 41,
105
- "LABEL_42": 42,
106
- "LABEL_43": 43,
107
- "LABEL_44": 44,
108
- "LABEL_45": 45,
109
- "LABEL_46": 46,
110
- "LABEL_47": 47,
111
- "LABEL_48": 48,
112
- "LABEL_49": 49,
113
- "LABEL_5": 5,
114
- "LABEL_50": 50,
115
- "LABEL_6": 6,
116
- "LABEL_7": 7,
117
- "LABEL_8": 8,
118
- "LABEL_9": 9
119
  },
120
  "layer_norm_eps": 1e-12,
121
  "max_position_embeddings": 512,
@@ -125,7 +124,7 @@
125
  "pad_token_id": 0,
126
  "position_embedding_type": "absolute",
127
  "torch_dtype": "float32",
128
- "transformers_version": "4.47.0",
129
  "type_vocab_size": 2,
130
  "use_cache": true,
131
  "vocab_size": 32000
 
1
  {
 
2
  "architectures": [
3
  "BertForTokenClassification"
4
  ],
 
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
+ "0": "O",
13
+ "1": "B-ADAGE",
14
+ "2": "I-ADAGE",
15
+ "3": "B-ART",
16
+ "4": "I-ART",
17
+ "5": "B-CARDINAL",
18
+ "6": "I-CARDINAL",
19
+ "7": "B-CONTACT",
20
+ "8": "I-CONTACT",
21
+ "9": "B-DATE",
22
+ "10": "I-DATE",
23
+ "11": "B-DISEASE",
24
+ "12": "I-DISEASE",
25
+ "13": "B-EVENT",
26
+ "14": "I-EVENT",
27
+ "15": "B-FACILITY",
28
+ "16": "I-FACILITY",
29
+ "17": "B-GPE",
30
+ "18": "I-GPE",
31
+ "19": "B-LANGUAGE",
32
+ "20": "I-LANGUAGE",
33
+ "21": "B-LAW",
34
+ "22": "I-LAW",
35
+ "23": "B-LOCATION",
36
+ "24": "I-LOCATION",
37
+ "25": "B-MISCELLANEOUS",
38
+ "26": "I-MISCELLANEOUS",
39
+ "27": "B-MONEY",
40
+ "28": "I-MONEY",
41
+ "29": "B-NON_HUMAN",
42
+ "30": "I-NON_HUMAN",
43
+ "31": "B-NORP",
44
+ "32": "I-NORP",
45
+ "33": "B-ORDINAL",
46
+ "34": "I-ORDINAL",
47
+ "35": "B-ORGANISATION",
48
+ "36": "I-ORGANISATION",
49
+ "37": "B-PERSON",
50
+ "38": "I-PERSON",
51
+ "39": "B-PERCENTAGE",
52
+ "40": "I-PERCENTAGE",
53
+ "41": "B-POSITION",
54
+ "42": "I-POSITION",
55
+ "43": "B-PRODUCT",
56
+ "44": "I-PRODUCT",
57
+ "45": "B-PROJECT",
58
+ "46": "I-PROJECT",
59
+ "47": "B-QUANTITY",
60
+ "48": "I-QUANTITY",
61
+ "49": "B-TIME",
62
+ "50": "I-TIME"
63
  },
64
  "initializer_range": 0.02,
65
  "intermediate_size": 3072,
66
  "label2id": {
67
+ "B-ADAGE": 1,
68
+ "B-ART": 3,
69
+ "B-CARDINAL": 5,
70
+ "B-CONTACT": 7,
71
+ "B-DATE": 9,
72
+ "B-DISEASE": 11,
73
+ "B-EVENT": 13,
74
+ "B-FACILITY": 15,
75
+ "B-GPE": 17,
76
+ "B-LANGUAGE": 19,
77
+ "B-LAW": 21,
78
+ "B-LOCATION": 23,
79
+ "B-MISCELLANEOUS": 25,
80
+ "B-MONEY": 27,
81
+ "B-NON_HUMAN": 29,
82
+ "B-NORP": 31,
83
+ "B-ORDINAL": 33,
84
+ "B-ORGANISATION": 35,
85
+ "B-PERCENTAGE": 39,
86
+ "B-PERSON": 37,
87
+ "B-POSITION": 41,
88
+ "B-PRODUCT": 43,
89
+ "B-PROJECT": 45,
90
+ "B-QUANTITY": 47,
91
+ "B-TIME": 49,
92
+ "I-ADAGE": 2,
93
+ "I-ART": 4,
94
+ "I-CARDINAL": 6,
95
+ "I-CONTACT": 8,
96
+ "I-DATE": 10,
97
+ "I-DISEASE": 12,
98
+ "I-EVENT": 14,
99
+ "I-FACILITY": 16,
100
+ "I-GPE": 18,
101
+ "I-LANGUAGE": 20,
102
+ "I-LAW": 22,
103
+ "I-LOCATION": 24,
104
+ "I-MISCELLANEOUS": 26,
105
+ "I-MONEY": 28,
106
+ "I-NON_HUMAN": 30,
107
+ "I-NORP": 32,
108
+ "I-ORDINAL": 34,
109
+ "I-ORGANISATION": 36,
110
+ "I-PERCENTAGE": 40,
111
+ "I-PERSON": 38,
112
+ "I-POSITION": 42,
113
+ "I-PRODUCT": 44,
114
+ "I-PROJECT": 46,
115
+ "I-QUANTITY": 48,
116
+ "I-TIME": 50,
117
+ "O": 0
118
  },
119
  "layer_norm_eps": 1e-12,
120
  "max_position_embeddings": 512,
 
124
  "pad_token_id": 0,
125
  "position_embedding_type": "absolute",
126
  "torch_dtype": "float32",
127
+ "transformers_version": "4.53.3",
128
  "type_vocab_size": 2,
129
  "use_cache": true,
130
  "vocab_size": 32000