emrecengdev commited on
Commit
b81d34a
·
verified ·
1 Parent(s): 27a7f5b

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. README.md +21 -0
  2. class_map.json +216 -0
  3. eval_onnx_fp32.json +25 -0
  4. model_fp32.onnx +3 -0
  5. training_metrics.json +506 -0
README.md ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Image Classification Model Card
2
+
3
+ ## Overview
4
+ This repository contains an ONNX image classification model for recognizing visual symbol classes in small cropped images.
5
+
6
+ ## Files
7
+ - `model_fp32.onnx`: FP32 ONNX model artifact
8
+ - `eval_onnx_fp32.json`: Evaluation summary for the ONNX model
9
+ - `training_metrics.json`: Training/validation metric logs
10
+ - `class_map.json`: Label mapping used by the model
11
+
12
+ ## Intended Use
13
+ - Inference in OCR or visual classification pipelines
14
+ - Benchmarking and experimentation
15
+
16
+ ## Limitations
17
+ - Performance depends on image quality and class balance
18
+ - May underperform on out-of-domain samples
19
+
20
+ ## Ethical Considerations
21
+ Use this model in lawful and privacy-respecting contexts.
class_map.json ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "class_mode": "observed",
3
+ "class_to_idx": {
4
+ "0": 0,
5
+ "2": 1,
6
+ "4": 2,
7
+ "6": 3,
8
+ "7": 4,
9
+ "8": 5,
10
+ "9": 6,
11
+ "10": 7,
12
+ "11": 8,
13
+ "12": 9,
14
+ "13": 10,
15
+ "14": 11,
16
+ "15": 12,
17
+ "16": 13,
18
+ "17": 14,
19
+ "18": 15,
20
+ "19": 16,
21
+ "20": 17,
22
+ "21": 18,
23
+ "22": 19,
24
+ "23": 20,
25
+ "24": 21,
26
+ "25": 22,
27
+ "26": 23,
28
+ "27": 24,
29
+ "28": 25,
30
+ "29": 26,
31
+ "30": 27,
32
+ "31": 28,
33
+ "32": 29,
34
+ "33": 30,
35
+ "34": 31,
36
+ "35": 32,
37
+ "36": 33,
38
+ "37": 34,
39
+ "38": 35,
40
+ "39": 36,
41
+ "40": 37,
42
+ "41": 38,
43
+ "42": 39,
44
+ "43": 40,
45
+ "44": 41,
46
+ "45": 42,
47
+ "46": 43,
48
+ "47": 44,
49
+ "48": 45,
50
+ "49": 46,
51
+ "50": 47,
52
+ "51": 48,
53
+ "52": 49,
54
+ "53": 50,
55
+ "54": 51,
56
+ "55": 52,
57
+ "56": 53,
58
+ "57": 54,
59
+ "58": 55,
60
+ "59": 56,
61
+ "60": 57,
62
+ "61": 58,
63
+ "62": 59,
64
+ "63": 60,
65
+ "64": 61,
66
+ "65": 62,
67
+ "66": 63,
68
+ "67": 64,
69
+ "68": 65,
70
+ "69": 66,
71
+ "70": 67,
72
+ "71": 68,
73
+ "72": 69,
74
+ "73": 70,
75
+ "74": 71,
76
+ "75": 72,
77
+ "76": 73,
78
+ "77": 74,
79
+ "78": 75,
80
+ "79": 76,
81
+ "80": 77,
82
+ "81": 78,
83
+ "82": 79,
84
+ "83": 80,
85
+ "84": 81,
86
+ "85": 82,
87
+ "86": 83,
88
+ "87": 84,
89
+ "88": 85,
90
+ "89": 86,
91
+ "90": 87,
92
+ "91": 88,
93
+ "92": 89,
94
+ "93": 90,
95
+ "94": 91,
96
+ "95": 92,
97
+ "96": 93,
98
+ "97": 94,
99
+ "98": 95,
100
+ "99": 96,
101
+ "100": 97,
102
+ "101": 98,
103
+ "102": 99,
104
+ "103": 100,
105
+ "104": 101,
106
+ "105": 102
107
+ },
108
+ "idx_to_class": {
109
+ "0": 0,
110
+ "1": 2,
111
+ "2": 4,
112
+ "3": 6,
113
+ "4": 7,
114
+ "5": 8,
115
+ "6": 9,
116
+ "7": 10,
117
+ "8": 11,
118
+ "9": 12,
119
+ "10": 13,
120
+ "11": 14,
121
+ "12": 15,
122
+ "13": 16,
123
+ "14": 17,
124
+ "15": 18,
125
+ "16": 19,
126
+ "17": 20,
127
+ "18": 21,
128
+ "19": 22,
129
+ "20": 23,
130
+ "21": 24,
131
+ "22": 25,
132
+ "23": 26,
133
+ "24": 27,
134
+ "25": 28,
135
+ "26": 29,
136
+ "27": 30,
137
+ "28": 31,
138
+ "29": 32,
139
+ "30": 33,
140
+ "31": 34,
141
+ "32": 35,
142
+ "33": 36,
143
+ "34": 37,
144
+ "35": 38,
145
+ "36": 39,
146
+ "37": 40,
147
+ "38": 41,
148
+ "39": 42,
149
+ "40": 43,
150
+ "41": 44,
151
+ "42": 45,
152
+ "43": 46,
153
+ "44": 47,
154
+ "45": 48,
155
+ "46": 49,
156
+ "47": 50,
157
+ "48": 51,
158
+ "49": 52,
159
+ "50": 53,
160
+ "51": 54,
161
+ "52": 55,
162
+ "53": 56,
163
+ "54": 57,
164
+ "55": 58,
165
+ "56": 59,
166
+ "57": 60,
167
+ "58": 61,
168
+ "59": 62,
169
+ "60": 63,
170
+ "61": 64,
171
+ "62": 65,
172
+ "63": 66,
173
+ "64": 67,
174
+ "65": 68,
175
+ "66": 69,
176
+ "67": 70,
177
+ "68": 71,
178
+ "69": 72,
179
+ "70": 73,
180
+ "71": 74,
181
+ "72": 75,
182
+ "73": 76,
183
+ "74": 77,
184
+ "75": 78,
185
+ "76": 79,
186
+ "77": 80,
187
+ "78": 81,
188
+ "79": 82,
189
+ "80": 83,
190
+ "81": 84,
191
+ "82": 85,
192
+ "83": 86,
193
+ "84": 87,
194
+ "85": 88,
195
+ "86": 89,
196
+ "87": 90,
197
+ "88": 91,
198
+ "89": 92,
199
+ "90": 93,
200
+ "91": 94,
201
+ "92": 95,
202
+ "93": 96,
203
+ "94": 97,
204
+ "95": 98,
205
+ "96": 99,
206
+ "97": 100,
207
+ "98": 101,
208
+ "99": 102,
209
+ "100": 103,
210
+ "101": 104,
211
+ "102": 105
212
+ },
213
+ "num_classes": 103,
214
+ "label_min": 0,
215
+ "label_max": 105
216
+ }
eval_onnx_fp32.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model": "/home/kkaisrvr/projects/live/captchaslayer/artifacts/models/math_mnv3_17k_v1/model_fp32.onnx",
3
+ "split_csv": "/home/kkaisrvr/projects/live/captchaslayer/artifacts/math_dataset_17k_v1/test.csv",
4
+ "num_samples": 2045,
5
+ "batch_size": 1,
6
+ "threads": 64,
7
+ "accuracy": 0.9613691931540342,
8
+ "macro_f1": 0.9320283953570693,
9
+ "latency_ms_per_sample": {
10
+ "mean": 4.756918244575169,
11
+ "p50": 4.349756985902786,
12
+ "p95": 6.074900273233652
13
+ },
14
+ "throughput_samples_per_sec": 210.22013593368118,
15
+ "acceptance_policy": {
16
+ "min_confidence": 0.6,
17
+ "min_margin": 0.1
18
+ },
19
+ "acceptance_metrics": {
20
+ "coverage": 0.9682151589242054,
21
+ "accepted_samples": 1980,
22
+ "accepted_accuracy": 0.9732323232323232,
23
+ "rejected_samples": 65
24
+ }
25
+ }
model_fp32.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6ca8c9e0d68f6a95a2d47f3fee1b378391fb6722598941db6f5efe1c4dbed49
3
+ size 6503358
training_metrics.json ADDED
@@ -0,0 +1,506 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_epoch": 33,
3
+ "best_stage": "stage1",
4
+ "best_val_acc": 0.9545232273838631,
5
+ "test_loss": 0.646228389306502,
6
+ "test_acc": 0.9613691931540342,
7
+ "test_macro_f1": 0.9320283953570693,
8
+ "hard_mining": {
9
+ "enabled": true,
10
+ "executed": true,
11
+ "num_samples": 16334,
12
+ "confidence_threshold": 0.85,
13
+ "misclassified": 545,
14
+ "low_confidence": 1700,
15
+ "both": 451,
16
+ "weight_min": 1.0,
17
+ "weight_max": 4.0,
18
+ "weight_mean": 1.176564221868495
19
+ },
20
+ "history": [
21
+ {
22
+ "stage": "stage1",
23
+ "epoch": 1,
24
+ "train_loss": 4.313515394232994,
25
+ "train_acc": 0.032019101261173015,
26
+ "train_macro_f1": 0.02712417475898117,
27
+ "val_loss": 7.769173968922008,
28
+ "val_acc": 0.014180929095354523,
29
+ "val_macro_f1": 0.003789197040298735,
30
+ "duration_seconds": 41.54798349970952
31
+ },
32
+ {
33
+ "stage": "stage1",
34
+ "epoch": 2,
35
+ "train_loss": 3.444204147471938,
36
+ "train_acc": 0.09850618342108486,
37
+ "train_macro_f1": 0.105352354413626,
38
+ "val_loss": 3.6144719557328657,
39
+ "val_acc": 0.10073349633251834,
40
+ "val_macro_f1": 0.07000853003986085,
41
+ "duration_seconds": 38.47417518682778
42
+ },
43
+ {
44
+ "stage": "stage1",
45
+ "epoch": 3,
46
+ "train_loss": 2.8109747060509616,
47
+ "train_acc": 0.20154279417166646,
48
+ "train_macro_f1": 0.21854176612220216,
49
+ "val_loss": 2.98252272605896,
50
+ "val_acc": 0.17946210268948656,
51
+ "val_macro_f1": 0.14989537030978706,
52
+ "duration_seconds": 38.17656293977052
53
+ },
54
+ {
55
+ "stage": "stage1",
56
+ "epoch": 4,
57
+ "train_loss": 2.2996954086214996,
58
+ "train_acc": 0.34761846455246725,
59
+ "train_macro_f1": 0.37294280895416104,
60
+ "val_loss": 2.3156512650576504,
61
+ "val_acc": 0.37457212713936433,
62
+ "val_macro_f1": 0.36651348862376204,
63
+ "duration_seconds": 37.970143370795995
64
+ },
65
+ {
66
+ "stage": "stage1",
67
+ "epoch": 5,
68
+ "train_loss": 1.6524738683256992,
69
+ "train_acc": 0.6124035753642708,
70
+ "train_macro_f1": 0.6176766827413694,
71
+ "val_loss": 1.4232586188749834,
72
+ "val_acc": 0.7530562347188264,
73
+ "val_macro_f1": 0.7361482815159818,
74
+ "duration_seconds": 37.93143594684079
75
+ },
76
+ {
77
+ "stage": "stage1",
78
+ "epoch": 6,
79
+ "train_loss": 1.208171897156294,
80
+ "train_acc": 0.8022529692665605,
81
+ "train_macro_f1": 0.791375137567527,
82
+ "val_loss": 1.096138444813815,
83
+ "val_acc": 0.8557457212713936,
84
+ "val_macro_f1": 0.8341570413748813,
85
+ "duration_seconds": 38.05841494211927
86
+ },
87
+ {
88
+ "stage": "stage1",
89
+ "epoch": 7,
90
+ "train_loss": 1.0467550879301026,
91
+ "train_acc": 0.8506183421084854,
92
+ "train_macro_f1": 0.8464827124195602,
93
+ "val_loss": 0.912368427623402,
94
+ "val_acc": 0.9080684596577017,
95
+ "val_macro_f1": 0.8785396246910446,
96
+ "duration_seconds": 37.41684066830203
97
+ },
98
+ {
99
+ "stage": "stage1",
100
+ "epoch": 8,
101
+ "train_loss": 0.9626071349132893,
102
+ "train_acc": 0.8759030243663524,
103
+ "train_macro_f1": 0.8737804943842077,
104
+ "val_loss": 0.9386219707402316,
105
+ "val_acc": 0.8948655256723717,
106
+ "val_macro_f1": 0.8682503489532023,
107
+ "duration_seconds": 38.04319545486942
108
+ },
109
+ {
110
+ "stage": "stage1",
111
+ "epoch": 9,
112
+ "train_loss": 0.8973523711049279,
113
+ "train_acc": 0.8907799681645647,
114
+ "train_macro_f1": 0.8875551854478719,
115
+ "val_loss": 0.8646871664307334,
116
+ "val_acc": 0.9198044009779951,
117
+ "val_macro_f1": 0.8949053659799362,
118
+ "duration_seconds": 37.64396993210539
119
+ },
120
+ {
121
+ "stage": "stage1",
122
+ "epoch": 10,
123
+ "train_loss": 0.8764715506586918,
124
+ "train_acc": 0.8992286029141667,
125
+ "train_macro_f1": 0.8913087240589926,
126
+ "val_loss": 0.8934382850473578,
127
+ "val_acc": 0.908557457212714,
128
+ "val_macro_f1": 0.8811464151370096,
129
+ "duration_seconds": 38.161499897018075
130
+ },
131
+ {
132
+ "stage": "stage1",
133
+ "epoch": 11,
134
+ "train_loss": 0.8411068756913029,
135
+ "train_acc": 0.9013101506060978,
136
+ "train_macro_f1": 0.8970095282070171,
137
+ "val_loss": 0.8103193749081005,
138
+ "val_acc": 0.9315403422982885,
139
+ "val_macro_f1": 0.900308708225663,
140
+ "duration_seconds": 38.78951836982742
141
+ },
142
+ {
143
+ "stage": "stage1",
144
+ "epoch": 12,
145
+ "train_loss": 0.801855732535207,
146
+ "train_acc": 0.9101261173013346,
147
+ "train_macro_f1": 0.9095695021002316,
148
+ "val_loss": 0.7941674590110779,
149
+ "val_acc": 0.9339853300733496,
150
+ "val_macro_f1": 0.8975888047900716,
151
+ "duration_seconds": 37.50118718901649
152
+ },
153
+ {
154
+ "stage": "stage1",
155
+ "epoch": 13,
156
+ "train_loss": 0.7851105207620666,
157
+ "train_acc": 0.9166768703318232,
158
+ "train_macro_f1": 0.9156568805546416,
159
+ "val_loss": 0.7914232449098066,
160
+ "val_acc": 0.9364303178484108,
161
+ "val_macro_f1": 0.9084976516741216,
162
+ "duration_seconds": 37.85148463025689
163
+ },
164
+ {
165
+ "stage": "stage1",
166
+ "epoch": 14,
167
+ "train_loss": 0.7669343338456265,
168
+ "train_acc": 0.9204726337700502,
169
+ "train_macro_f1": 0.9229367514640775,
170
+ "val_loss": 0.7820644216103987,
171
+ "val_acc": 0.9354523227383863,
172
+ "val_macro_f1": 0.9072646736307985,
173
+ "duration_seconds": 38.37839019019157
174
+ },
175
+ {
176
+ "stage": "stage1",
177
+ "epoch": 15,
178
+ "train_loss": 0.7542976293452951,
179
+ "train_acc": 0.925003061099547,
180
+ "train_macro_f1": 0.9252287661475155,
181
+ "val_loss": 0.7722757241942666,
182
+ "val_acc": 0.9378973105134474,
183
+ "val_macro_f1": 0.9088258270013261,
184
+ "duration_seconds": 38.02175585925579
185
+ },
186
+ {
187
+ "stage": "stage1",
188
+ "epoch": 16,
189
+ "train_loss": 0.724993243466976,
190
+ "train_acc": 0.9310640382025224,
191
+ "train_macro_f1": 0.9306218869346541,
192
+ "val_loss": 0.7765102765776895,
193
+ "val_acc": 0.9325183374083129,
194
+ "val_macro_f1": 0.8957598016047307,
195
+ "duration_seconds": 38.236370380967855
196
+ },
197
+ {
198
+ "stage": "stage1",
199
+ "epoch": 17,
200
+ "train_loss": 0.7193509769994159,
201
+ "train_acc": 0.9331455858944533,
202
+ "train_macro_f1": 0.9339855859569205,
203
+ "val_loss": 0.7548327012495561,
204
+ "val_acc": 0.9422982885085575,
205
+ "val_macro_f1": 0.9146078158676976,
206
+ "duration_seconds": 37.599709355738014
207
+ },
208
+ {
209
+ "stage": "stage1",
210
+ "epoch": 18,
211
+ "train_loss": 0.7072599779727847,
212
+ "train_acc": 0.933880249785723,
213
+ "train_macro_f1": 0.93310010660679,
214
+ "val_loss": 0.7916218855164268,
215
+ "val_acc": 0.9330073349633252,
216
+ "val_macro_f1": 0.8968142015045353,
217
+ "duration_seconds": 38.33785875607282
218
+ },
219
+ {
220
+ "stage": "stage1",
221
+ "epoch": 19,
222
+ "train_loss": 0.7052893208902936,
223
+ "train_acc": 0.9335741398310273,
224
+ "train_macro_f1": 0.9343232005341593,
225
+ "val_loss": 0.7364852699366483,
226
+ "val_acc": 0.9452322738386308,
227
+ "val_macro_f1": 0.9182284675084373,
228
+ "duration_seconds": 37.837123352102935
229
+ },
230
+ {
231
+ "stage": "stage1",
232
+ "epoch": 20,
233
+ "train_loss": 0.6814761432104333,
234
+ "train_acc": 0.9384106771152197,
235
+ "train_macro_f1": 0.9398322150903461,
236
+ "val_loss": 0.7354252338409424,
237
+ "val_acc": 0.943276283618582,
238
+ "val_macro_f1": 0.9160987470807599,
239
+ "duration_seconds": 38.50653554406017
240
+ },
241
+ {
242
+ "stage": "stage1",
243
+ "epoch": 21,
244
+ "train_loss": 0.6830035545105158,
245
+ "train_acc": 0.9377984572058283,
246
+ "train_macro_f1": 0.9395218264544717,
247
+ "val_loss": 0.7638225717978044,
248
+ "val_acc": 0.9354523227383863,
249
+ "val_macro_f1": 0.8995650263429223,
250
+ "duration_seconds": 37.889779151417315
251
+ },
252
+ {
253
+ "stage": "stage1",
254
+ "epoch": 22,
255
+ "train_loss": 0.6549201711665752,
256
+ "train_acc": 0.9458185380188564,
257
+ "train_macro_f1": 0.9479852966726539,
258
+ "val_loss": 0.748274640603499,
259
+ "val_acc": 0.9408312958435208,
260
+ "val_macro_f1": 0.913678433990217,
261
+ "duration_seconds": 37.69011677801609
262
+ },
263
+ {
264
+ "stage": "stage1",
265
+ "epoch": 23,
266
+ "train_loss": 0.6476058356983717,
267
+ "train_acc": 0.9478388637198482,
268
+ "train_macro_f1": 0.9474735112676054,
269
+ "val_loss": 0.7422939701513811,
270
+ "val_acc": 0.9393643031784841,
271
+ "val_macro_f1": 0.9118126326501514,
272
+ "duration_seconds": 37.879671565722674
273
+ },
274
+ {
275
+ "stage": "stage1",
276
+ "epoch": 24,
277
+ "train_loss": 0.6383854310179866,
278
+ "train_acc": 0.9452063181094649,
279
+ "train_macro_f1": 0.9456692832105631,
280
+ "val_loss": 0.70200622623617,
281
+ "val_acc": 0.952078239608802,
282
+ "val_macro_f1": 0.9221311577849888,
283
+ "duration_seconds": 39.13599858107045
284
+ },
285
+ {
286
+ "stage": "stage1",
287
+ "epoch": 25,
288
+ "train_loss": 0.6240727319273838,
289
+ "train_acc": 0.9534100648953104,
290
+ "train_macro_f1": 0.9560553119679458,
291
+ "val_loss": 0.7065791866996072,
292
+ "val_acc": 0.9525672371638142,
293
+ "val_macro_f1": 0.9233286511154518,
294
+ "duration_seconds": 37.369449513964355
295
+ },
296
+ {
297
+ "stage": "stage1",
298
+ "epoch": 26,
299
+ "train_loss": 0.6299820653227872,
300
+ "train_acc": 0.9505938533121097,
301
+ "train_macro_f1": 0.9530535174633679,
302
+ "val_loss": 0.7115304686806418,
303
+ "val_acc": 0.9476772616136919,
304
+ "val_macro_f1": 0.9190713459538334,
305
+ "duration_seconds": 38.107163506094366
306
+ },
307
+ {
308
+ "stage": "stage1",
309
+ "epoch": 27,
310
+ "train_loss": 0.620378524758095,
311
+ "train_acc": 0.9531039549406146,
312
+ "train_macro_f1": 0.9549947894496518,
313
+ "val_loss": 0.6997524770823392,
314
+ "val_acc": 0.9486552567237164,
315
+ "val_macro_f1": 0.9201905369641149,
316
+ "duration_seconds": 37.054033300839365
317
+ },
318
+ {
319
+ "stage": "stage1",
320
+ "epoch": 28,
321
+ "train_loss": 0.6110506050808485,
322
+ "train_acc": 0.9556140565691196,
323
+ "train_macro_f1": 0.958136157178548,
324
+ "val_loss": 0.6986935463818637,
325
+ "val_acc": 0.9491442542787286,
326
+ "val_macro_f1": 0.9200785518230558,
327
+ "duration_seconds": 38.087362293154
328
+ },
329
+ {
330
+ "stage": "stage1",
331
+ "epoch": 29,
332
+ "train_loss": 0.598165498916493,
333
+ "train_acc": 0.959532263989225,
334
+ "train_macro_f1": 0.9600834433079148,
335
+ "val_loss": 0.7002380436116998,
336
+ "val_acc": 0.9506112469437653,
337
+ "val_macro_f1": 0.9219051963307497,
338
+ "duration_seconds": 37.679548679850996
339
+ },
340
+ {
341
+ "stage": "stage1",
342
+ "epoch": 30,
343
+ "train_loss": 0.5900751698848813,
344
+ "train_acc": 0.959593485980164,
345
+ "train_macro_f1": 0.9605770687335475,
346
+ "val_loss": 0.695245005867698,
347
+ "val_acc": 0.9511002444987775,
348
+ "val_macro_f1": 0.9216505884105199,
349
+ "duration_seconds": 38.43997255899012
350
+ },
351
+ {
352
+ "stage": "stage1",
353
+ "epoch": 31,
354
+ "train_loss": 0.5878149402696032,
355
+ "train_acc": 0.9606342598261296,
356
+ "train_macro_f1": 0.9621756383388788,
357
+ "val_loss": 0.6914988647807728,
358
+ "val_acc": 0.9525672371638142,
359
+ "val_macro_f1": 0.9230561654384433,
360
+ "duration_seconds": 37.00720879295841
361
+ },
362
+ {
363
+ "stage": "stage1",
364
+ "epoch": 32,
365
+ "train_loss": 0.5783637138300164,
366
+ "train_acc": 0.9626545855271214,
367
+ "train_macro_f1": 0.9650474377206499,
368
+ "val_loss": 0.6921304247596047,
369
+ "val_acc": 0.9525672371638142,
370
+ "val_macro_f1": 0.9242248262820537,
371
+ "duration_seconds": 38.078143508173525
372
+ },
373
+ {
374
+ "stage": "stage1",
375
+ "epoch": 33,
376
+ "train_loss": 0.5720760066841923,
377
+ "train_acc": 0.9652871311375046,
378
+ "train_macro_f1": 0.9668956502827502,
379
+ "val_loss": 0.6832193461331454,
380
+ "val_acc": 0.9545232273838631,
381
+ "val_macro_f1": 0.9254622457557515,
382
+ "duration_seconds": 37.823320649098605
383
+ },
384
+ {
385
+ "stage": "stage1",
386
+ "epoch": 34,
387
+ "train_loss": 0.5669168850710226,
388
+ "train_acc": 0.9660217950287744,
389
+ "train_macro_f1": 0.9679376980718958,
390
+ "val_loss": 0.6903422366489064,
391
+ "val_acc": 0.952078239608802,
392
+ "val_macro_f1": 0.9141290090107064,
393
+ "duration_seconds": 37.19663280015811
394
+ },
395
+ {
396
+ "stage": "stage1",
397
+ "epoch": 35,
398
+ "train_loss": 0.5721360119276269,
399
+ "train_acc": 0.9652259091465655,
400
+ "train_macro_f1": 0.9672558713264995,
401
+ "val_loss": 0.6860878196629611,
402
+ "val_acc": 0.9535452322738386,
403
+ "val_macro_f1": 0.9246890611512996,
404
+ "duration_seconds": 37.865042545832694
405
+ },
406
+ {
407
+ "stage": "stage1",
408
+ "epoch": 36,
409
+ "train_loss": 0.5751806615396987,
410
+ "train_acc": 0.9634504714093303,
411
+ "train_macro_f1": 0.9650922342936058,
412
+ "val_loss": 0.6858713843605735,
413
+ "val_acc": 0.9506112469437653,
414
+ "val_macro_f1": 0.9215963057574214,
415
+ "duration_seconds": 38.06451524980366
416
+ },
417
+ {
418
+ "stage": "stage1",
419
+ "epoch": 37,
420
+ "train_loss": 0.5691629336323849,
421
+ "train_acc": 0.963756581364026,
422
+ "train_macro_f1": 0.9659098190263928,
423
+ "val_loss": 0.6852591091936285,
424
+ "val_acc": 0.9511002444987775,
425
+ "val_macro_f1": 0.9221256983893021,
426
+ "duration_seconds": 37.62006014632061
427
+ },
428
+ {
429
+ "stage": "stage1",
430
+ "epoch": 38,
431
+ "train_loss": 0.563821169526078,
432
+ "train_acc": 0.9661442390106526,
433
+ "train_macro_f1": 0.9676408629024923,
434
+ "val_loss": 0.6863496682860635,
435
+ "val_acc": 0.9515892420537897,
436
+ "val_macro_f1": 0.9228874257523916,
437
+ "duration_seconds": 37.83984856121242
438
+ },
439
+ {
440
+ "stage": "stage1",
441
+ "epoch": 39,
442
+ "train_loss": 0.5663612668597421,
443
+ "train_acc": 0.9646136892371739,
444
+ "train_macro_f1": 0.966505548310987,
445
+ "val_loss": 0.685564474626021,
446
+ "val_acc": 0.952078239608802,
447
+ "val_macro_f1": 0.9230060614495823,
448
+ "duration_seconds": 38.010506270918995
449
+ },
450
+ {
451
+ "stage": "stage1",
452
+ "epoch": 40,
453
+ "train_loss": 0.5743493382320848,
454
+ "train_acc": 0.9646749112281131,
455
+ "train_macro_f1": 0.9661682215935604,
456
+ "val_loss": 0.685387134552002,
457
+ "val_acc": 0.9515892420537897,
458
+ "val_macro_f1": 0.9225555286063664,
459
+ "duration_seconds": 37.642080747988075
460
+ },
461
+ {
462
+ "stage": "stage2",
463
+ "epoch": 1,
464
+ "train_loss": 0.6768135927444281,
465
+ "train_acc": 0.9281253826374434,
466
+ "train_macro_f1": 0.9207764039512437,
467
+ "val_loss": 0.7352005947719921,
468
+ "val_acc": 0.9408312958435208,
469
+ "val_macro_f1": 0.908830505411882,
470
+ "duration_seconds": 40.97610312793404
471
+ },
472
+ {
473
+ "stage": "stage2",
474
+ "epoch": 2,
475
+ "train_loss": 0.6523073114628015,
476
+ "train_acc": 0.9372474592873761,
477
+ "train_macro_f1": 0.9393616328475114,
478
+ "val_loss": 0.7405287569219415,
479
+ "val_acc": 0.943276283618582,
480
+ "val_macro_f1": 0.9145481739672323,
481
+ "duration_seconds": 40.34182390011847
482
+ },
483
+ {
484
+ "stage": "stage2",
485
+ "epoch": 3,
486
+ "train_loss": 0.6370004353135131,
487
+ "train_acc": 0.9430635484265948,
488
+ "train_macro_f1": 0.9449550202236057,
489
+ "val_loss": 0.7183141654187982,
490
+ "val_acc": 0.9418092909535453,
491
+ "val_macro_f1": 0.9110062617729418,
492
+ "duration_seconds": 39.924425372853875
493
+ },
494
+ {
495
+ "stage": "stage2",
496
+ "epoch": 4,
497
+ "train_loss": 0.6195670698964318,
498
+ "train_acc": 0.9518182931308926,
499
+ "train_macro_f1": 0.9531957634054529,
500
+ "val_loss": 0.722674245184118,
501
+ "val_acc": 0.9427872860635697,
502
+ "val_macro_f1": 0.9122724208628346,
503
+ "duration_seconds": 39.511029839050025
504
+ }
505
+ ]
506
+ }