reubk commited on
Commit
e5d3e43
·
verified ·
1 Parent(s): 18de95d

Upload 8 files

Browse files
adapter_config.json ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": "Molmo2-4B",
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": false,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": false,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 32,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.05,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": null,
25
+ "peft_type": "LORA",
26
+ "peft_version": "0.18.0",
27
+ "qalora_group_size": 16,
28
+ "r": 16,
29
+ "rank_pattern": {},
30
+ "revision": null,
31
+ "target_modules": [
32
+ "transformer.blocks.29.att_proj",
33
+ "transformer.blocks.24.att_proj",
34
+ "transformer.blocks.15.att_proj",
35
+ "transformer.blocks.7.att_proj",
36
+ "transformer.blocks.0.attn_out",
37
+ "transformer.blocks.8.att_proj",
38
+ "vision_backbone.image_pooling_2d.wv",
39
+ "transformer.blocks.24.attn_out",
40
+ "transformer.blocks.35.att_proj",
41
+ "transformer.blocks.22.attn_out",
42
+ "transformer.blocks.34.attn_out",
43
+ "transformer.blocks.3.attn_out",
44
+ "transformer.blocks.26.att_proj",
45
+ "transformer.blocks.6.att_proj",
46
+ "vision_backbone.image_projector.w1",
47
+ "transformer.blocks.10.att_proj",
48
+ "transformer.blocks.19.attn_out",
49
+ "transformer.blocks.13.att_proj",
50
+ "vision_backbone.image_pooling_2d.wk",
51
+ "transformer.blocks.25.att_proj",
52
+ "vision_backbone.image_projector.w2",
53
+ "transformer.blocks.21.att_proj",
54
+ "transformer.blocks.8.attn_out",
55
+ "transformer.blocks.27.attn_out",
56
+ "transformer.blocks.6.attn_out",
57
+ "transformer.blocks.35.attn_out",
58
+ "vision_backbone.image_pooling_2d.wq",
59
+ "transformer.blocks.9.attn_out",
60
+ "transformer.blocks.12.att_proj",
61
+ "transformer.blocks.22.att_proj",
62
+ "vision_backbone.image_pooling_2d.wo",
63
+ "transformer.blocks.10.attn_out",
64
+ "transformer.blocks.31.att_proj",
65
+ "transformer.blocks.25.attn_out",
66
+ "transformer.blocks.5.attn_out",
67
+ "transformer.blocks.7.attn_out",
68
+ "transformer.blocks.16.att_proj",
69
+ "transformer.blocks.14.attn_out",
70
+ "transformer.blocks.29.attn_out",
71
+ "transformer.blocks.34.att_proj",
72
+ "transformer.blocks.17.attn_out",
73
+ "transformer.blocks.5.att_proj",
74
+ "transformer.blocks.1.att_proj",
75
+ "transformer.blocks.20.att_proj",
76
+ "transformer.blocks.33.att_proj",
77
+ "transformer.blocks.1.attn_out",
78
+ "transformer.blocks.3.att_proj",
79
+ "transformer.blocks.17.att_proj",
80
+ "transformer.blocks.26.attn_out",
81
+ "transformer.blocks.33.attn_out",
82
+ "transformer.blocks.14.att_proj",
83
+ "transformer.blocks.31.attn_out",
84
+ "transformer.blocks.30.att_proj",
85
+ "transformer.blocks.32.att_proj",
86
+ "transformer.blocks.11.att_proj",
87
+ "transformer.blocks.32.attn_out",
88
+ "transformer.blocks.16.attn_out",
89
+ "transformer.blocks.4.att_proj",
90
+ "transformer.blocks.4.attn_out",
91
+ "transformer.blocks.0.att_proj",
92
+ "transformer.blocks.23.attn_out",
93
+ "transformer.blocks.20.attn_out",
94
+ "transformer.blocks.27.att_proj",
95
+ "transformer.blocks.9.att_proj",
96
+ "transformer.blocks.28.att_proj",
97
+ "transformer.blocks.19.att_proj",
98
+ "transformer.blocks.18.attn_out",
99
+ "transformer.blocks.2.att_proj",
100
+ "transformer.blocks.30.attn_out",
101
+ "transformer.blocks.23.att_proj",
102
+ "transformer.blocks.28.attn_out",
103
+ "transformer.blocks.12.attn_out",
104
+ "transformer.blocks.2.attn_out",
105
+ "transformer.blocks.21.attn_out",
106
+ "transformer.blocks.18.att_proj",
107
+ "transformer.blocks.15.attn_out",
108
+ "vision_backbone.image_projector.w3",
109
+ "transformer.blocks.13.attn_out",
110
+ "transformer.blocks.11.attn_out"
111
+ ],
112
+ "target_parameters": null,
113
+ "task_type": "CAUSAL_LM",
114
+ "trainable_token_indices": null,
115
+ "use_dora": false,
116
+ "use_qalora": false,
117
+ "use_rslora": false
118
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a7eff1a7f925fbb629d54f0c8281fb3d62cae0df9f32a2ebfad873eb0659483
3
+ size 2992080
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fee794f339f36ef1996c4e56f6502eba59cdf8c2070eef674041c7e3dfd7a23
3
+ size 1538037
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:363c5df1543d2c82b2f13164f35bdd0367ceb32e7fa1b2f67c19df073a08b17b
3
+ size 14645
scaler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:419e9121be762e4368fecb31eef4f8ce6f2ce4d52aa3adc3c6e5a569f9a1f099
3
+ size 1383
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09de904d0ccc8857cebd09674e133c4f831bdbf6f060f9b5a76febbc1b782279
3
+ size 1465
trainer_state.json ADDED
@@ -0,0 +1,454 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 8.956651718983558,
6
+ "eval_steps": 500,
7
+ "global_step": 3000,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.14947683109118087,
14
+ "grad_norm": 0.3962731659412384,
15
+ "learning_rate": 1.9706176961602673e-05,
16
+ "loss": 1.902,
17
+ "step": 50
18
+ },
19
+ {
20
+ "epoch": 0.29895366218236175,
21
+ "grad_norm": 0.3334282338619232,
22
+ "learning_rate": 1.9372287145242073e-05,
23
+ "loss": 1.8032,
24
+ "step": 100
25
+ },
26
+ {
27
+ "epoch": 0.4484304932735426,
28
+ "grad_norm": 0.8322853446006775,
29
+ "learning_rate": 1.903839732888147e-05,
30
+ "loss": 1.6039,
31
+ "step": 150
32
+ },
33
+ {
34
+ "epoch": 0.5979073243647235,
35
+ "grad_norm": 2.1256463527679443,
36
+ "learning_rate": 1.870450751252087e-05,
37
+ "loss": 1.354,
38
+ "step": 200
39
+ },
40
+ {
41
+ "epoch": 0.7473841554559043,
42
+ "grad_norm": 3.58968186378479,
43
+ "learning_rate": 1.837061769616027e-05,
44
+ "loss": 1.0853,
45
+ "step": 250
46
+ },
47
+ {
48
+ "epoch": 0.8968609865470852,
49
+ "grad_norm": 11.132929801940918,
50
+ "learning_rate": 1.8036727879799668e-05,
51
+ "loss": 0.8252,
52
+ "step": 300
53
+ },
54
+ {
55
+ "epoch": 1.0448430493273542,
56
+ "grad_norm": 4.746887683868408,
57
+ "learning_rate": 1.7702838063439068e-05,
58
+ "loss": 0.6159,
59
+ "step": 350
60
+ },
61
+ {
62
+ "epoch": 1.194319880418535,
63
+ "grad_norm": 7.573383808135986,
64
+ "learning_rate": 1.7368948247078464e-05,
65
+ "loss": 0.452,
66
+ "step": 400
67
+ },
68
+ {
69
+ "epoch": 1.343796711509716,
70
+ "grad_norm": 4.613552093505859,
71
+ "learning_rate": 1.7035058430717864e-05,
72
+ "loss": 0.3191,
73
+ "step": 450
74
+ },
75
+ {
76
+ "epoch": 1.493273542600897,
77
+ "grad_norm": 18.23060417175293,
78
+ "learning_rate": 1.6701168614357263e-05,
79
+ "loss": 0.2341,
80
+ "step": 500
81
+ },
82
+ {
83
+ "epoch": 1.6427503736920777,
84
+ "grad_norm": 4.62631368637085,
85
+ "learning_rate": 1.6367278797996663e-05,
86
+ "loss": 0.1614,
87
+ "step": 550
88
+ },
89
+ {
90
+ "epoch": 1.7922272047832586,
91
+ "grad_norm": 1.3041956424713135,
92
+ "learning_rate": 1.6033388981636063e-05,
93
+ "loss": 0.1584,
94
+ "step": 600
95
+ },
96
+ {
97
+ "epoch": 1.9417040358744395,
98
+ "grad_norm": 2.2017955780029297,
99
+ "learning_rate": 1.569949916527546e-05,
100
+ "loss": 0.1428,
101
+ "step": 650
102
+ },
103
+ {
104
+ "epoch": 2.0896860986547083,
105
+ "grad_norm": 1.256799578666687,
106
+ "learning_rate": 1.536560934891486e-05,
107
+ "loss": 0.1266,
108
+ "step": 700
109
+ },
110
+ {
111
+ "epoch": 2.2391629297458895,
112
+ "grad_norm": 4.344259262084961,
113
+ "learning_rate": 1.5031719532554258e-05,
114
+ "loss": 0.1112,
115
+ "step": 750
116
+ },
117
+ {
118
+ "epoch": 2.38863976083707,
119
+ "grad_norm": 2.4069414138793945,
120
+ "learning_rate": 1.4697829716193656e-05,
121
+ "loss": 0.1158,
122
+ "step": 800
123
+ },
124
+ {
125
+ "epoch": 2.538116591928251,
126
+ "grad_norm": 1.6278817653656006,
127
+ "learning_rate": 1.4363939899833058e-05,
128
+ "loss": 0.0949,
129
+ "step": 850
130
+ },
131
+ {
132
+ "epoch": 2.687593423019432,
133
+ "grad_norm": 2.6387939453125,
134
+ "learning_rate": 1.4030050083472456e-05,
135
+ "loss": 0.095,
136
+ "step": 900
137
+ },
138
+ {
139
+ "epoch": 2.8370702541106128,
140
+ "grad_norm": 3.1809768676757812,
141
+ "learning_rate": 1.3696160267111853e-05,
142
+ "loss": 0.0918,
143
+ "step": 950
144
+ },
145
+ {
146
+ "epoch": 2.986547085201794,
147
+ "grad_norm": 2.9219062328338623,
148
+ "learning_rate": 1.3362270450751253e-05,
149
+ "loss": 0.087,
150
+ "step": 1000
151
+ },
152
+ {
153
+ "epoch": 3.1345291479820627,
154
+ "grad_norm": 3.087089776992798,
155
+ "learning_rate": 1.3028380634390651e-05,
156
+ "loss": 0.0792,
157
+ "step": 1050
158
+ },
159
+ {
160
+ "epoch": 3.2840059790732434,
161
+ "grad_norm": 1.1930086612701416,
162
+ "learning_rate": 1.2694490818030052e-05,
163
+ "loss": 0.0769,
164
+ "step": 1100
165
+ },
166
+ {
167
+ "epoch": 3.4334828101644246,
168
+ "grad_norm": 3.4931893348693848,
169
+ "learning_rate": 1.236060100166945e-05,
170
+ "loss": 0.0848,
171
+ "step": 1150
172
+ },
173
+ {
174
+ "epoch": 3.5829596412556053,
175
+ "grad_norm": 2.244807004928589,
176
+ "learning_rate": 1.2026711185308848e-05,
177
+ "loss": 0.0765,
178
+ "step": 1200
179
+ },
180
+ {
181
+ "epoch": 3.7324364723467864,
182
+ "grad_norm": 2.385439872741699,
183
+ "learning_rate": 1.1692821368948248e-05,
184
+ "loss": 0.0689,
185
+ "step": 1250
186
+ },
187
+ {
188
+ "epoch": 3.881913303437967,
189
+ "grad_norm": 4.636480808258057,
190
+ "learning_rate": 1.1358931552587646e-05,
191
+ "loss": 0.063,
192
+ "step": 1300
193
+ },
194
+ {
195
+ "epoch": 4.029895366218236,
196
+ "grad_norm": 1.3066422939300537,
197
+ "learning_rate": 1.1031719532554258e-05,
198
+ "loss": 0.074,
199
+ "step": 1350
200
+ },
201
+ {
202
+ "epoch": 4.179372197309417,
203
+ "grad_norm": 1.9610888957977295,
204
+ "learning_rate": 1.0697829716193657e-05,
205
+ "loss": 0.0638,
206
+ "step": 1400
207
+ },
208
+ {
209
+ "epoch": 4.328849028400598,
210
+ "grad_norm": 2.659454822540283,
211
+ "learning_rate": 1.0363939899833055e-05,
212
+ "loss": 0.0591,
213
+ "step": 1450
214
+ },
215
+ {
216
+ "epoch": 4.478325859491779,
217
+ "grad_norm": 1.8156203031539917,
218
+ "learning_rate": 1.0030050083472455e-05,
219
+ "loss": 0.0548,
220
+ "step": 1500
221
+ },
222
+ {
223
+ "epoch": 4.62780269058296,
224
+ "grad_norm": 3.0744662284851074,
225
+ "learning_rate": 9.696160267111854e-06,
226
+ "loss": 0.0586,
227
+ "step": 1550
228
+ },
229
+ {
230
+ "epoch": 4.77727952167414,
231
+ "grad_norm": 1.5125874280929565,
232
+ "learning_rate": 9.362270450751252e-06,
233
+ "loss": 0.0643,
234
+ "step": 1600
235
+ },
236
+ {
237
+ "epoch": 4.926756352765321,
238
+ "grad_norm": 1.3466798067092896,
239
+ "learning_rate": 9.028380634390652e-06,
240
+ "loss": 0.0614,
241
+ "step": 1650
242
+ },
243
+ {
244
+ "epoch": 5.074738415545591,
245
+ "grad_norm": 2.761390447616577,
246
+ "learning_rate": 8.694490818030052e-06,
247
+ "loss": 0.0568,
248
+ "step": 1700
249
+ },
250
+ {
251
+ "epoch": 5.2242152466367715,
252
+ "grad_norm": 1.61431086063385,
253
+ "learning_rate": 8.36060100166945e-06,
254
+ "loss": 0.0505,
255
+ "step": 1750
256
+ },
257
+ {
258
+ "epoch": 5.373692077727952,
259
+ "grad_norm": 0.6185945272445679,
260
+ "learning_rate": 8.02671118530885e-06,
261
+ "loss": 0.0539,
262
+ "step": 1800
263
+ },
264
+ {
265
+ "epoch": 5.523168908819133,
266
+ "grad_norm": 1.7833508253097534,
267
+ "learning_rate": 7.692821368948247e-06,
268
+ "loss": 0.049,
269
+ "step": 1850
270
+ },
271
+ {
272
+ "epoch": 5.672645739910314,
273
+ "grad_norm": 2.104482412338257,
274
+ "learning_rate": 7.358931552587647e-06,
275
+ "loss": 0.0486,
276
+ "step": 1900
277
+ },
278
+ {
279
+ "epoch": 5.822122571001495,
280
+ "grad_norm": 2.144746780395508,
281
+ "learning_rate": 7.025041736227045e-06,
282
+ "loss": 0.0487,
283
+ "step": 1950
284
+ },
285
+ {
286
+ "epoch": 5.971599402092676,
287
+ "grad_norm": 0.9488668441772461,
288
+ "learning_rate": 6.6911519198664446e-06,
289
+ "loss": 0.0533,
290
+ "step": 2000
291
+ },
292
+ {
293
+ "epoch": 6.119581464872945,
294
+ "grad_norm": 1.840523600578308,
295
+ "learning_rate": 6.357262103505843e-06,
296
+ "loss": 0.0426,
297
+ "step": 2050
298
+ },
299
+ {
300
+ "epoch": 6.2690582959641254,
301
+ "grad_norm": 3.228400707244873,
302
+ "learning_rate": 6.023372287145243e-06,
303
+ "loss": 0.0519,
304
+ "step": 2100
305
+ },
306
+ {
307
+ "epoch": 6.418535127055306,
308
+ "grad_norm": 3.9569454193115234,
309
+ "learning_rate": 5.689482470784642e-06,
310
+ "loss": 0.0426,
311
+ "step": 2150
312
+ },
313
+ {
314
+ "epoch": 6.568011958146487,
315
+ "grad_norm": 1.098770260810852,
316
+ "learning_rate": 5.35559265442404e-06,
317
+ "loss": 0.0402,
318
+ "step": 2200
319
+ },
320
+ {
321
+ "epoch": 6.7174887892376685,
322
+ "grad_norm": 1.297438621520996,
323
+ "learning_rate": 5.0217028380634394e-06,
324
+ "loss": 0.0461,
325
+ "step": 2250
326
+ },
327
+ {
328
+ "epoch": 6.866965620328849,
329
+ "grad_norm": 0.9027300477027893,
330
+ "learning_rate": 4.687813021702838e-06,
331
+ "loss": 0.0464,
332
+ "step": 2300
333
+ },
334
+ {
335
+ "epoch": 7.014947683109118,
336
+ "grad_norm": 1.7319854497909546,
337
+ "learning_rate": 4.353923205342238e-06,
338
+ "loss": 0.0669,
339
+ "step": 2350
340
+ },
341
+ {
342
+ "epoch": 7.164424514200299,
343
+ "grad_norm": 1.1381218433380127,
344
+ "learning_rate": 4.020033388981636e-06,
345
+ "loss": 0.0403,
346
+ "step": 2400
347
+ },
348
+ {
349
+ "epoch": 7.31390134529148,
350
+ "grad_norm": 1.5444351434707642,
351
+ "learning_rate": 3.6861435726210355e-06,
352
+ "loss": 0.043,
353
+ "step": 2450
354
+ },
355
+ {
356
+ "epoch": 7.463378176382661,
357
+ "grad_norm": 2.456782817840576,
358
+ "learning_rate": 3.3589315525876463e-06,
359
+ "loss": 0.0469,
360
+ "step": 2500
361
+ },
362
+ {
363
+ "epoch": 7.612855007473842,
364
+ "grad_norm": 1.1411938667297363,
365
+ "learning_rate": 3.025041736227045e-06,
366
+ "loss": 0.054,
367
+ "step": 2550
368
+ },
369
+ {
370
+ "epoch": 7.762331838565022,
371
+ "grad_norm": 1.1636459827423096,
372
+ "learning_rate": 2.6911519198664443e-06,
373
+ "loss": 0.0442,
374
+ "step": 2600
375
+ },
376
+ {
377
+ "epoch": 7.911808669656203,
378
+ "grad_norm": 1.4487160444259644,
379
+ "learning_rate": 2.357262103505843e-06,
380
+ "loss": 0.0415,
381
+ "step": 2650
382
+ },
383
+ {
384
+ "epoch": 8.059790732436472,
385
+ "grad_norm": 1.9948341846466064,
386
+ "learning_rate": 2.023372287145242e-06,
387
+ "loss": 0.0468,
388
+ "step": 2700
389
+ },
390
+ {
391
+ "epoch": 8.209267563527654,
392
+ "grad_norm": 1.8844599723815918,
393
+ "learning_rate": 1.6894824707846414e-06,
394
+ "loss": 0.0407,
395
+ "step": 2750
396
+ },
397
+ {
398
+ "epoch": 8.358744394618833,
399
+ "grad_norm": 1.19595205783844,
400
+ "learning_rate": 1.3555926544240402e-06,
401
+ "loss": 0.0531,
402
+ "step": 2800
403
+ },
404
+ {
405
+ "epoch": 8.508221225710015,
406
+ "grad_norm": 2.4211935997009277,
407
+ "learning_rate": 1.0217028380634392e-06,
408
+ "loss": 0.0421,
409
+ "step": 2850
410
+ },
411
+ {
412
+ "epoch": 8.657698056801197,
413
+ "grad_norm": 1.1242051124572754,
414
+ "learning_rate": 6.878130217028381e-07,
415
+ "loss": 0.0472,
416
+ "step": 2900
417
+ },
418
+ {
419
+ "epoch": 8.807174887892376,
420
+ "grad_norm": 2.1767444610595703,
421
+ "learning_rate": 3.5392320534223706e-07,
422
+ "loss": 0.042,
423
+ "step": 2950
424
+ },
425
+ {
426
+ "epoch": 8.956651718983558,
427
+ "grad_norm": 3.1764280796051025,
428
+ "learning_rate": 2.003338898163606e-08,
429
+ "loss": 0.0422,
430
+ "step": 3000
431
+ }
432
+ ],
433
+ "logging_steps": 50,
434
+ "max_steps": 3000,
435
+ "num_input_tokens_seen": 0,
436
+ "num_train_epochs": 9,
437
+ "save_steps": 600,
438
+ "stateful_callbacks": {
439
+ "TrainerControl": {
440
+ "args": {
441
+ "should_epoch_stop": false,
442
+ "should_evaluate": false,
443
+ "should_log": false,
444
+ "should_save": true,
445
+ "should_training_stop": true
446
+ },
447
+ "attributes": {}
448
+ }
449
+ },
450
+ "total_flos": 4.497547090227081e+17,
451
+ "train_batch_size": 1,
452
+ "trial_name": null,
453
+ "trial_params": null
454
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0afed9a30bc0f9f2754235eb9897d853f9c98584b20363945fc053d2b9c1af1a
3
+ size 5905