File size: 9,369 Bytes
5d9e1a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
{
  "best_global_step": 2000,
  "best_metric": 0.3400862027176887,
  "best_model_checkpoint": "models/test-finetuned-sw-en/checkpoint-2000",
  "epoch": 0.350385423966363,
  "eval_steps": 500,
  "global_step": 2000,
  "is_hyper_param_search": false,
  "is_local_process_zero": true,
  "is_world_process_zero": true,
  "log_history": [
    {
      "epoch": 0.008759635599159075,
      "grad_norm": 100.36164855957031,
      "learning_rate": 2.5744308231173378e-06,
      "loss": 14.1521,
      "step": 50
    },
    {
      "epoch": 0.01751927119831815,
      "grad_norm": 11.487061500549316,
      "learning_rate": 5.201401050788091e-06,
      "loss": 5.8065,
      "step": 100
    },
    {
      "epoch": 0.026278906797477224,
      "grad_norm": 10.166085243225098,
      "learning_rate": 7.828371278458844e-06,
      "loss": 4.411,
      "step": 150
    },
    {
      "epoch": 0.0350385423966363,
      "grad_norm": 9.804460525512695,
      "learning_rate": 1.0455341506129598e-05,
      "loss": 4.1303,
      "step": 200
    },
    {
      "epoch": 0.04379817799579538,
      "grad_norm": 8.944438934326172,
      "learning_rate": 1.308231173380035e-05,
      "loss": 4.0012,
      "step": 250
    },
    {
      "epoch": 0.05255781359495445,
      "grad_norm": 9.135123252868652,
      "learning_rate": 1.5709281961471103e-05,
      "loss": 3.8622,
      "step": 300
    },
    {
      "epoch": 0.06131744919411353,
      "grad_norm": 8.474159240722656,
      "learning_rate": 1.8336252189141855e-05,
      "loss": 3.749,
      "step": 350
    },
    {
      "epoch": 0.0700770847932726,
      "grad_norm": 8.502720832824707,
      "learning_rate": 2.096322241681261e-05,
      "loss": 3.6698,
      "step": 400
    },
    {
      "epoch": 0.07883672039243167,
      "grad_norm": 8.237638473510742,
      "learning_rate": 2.3590192644483363e-05,
      "loss": 3.5605,
      "step": 450
    },
    {
      "epoch": 0.08759635599159075,
      "grad_norm": 8.184097290039062,
      "learning_rate": 2.6217162872154118e-05,
      "loss": 3.4528,
      "step": 500
    },
    {
      "epoch": 0.08759635599159075,
      "eval_bleu": 0.18452195716495454,
      "eval_chrf": 37.579073815894574,
      "eval_loss": 3.3051514625549316,
      "eval_model_preparation_time": 0.0024,
      "eval_runtime": 312.3723,
      "eval_samples_per_second": 68.79,
      "eval_steps_per_second": 2.151,
      "step": 500
    },
    {
      "epoch": 0.09635599159074983,
      "grad_norm": 8.083911895751953,
      "learning_rate": 2.884413309982487e-05,
      "loss": 3.4183,
      "step": 550
    },
    {
      "epoch": 0.1051156271899089,
      "grad_norm": 7.500244617462158,
      "learning_rate": 2.9997800888103602e-05,
      "loss": 3.3458,
      "step": 600
    },
    {
      "epoch": 0.11387526278906797,
      "grad_norm": 8.195356369018555,
      "learning_rate": 2.9982937261717248e-05,
      "loss": 3.3022,
      "step": 650
    },
    {
      "epoch": 0.12263489838822705,
      "grad_norm": 7.267704010009766,
      "learning_rate": 2.9954065389845778e-05,
      "loss": 3.2882,
      "step": 700
    },
    {
      "epoch": 0.13139453398738613,
      "grad_norm": 8.064687728881836,
      "learning_rate": 2.9911212266146163e-05,
      "loss": 3.1957,
      "step": 750
    },
    {
      "epoch": 0.1401541695865452,
      "grad_norm": 7.6160688400268555,
      "learning_rate": 2.985441795599852e-05,
      "loss": 3.2081,
      "step": 800
    },
    {
      "epoch": 0.14891380518570427,
      "grad_norm": 8.32299518585205,
      "learning_rate": 2.978373555904712e-05,
      "loss": 3.117,
      "step": 850
    },
    {
      "epoch": 0.15767344078486334,
      "grad_norm": 7.199745178222656,
      "learning_rate": 2.9699231159555054e-05,
      "loss": 3.1089,
      "step": 900
    },
    {
      "epoch": 0.16643307638402244,
      "grad_norm": 7.270870685577393,
      "learning_rate": 2.9600983764618996e-05,
      "loss": 3.0805,
      "step": 950
    },
    {
      "epoch": 0.1751927119831815,
      "grad_norm": 7.554959297180176,
      "learning_rate": 2.9489085230301778e-05,
      "loss": 3.0778,
      "step": 1000
    },
    {
      "epoch": 0.1751927119831815,
      "eval_bleu": 0.2956498402636781,
      "eval_chrf": 48.85756661559643,
      "eval_loss": 2.9274492263793945,
      "eval_model_preparation_time": 0.0024,
      "eval_runtime": 292.5539,
      "eval_samples_per_second": 73.45,
      "eval_steps_per_second": 2.297,
      "step": 1000
    },
    {
      "epoch": 0.18395234758234058,
      "grad_norm": 8.069275856018066,
      "learning_rate": 2.9363640175751887e-05,
      "loss": 3.0401,
      "step": 1050
    },
    {
      "epoch": 0.19271198318149965,
      "grad_norm": 7.83376932144165,
      "learning_rate": 2.922476588539015e-05,
      "loss": 3.0312,
      "step": 1100
    },
    {
      "epoch": 0.20147161878065872,
      "grad_norm": 7.48977518081665,
      "learning_rate": 2.9072592199255066e-05,
      "loss": 2.993,
      "step": 1150
    },
    {
      "epoch": 0.2102312543798178,
      "grad_norm": 7.3794779777526855,
      "learning_rate": 2.8907261391609325e-05,
      "loss": 3.0023,
      "step": 1200
    },
    {
      "epoch": 0.21899088997897687,
      "grad_norm": 7.881261825561523,
      "learning_rate": 2.8728928037920966e-05,
      "loss": 2.9784,
      "step": 1250
    },
    {
      "epoch": 0.22775052557813594,
      "grad_norm": 6.710996150970459,
      "learning_rate": 2.853775887034356e-05,
      "loss": 2.9662,
      "step": 1300
    },
    {
      "epoch": 0.23651016117729504,
      "grad_norm": 7.729732990264893,
      "learning_rate": 2.8333932621830594e-05,
      "loss": 2.972,
      "step": 1350
    },
    {
      "epoch": 0.2452697967764541,
      "grad_norm": 6.742512226104736,
      "learning_rate": 2.8117639859029685e-05,
      "loss": 2.9373,
      "step": 1400
    },
    {
      "epoch": 0.25402943237561315,
      "grad_norm": 6.744399547576904,
      "learning_rate": 2.7889082804112972e-05,
      "loss": 2.9438,
      "step": 1450
    },
    {
      "epoch": 0.26278906797477225,
      "grad_norm": 6.606257438659668,
      "learning_rate": 2.764847514571017e-05,
      "loss": 2.9562,
      "step": 1500
    },
    {
      "epoch": 0.26278906797477225,
      "eval_bleu": 0.32395940047060856,
      "eval_chrf": 51.97366756294741,
      "eval_loss": 2.8271327018737793,
      "eval_model_preparation_time": 0.0024,
      "eval_runtime": 299.5193,
      "eval_samples_per_second": 71.742,
      "eval_steps_per_second": 2.244,
      "step": 1500
    },
    {
      "epoch": 0.27154870357393135,
      "grad_norm": 7.149145603179932,
      "learning_rate": 2.7396041839121136e-05,
      "loss": 2.9199,
      "step": 1550
    },
    {
      "epoch": 0.2803083391730904,
      "grad_norm": 7.462961673736572,
      "learning_rate": 2.7132018895994697e-05,
      "loss": 2.9294,
      "step": 1600
    },
    {
      "epoch": 0.2890679747722495,
      "grad_norm": 6.644341468811035,
      "learning_rate": 2.685665316367035e-05,
      "loss": 2.9246,
      "step": 1650
    },
    {
      "epoch": 0.29782761037140854,
      "grad_norm": 6.9484429359436035,
      "learning_rate": 2.6570202094389226e-05,
      "loss": 2.9154,
      "step": 1700
    },
    {
      "epoch": 0.30658724597056763,
      "grad_norm": 6.633726119995117,
      "learning_rate": 2.6272933504589965e-05,
      "loss": 2.8688,
      "step": 1750
    },
    {
      "epoch": 0.3153468815697267,
      "grad_norm": 7.676279544830322,
      "learning_rate": 2.5965125324514702e-05,
      "loss": 2.9137,
      "step": 1800
    },
    {
      "epoch": 0.3241065171688858,
      "grad_norm": 6.953280448913574,
      "learning_rate": 2.564706533835911e-05,
      "loss": 2.8974,
      "step": 1850
    },
    {
      "epoch": 0.3328661527680449,
      "grad_norm": 7.126858711242676,
      "learning_rate": 2.5319050915209592e-05,
      "loss": 2.9095,
      "step": 1900
    },
    {
      "epoch": 0.3416257883672039,
      "grad_norm": 6.376485824584961,
      "learning_rate": 2.498138873101906e-05,
      "loss": 2.8332,
      "step": 1950
    },
    {
      "epoch": 0.350385423966363,
      "grad_norm": 7.114526748657227,
      "learning_rate": 2.4634394481881312e-05,
      "loss": 2.8781,
      "step": 2000
    },
    {
      "epoch": 0.350385423966363,
      "eval_bleu": 0.3400862027176887,
      "eval_chrf": 53.63135686349859,
      "eval_loss": 2.7728657722473145,
      "eval_model_preparation_time": 0.0024,
      "eval_runtime": 459.505,
      "eval_samples_per_second": 46.763,
      "eval_steps_per_second": 1.462,
      "step": 2000
    }
  ],
  "logging_steps": 50,
  "max_steps": 5708,
  "num_input_tokens_seen": 0,
  "num_train_epochs": 1,
  "save_steps": 500,
  "stateful_callbacks": {
    "EarlyStoppingCallback": {
      "args": {
        "early_stopping_patience": 3,
        "early_stopping_threshold": 0.001
      },
      "attributes": {
        "early_stopping_patience_counter": 0
      }
    },
    "TrainerControl": {
      "args": {
        "should_epoch_stop": false,
        "should_evaluate": false,
        "should_log": false,
        "should_save": true,
        "should_training_stop": false
      },
      "attributes": {}
    }
  },
  "total_flos": 1521367092559872.0,
  "train_batch_size": 32,
  "trial_name": null,
  "trial_params": null
}