mini_VN_decoder / checkpoint-500 /trainer_state.json
tranhuyHoang's picture
Upload checkpoint at step 500
04c063f verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5e-05,
"eval_steps": 500,
"global_step": 500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1e-07,
"grad_norm": 13.864434242248535,
"learning_rate": 0.0,
"loss": 73.5625,
"step": 1
},
{
"epoch": 2e-07,
"grad_norm": 13.48910903930664,
"learning_rate": 1e-08,
"loss": 73.625,
"step": 2
},
{
"epoch": 3e-07,
"grad_norm": 14.2167387008667,
"learning_rate": 2e-08,
"loss": 73.625,
"step": 3
},
{
"epoch": 4e-07,
"grad_norm": 13.33409595489502,
"learning_rate": 3.0000000000000004e-08,
"loss": 73.625,
"step": 4
},
{
"epoch": 5e-07,
"grad_norm": 13.449651718139648,
"learning_rate": 4e-08,
"loss": 73.625,
"step": 5
},
{
"epoch": 6e-07,
"grad_norm": 13.80512809753418,
"learning_rate": 5.0000000000000004e-08,
"loss": 73.6875,
"step": 6
},
{
"epoch": 7e-07,
"grad_norm": 13.694576263427734,
"learning_rate": 6.000000000000001e-08,
"loss": 73.5,
"step": 7
},
{
"epoch": 8e-07,
"grad_norm": 13.790714263916016,
"learning_rate": 7e-08,
"loss": 73.625,
"step": 8
},
{
"epoch": 9e-07,
"grad_norm": 13.956605911254883,
"learning_rate": 8e-08,
"loss": 73.6875,
"step": 9
},
{
"epoch": 1e-06,
"grad_norm": 13.796408653259277,
"learning_rate": 9e-08,
"loss": 73.6875,
"step": 10
},
{
"epoch": 1.1e-06,
"grad_norm": 13.963505744934082,
"learning_rate": 1.0000000000000001e-07,
"loss": 73.5625,
"step": 11
},
{
"epoch": 1.2e-06,
"grad_norm": 13.888561248779297,
"learning_rate": 1.1e-07,
"loss": 73.5625,
"step": 12
},
{
"epoch": 1.3e-06,
"grad_norm": 13.5466890335083,
"learning_rate": 1.2000000000000002e-07,
"loss": 73.625,
"step": 13
},
{
"epoch": 1.4e-06,
"grad_norm": 13.878573417663574,
"learning_rate": 1.3e-07,
"loss": 73.5,
"step": 14
},
{
"epoch": 1.5e-06,
"grad_norm": 13.623698234558105,
"learning_rate": 1.4e-07,
"loss": 73.5625,
"step": 15
},
{
"epoch": 1.6e-06,
"grad_norm": 13.813441276550293,
"learning_rate": 1.5000000000000002e-07,
"loss": 73.5625,
"step": 16
},
{
"epoch": 1.7e-06,
"grad_norm": 13.076991081237793,
"learning_rate": 1.6e-07,
"loss": 73.5625,
"step": 17
},
{
"epoch": 1.8e-06,
"grad_norm": 13.786111831665039,
"learning_rate": 1.7e-07,
"loss": 73.625,
"step": 18
},
{
"epoch": 1.9e-06,
"grad_norm": 13.549301147460938,
"learning_rate": 1.8e-07,
"loss": 73.6875,
"step": 19
},
{
"epoch": 2e-06,
"grad_norm": 13.712065696716309,
"learning_rate": 1.9e-07,
"loss": 73.5,
"step": 20
},
{
"epoch": 2.1e-06,
"grad_norm": 13.899330139160156,
"learning_rate": 2.0000000000000002e-07,
"loss": 73.625,
"step": 21
},
{
"epoch": 2.2e-06,
"grad_norm": 13.786901473999023,
"learning_rate": 2.0999999999999997e-07,
"loss": 73.5625,
"step": 22
},
{
"epoch": 2.3e-06,
"grad_norm": 13.647011756896973,
"learning_rate": 2.2e-07,
"loss": 73.5625,
"step": 23
},
{
"epoch": 2.4e-06,
"grad_norm": 13.713528633117676,
"learning_rate": 2.3000000000000002e-07,
"loss": 73.5625,
"step": 24
},
{
"epoch": 2.5e-06,
"grad_norm": 13.645662307739258,
"learning_rate": 2.4000000000000003e-07,
"loss": 73.625,
"step": 25
},
{
"epoch": 2.6e-06,
"grad_norm": 14.283074378967285,
"learning_rate": 2.5e-07,
"loss": 73.625,
"step": 26
},
{
"epoch": 2.7e-06,
"grad_norm": 13.786810874938965,
"learning_rate": 2.6e-07,
"loss": 73.6875,
"step": 27
},
{
"epoch": 2.8e-06,
"grad_norm": 13.583864212036133,
"learning_rate": 2.7e-07,
"loss": 73.625,
"step": 28
},
{
"epoch": 2.9e-06,
"grad_norm": 13.53901481628418,
"learning_rate": 2.8e-07,
"loss": 73.625,
"step": 29
},
{
"epoch": 3e-06,
"grad_norm": 13.56877613067627,
"learning_rate": 2.9000000000000003e-07,
"loss": 73.5625,
"step": 30
},
{
"epoch": 3.1e-06,
"grad_norm": 13.632527351379395,
"learning_rate": 3.0000000000000004e-07,
"loss": 73.6875,
"step": 31
},
{
"epoch": 3.2e-06,
"grad_norm": 13.880523681640625,
"learning_rate": 3.1e-07,
"loss": 73.5625,
"step": 32
},
{
"epoch": 3.3e-06,
"grad_norm": 13.455181121826172,
"learning_rate": 3.2e-07,
"loss": 73.6875,
"step": 33
},
{
"epoch": 3.4e-06,
"grad_norm": 13.400262832641602,
"learning_rate": 3.3e-07,
"loss": 73.5625,
"step": 34
},
{
"epoch": 3.5e-06,
"grad_norm": 13.204479217529297,
"learning_rate": 3.4e-07,
"loss": 73.5625,
"step": 35
},
{
"epoch": 3.6e-06,
"grad_norm": 13.526925086975098,
"learning_rate": 3.5e-07,
"loss": 73.5625,
"step": 36
},
{
"epoch": 3.7e-06,
"grad_norm": 13.56838321685791,
"learning_rate": 3.6e-07,
"loss": 73.5625,
"step": 37
},
{
"epoch": 3.8e-06,
"grad_norm": 13.788740158081055,
"learning_rate": 3.7e-07,
"loss": 73.625,
"step": 38
},
{
"epoch": 3.9e-06,
"grad_norm": 13.793792724609375,
"learning_rate": 3.8e-07,
"loss": 73.5625,
"step": 39
},
{
"epoch": 4e-06,
"grad_norm": 14.137533187866211,
"learning_rate": 3.8999999999999997e-07,
"loss": 73.6875,
"step": 40
},
{
"epoch": 4.1e-06,
"grad_norm": 13.904379844665527,
"learning_rate": 4.0000000000000003e-07,
"loss": 73.625,
"step": 41
},
{
"epoch": 4.2e-06,
"grad_norm": 13.895687103271484,
"learning_rate": 4.1e-07,
"loss": 73.625,
"step": 42
},
{
"epoch": 4.3e-06,
"grad_norm": 14.146393775939941,
"learning_rate": 4.1999999999999995e-07,
"loss": 73.5625,
"step": 43
},
{
"epoch": 4.4e-06,
"grad_norm": 13.730606079101562,
"learning_rate": 4.3e-07,
"loss": 73.625,
"step": 44
},
{
"epoch": 4.5e-06,
"grad_norm": 13.854805946350098,
"learning_rate": 4.4e-07,
"loss": 73.5625,
"step": 45
},
{
"epoch": 4.6e-06,
"grad_norm": 13.894522666931152,
"learning_rate": 4.5e-07,
"loss": 73.5625,
"step": 46
},
{
"epoch": 4.7e-06,
"grad_norm": 13.2392578125,
"learning_rate": 4.6000000000000004e-07,
"loss": 73.6875,
"step": 47
},
{
"epoch": 4.8e-06,
"grad_norm": 13.556278228759766,
"learning_rate": 4.7e-07,
"loss": 73.75,
"step": 48
},
{
"epoch": 4.9e-06,
"grad_norm": 13.716812133789062,
"learning_rate": 4.800000000000001e-07,
"loss": 73.625,
"step": 49
},
{
"epoch": 5e-06,
"grad_norm": 13.964197158813477,
"learning_rate": 4.900000000000001e-07,
"loss": 73.5625,
"step": 50
},
{
"epoch": 5.1e-06,
"grad_norm": 13.632363319396973,
"learning_rate": 5e-07,
"loss": 73.6875,
"step": 51
},
{
"epoch": 5.2e-06,
"grad_norm": 13.38712215423584,
"learning_rate": 5.100000000000001e-07,
"loss": 73.5625,
"step": 52
},
{
"epoch": 5.3e-06,
"grad_norm": 13.229913711547852,
"learning_rate": 5.2e-07,
"loss": 73.5,
"step": 53
},
{
"epoch": 5.4e-06,
"grad_norm": 13.719653129577637,
"learning_rate": 5.3e-07,
"loss": 73.625,
"step": 54
},
{
"epoch": 5.5e-06,
"grad_norm": 12.99260425567627,
"learning_rate": 5.4e-07,
"loss": 73.5625,
"step": 55
},
{
"epoch": 5.6e-06,
"grad_norm": 13.961441040039062,
"learning_rate": 5.5e-07,
"loss": 73.5625,
"step": 56
},
{
"epoch": 5.7e-06,
"grad_norm": 13.631799697875977,
"learning_rate": 5.6e-07,
"loss": 73.5625,
"step": 57
},
{
"epoch": 5.8e-06,
"grad_norm": 13.392004013061523,
"learning_rate": 5.7e-07,
"loss": 73.75,
"step": 58
},
{
"epoch": 5.9e-06,
"grad_norm": 13.643326759338379,
"learning_rate": 5.800000000000001e-07,
"loss": 73.625,
"step": 59
},
{
"epoch": 6e-06,
"grad_norm": 13.313652992248535,
"learning_rate": 5.9e-07,
"loss": 73.5,
"step": 60
},
{
"epoch": 6.1e-06,
"grad_norm": 13.296246528625488,
"learning_rate": 6.000000000000001e-07,
"loss": 73.5625,
"step": 61
},
{
"epoch": 6.2e-06,
"grad_norm": 12.835416793823242,
"learning_rate": 6.1e-07,
"loss": 73.5625,
"step": 62
},
{
"epoch": 6.3e-06,
"grad_norm": 13.500370025634766,
"learning_rate": 6.2e-07,
"loss": 73.5,
"step": 63
},
{
"epoch": 6.4e-06,
"grad_norm": 14.06345272064209,
"learning_rate": 6.3e-07,
"loss": 73.5625,
"step": 64
},
{
"epoch": 6.5e-06,
"grad_norm": 13.467910766601562,
"learning_rate": 6.4e-07,
"loss": 73.5625,
"step": 65
},
{
"epoch": 6.6e-06,
"grad_norm": 13.793481826782227,
"learning_rate": 6.5e-07,
"loss": 73.8125,
"step": 66
},
{
"epoch": 6.7e-06,
"grad_norm": 13.709341049194336,
"learning_rate": 6.6e-07,
"loss": 73.75,
"step": 67
},
{
"epoch": 6.8e-06,
"grad_norm": 13.779867172241211,
"learning_rate": 6.7e-07,
"loss": 73.5625,
"step": 68
},
{
"epoch": 6.9e-06,
"grad_norm": 13.970793724060059,
"learning_rate": 6.8e-07,
"loss": 73.625,
"step": 69
},
{
"epoch": 7e-06,
"grad_norm": 13.877311706542969,
"learning_rate": 6.900000000000001e-07,
"loss": 73.625,
"step": 70
},
{
"epoch": 7.1e-06,
"grad_norm": 13.815545082092285,
"learning_rate": 7e-07,
"loss": 73.6875,
"step": 71
},
{
"epoch": 7.2e-06,
"grad_norm": 13.491872787475586,
"learning_rate": 7.1e-07,
"loss": 73.625,
"step": 72
},
{
"epoch": 7.3e-06,
"grad_norm": 13.690380096435547,
"learning_rate": 7.2e-07,
"loss": 73.5625,
"step": 73
},
{
"epoch": 7.4e-06,
"grad_norm": 13.530372619628906,
"learning_rate": 7.3e-07,
"loss": 73.5625,
"step": 74
},
{
"epoch": 7.5e-06,
"grad_norm": 13.24216079711914,
"learning_rate": 7.4e-07,
"loss": 73.5625,
"step": 75
},
{
"epoch": 7.6e-06,
"grad_norm": 13.833526611328125,
"learning_rate": 7.5e-07,
"loss": 73.5625,
"step": 76
},
{
"epoch": 7.7e-06,
"grad_norm": 13.154213905334473,
"learning_rate": 7.6e-07,
"loss": 73.5625,
"step": 77
},
{
"epoch": 7.8e-06,
"grad_norm": 13.55800724029541,
"learning_rate": 7.699999999999999e-07,
"loss": 73.5625,
"step": 78
},
{
"epoch": 7.9e-06,
"grad_norm": 13.533388137817383,
"learning_rate": 7.799999999999999e-07,
"loss": 73.5625,
"step": 79
},
{
"epoch": 8e-06,
"grad_norm": 12.90389633178711,
"learning_rate": 7.900000000000001e-07,
"loss": 73.4375,
"step": 80
},
{
"epoch": 8.1e-06,
"grad_norm": 13.699131965637207,
"learning_rate": 8.000000000000001e-07,
"loss": 73.625,
"step": 81
},
{
"epoch": 8.2e-06,
"grad_norm": 14.151235580444336,
"learning_rate": 8.100000000000001e-07,
"loss": 73.5625,
"step": 82
},
{
"epoch": 8.3e-06,
"grad_norm": 13.401898384094238,
"learning_rate": 8.2e-07,
"loss": 73.625,
"step": 83
},
{
"epoch": 8.4e-06,
"grad_norm": 12.971834182739258,
"learning_rate": 8.3e-07,
"loss": 73.5625,
"step": 84
},
{
"epoch": 8.5e-06,
"grad_norm": 13.448446273803711,
"learning_rate": 8.399999999999999e-07,
"loss": 73.5,
"step": 85
},
{
"epoch": 8.6e-06,
"grad_norm": 13.639290809631348,
"learning_rate": 8.500000000000001e-07,
"loss": 73.6875,
"step": 86
},
{
"epoch": 8.7e-06,
"grad_norm": 13.647253036499023,
"learning_rate": 8.6e-07,
"loss": 73.5,
"step": 87
},
{
"epoch": 8.8e-06,
"grad_norm": 13.423108100891113,
"learning_rate": 8.7e-07,
"loss": 73.625,
"step": 88
},
{
"epoch": 8.9e-06,
"grad_norm": 13.234737396240234,
"learning_rate": 8.8e-07,
"loss": 73.5625,
"step": 89
},
{
"epoch": 9e-06,
"grad_norm": 13.644418716430664,
"learning_rate": 8.9e-07,
"loss": 73.6875,
"step": 90
},
{
"epoch": 9.1e-06,
"grad_norm": 13.497346878051758,
"learning_rate": 9e-07,
"loss": 73.6875,
"step": 91
},
{
"epoch": 9.2e-06,
"grad_norm": 13.384448051452637,
"learning_rate": 9.100000000000001e-07,
"loss": 73.625,
"step": 92
},
{
"epoch": 9.3e-06,
"grad_norm": 13.391413688659668,
"learning_rate": 9.200000000000001e-07,
"loss": 73.625,
"step": 93
},
{
"epoch": 9.4e-06,
"grad_norm": 13.153121948242188,
"learning_rate": 9.3e-07,
"loss": 73.625,
"step": 94
},
{
"epoch": 9.5e-06,
"grad_norm": 13.874184608459473,
"learning_rate": 9.4e-07,
"loss": 73.5,
"step": 95
},
{
"epoch": 9.6e-06,
"grad_norm": 13.332117080688477,
"learning_rate": 9.5e-07,
"loss": 73.625,
"step": 96
},
{
"epoch": 9.7e-06,
"grad_norm": 13.225434303283691,
"learning_rate": 9.600000000000001e-07,
"loss": 73.5625,
"step": 97
},
{
"epoch": 9.8e-06,
"grad_norm": 13.648529052734375,
"learning_rate": 9.7e-07,
"loss": 73.5,
"step": 98
},
{
"epoch": 9.9e-06,
"grad_norm": 14.294005393981934,
"learning_rate": 9.800000000000001e-07,
"loss": 73.625,
"step": 99
},
{
"epoch": 1e-05,
"grad_norm": 13.486104965209961,
"learning_rate": 9.9e-07,
"loss": 73.5625,
"step": 100
},
{
"epoch": 1.01e-05,
"grad_norm": 13.966443061828613,
"learning_rate": 1e-06,
"loss": 73.6875,
"step": 101
},
{
"epoch": 1.02e-05,
"grad_norm": 13.408844947814941,
"learning_rate": 1.0099999999999999e-06,
"loss": 73.625,
"step": 102
},
{
"epoch": 1.03e-05,
"grad_norm": 13.945192337036133,
"learning_rate": 1.0200000000000002e-06,
"loss": 73.5625,
"step": 103
},
{
"epoch": 1.04e-05,
"grad_norm": 13.554696083068848,
"learning_rate": 1.03e-06,
"loss": 73.5,
"step": 104
},
{
"epoch": 1.05e-05,
"grad_norm": 13.722262382507324,
"learning_rate": 1.04e-06,
"loss": 73.5,
"step": 105
},
{
"epoch": 1.06e-05,
"grad_norm": 14.362868309020996,
"learning_rate": 1.0500000000000001e-06,
"loss": 73.8125,
"step": 106
},
{
"epoch": 1.07e-05,
"grad_norm": 12.963215827941895,
"learning_rate": 1.06e-06,
"loss": 73.5625,
"step": 107
},
{
"epoch": 1.08e-05,
"grad_norm": 13.040894508361816,
"learning_rate": 1.07e-06,
"loss": 73.5625,
"step": 108
},
{
"epoch": 1.09e-05,
"grad_norm": 13.496689796447754,
"learning_rate": 1.08e-06,
"loss": 73.625,
"step": 109
},
{
"epoch": 1.1e-05,
"grad_norm": 12.973114013671875,
"learning_rate": 1.0900000000000002e-06,
"loss": 73.625,
"step": 110
},
{
"epoch": 1.11e-05,
"grad_norm": 13.376940727233887,
"learning_rate": 1.1e-06,
"loss": 73.625,
"step": 111
},
{
"epoch": 1.12e-05,
"grad_norm": 13.423301696777344,
"learning_rate": 1.11e-06,
"loss": 73.5,
"step": 112
},
{
"epoch": 1.13e-05,
"grad_norm": 14.047898292541504,
"learning_rate": 1.12e-06,
"loss": 73.6875,
"step": 113
},
{
"epoch": 1.14e-05,
"grad_norm": 13.22636890411377,
"learning_rate": 1.13e-06,
"loss": 73.5625,
"step": 114
},
{
"epoch": 1.15e-05,
"grad_norm": 13.150336265563965,
"learning_rate": 1.14e-06,
"loss": 73.5625,
"step": 115
},
{
"epoch": 1.16e-05,
"grad_norm": 13.312559127807617,
"learning_rate": 1.15e-06,
"loss": 73.5,
"step": 116
},
{
"epoch": 1.17e-05,
"grad_norm": 13.54313850402832,
"learning_rate": 1.1600000000000001e-06,
"loss": 73.5,
"step": 117
},
{
"epoch": 1.18e-05,
"grad_norm": 13.342724800109863,
"learning_rate": 1.17e-06,
"loss": 73.6875,
"step": 118
},
{
"epoch": 1.19e-05,
"grad_norm": 13.630057334899902,
"learning_rate": 1.18e-06,
"loss": 73.5625,
"step": 119
},
{
"epoch": 1.2e-05,
"grad_norm": 13.717558860778809,
"learning_rate": 1.19e-06,
"loss": 73.5,
"step": 120
},
{
"epoch": 1.21e-05,
"grad_norm": 14.034920692443848,
"learning_rate": 1.2000000000000002e-06,
"loss": 73.5625,
"step": 121
},
{
"epoch": 1.22e-05,
"grad_norm": 12.964286804199219,
"learning_rate": 1.21e-06,
"loss": 73.5,
"step": 122
},
{
"epoch": 1.23e-05,
"grad_norm": 13.391942024230957,
"learning_rate": 1.22e-06,
"loss": 73.625,
"step": 123
},
{
"epoch": 1.24e-05,
"grad_norm": 13.302276611328125,
"learning_rate": 1.23e-06,
"loss": 73.5,
"step": 124
},
{
"epoch": 1.25e-05,
"grad_norm": 13.081567764282227,
"learning_rate": 1.24e-06,
"loss": 73.625,
"step": 125
},
{
"epoch": 1.26e-05,
"grad_norm": 13.869545936584473,
"learning_rate": 1.25e-06,
"loss": 73.5625,
"step": 126
},
{
"epoch": 1.27e-05,
"grad_norm": 13.455565452575684,
"learning_rate": 1.26e-06,
"loss": 73.5,
"step": 127
},
{
"epoch": 1.28e-05,
"grad_norm": 13.791558265686035,
"learning_rate": 1.2700000000000001e-06,
"loss": 73.625,
"step": 128
},
{
"epoch": 1.29e-05,
"grad_norm": 13.645962715148926,
"learning_rate": 1.28e-06,
"loss": 73.625,
"step": 129
},
{
"epoch": 1.3e-05,
"grad_norm": 13.296918869018555,
"learning_rate": 1.29e-06,
"loss": 73.5625,
"step": 130
},
{
"epoch": 1.31e-05,
"grad_norm": 13.747169494628906,
"learning_rate": 1.3e-06,
"loss": 73.5625,
"step": 131
},
{
"epoch": 1.32e-05,
"grad_norm": 13.228446006774902,
"learning_rate": 1.3100000000000002e-06,
"loss": 73.5,
"step": 132
},
{
"epoch": 1.33e-05,
"grad_norm": 13.962552070617676,
"learning_rate": 1.32e-06,
"loss": 73.5,
"step": 133
},
{
"epoch": 1.34e-05,
"grad_norm": 13.579384803771973,
"learning_rate": 1.33e-06,
"loss": 73.5625,
"step": 134
},
{
"epoch": 1.35e-05,
"grad_norm": 13.230710983276367,
"learning_rate": 1.34e-06,
"loss": 73.5,
"step": 135
},
{
"epoch": 1.36e-05,
"grad_norm": 13.567021369934082,
"learning_rate": 1.35e-06,
"loss": 73.5,
"step": 136
},
{
"epoch": 1.37e-05,
"grad_norm": 12.811543464660645,
"learning_rate": 1.36e-06,
"loss": 73.5,
"step": 137
},
{
"epoch": 1.38e-05,
"grad_norm": 13.733097076416016,
"learning_rate": 1.37e-06,
"loss": 73.625,
"step": 138
},
{
"epoch": 1.39e-05,
"grad_norm": 13.566929817199707,
"learning_rate": 1.3800000000000001e-06,
"loss": 73.5625,
"step": 139
},
{
"epoch": 1.4e-05,
"grad_norm": 13.628262519836426,
"learning_rate": 1.39e-06,
"loss": 73.6875,
"step": 140
},
{
"epoch": 1.41e-05,
"grad_norm": 13.57292652130127,
"learning_rate": 1.4e-06,
"loss": 73.5625,
"step": 141
},
{
"epoch": 1.42e-05,
"grad_norm": 13.723799705505371,
"learning_rate": 1.41e-06,
"loss": 73.625,
"step": 142
},
{
"epoch": 1.43e-05,
"grad_norm": 14.213798522949219,
"learning_rate": 1.42e-06,
"loss": 73.625,
"step": 143
},
{
"epoch": 1.44e-05,
"grad_norm": 13.248021125793457,
"learning_rate": 1.43e-06,
"loss": 73.5625,
"step": 144
},
{
"epoch": 1.45e-05,
"grad_norm": 13.645977020263672,
"learning_rate": 1.44e-06,
"loss": 73.5625,
"step": 145
},
{
"epoch": 1.46e-05,
"grad_norm": 13.61572551727295,
"learning_rate": 1.45e-06,
"loss": 73.5,
"step": 146
},
{
"epoch": 1.47e-05,
"grad_norm": 13.234169960021973,
"learning_rate": 1.46e-06,
"loss": 73.5625,
"step": 147
},
{
"epoch": 1.48e-05,
"grad_norm": 13.495967864990234,
"learning_rate": 1.47e-06,
"loss": 73.6875,
"step": 148
},
{
"epoch": 1.49e-05,
"grad_norm": 13.479613304138184,
"learning_rate": 1.48e-06,
"loss": 73.5,
"step": 149
},
{
"epoch": 1.5e-05,
"grad_norm": 12.886178016662598,
"learning_rate": 1.49e-06,
"loss": 73.5,
"step": 150
},
{
"epoch": 1.51e-05,
"grad_norm": 13.639323234558105,
"learning_rate": 1.5e-06,
"loss": 73.5625,
"step": 151
},
{
"epoch": 1.52e-05,
"grad_norm": 13.812483787536621,
"learning_rate": 1.51e-06,
"loss": 73.5,
"step": 152
},
{
"epoch": 1.53e-05,
"grad_norm": 13.071154594421387,
"learning_rate": 1.52e-06,
"loss": 73.5,
"step": 153
},
{
"epoch": 1.54e-05,
"grad_norm": 13.37407112121582,
"learning_rate": 1.5300000000000002e-06,
"loss": 73.5625,
"step": 154
},
{
"epoch": 1.55e-05,
"grad_norm": 13.398472785949707,
"learning_rate": 1.5399999999999999e-06,
"loss": 73.5625,
"step": 155
},
{
"epoch": 1.56e-05,
"grad_norm": 13.664745330810547,
"learning_rate": 1.55e-06,
"loss": 73.625,
"step": 156
},
{
"epoch": 1.57e-05,
"grad_norm": 13.450276374816895,
"learning_rate": 1.5599999999999999e-06,
"loss": 73.5,
"step": 157
},
{
"epoch": 1.58e-05,
"grad_norm": 13.383707046508789,
"learning_rate": 1.57e-06,
"loss": 73.5,
"step": 158
},
{
"epoch": 1.59e-05,
"grad_norm": 13.524046897888184,
"learning_rate": 1.5800000000000001e-06,
"loss": 73.4375,
"step": 159
},
{
"epoch": 1.6e-05,
"grad_norm": 13.396870613098145,
"learning_rate": 1.59e-06,
"loss": 73.5625,
"step": 160
},
{
"epoch": 1.61e-05,
"grad_norm": 13.882829666137695,
"learning_rate": 1.6000000000000001e-06,
"loss": 73.5,
"step": 161
},
{
"epoch": 1.62e-05,
"grad_norm": 13.542072296142578,
"learning_rate": 1.6099999999999998e-06,
"loss": 73.5625,
"step": 162
},
{
"epoch": 1.63e-05,
"grad_norm": 12.320090293884277,
"learning_rate": 1.6200000000000002e-06,
"loss": 73.5,
"step": 163
},
{
"epoch": 1.64e-05,
"grad_norm": 13.312244415283203,
"learning_rate": 1.6300000000000003e-06,
"loss": 73.5625,
"step": 164
},
{
"epoch": 1.65e-05,
"grad_norm": 12.99962043762207,
"learning_rate": 1.64e-06,
"loss": 73.5,
"step": 165
},
{
"epoch": 1.66e-05,
"grad_norm": 12.987727165222168,
"learning_rate": 1.65e-06,
"loss": 73.4375,
"step": 166
},
{
"epoch": 1.67e-05,
"grad_norm": 13.491228103637695,
"learning_rate": 1.66e-06,
"loss": 73.5,
"step": 167
},
{
"epoch": 1.68e-05,
"grad_norm": 13.407848358154297,
"learning_rate": 1.67e-06,
"loss": 73.5625,
"step": 168
},
{
"epoch": 1.69e-05,
"grad_norm": 13.65124797821045,
"learning_rate": 1.6799999999999998e-06,
"loss": 73.5,
"step": 169
},
{
"epoch": 1.7e-05,
"grad_norm": 13.30240249633789,
"learning_rate": 1.6900000000000001e-06,
"loss": 73.5625,
"step": 170
},
{
"epoch": 1.71e-05,
"grad_norm": 13.703505516052246,
"learning_rate": 1.7000000000000002e-06,
"loss": 73.625,
"step": 171
},
{
"epoch": 1.72e-05,
"grad_norm": 13.372552871704102,
"learning_rate": 1.71e-06,
"loss": 73.5625,
"step": 172
},
{
"epoch": 1.73e-05,
"grad_norm": 13.747980117797852,
"learning_rate": 1.72e-06,
"loss": 73.5,
"step": 173
},
{
"epoch": 1.74e-05,
"grad_norm": 13.47564697265625,
"learning_rate": 1.73e-06,
"loss": 73.5625,
"step": 174
},
{
"epoch": 1.75e-05,
"grad_norm": 13.97725772857666,
"learning_rate": 1.74e-06,
"loss": 73.5,
"step": 175
},
{
"epoch": 1.76e-05,
"grad_norm": 13.73171615600586,
"learning_rate": 1.7500000000000002e-06,
"loss": 73.5,
"step": 176
},
{
"epoch": 1.77e-05,
"grad_norm": 13.646438598632812,
"learning_rate": 1.76e-06,
"loss": 73.5625,
"step": 177
},
{
"epoch": 1.78e-05,
"grad_norm": 12.986227989196777,
"learning_rate": 1.7700000000000002e-06,
"loss": 73.625,
"step": 178
},
{
"epoch": 1.79e-05,
"grad_norm": 13.07104778289795,
"learning_rate": 1.78e-06,
"loss": 73.5,
"step": 179
},
{
"epoch": 1.8e-05,
"grad_norm": 13.796416282653809,
"learning_rate": 1.79e-06,
"loss": 73.5,
"step": 180
},
{
"epoch": 1.81e-05,
"grad_norm": 13.549040794372559,
"learning_rate": 1.8e-06,
"loss": 73.5625,
"step": 181
},
{
"epoch": 1.82e-05,
"grad_norm": 13.482290267944336,
"learning_rate": 1.81e-06,
"loss": 73.5,
"step": 182
},
{
"epoch": 1.83e-05,
"grad_norm": 13.399819374084473,
"learning_rate": 1.8200000000000002e-06,
"loss": 73.5,
"step": 183
},
{
"epoch": 1.84e-05,
"grad_norm": 13.660837173461914,
"learning_rate": 1.83e-06,
"loss": 73.5625,
"step": 184
},
{
"epoch": 1.85e-05,
"grad_norm": 13.732992172241211,
"learning_rate": 1.8400000000000002e-06,
"loss": 73.5625,
"step": 185
},
{
"epoch": 1.86e-05,
"grad_norm": 13.626218795776367,
"learning_rate": 1.8499999999999999e-06,
"loss": 73.5625,
"step": 186
},
{
"epoch": 1.87e-05,
"grad_norm": 13.294013977050781,
"learning_rate": 1.86e-06,
"loss": 73.5,
"step": 187
},
{
"epoch": 1.88e-05,
"grad_norm": 12.994363784790039,
"learning_rate": 1.87e-06,
"loss": 73.5625,
"step": 188
},
{
"epoch": 1.89e-05,
"grad_norm": 13.95550537109375,
"learning_rate": 1.88e-06,
"loss": 73.5625,
"step": 189
},
{
"epoch": 1.9e-05,
"grad_norm": 13.144396781921387,
"learning_rate": 1.8900000000000001e-06,
"loss": 73.5,
"step": 190
},
{
"epoch": 1.91e-05,
"grad_norm": 12.912415504455566,
"learning_rate": 1.9e-06,
"loss": 73.5,
"step": 191
},
{
"epoch": 1.92e-05,
"grad_norm": 13.303735733032227,
"learning_rate": 1.91e-06,
"loss": 73.5625,
"step": 192
},
{
"epoch": 1.93e-05,
"grad_norm": 13.176830291748047,
"learning_rate": 1.9200000000000003e-06,
"loss": 73.5,
"step": 193
},
{
"epoch": 1.94e-05,
"grad_norm": 13.223044395446777,
"learning_rate": 1.93e-06,
"loss": 73.4375,
"step": 194
},
{
"epoch": 1.95e-05,
"grad_norm": 13.411727905273438,
"learning_rate": 1.94e-06,
"loss": 73.5,
"step": 195
},
{
"epoch": 1.96e-05,
"grad_norm": 13.083850860595703,
"learning_rate": 1.95e-06,
"loss": 73.5625,
"step": 196
},
{
"epoch": 1.97e-05,
"grad_norm": 13.732102394104004,
"learning_rate": 1.9600000000000003e-06,
"loss": 73.5625,
"step": 197
},
{
"epoch": 1.98e-05,
"grad_norm": 13.143880844116211,
"learning_rate": 1.9699999999999998e-06,
"loss": 73.4375,
"step": 198
},
{
"epoch": 1.99e-05,
"grad_norm": 13.069990158081055,
"learning_rate": 1.98e-06,
"loss": 73.4375,
"step": 199
},
{
"epoch": 2e-05,
"grad_norm": 13.723267555236816,
"learning_rate": 1.99e-06,
"loss": 73.5,
"step": 200
},
{
"epoch": 2.01e-05,
"grad_norm": 13.735518455505371,
"learning_rate": 2e-06,
"loss": 73.6875,
"step": 201
},
{
"epoch": 2.02e-05,
"grad_norm": 13.416829109191895,
"learning_rate": 2.0100000000000002e-06,
"loss": 73.5,
"step": 202
},
{
"epoch": 2.03e-05,
"grad_norm": 13.453069686889648,
"learning_rate": 2.0199999999999997e-06,
"loss": 73.5625,
"step": 203
},
{
"epoch": 2.04e-05,
"grad_norm": 13.582813262939453,
"learning_rate": 2.03e-06,
"loss": 73.5625,
"step": 204
},
{
"epoch": 2.05e-05,
"grad_norm": 13.080293655395508,
"learning_rate": 2.0400000000000004e-06,
"loss": 73.5,
"step": 205
},
{
"epoch": 2.06e-05,
"grad_norm": 13.556493759155273,
"learning_rate": 2.05e-06,
"loss": 73.5,
"step": 206
},
{
"epoch": 2.07e-05,
"grad_norm": 12.98852825164795,
"learning_rate": 2.06e-06,
"loss": 73.5625,
"step": 207
},
{
"epoch": 2.08e-05,
"grad_norm": 13.484770774841309,
"learning_rate": 2.07e-06,
"loss": 73.5,
"step": 208
},
{
"epoch": 2.09e-05,
"grad_norm": 13.057182312011719,
"learning_rate": 2.08e-06,
"loss": 73.5625,
"step": 209
},
{
"epoch": 2.1e-05,
"grad_norm": 13.500996589660645,
"learning_rate": 2.09e-06,
"loss": 73.5,
"step": 210
},
{
"epoch": 2.11e-05,
"grad_norm": 13.239738464355469,
"learning_rate": 2.1000000000000002e-06,
"loss": 73.5625,
"step": 211
},
{
"epoch": 2.12e-05,
"grad_norm": 13.07319450378418,
"learning_rate": 2.11e-06,
"loss": 73.5625,
"step": 212
},
{
"epoch": 2.13e-05,
"grad_norm": 13.956418991088867,
"learning_rate": 2.12e-06,
"loss": 73.5625,
"step": 213
},
{
"epoch": 2.14e-05,
"grad_norm": 13.399481773376465,
"learning_rate": 2.13e-06,
"loss": 73.4375,
"step": 214
},
{
"epoch": 2.15e-05,
"grad_norm": 13.294981002807617,
"learning_rate": 2.14e-06,
"loss": 73.5,
"step": 215
},
{
"epoch": 2.16e-05,
"grad_norm": 13.160533905029297,
"learning_rate": 2.15e-06,
"loss": 73.5,
"step": 216
},
{
"epoch": 2.17e-05,
"grad_norm": 13.458478927612305,
"learning_rate": 2.16e-06,
"loss": 73.4375,
"step": 217
},
{
"epoch": 2.18e-05,
"grad_norm": 13.246068000793457,
"learning_rate": 2.17e-06,
"loss": 73.5,
"step": 218
},
{
"epoch": 2.19e-05,
"grad_norm": 13.33403491973877,
"learning_rate": 2.1800000000000003e-06,
"loss": 73.5,
"step": 219
},
{
"epoch": 2.2e-05,
"grad_norm": 13.391315460205078,
"learning_rate": 2.1899999999999998e-06,
"loss": 73.5,
"step": 220
},
{
"epoch": 2.21e-05,
"grad_norm": 13.164493560791016,
"learning_rate": 2.2e-06,
"loss": 73.5,
"step": 221
},
{
"epoch": 2.22e-05,
"grad_norm": 13.39545726776123,
"learning_rate": 2.21e-06,
"loss": 73.5,
"step": 222
},
{
"epoch": 2.23e-05,
"grad_norm": 13.700584411621094,
"learning_rate": 2.22e-06,
"loss": 73.5,
"step": 223
},
{
"epoch": 2.24e-05,
"grad_norm": 12.7564115524292,
"learning_rate": 2.2300000000000002e-06,
"loss": 73.5,
"step": 224
},
{
"epoch": 2.25e-05,
"grad_norm": 13.187485694885254,
"learning_rate": 2.24e-06,
"loss": 73.5,
"step": 225
},
{
"epoch": 2.26e-05,
"grad_norm": 12.73024845123291,
"learning_rate": 2.25e-06,
"loss": 73.5,
"step": 226
},
{
"epoch": 2.27e-05,
"grad_norm": 13.395713806152344,
"learning_rate": 2.26e-06,
"loss": 73.4375,
"step": 227
},
{
"epoch": 2.28e-05,
"grad_norm": 13.149252891540527,
"learning_rate": 2.27e-06,
"loss": 73.5,
"step": 228
},
{
"epoch": 2.29e-05,
"grad_norm": 12.85241413116455,
"learning_rate": 2.28e-06,
"loss": 73.4375,
"step": 229
},
{
"epoch": 2.3e-05,
"grad_norm": 13.827969551086426,
"learning_rate": 2.29e-06,
"loss": 73.5,
"step": 230
},
{
"epoch": 2.31e-05,
"grad_norm": 13.54524040222168,
"learning_rate": 2.3e-06,
"loss": 73.5625,
"step": 231
},
{
"epoch": 2.32e-05,
"grad_norm": 12.822538375854492,
"learning_rate": 2.31e-06,
"loss": 73.375,
"step": 232
},
{
"epoch": 2.33e-05,
"grad_norm": 13.30932903289795,
"learning_rate": 2.3200000000000002e-06,
"loss": 73.5,
"step": 233
},
{
"epoch": 2.34e-05,
"grad_norm": 13.388375282287598,
"learning_rate": 2.33e-06,
"loss": 73.4375,
"step": 234
},
{
"epoch": 2.35e-05,
"grad_norm": 12.996811866760254,
"learning_rate": 2.34e-06,
"loss": 73.5,
"step": 235
},
{
"epoch": 2.36e-05,
"grad_norm": 12.992647171020508,
"learning_rate": 2.35e-06,
"loss": 73.5,
"step": 236
},
{
"epoch": 2.37e-05,
"grad_norm": 13.552807807922363,
"learning_rate": 2.36e-06,
"loss": 73.4375,
"step": 237
},
{
"epoch": 2.38e-05,
"grad_norm": 13.842035293579102,
"learning_rate": 2.37e-06,
"loss": 73.4375,
"step": 238
},
{
"epoch": 2.39e-05,
"grad_norm": 13.160901069641113,
"learning_rate": 2.38e-06,
"loss": 73.375,
"step": 239
},
{
"epoch": 2.4e-05,
"grad_norm": 13.619793891906738,
"learning_rate": 2.39e-06,
"loss": 73.4375,
"step": 240
},
{
"epoch": 2.41e-05,
"grad_norm": 13.451335906982422,
"learning_rate": 2.4000000000000003e-06,
"loss": 73.5,
"step": 241
},
{
"epoch": 2.42e-05,
"grad_norm": 13.234610557556152,
"learning_rate": 2.41e-06,
"loss": 73.5,
"step": 242
},
{
"epoch": 2.43e-05,
"grad_norm": 13.145569801330566,
"learning_rate": 2.42e-06,
"loss": 73.375,
"step": 243
},
{
"epoch": 2.44e-05,
"grad_norm": 13.658345222473145,
"learning_rate": 2.43e-06,
"loss": 73.4375,
"step": 244
},
{
"epoch": 2.45e-05,
"grad_norm": 13.420488357543945,
"learning_rate": 2.44e-06,
"loss": 73.5,
"step": 245
},
{
"epoch": 2.46e-05,
"grad_norm": 13.162190437316895,
"learning_rate": 2.4500000000000003e-06,
"loss": 73.4375,
"step": 246
},
{
"epoch": 2.47e-05,
"grad_norm": 13.59197998046875,
"learning_rate": 2.46e-06,
"loss": 73.5,
"step": 247
},
{
"epoch": 2.48e-05,
"grad_norm": 13.829290390014648,
"learning_rate": 2.47e-06,
"loss": 73.5625,
"step": 248
},
{
"epoch": 2.49e-05,
"grad_norm": 13.297616004943848,
"learning_rate": 2.48e-06,
"loss": 73.5,
"step": 249
},
{
"epoch": 2.5e-05,
"grad_norm": 13.626100540161133,
"learning_rate": 2.49e-06,
"loss": 73.4375,
"step": 250
},
{
"epoch": 2.51e-05,
"grad_norm": 13.326560020446777,
"learning_rate": 2.5e-06,
"loss": 73.5,
"step": 251
},
{
"epoch": 2.52e-05,
"grad_norm": 13.659133911132812,
"learning_rate": 2.51e-06,
"loss": 73.4375,
"step": 252
},
{
"epoch": 2.53e-05,
"grad_norm": 13.558892250061035,
"learning_rate": 2.52e-06,
"loss": 73.4375,
"step": 253
},
{
"epoch": 2.54e-05,
"grad_norm": 12.769811630249023,
"learning_rate": 2.53e-06,
"loss": 73.5,
"step": 254
},
{
"epoch": 2.55e-05,
"grad_norm": 13.625076293945312,
"learning_rate": 2.5400000000000002e-06,
"loss": 73.5,
"step": 255
},
{
"epoch": 2.56e-05,
"grad_norm": 13.309321403503418,
"learning_rate": 2.5499999999999997e-06,
"loss": 73.4375,
"step": 256
},
{
"epoch": 2.57e-05,
"grad_norm": 13.672002792358398,
"learning_rate": 2.56e-06,
"loss": 73.4375,
"step": 257
},
{
"epoch": 2.58e-05,
"grad_norm": 13.397356033325195,
"learning_rate": 2.5700000000000004e-06,
"loss": 73.5,
"step": 258
},
{
"epoch": 2.59e-05,
"grad_norm": 13.15006160736084,
"learning_rate": 2.58e-06,
"loss": 73.375,
"step": 259
},
{
"epoch": 2.6e-05,
"grad_norm": 12.989713668823242,
"learning_rate": 2.59e-06,
"loss": 73.5,
"step": 260
},
{
"epoch": 2.61e-05,
"grad_norm": 13.889228820800781,
"learning_rate": 2.6e-06,
"loss": 73.5,
"step": 261
},
{
"epoch": 2.62e-05,
"grad_norm": 13.123197555541992,
"learning_rate": 2.61e-06,
"loss": 73.375,
"step": 262
},
{
"epoch": 2.63e-05,
"grad_norm": 13.125618934631348,
"learning_rate": 2.6200000000000003e-06,
"loss": 73.4375,
"step": 263
},
{
"epoch": 2.64e-05,
"grad_norm": 13.075492858886719,
"learning_rate": 2.63e-06,
"loss": 73.4375,
"step": 264
},
{
"epoch": 2.65e-05,
"grad_norm": 13.195477485656738,
"learning_rate": 2.64e-06,
"loss": 73.25,
"step": 265
},
{
"epoch": 2.66e-05,
"grad_norm": 13.082707405090332,
"learning_rate": 2.65e-06,
"loss": 73.25,
"step": 266
},
{
"epoch": 2.67e-05,
"grad_norm": 12.753524780273438,
"learning_rate": 2.66e-06,
"loss": 73.4375,
"step": 267
},
{
"epoch": 2.68e-05,
"grad_norm": 13.304618835449219,
"learning_rate": 2.67e-06,
"loss": 73.4375,
"step": 268
},
{
"epoch": 2.69e-05,
"grad_norm": 13.050411224365234,
"learning_rate": 2.68e-06,
"loss": 73.4375,
"step": 269
},
{
"epoch": 2.7e-05,
"grad_norm": 13.13429069519043,
"learning_rate": 2.69e-06,
"loss": 73.375,
"step": 270
},
{
"epoch": 2.71e-05,
"grad_norm": 13.40479850769043,
"learning_rate": 2.7e-06,
"loss": 73.375,
"step": 271
},
{
"epoch": 2.72e-05,
"grad_norm": 13.249833106994629,
"learning_rate": 2.7100000000000003e-06,
"loss": 73.4375,
"step": 272
},
{
"epoch": 2.73e-05,
"grad_norm": 13.233908653259277,
"learning_rate": 2.72e-06,
"loss": 73.4375,
"step": 273
},
{
"epoch": 2.74e-05,
"grad_norm": 13.229077339172363,
"learning_rate": 2.73e-06,
"loss": 73.125,
"step": 274
},
{
"epoch": 2.75e-05,
"grad_norm": 13.314671516418457,
"learning_rate": 2.74e-06,
"loss": 73.3125,
"step": 275
},
{
"epoch": 2.76e-05,
"grad_norm": 13.741567611694336,
"learning_rate": 2.75e-06,
"loss": 73.5,
"step": 276
},
{
"epoch": 2.77e-05,
"grad_norm": 12.917357444763184,
"learning_rate": 2.7600000000000003e-06,
"loss": 73.4375,
"step": 277
},
{
"epoch": 2.78e-05,
"grad_norm": 13.566869735717773,
"learning_rate": 2.77e-06,
"loss": 73.5,
"step": 278
},
{
"epoch": 2.79e-05,
"grad_norm": 13.380074501037598,
"learning_rate": 2.78e-06,
"loss": 73.4375,
"step": 279
},
{
"epoch": 2.8e-05,
"grad_norm": 13.790977478027344,
"learning_rate": 2.7900000000000004e-06,
"loss": 73.3125,
"step": 280
},
{
"epoch": 2.81e-05,
"grad_norm": 13.409905433654785,
"learning_rate": 2.8e-06,
"loss": 73.375,
"step": 281
},
{
"epoch": 2.82e-05,
"grad_norm": 13.063041687011719,
"learning_rate": 2.81e-06,
"loss": 73.375,
"step": 282
},
{
"epoch": 2.83e-05,
"grad_norm": 13.631767272949219,
"learning_rate": 2.82e-06,
"loss": 73.375,
"step": 283
},
{
"epoch": 2.84e-05,
"grad_norm": 13.662729263305664,
"learning_rate": 2.83e-06,
"loss": 73.375,
"step": 284
},
{
"epoch": 2.85e-05,
"grad_norm": 13.42518424987793,
"learning_rate": 2.84e-06,
"loss": 73.4375,
"step": 285
},
{
"epoch": 2.86e-05,
"grad_norm": 13.8087739944458,
"learning_rate": 2.8500000000000002e-06,
"loss": 73.5,
"step": 286
},
{
"epoch": 2.87e-05,
"grad_norm": 13.71200180053711,
"learning_rate": 2.86e-06,
"loss": 73.3125,
"step": 287
},
{
"epoch": 2.88e-05,
"grad_norm": 13.234236717224121,
"learning_rate": 2.87e-06,
"loss": 73.4375,
"step": 288
},
{
"epoch": 2.89e-05,
"grad_norm": 13.711640357971191,
"learning_rate": 2.88e-06,
"loss": 73.375,
"step": 289
},
{
"epoch": 2.9e-05,
"grad_norm": 13.486573219299316,
"learning_rate": 2.89e-06,
"loss": 73.375,
"step": 290
},
{
"epoch": 2.91e-05,
"grad_norm": 13.30084228515625,
"learning_rate": 2.9e-06,
"loss": 73.4375,
"step": 291
},
{
"epoch": 2.92e-05,
"grad_norm": 13.337886810302734,
"learning_rate": 2.91e-06,
"loss": 73.375,
"step": 292
},
{
"epoch": 2.93e-05,
"grad_norm": 13.75003719329834,
"learning_rate": 2.92e-06,
"loss": 73.375,
"step": 293
},
{
"epoch": 2.94e-05,
"grad_norm": 14.156579971313477,
"learning_rate": 2.9300000000000003e-06,
"loss": 73.5,
"step": 294
},
{
"epoch": 2.95e-05,
"grad_norm": 13.412623405456543,
"learning_rate": 2.94e-06,
"loss": 73.125,
"step": 295
},
{
"epoch": 2.96e-05,
"grad_norm": 12.932133674621582,
"learning_rate": 2.9499999999999997e-06,
"loss": 73.25,
"step": 296
},
{
"epoch": 2.97e-05,
"grad_norm": 13.164972305297852,
"learning_rate": 2.96e-06,
"loss": 73.1875,
"step": 297
},
{
"epoch": 2.98e-05,
"grad_norm": 13.32223129272461,
"learning_rate": 2.97e-06,
"loss": 73.3125,
"step": 298
},
{
"epoch": 2.99e-05,
"grad_norm": 13.821455001831055,
"learning_rate": 2.98e-06,
"loss": 73.4375,
"step": 299
},
{
"epoch": 3e-05,
"grad_norm": 13.473308563232422,
"learning_rate": 2.99e-06,
"loss": 73.5,
"step": 300
},
{
"epoch": 3.01e-05,
"grad_norm": 13.666665077209473,
"learning_rate": 3e-06,
"loss": 73.3125,
"step": 301
},
{
"epoch": 3.02e-05,
"grad_norm": 13.532960891723633,
"learning_rate": 3.01e-06,
"loss": 73.1875,
"step": 302
},
{
"epoch": 3.03e-05,
"grad_norm": 13.009042739868164,
"learning_rate": 3.02e-06,
"loss": 73.375,
"step": 303
},
{
"epoch": 3.04e-05,
"grad_norm": 13.482487678527832,
"learning_rate": 3.0300000000000002e-06,
"loss": 73.3125,
"step": 304
},
{
"epoch": 3.05e-05,
"grad_norm": 13.215324401855469,
"learning_rate": 3.04e-06,
"loss": 73.1875,
"step": 305
},
{
"epoch": 3.06e-05,
"grad_norm": 13.712039947509766,
"learning_rate": 3.05e-06,
"loss": 73.3125,
"step": 306
},
{
"epoch": 3.07e-05,
"grad_norm": 13.075980186462402,
"learning_rate": 3.0600000000000003e-06,
"loss": 73.25,
"step": 307
},
{
"epoch": 3.08e-05,
"grad_norm": 13.958653450012207,
"learning_rate": 3.0700000000000003e-06,
"loss": 73.3125,
"step": 308
},
{
"epoch": 3.09e-05,
"grad_norm": 12.661245346069336,
"learning_rate": 3.0799999999999997e-06,
"loss": 73.25,
"step": 309
},
{
"epoch": 3.1e-05,
"grad_norm": 13.312789916992188,
"learning_rate": 3.0900000000000005e-06,
"loss": 73.125,
"step": 310
},
{
"epoch": 3.11e-05,
"grad_norm": 13.056986808776855,
"learning_rate": 3.1e-06,
"loss": 73.375,
"step": 311
},
{
"epoch": 3.12e-05,
"grad_norm": 13.239110946655273,
"learning_rate": 3.11e-06,
"loss": 73.25,
"step": 312
},
{
"epoch": 3.13e-05,
"grad_norm": 13.4395170211792,
"learning_rate": 3.1199999999999998e-06,
"loss": 73.375,
"step": 313
},
{
"epoch": 3.14e-05,
"grad_norm": 13.566024780273438,
"learning_rate": 3.13e-06,
"loss": 73.3125,
"step": 314
},
{
"epoch": 3.15e-05,
"grad_norm": 13.323553085327148,
"learning_rate": 3.14e-06,
"loss": 73.1875,
"step": 315
},
{
"epoch": 3.16e-05,
"grad_norm": 13.510903358459473,
"learning_rate": 3.15e-06,
"loss": 73.375,
"step": 316
},
{
"epoch": 3.17e-05,
"grad_norm": 13.496997833251953,
"learning_rate": 3.1600000000000002e-06,
"loss": 73.375,
"step": 317
},
{
"epoch": 3.18e-05,
"grad_norm": 12.994120597839355,
"learning_rate": 3.17e-06,
"loss": 73.1875,
"step": 318
},
{
"epoch": 3.19e-05,
"grad_norm": 13.158904075622559,
"learning_rate": 3.18e-06,
"loss": 73.1875,
"step": 319
},
{
"epoch": 3.2e-05,
"grad_norm": 12.74785327911377,
"learning_rate": 3.19e-06,
"loss": 73.25,
"step": 320
},
{
"epoch": 3.21e-05,
"grad_norm": 13.40980339050293,
"learning_rate": 3.2000000000000003e-06,
"loss": 73.25,
"step": 321
},
{
"epoch": 3.22e-05,
"grad_norm": 13.221853256225586,
"learning_rate": 3.21e-06,
"loss": 73.0625,
"step": 322
},
{
"epoch": 3.23e-05,
"grad_norm": 13.401768684387207,
"learning_rate": 3.2199999999999997e-06,
"loss": 73.1875,
"step": 323
},
{
"epoch": 3.24e-05,
"grad_norm": 12.98742961883545,
"learning_rate": 3.2300000000000004e-06,
"loss": 73.25,
"step": 324
},
{
"epoch": 3.25e-05,
"grad_norm": 13.389715194702148,
"learning_rate": 3.2400000000000003e-06,
"loss": 73.25,
"step": 325
},
{
"epoch": 3.26e-05,
"grad_norm": 13.654870986938477,
"learning_rate": 3.25e-06,
"loss": 73.125,
"step": 326
},
{
"epoch": 3.27e-05,
"grad_norm": 12.968194961547852,
"learning_rate": 3.2600000000000006e-06,
"loss": 73.25,
"step": 327
},
{
"epoch": 3.28e-05,
"grad_norm": 13.770217895507812,
"learning_rate": 3.27e-06,
"loss": 73.125,
"step": 328
},
{
"epoch": 3.29e-05,
"grad_norm": 13.335156440734863,
"learning_rate": 3.28e-06,
"loss": 73.25,
"step": 329
},
{
"epoch": 3.3e-05,
"grad_norm": 12.833738327026367,
"learning_rate": 3.29e-06,
"loss": 73.1875,
"step": 330
},
{
"epoch": 3.31e-05,
"grad_norm": 13.088351249694824,
"learning_rate": 3.3e-06,
"loss": 73.1875,
"step": 331
},
{
"epoch": 3.32e-05,
"grad_norm": 13.253767967224121,
"learning_rate": 3.31e-06,
"loss": 73.125,
"step": 332
},
{
"epoch": 3.33e-05,
"grad_norm": 13.729750633239746,
"learning_rate": 3.32e-06,
"loss": 73.0625,
"step": 333
},
{
"epoch": 3.34e-05,
"grad_norm": 13.646214485168457,
"learning_rate": 3.3300000000000003e-06,
"loss": 73.1875,
"step": 334
},
{
"epoch": 3.35e-05,
"grad_norm": 13.651313781738281,
"learning_rate": 3.34e-06,
"loss": 73.3125,
"step": 335
},
{
"epoch": 3.36e-05,
"grad_norm": 13.795608520507812,
"learning_rate": 3.35e-06,
"loss": 73.375,
"step": 336
},
{
"epoch": 3.37e-05,
"grad_norm": 13.318341255187988,
"learning_rate": 3.3599999999999996e-06,
"loss": 73.125,
"step": 337
},
{
"epoch": 3.38e-05,
"grad_norm": 13.053764343261719,
"learning_rate": 3.3700000000000003e-06,
"loss": 73.1875,
"step": 338
},
{
"epoch": 3.39e-05,
"grad_norm": 13.565455436706543,
"learning_rate": 3.3800000000000002e-06,
"loss": 73.1875,
"step": 339
},
{
"epoch": 3.4e-05,
"grad_norm": 13.372631072998047,
"learning_rate": 3.3899999999999997e-06,
"loss": 73.125,
"step": 340
},
{
"epoch": 3.41e-05,
"grad_norm": 13.144970893859863,
"learning_rate": 3.4000000000000005e-06,
"loss": 73.125,
"step": 341
},
{
"epoch": 3.42e-05,
"grad_norm": 13.336371421813965,
"learning_rate": 3.41e-06,
"loss": 73.1875,
"step": 342
},
{
"epoch": 3.43e-05,
"grad_norm": 13.389857292175293,
"learning_rate": 3.42e-06,
"loss": 73.0625,
"step": 343
},
{
"epoch": 3.44e-05,
"grad_norm": 13.658300399780273,
"learning_rate": 3.43e-06,
"loss": 73.1875,
"step": 344
},
{
"epoch": 3.45e-05,
"grad_norm": 13.079704284667969,
"learning_rate": 3.44e-06,
"loss": 73.125,
"step": 345
},
{
"epoch": 3.46e-05,
"grad_norm": 12.891420364379883,
"learning_rate": 3.45e-06,
"loss": 73.0625,
"step": 346
},
{
"epoch": 3.47e-05,
"grad_norm": 13.24589729309082,
"learning_rate": 3.46e-06,
"loss": 73.3125,
"step": 347
},
{
"epoch": 3.48e-05,
"grad_norm": 12.827505111694336,
"learning_rate": 3.4700000000000002e-06,
"loss": 73.1875,
"step": 348
},
{
"epoch": 3.49e-05,
"grad_norm": 13.241964340209961,
"learning_rate": 3.48e-06,
"loss": 73.125,
"step": 349
},
{
"epoch": 3.5e-05,
"grad_norm": 12.945211410522461,
"learning_rate": 3.49e-06,
"loss": 73.25,
"step": 350
},
{
"epoch": 3.51e-05,
"grad_norm": 13.305326461791992,
"learning_rate": 3.5000000000000004e-06,
"loss": 73.125,
"step": 351
},
{
"epoch": 3.52e-05,
"grad_norm": 13.148869514465332,
"learning_rate": 3.5100000000000003e-06,
"loss": 73.1875,
"step": 352
},
{
"epoch": 3.53e-05,
"grad_norm": 13.825087547302246,
"learning_rate": 3.52e-06,
"loss": 73.0625,
"step": 353
},
{
"epoch": 3.54e-05,
"grad_norm": 13.412226676940918,
"learning_rate": 3.5299999999999997e-06,
"loss": 73.1875,
"step": 354
},
{
"epoch": 3.55e-05,
"grad_norm": 12.815329551696777,
"learning_rate": 3.5400000000000004e-06,
"loss": 73.125,
"step": 355
},
{
"epoch": 3.56e-05,
"grad_norm": 13.162130355834961,
"learning_rate": 3.55e-06,
"loss": 73.125,
"step": 356
},
{
"epoch": 3.57e-05,
"grad_norm": 13.487090110778809,
"learning_rate": 3.56e-06,
"loss": 73.0625,
"step": 357
},
{
"epoch": 3.58e-05,
"grad_norm": 13.002684593200684,
"learning_rate": 3.57e-06,
"loss": 73.125,
"step": 358
},
{
"epoch": 3.59e-05,
"grad_norm": 13.55034065246582,
"learning_rate": 3.58e-06,
"loss": 73.125,
"step": 359
},
{
"epoch": 3.6e-05,
"grad_norm": 13.577582359313965,
"learning_rate": 3.59e-06,
"loss": 73.0625,
"step": 360
},
{
"epoch": 3.61e-05,
"grad_norm": 13.844265937805176,
"learning_rate": 3.6e-06,
"loss": 73.3125,
"step": 361
},
{
"epoch": 3.62e-05,
"grad_norm": 13.737598419189453,
"learning_rate": 3.61e-06,
"loss": 73.0,
"step": 362
},
{
"epoch": 3.63e-05,
"grad_norm": 12.993484497070312,
"learning_rate": 3.62e-06,
"loss": 73.0625,
"step": 363
},
{
"epoch": 3.64e-05,
"grad_norm": 13.497552871704102,
"learning_rate": 3.63e-06,
"loss": 73.1875,
"step": 364
},
{
"epoch": 3.65e-05,
"grad_norm": 13.50912857055664,
"learning_rate": 3.6400000000000003e-06,
"loss": 73.25,
"step": 365
},
{
"epoch": 3.66e-05,
"grad_norm": 13.057828903198242,
"learning_rate": 3.65e-06,
"loss": 73.0625,
"step": 366
},
{
"epoch": 3.67e-05,
"grad_norm": 13.317353248596191,
"learning_rate": 3.66e-06,
"loss": 73.125,
"step": 367
},
{
"epoch": 3.68e-05,
"grad_norm": 13.579947471618652,
"learning_rate": 3.6700000000000004e-06,
"loss": 73.125,
"step": 368
},
{
"epoch": 3.69e-05,
"grad_norm": 13.080233573913574,
"learning_rate": 3.6800000000000003e-06,
"loss": 73.125,
"step": 369
},
{
"epoch": 3.7e-05,
"grad_norm": 13.080486297607422,
"learning_rate": 3.69e-06,
"loss": 73.125,
"step": 370
},
{
"epoch": 3.71e-05,
"grad_norm": 13.311331748962402,
"learning_rate": 3.6999999999999997e-06,
"loss": 73.125,
"step": 371
},
{
"epoch": 3.72e-05,
"grad_norm": 13.139957427978516,
"learning_rate": 3.7100000000000005e-06,
"loss": 73.125,
"step": 372
},
{
"epoch": 3.73e-05,
"grad_norm": 13.586795806884766,
"learning_rate": 3.72e-06,
"loss": 73.25,
"step": 373
},
{
"epoch": 3.74e-05,
"grad_norm": 13.488346099853516,
"learning_rate": 3.73e-06,
"loss": 73.125,
"step": 374
},
{
"epoch": 3.75e-05,
"grad_norm": 12.536556243896484,
"learning_rate": 3.74e-06,
"loss": 73.0,
"step": 375
},
{
"epoch": 3.76e-05,
"grad_norm": 13.239697456359863,
"learning_rate": 3.75e-06,
"loss": 73.125,
"step": 376
},
{
"epoch": 3.77e-05,
"grad_norm": 13.392301559448242,
"learning_rate": 3.76e-06,
"loss": 73.0625,
"step": 377
},
{
"epoch": 3.78e-05,
"grad_norm": 13.62486743927002,
"learning_rate": 3.77e-06,
"loss": 73.125,
"step": 378
},
{
"epoch": 3.79e-05,
"grad_norm": 13.354044914245605,
"learning_rate": 3.7800000000000002e-06,
"loss": 73.125,
"step": 379
},
{
"epoch": 3.8e-05,
"grad_norm": 13.179764747619629,
"learning_rate": 3.79e-06,
"loss": 73.0625,
"step": 380
},
{
"epoch": 3.81e-05,
"grad_norm": 12.997714042663574,
"learning_rate": 3.8e-06,
"loss": 73.0,
"step": 381
},
{
"epoch": 3.82e-05,
"grad_norm": 13.98970890045166,
"learning_rate": 3.8100000000000004e-06,
"loss": 73.0,
"step": 382
},
{
"epoch": 3.83e-05,
"grad_norm": 12.927799224853516,
"learning_rate": 3.82e-06,
"loss": 73.125,
"step": 383
},
{
"epoch": 3.84e-05,
"grad_norm": 13.921652793884277,
"learning_rate": 3.83e-06,
"loss": 73.0,
"step": 384
},
{
"epoch": 3.85e-05,
"grad_norm": 13.006847381591797,
"learning_rate": 3.8400000000000005e-06,
"loss": 73.0,
"step": 385
},
{
"epoch": 3.86e-05,
"grad_norm": 13.084640502929688,
"learning_rate": 3.85e-06,
"loss": 73.0625,
"step": 386
},
{
"epoch": 3.87e-05,
"grad_norm": 12.917254447937012,
"learning_rate": 3.86e-06,
"loss": 73.0625,
"step": 387
},
{
"epoch": 3.88e-05,
"grad_norm": 13.406126022338867,
"learning_rate": 3.87e-06,
"loss": 73.0625,
"step": 388
},
{
"epoch": 3.89e-05,
"grad_norm": 13.393180847167969,
"learning_rate": 3.88e-06,
"loss": 73.0,
"step": 389
},
{
"epoch": 3.9e-05,
"grad_norm": 13.257850646972656,
"learning_rate": 3.89e-06,
"loss": 73.0,
"step": 390
},
{
"epoch": 3.91e-05,
"grad_norm": 13.004161834716797,
"learning_rate": 3.9e-06,
"loss": 73.0,
"step": 391
},
{
"epoch": 3.92e-05,
"grad_norm": 13.656088829040527,
"learning_rate": 3.910000000000001e-06,
"loss": 73.0625,
"step": 392
},
{
"epoch": 3.93e-05,
"grad_norm": 13.50597095489502,
"learning_rate": 3.920000000000001e-06,
"loss": 73.0625,
"step": 393
},
{
"epoch": 3.94e-05,
"grad_norm": 13.430299758911133,
"learning_rate": 3.93e-06,
"loss": 73.0,
"step": 394
},
{
"epoch": 3.95e-05,
"grad_norm": 13.662679672241211,
"learning_rate": 3.9399999999999995e-06,
"loss": 73.125,
"step": 395
},
{
"epoch": 3.96e-05,
"grad_norm": 13.804597854614258,
"learning_rate": 3.95e-06,
"loss": 73.0625,
"step": 396
},
{
"epoch": 3.97e-05,
"grad_norm": 13.883934020996094,
"learning_rate": 3.96e-06,
"loss": 73.0625,
"step": 397
},
{
"epoch": 3.98e-05,
"grad_norm": 14.236750602722168,
"learning_rate": 3.97e-06,
"loss": 73.0,
"step": 398
},
{
"epoch": 3.99e-05,
"grad_norm": 13.254412651062012,
"learning_rate": 3.98e-06,
"loss": 73.0,
"step": 399
},
{
"epoch": 4e-05,
"grad_norm": 12.98829460144043,
"learning_rate": 3.99e-06,
"loss": 72.9375,
"step": 400
},
{
"epoch": 4.01e-05,
"grad_norm": 12.85839557647705,
"learning_rate": 4e-06,
"loss": 73.0625,
"step": 401
},
{
"epoch": 4.02e-05,
"grad_norm": 13.245451927185059,
"learning_rate": 4.01e-06,
"loss": 73.1875,
"step": 402
},
{
"epoch": 4.03e-05,
"grad_norm": 13.241745948791504,
"learning_rate": 4.0200000000000005e-06,
"loss": 73.0,
"step": 403
},
{
"epoch": 4.04e-05,
"grad_norm": 13.75320053100586,
"learning_rate": 4.03e-06,
"loss": 73.0,
"step": 404
},
{
"epoch": 4.05e-05,
"grad_norm": 13.509017944335938,
"learning_rate": 4.0399999999999994e-06,
"loss": 73.0,
"step": 405
},
{
"epoch": 4.06e-05,
"grad_norm": 13.018754005432129,
"learning_rate": 4.05e-06,
"loss": 73.0,
"step": 406
},
{
"epoch": 4.07e-05,
"grad_norm": 13.013233184814453,
"learning_rate": 4.06e-06,
"loss": 73.0,
"step": 407
},
{
"epoch": 4.08e-05,
"grad_norm": 13.415353775024414,
"learning_rate": 4.07e-06,
"loss": 73.0,
"step": 408
},
{
"epoch": 4.09e-05,
"grad_norm": 13.008545875549316,
"learning_rate": 4.080000000000001e-06,
"loss": 73.0,
"step": 409
},
{
"epoch": 4.1e-05,
"grad_norm": 13.839768409729004,
"learning_rate": 4.09e-06,
"loss": 73.0,
"step": 410
},
{
"epoch": 4.11e-05,
"grad_norm": 12.9841947555542,
"learning_rate": 4.1e-06,
"loss": 73.0,
"step": 411
},
{
"epoch": 4.12e-05,
"grad_norm": 13.155670166015625,
"learning_rate": 4.11e-06,
"loss": 73.0,
"step": 412
},
{
"epoch": 4.13e-05,
"grad_norm": 13.276348114013672,
"learning_rate": 4.12e-06,
"loss": 72.9375,
"step": 413
},
{
"epoch": 4.14e-05,
"grad_norm": 13.74177360534668,
"learning_rate": 4.13e-06,
"loss": 73.0,
"step": 414
},
{
"epoch": 4.15e-05,
"grad_norm": 13.342119216918945,
"learning_rate": 4.14e-06,
"loss": 73.0,
"step": 415
},
{
"epoch": 4.16e-05,
"grad_norm": 13.325584411621094,
"learning_rate": 4.15e-06,
"loss": 73.0,
"step": 416
},
{
"epoch": 4.17e-05,
"grad_norm": 13.430407524108887,
"learning_rate": 4.16e-06,
"loss": 73.0,
"step": 417
},
{
"epoch": 4.18e-05,
"grad_norm": 12.990144729614258,
"learning_rate": 4.17e-06,
"loss": 73.0,
"step": 418
},
{
"epoch": 4.19e-05,
"grad_norm": 13.573437690734863,
"learning_rate": 4.18e-06,
"loss": 73.0,
"step": 419
},
{
"epoch": 4.2e-05,
"grad_norm": 13.521942138671875,
"learning_rate": 4.1900000000000005e-06,
"loss": 73.0625,
"step": 420
},
{
"epoch": 4.21e-05,
"grad_norm": 13.434293746948242,
"learning_rate": 4.2000000000000004e-06,
"loss": 73.0,
"step": 421
},
{
"epoch": 4.22e-05,
"grad_norm": 13.26038932800293,
"learning_rate": 4.2099999999999995e-06,
"loss": 73.0,
"step": 422
},
{
"epoch": 4.23e-05,
"grad_norm": 13.418600082397461,
"learning_rate": 4.22e-06,
"loss": 73.0,
"step": 423
},
{
"epoch": 4.24e-05,
"grad_norm": 13.086088180541992,
"learning_rate": 4.23e-06,
"loss": 72.9375,
"step": 424
},
{
"epoch": 4.25e-05,
"grad_norm": 13.842316627502441,
"learning_rate": 4.24e-06,
"loss": 73.0,
"step": 425
},
{
"epoch": 4.26e-05,
"grad_norm": 13.62370777130127,
"learning_rate": 4.250000000000001e-06,
"loss": 72.9375,
"step": 426
},
{
"epoch": 4.27e-05,
"grad_norm": 13.237730026245117,
"learning_rate": 4.26e-06,
"loss": 72.875,
"step": 427
},
{
"epoch": 4.28e-05,
"grad_norm": 13.425525665283203,
"learning_rate": 4.27e-06,
"loss": 72.9375,
"step": 428
},
{
"epoch": 4.29e-05,
"grad_norm": 13.190109252929688,
"learning_rate": 4.28e-06,
"loss": 73.0,
"step": 429
},
{
"epoch": 4.3e-05,
"grad_norm": 12.757128715515137,
"learning_rate": 4.2900000000000004e-06,
"loss": 73.0,
"step": 430
},
{
"epoch": 4.31e-05,
"grad_norm": 13.181131362915039,
"learning_rate": 4.3e-06,
"loss": 72.9375,
"step": 431
},
{
"epoch": 4.32e-05,
"grad_norm": 13.78049087524414,
"learning_rate": 4.31e-06,
"loss": 72.8125,
"step": 432
},
{
"epoch": 4.33e-05,
"grad_norm": 13.295443534851074,
"learning_rate": 4.32e-06,
"loss": 72.875,
"step": 433
},
{
"epoch": 4.34e-05,
"grad_norm": 13.687934875488281,
"learning_rate": 4.33e-06,
"loss": 72.875,
"step": 434
},
{
"epoch": 4.35e-05,
"grad_norm": 13.669114112854004,
"learning_rate": 4.34e-06,
"loss": 72.75,
"step": 435
},
{
"epoch": 4.36e-05,
"grad_norm": 13.830716133117676,
"learning_rate": 4.35e-06,
"loss": 72.9375,
"step": 436
},
{
"epoch": 4.37e-05,
"grad_norm": 13.447793960571289,
"learning_rate": 4.360000000000001e-06,
"loss": 72.6875,
"step": 437
},
{
"epoch": 4.38e-05,
"grad_norm": 13.497492790222168,
"learning_rate": 4.37e-06,
"loss": 72.8125,
"step": 438
},
{
"epoch": 4.39e-05,
"grad_norm": 13.606331825256348,
"learning_rate": 4.3799999999999996e-06,
"loss": 73.0,
"step": 439
},
{
"epoch": 4.4e-05,
"grad_norm": 13.782898902893066,
"learning_rate": 4.39e-06,
"loss": 72.875,
"step": 440
},
{
"epoch": 4.41e-05,
"grad_norm": 13.709412574768066,
"learning_rate": 4.4e-06,
"loss": 72.875,
"step": 441
},
{
"epoch": 4.42e-05,
"grad_norm": 13.186978340148926,
"learning_rate": 4.41e-06,
"loss": 72.6875,
"step": 442
},
{
"epoch": 4.43e-05,
"grad_norm": 13.370944023132324,
"learning_rate": 4.42e-06,
"loss": 72.875,
"step": 443
},
{
"epoch": 4.44e-05,
"grad_norm": 13.442479133605957,
"learning_rate": 4.43e-06,
"loss": 72.9375,
"step": 444
},
{
"epoch": 4.45e-05,
"grad_norm": 13.615199089050293,
"learning_rate": 4.44e-06,
"loss": 72.75,
"step": 445
},
{
"epoch": 4.46e-05,
"grad_norm": 13.638628005981445,
"learning_rate": 4.45e-06,
"loss": 72.6875,
"step": 446
},
{
"epoch": 4.47e-05,
"grad_norm": 13.779143333435059,
"learning_rate": 4.4600000000000005e-06,
"loss": 72.625,
"step": 447
},
{
"epoch": 4.48e-05,
"grad_norm": 13.44798469543457,
"learning_rate": 4.47e-06,
"loss": 72.625,
"step": 448
},
{
"epoch": 4.49e-05,
"grad_norm": 13.578337669372559,
"learning_rate": 4.48e-06,
"loss": 72.8125,
"step": 449
},
{
"epoch": 4.5e-05,
"grad_norm": 14.065568923950195,
"learning_rate": 4.49e-06,
"loss": 72.75,
"step": 450
},
{
"epoch": 4.51e-05,
"grad_norm": 13.366068840026855,
"learning_rate": 4.5e-06,
"loss": 72.625,
"step": 451
},
{
"epoch": 4.52e-05,
"grad_norm": 13.885690689086914,
"learning_rate": 4.51e-06,
"loss": 72.5625,
"step": 452
},
{
"epoch": 4.53e-05,
"grad_norm": 13.551187515258789,
"learning_rate": 4.52e-06,
"loss": 72.6875,
"step": 453
},
{
"epoch": 4.54e-05,
"grad_norm": 13.381160736083984,
"learning_rate": 4.530000000000001e-06,
"loss": 72.625,
"step": 454
},
{
"epoch": 4.55e-05,
"grad_norm": 13.608617782592773,
"learning_rate": 4.54e-06,
"loss": 72.6875,
"step": 455
},
{
"epoch": 4.56e-05,
"grad_norm": 13.639422416687012,
"learning_rate": 4.55e-06,
"loss": 72.625,
"step": 456
},
{
"epoch": 4.57e-05,
"grad_norm": 13.869024276733398,
"learning_rate": 4.56e-06,
"loss": 72.5,
"step": 457
},
{
"epoch": 4.58e-05,
"grad_norm": 13.874001502990723,
"learning_rate": 4.57e-06,
"loss": 72.625,
"step": 458
},
{
"epoch": 4.59e-05,
"grad_norm": 13.535733222961426,
"learning_rate": 4.58e-06,
"loss": 72.5,
"step": 459
},
{
"epoch": 4.6e-05,
"grad_norm": 13.238990783691406,
"learning_rate": 4.59e-06,
"loss": 72.5,
"step": 460
},
{
"epoch": 4.61e-05,
"grad_norm": 13.706893920898438,
"learning_rate": 4.6e-06,
"loss": 72.5,
"step": 461
},
{
"epoch": 4.62e-05,
"grad_norm": 13.469103813171387,
"learning_rate": 4.61e-06,
"loss": 72.5625,
"step": 462
},
{
"epoch": 4.63e-05,
"grad_norm": 13.195649147033691,
"learning_rate": 4.62e-06,
"loss": 72.5,
"step": 463
},
{
"epoch": 4.64e-05,
"grad_norm": 13.530961990356445,
"learning_rate": 4.6300000000000006e-06,
"loss": 72.5625,
"step": 464
},
{
"epoch": 4.65e-05,
"grad_norm": 13.53665542602539,
"learning_rate": 4.6400000000000005e-06,
"loss": 72.5,
"step": 465
},
{
"epoch": 4.66e-05,
"grad_norm": 12.728484153747559,
"learning_rate": 4.6499999999999995e-06,
"loss": 72.5625,
"step": 466
},
{
"epoch": 4.67e-05,
"grad_norm": 13.709749221801758,
"learning_rate": 4.66e-06,
"loss": 72.5,
"step": 467
},
{
"epoch": 4.68e-05,
"grad_norm": 13.372309684753418,
"learning_rate": 4.67e-06,
"loss": 72.5625,
"step": 468
},
{
"epoch": 4.69e-05,
"grad_norm": 13.057953834533691,
"learning_rate": 4.68e-06,
"loss": 72.5,
"step": 469
},
{
"epoch": 4.7e-05,
"grad_norm": 13.628046989440918,
"learning_rate": 4.69e-06,
"loss": 72.4375,
"step": 470
},
{
"epoch": 4.71e-05,
"grad_norm": 13.151557922363281,
"learning_rate": 4.7e-06,
"loss": 72.5,
"step": 471
},
{
"epoch": 4.72e-05,
"grad_norm": 13.463614463806152,
"learning_rate": 4.71e-06,
"loss": 72.5625,
"step": 472
},
{
"epoch": 4.73e-05,
"grad_norm": 13.199589729309082,
"learning_rate": 4.72e-06,
"loss": 72.5,
"step": 473
},
{
"epoch": 4.74e-05,
"grad_norm": 13.053608894348145,
"learning_rate": 4.7300000000000005e-06,
"loss": 72.5,
"step": 474
},
{
"epoch": 4.75e-05,
"grad_norm": 13.356788635253906,
"learning_rate": 4.74e-06,
"loss": 72.5,
"step": 475
},
{
"epoch": 4.76e-05,
"grad_norm": 13.800188064575195,
"learning_rate": 4.75e-06,
"loss": 72.5,
"step": 476
},
{
"epoch": 4.77e-05,
"grad_norm": 13.535609245300293,
"learning_rate": 4.76e-06,
"loss": 72.4375,
"step": 477
},
{
"epoch": 4.78e-05,
"grad_norm": 12.961105346679688,
"learning_rate": 4.77e-06,
"loss": 72.5,
"step": 478
},
{
"epoch": 4.79e-05,
"grad_norm": 13.4580659866333,
"learning_rate": 4.78e-06,
"loss": 72.5,
"step": 479
},
{
"epoch": 4.8e-05,
"grad_norm": 13.220132827758789,
"learning_rate": 4.79e-06,
"loss": 72.5,
"step": 480
},
{
"epoch": 4.81e-05,
"grad_norm": 13.80113697052002,
"learning_rate": 4.800000000000001e-06,
"loss": 72.375,
"step": 481
},
{
"epoch": 4.82e-05,
"grad_norm": 13.622671127319336,
"learning_rate": 4.8100000000000005e-06,
"loss": 72.5,
"step": 482
},
{
"epoch": 4.83e-05,
"grad_norm": 13.397513389587402,
"learning_rate": 4.82e-06,
"loss": 72.5,
"step": 483
},
{
"epoch": 4.84e-05,
"grad_norm": 14.030313491821289,
"learning_rate": 4.83e-06,
"loss": 72.4375,
"step": 484
},
{
"epoch": 4.85e-05,
"grad_norm": 13.639060020446777,
"learning_rate": 4.84e-06,
"loss": 72.4375,
"step": 485
},
{
"epoch": 4.86e-05,
"grad_norm": 13.650350570678711,
"learning_rate": 4.85e-06,
"loss": 72.3125,
"step": 486
},
{
"epoch": 4.87e-05,
"grad_norm": 13.899084091186523,
"learning_rate": 4.86e-06,
"loss": 72.3125,
"step": 487
},
{
"epoch": 4.88e-05,
"grad_norm": 13.809762001037598,
"learning_rate": 4.87e-06,
"loss": 72.375,
"step": 488
},
{
"epoch": 4.89e-05,
"grad_norm": 13.373944282531738,
"learning_rate": 4.88e-06,
"loss": 72.25,
"step": 489
},
{
"epoch": 4.9e-05,
"grad_norm": 13.971879005432129,
"learning_rate": 4.89e-06,
"loss": 72.4375,
"step": 490
},
{
"epoch": 4.91e-05,
"grad_norm": 13.291943550109863,
"learning_rate": 4.9000000000000005e-06,
"loss": 72.4375,
"step": 491
},
{
"epoch": 4.92e-05,
"grad_norm": 13.483219146728516,
"learning_rate": 4.9100000000000004e-06,
"loss": 72.375,
"step": 492
},
{
"epoch": 4.93e-05,
"grad_norm": 13.614572525024414,
"learning_rate": 4.92e-06,
"loss": 72.375,
"step": 493
},
{
"epoch": 4.94e-05,
"grad_norm": 13.22291374206543,
"learning_rate": 4.929999999999999e-06,
"loss": 72.25,
"step": 494
},
{
"epoch": 4.95e-05,
"grad_norm": 13.640573501586914,
"learning_rate": 4.94e-06,
"loss": 72.375,
"step": 495
},
{
"epoch": 4.96e-05,
"grad_norm": 12.912464141845703,
"learning_rate": 4.95e-06,
"loss": 72.1875,
"step": 496
},
{
"epoch": 4.97e-05,
"grad_norm": 13.510415077209473,
"learning_rate": 4.96e-06,
"loss": 72.25,
"step": 497
},
{
"epoch": 4.98e-05,
"grad_norm": 13.389180183410645,
"learning_rate": 4.970000000000001e-06,
"loss": 72.125,
"step": 498
},
{
"epoch": 4.99e-05,
"grad_norm": 13.56809139251709,
"learning_rate": 4.98e-06,
"loss": 72.25,
"step": 499
},
{
"epoch": 5e-05,
"grad_norm": 13.981008529663086,
"learning_rate": 4.99e-06,
"loss": 72.0625,
"step": 500
},
{
"epoch": 5e-05,
"eval_loss": 9.004996299743652,
"eval_runtime": 364.2712,
"eval_samples_per_second": 27.452,
"eval_steps_per_second": 1.716,
"step": 500
}
],
"logging_steps": 1,
"max_steps": 10000000,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}