sft-clip-instruct-base-patch16 / trainer_state.json
Leonardo6's picture
Add files using upload-large-folder tool
4fe1710 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 915,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003278688524590164,
"grad_norm": 58.999969482421875,
"learning_rate": 0.0,
"loss": 3.8125,
"step": 1
},
{
"epoch": 0.006557377049180328,
"grad_norm": 56.5999870300293,
"learning_rate": 3.5714285714285716e-07,
"loss": 3.8984,
"step": 2
},
{
"epoch": 0.009836065573770493,
"grad_norm": 58.442317962646484,
"learning_rate": 7.142857142857143e-07,
"loss": 3.8359,
"step": 3
},
{
"epoch": 0.013114754098360656,
"grad_norm": 59.13441467285156,
"learning_rate": 1.0714285714285714e-06,
"loss": 3.7422,
"step": 4
},
{
"epoch": 0.01639344262295082,
"grad_norm": 59.55280685424805,
"learning_rate": 1.4285714285714286e-06,
"loss": 3.7383,
"step": 5
},
{
"epoch": 0.019672131147540985,
"grad_norm": 57.113956451416016,
"learning_rate": 1.7857142857142859e-06,
"loss": 3.8281,
"step": 6
},
{
"epoch": 0.022950819672131147,
"grad_norm": 44.69753646850586,
"learning_rate": 2.1428571428571427e-06,
"loss": 3.6562,
"step": 7
},
{
"epoch": 0.02622950819672131,
"grad_norm": 42.74599075317383,
"learning_rate": 2.5e-06,
"loss": 3.668,
"step": 8
},
{
"epoch": 0.029508196721311476,
"grad_norm": 27.557815551757812,
"learning_rate": 2.8571428571428573e-06,
"loss": 3.3438,
"step": 9
},
{
"epoch": 0.03278688524590164,
"grad_norm": 25.87570571899414,
"learning_rate": 3.2142857142857147e-06,
"loss": 3.3633,
"step": 10
},
{
"epoch": 0.036065573770491806,
"grad_norm": 25.204158782958984,
"learning_rate": 3.5714285714285718e-06,
"loss": 3.2852,
"step": 11
},
{
"epoch": 0.03934426229508197,
"grad_norm": 25.082277297973633,
"learning_rate": 3.928571428571429e-06,
"loss": 3.2188,
"step": 12
},
{
"epoch": 0.04262295081967213,
"grad_norm": 21.52399253845215,
"learning_rate": 4.2857142857142855e-06,
"loss": 3.1016,
"step": 13
},
{
"epoch": 0.04590163934426229,
"grad_norm": 21.302213668823242,
"learning_rate": 4.642857142857144e-06,
"loss": 3.0938,
"step": 14
},
{
"epoch": 0.04918032786885246,
"grad_norm": 17.48920249938965,
"learning_rate": 5e-06,
"loss": 3.0703,
"step": 15
},
{
"epoch": 0.05245901639344262,
"grad_norm": 14.723689079284668,
"learning_rate": 5.357142857142857e-06,
"loss": 2.7812,
"step": 16
},
{
"epoch": 0.05573770491803279,
"grad_norm": 13.425204277038574,
"learning_rate": 5.7142857142857145e-06,
"loss": 2.8398,
"step": 17
},
{
"epoch": 0.05901639344262295,
"grad_norm": 12.121846199035645,
"learning_rate": 6.071428571428571e-06,
"loss": 2.7539,
"step": 18
},
{
"epoch": 0.06229508196721312,
"grad_norm": 12.97847843170166,
"learning_rate": 6.4285714285714295e-06,
"loss": 2.6719,
"step": 19
},
{
"epoch": 0.06557377049180328,
"grad_norm": 12.588919639587402,
"learning_rate": 6.785714285714287e-06,
"loss": 2.6758,
"step": 20
},
{
"epoch": 0.06885245901639345,
"grad_norm": 10.492659568786621,
"learning_rate": 7.1428571428571436e-06,
"loss": 2.6562,
"step": 21
},
{
"epoch": 0.07213114754098361,
"grad_norm": 9.676924705505371,
"learning_rate": 7.500000000000001e-06,
"loss": 2.6094,
"step": 22
},
{
"epoch": 0.07540983606557378,
"grad_norm": 10.222829818725586,
"learning_rate": 7.857142857142858e-06,
"loss": 2.6211,
"step": 23
},
{
"epoch": 0.07868852459016394,
"grad_norm": 10.020594596862793,
"learning_rate": 8.214285714285714e-06,
"loss": 2.5273,
"step": 24
},
{
"epoch": 0.08196721311475409,
"grad_norm": 9.404995918273926,
"learning_rate": 8.571428571428571e-06,
"loss": 2.5352,
"step": 25
},
{
"epoch": 0.08524590163934426,
"grad_norm": 10.194731712341309,
"learning_rate": 8.92857142857143e-06,
"loss": 2.543,
"step": 26
},
{
"epoch": 0.08852459016393442,
"grad_norm": 9.045639991760254,
"learning_rate": 9.285714285714288e-06,
"loss": 2.4961,
"step": 27
},
{
"epoch": 0.09180327868852459,
"grad_norm": 9.108716011047363,
"learning_rate": 9.642857142857144e-06,
"loss": 2.4336,
"step": 28
},
{
"epoch": 0.09508196721311475,
"grad_norm": 9.336297988891602,
"learning_rate": 1e-05,
"loss": 2.457,
"step": 29
},
{
"epoch": 0.09836065573770492,
"grad_norm": 8.170738220214844,
"learning_rate": 9.999968638844173e-06,
"loss": 2.4688,
"step": 30
},
{
"epoch": 0.10163934426229508,
"grad_norm": 8.507989883422852,
"learning_rate": 9.999874555770099e-06,
"loss": 2.4062,
"step": 31
},
{
"epoch": 0.10491803278688525,
"grad_norm": 8.38835334777832,
"learning_rate": 9.999717751958002e-06,
"loss": 2.3711,
"step": 32
},
{
"epoch": 0.10819672131147541,
"grad_norm": 8.36460018157959,
"learning_rate": 9.9994982293749e-06,
"loss": 2.4805,
"step": 33
},
{
"epoch": 0.11147540983606558,
"grad_norm": 8.547989845275879,
"learning_rate": 9.999215990774584e-06,
"loss": 2.3906,
"step": 34
},
{
"epoch": 0.11475409836065574,
"grad_norm": 8.938812255859375,
"learning_rate": 9.998871039697589e-06,
"loss": 2.3906,
"step": 35
},
{
"epoch": 0.1180327868852459,
"grad_norm": 8.230469703674316,
"learning_rate": 9.998463380471138e-06,
"loss": 2.3164,
"step": 36
},
{
"epoch": 0.12131147540983607,
"grad_norm": 7.883755683898926,
"learning_rate": 9.997993018209098e-06,
"loss": 2.3633,
"step": 37
},
{
"epoch": 0.12459016393442623,
"grad_norm": 8.220748901367188,
"learning_rate": 9.997459958811911e-06,
"loss": 2.3516,
"step": 38
},
{
"epoch": 0.12786885245901639,
"grad_norm": 8.738327980041504,
"learning_rate": 9.996864208966522e-06,
"loss": 2.3398,
"step": 39
},
{
"epoch": 0.13114754098360656,
"grad_norm": 7.944454669952393,
"learning_rate": 9.996205776146288e-06,
"loss": 2.3867,
"step": 40
},
{
"epoch": 0.13442622950819672,
"grad_norm": 7.631208419799805,
"learning_rate": 9.995484668610897e-06,
"loss": 2.3242,
"step": 41
},
{
"epoch": 0.1377049180327869,
"grad_norm": 9.301526069641113,
"learning_rate": 9.994700895406258e-06,
"loss": 2.3203,
"step": 42
},
{
"epoch": 0.14098360655737704,
"grad_norm": 7.625834941864014,
"learning_rate": 9.99385446636438e-06,
"loss": 2.3398,
"step": 43
},
{
"epoch": 0.14426229508196722,
"grad_norm": 7.908705234527588,
"learning_rate": 9.992945392103264e-06,
"loss": 2.3477,
"step": 44
},
{
"epoch": 0.14754098360655737,
"grad_norm": 8.002193450927734,
"learning_rate": 9.991973684026755e-06,
"loss": 2.2852,
"step": 45
},
{
"epoch": 0.15081967213114755,
"grad_norm": 8.058524131774902,
"learning_rate": 9.99093935432441e-06,
"loss": 2.3711,
"step": 46
},
{
"epoch": 0.1540983606557377,
"grad_norm": 8.803773880004883,
"learning_rate": 9.989842415971338e-06,
"loss": 2.2344,
"step": 47
},
{
"epoch": 0.15737704918032788,
"grad_norm": 7.851852893829346,
"learning_rate": 9.988682882728043e-06,
"loss": 2.3047,
"step": 48
},
{
"epoch": 0.16065573770491803,
"grad_norm": 8.29660415649414,
"learning_rate": 9.987460769140242e-06,
"loss": 2.3047,
"step": 49
},
{
"epoch": 0.16393442622950818,
"grad_norm": 7.779457092285156,
"learning_rate": 9.986176090538697e-06,
"loss": 2.1992,
"step": 50
},
{
"epoch": 0.16721311475409836,
"grad_norm": 8.602660179138184,
"learning_rate": 9.984828863039008e-06,
"loss": 2.25,
"step": 51
},
{
"epoch": 0.17049180327868851,
"grad_norm": 8.079516410827637,
"learning_rate": 9.983419103541421e-06,
"loss": 2.3203,
"step": 52
},
{
"epoch": 0.1737704918032787,
"grad_norm": 8.443635940551758,
"learning_rate": 9.981946829730611e-06,
"loss": 2.3242,
"step": 53
},
{
"epoch": 0.17704918032786884,
"grad_norm": 7.873276710510254,
"learning_rate": 9.980412060075459e-06,
"loss": 2.2656,
"step": 54
},
{
"epoch": 0.18032786885245902,
"grad_norm": 8.633429527282715,
"learning_rate": 9.978814813828827e-06,
"loss": 2.2227,
"step": 55
},
{
"epoch": 0.18360655737704917,
"grad_norm": 9.135279655456543,
"learning_rate": 9.97715511102731e-06,
"loss": 2.2148,
"step": 56
},
{
"epoch": 0.18688524590163935,
"grad_norm": 7.584745407104492,
"learning_rate": 9.975432972490985e-06,
"loss": 2.2773,
"step": 57
},
{
"epoch": 0.1901639344262295,
"grad_norm": 9.086216926574707,
"learning_rate": 9.973648419823161e-06,
"loss": 2.2656,
"step": 58
},
{
"epoch": 0.19344262295081968,
"grad_norm": 8.223559379577637,
"learning_rate": 9.971801475410084e-06,
"loss": 2.2773,
"step": 59
},
{
"epoch": 0.19672131147540983,
"grad_norm": 8.726387023925781,
"learning_rate": 9.969892162420682e-06,
"loss": 2.3125,
"step": 60
},
{
"epoch": 0.2,
"grad_norm": 8.304938316345215,
"learning_rate": 9.96792050480626e-06,
"loss": 2.168,
"step": 61
},
{
"epoch": 0.20327868852459016,
"grad_norm": 8.20266342163086,
"learning_rate": 9.965886527300201e-06,
"loss": 2.25,
"step": 62
},
{
"epoch": 0.20655737704918034,
"grad_norm": 7.833676815032959,
"learning_rate": 9.963790255417663e-06,
"loss": 2.2422,
"step": 63
},
{
"epoch": 0.2098360655737705,
"grad_norm": 7.891449928283691,
"learning_rate": 9.961631715455245e-06,
"loss": 2.1758,
"step": 64
},
{
"epoch": 0.21311475409836064,
"grad_norm": 8.999512672424316,
"learning_rate": 9.959410934490673e-06,
"loss": 2.1484,
"step": 65
},
{
"epoch": 0.21639344262295082,
"grad_norm": 7.718410015106201,
"learning_rate": 9.95712794038245e-06,
"loss": 2.1172,
"step": 66
},
{
"epoch": 0.21967213114754097,
"grad_norm": 8.272541999816895,
"learning_rate": 9.954782761769509e-06,
"loss": 2.25,
"step": 67
},
{
"epoch": 0.22295081967213115,
"grad_norm": 9.607975006103516,
"learning_rate": 9.952375428070853e-06,
"loss": 2.2812,
"step": 68
},
{
"epoch": 0.2262295081967213,
"grad_norm": 8.297865867614746,
"learning_rate": 9.949905969485192e-06,
"loss": 2.1406,
"step": 69
},
{
"epoch": 0.22950819672131148,
"grad_norm": 8.192915916442871,
"learning_rate": 9.947374416990554e-06,
"loss": 2.2109,
"step": 70
},
{
"epoch": 0.23278688524590163,
"grad_norm": 8.489333152770996,
"learning_rate": 9.944780802343906e-06,
"loss": 2.1914,
"step": 71
},
{
"epoch": 0.2360655737704918,
"grad_norm": 8.244175910949707,
"learning_rate": 9.942125158080747e-06,
"loss": 2.1797,
"step": 72
},
{
"epoch": 0.23934426229508196,
"grad_norm": 8.009477615356445,
"learning_rate": 9.939407517514709e-06,
"loss": 2.2578,
"step": 73
},
{
"epoch": 0.24262295081967214,
"grad_norm": 8.284704208374023,
"learning_rate": 9.936627914737129e-06,
"loss": 2.2305,
"step": 74
},
{
"epoch": 0.2459016393442623,
"grad_norm": 9.479103088378906,
"learning_rate": 9.933786384616631e-06,
"loss": 2.1406,
"step": 75
},
{
"epoch": 0.24918032786885247,
"grad_norm": 8.558723449707031,
"learning_rate": 9.930882962798683e-06,
"loss": 2.2812,
"step": 76
},
{
"epoch": 0.25245901639344265,
"grad_norm": 8.608718872070312,
"learning_rate": 9.927917685705148e-06,
"loss": 2.2461,
"step": 77
},
{
"epoch": 0.25573770491803277,
"grad_norm": 7.863804817199707,
"learning_rate": 9.924890590533837e-06,
"loss": 2.1914,
"step": 78
},
{
"epoch": 0.25901639344262295,
"grad_norm": 8.842366218566895,
"learning_rate": 9.921801715258027e-06,
"loss": 2.0996,
"step": 79
},
{
"epoch": 0.26229508196721313,
"grad_norm": 8.273311614990234,
"learning_rate": 9.918651098626e-06,
"loss": 2.1641,
"step": 80
},
{
"epoch": 0.26557377049180325,
"grad_norm": 8.011392593383789,
"learning_rate": 9.915438780160547e-06,
"loss": 2.168,
"step": 81
},
{
"epoch": 0.26885245901639343,
"grad_norm": 7.946706771850586,
"learning_rate": 9.912164800158474e-06,
"loss": 2.2227,
"step": 82
},
{
"epoch": 0.2721311475409836,
"grad_norm": 9.032291412353516,
"learning_rate": 9.908829199690106e-06,
"loss": 2.2891,
"step": 83
},
{
"epoch": 0.2754098360655738,
"grad_norm": 8.33333969116211,
"learning_rate": 9.905432020598751e-06,
"loss": 2.2344,
"step": 84
},
{
"epoch": 0.2786885245901639,
"grad_norm": 7.967901229858398,
"learning_rate": 9.901973305500197e-06,
"loss": 2.1172,
"step": 85
},
{
"epoch": 0.2819672131147541,
"grad_norm": 8.24918270111084,
"learning_rate": 9.898453097782164e-06,
"loss": 2.1602,
"step": 86
},
{
"epoch": 0.28524590163934427,
"grad_norm": 8.877111434936523,
"learning_rate": 9.894871441603766e-06,
"loss": 2.2383,
"step": 87
},
{
"epoch": 0.28852459016393445,
"grad_norm": 8.32304573059082,
"learning_rate": 9.891228381894954e-06,
"loss": 2.1719,
"step": 88
},
{
"epoch": 0.29180327868852457,
"grad_norm": 9.091806411743164,
"learning_rate": 9.887523964355953e-06,
"loss": 2.168,
"step": 89
},
{
"epoch": 0.29508196721311475,
"grad_norm": 8.54736614227295,
"learning_rate": 9.88375823545669e-06,
"loss": 2.1211,
"step": 90
},
{
"epoch": 0.2983606557377049,
"grad_norm": 9.3049955368042,
"learning_rate": 9.879931242436208e-06,
"loss": 2.1719,
"step": 91
},
{
"epoch": 0.3016393442622951,
"grad_norm": 8.523479461669922,
"learning_rate": 9.876043033302079e-06,
"loss": 2.1719,
"step": 92
},
{
"epoch": 0.30491803278688523,
"grad_norm": 7.741846561431885,
"learning_rate": 9.872093656829792e-06,
"loss": 2.0391,
"step": 93
},
{
"epoch": 0.3081967213114754,
"grad_norm": 8.36119270324707,
"learning_rate": 9.868083162562155e-06,
"loss": 2.0938,
"step": 94
},
{
"epoch": 0.3114754098360656,
"grad_norm": 8.163023948669434,
"learning_rate": 9.864011600808663e-06,
"loss": 2.0938,
"step": 95
},
{
"epoch": 0.31475409836065577,
"grad_norm": 8.827863693237305,
"learning_rate": 9.859879022644865e-06,
"loss": 2.1406,
"step": 96
},
{
"epoch": 0.3180327868852459,
"grad_norm": 8.434041023254395,
"learning_rate": 9.855685479911736e-06,
"loss": 2.168,
"step": 97
},
{
"epoch": 0.32131147540983607,
"grad_norm": 8.144036293029785,
"learning_rate": 9.851431025215012e-06,
"loss": 2.0742,
"step": 98
},
{
"epoch": 0.32459016393442625,
"grad_norm": 8.843683242797852,
"learning_rate": 9.847115711924542e-06,
"loss": 2.1016,
"step": 99
},
{
"epoch": 0.32786885245901637,
"grad_norm": 8.300407409667969,
"learning_rate": 9.842739594173607e-06,
"loss": 2.1953,
"step": 100
},
{
"epoch": 0.33114754098360655,
"grad_norm": 8.825713157653809,
"learning_rate": 9.838302726858258e-06,
"loss": 2.1406,
"step": 101
},
{
"epoch": 0.3344262295081967,
"grad_norm": 8.763128280639648,
"learning_rate": 9.833805165636603e-06,
"loss": 2.1523,
"step": 102
},
{
"epoch": 0.3377049180327869,
"grad_norm": 8.53940486907959,
"learning_rate": 9.829246966928135e-06,
"loss": 2.0703,
"step": 103
},
{
"epoch": 0.34098360655737703,
"grad_norm": 8.830872535705566,
"learning_rate": 9.824628187913001e-06,
"loss": 2.1602,
"step": 104
},
{
"epoch": 0.3442622950819672,
"grad_norm": 8.062509536743164,
"learning_rate": 9.819948886531305e-06,
"loss": 2.1211,
"step": 105
},
{
"epoch": 0.3475409836065574,
"grad_norm": 8.8229398727417,
"learning_rate": 9.815209121482363e-06,
"loss": 2.1172,
"step": 106
},
{
"epoch": 0.35081967213114756,
"grad_norm": 9.78320598602295,
"learning_rate": 9.810408952223982e-06,
"loss": 2.0586,
"step": 107
},
{
"epoch": 0.3540983606557377,
"grad_norm": 8.267208099365234,
"learning_rate": 9.805548438971702e-06,
"loss": 2.1406,
"step": 108
},
{
"epoch": 0.35737704918032787,
"grad_norm": 10.9806489944458,
"learning_rate": 9.80062764269805e-06,
"loss": 2.1035,
"step": 109
},
{
"epoch": 0.36065573770491804,
"grad_norm": 9.699831008911133,
"learning_rate": 9.795646625131771e-06,
"loss": 2.1797,
"step": 110
},
{
"epoch": 0.3639344262295082,
"grad_norm": 10.373594284057617,
"learning_rate": 9.790605448757049e-06,
"loss": 2.1445,
"step": 111
},
{
"epoch": 0.36721311475409835,
"grad_norm": 8.51201343536377,
"learning_rate": 9.785504176812733e-06,
"loss": 2.1875,
"step": 112
},
{
"epoch": 0.3704918032786885,
"grad_norm": 10.090012550354004,
"learning_rate": 9.780342873291535e-06,
"loss": 2.2109,
"step": 113
},
{
"epoch": 0.3737704918032787,
"grad_norm": 8.939981460571289,
"learning_rate": 9.775121602939234e-06,
"loss": 2.0273,
"step": 114
},
{
"epoch": 0.3770491803278688,
"grad_norm": 8.556815147399902,
"learning_rate": 9.76984043125386e-06,
"loss": 2.002,
"step": 115
},
{
"epoch": 0.380327868852459,
"grad_norm": 9.271822929382324,
"learning_rate": 9.764499424484873e-06,
"loss": 2.0742,
"step": 116
},
{
"epoch": 0.3836065573770492,
"grad_norm": 9.027301788330078,
"learning_rate": 9.759098649632326e-06,
"loss": 2.1133,
"step": 117
},
{
"epoch": 0.38688524590163936,
"grad_norm": 9.81689453125,
"learning_rate": 9.753638174446042e-06,
"loss": 2.0645,
"step": 118
},
{
"epoch": 0.3901639344262295,
"grad_norm": 9.42857551574707,
"learning_rate": 9.748118067424742e-06,
"loss": 2.1172,
"step": 119
},
{
"epoch": 0.39344262295081966,
"grad_norm": 8.636916160583496,
"learning_rate": 9.742538397815203e-06,
"loss": 2.0859,
"step": 120
},
{
"epoch": 0.39672131147540984,
"grad_norm": 9.01211166381836,
"learning_rate": 9.73689923561138e-06,
"loss": 2.0605,
"step": 121
},
{
"epoch": 0.4,
"grad_norm": 8.320965766906738,
"learning_rate": 9.73120065155353e-06,
"loss": 2.0352,
"step": 122
},
{
"epoch": 0.40327868852459015,
"grad_norm": 8.969964027404785,
"learning_rate": 9.725442717127324e-06,
"loss": 2.1055,
"step": 123
},
{
"epoch": 0.4065573770491803,
"grad_norm": 7.825037002563477,
"learning_rate": 9.719625504562959e-06,
"loss": 2.1133,
"step": 124
},
{
"epoch": 0.4098360655737705,
"grad_norm": 9.594252586364746,
"learning_rate": 9.713749086834234e-06,
"loss": 2.1367,
"step": 125
},
{
"epoch": 0.4131147540983607,
"grad_norm": 9.286437034606934,
"learning_rate": 9.707813537657652e-06,
"loss": 2.0293,
"step": 126
},
{
"epoch": 0.4163934426229508,
"grad_norm": 11.219290733337402,
"learning_rate": 9.701818931491485e-06,
"loss": 2.1992,
"step": 127
},
{
"epoch": 0.419672131147541,
"grad_norm": 8.125706672668457,
"learning_rate": 9.695765343534846e-06,
"loss": 2.0273,
"step": 128
},
{
"epoch": 0.42295081967213116,
"grad_norm": 9.294842720031738,
"learning_rate": 9.689652849726739e-06,
"loss": 2.082,
"step": 129
},
{
"epoch": 0.4262295081967213,
"grad_norm": 8.297966957092285,
"learning_rate": 9.683481526745114e-06,
"loss": 2.084,
"step": 130
},
{
"epoch": 0.42950819672131146,
"grad_norm": 9.68375015258789,
"learning_rate": 9.677251452005897e-06,
"loss": 2.0996,
"step": 131
},
{
"epoch": 0.43278688524590164,
"grad_norm": 8.27182388305664,
"learning_rate": 9.670962703662032e-06,
"loss": 2.0312,
"step": 132
},
{
"epoch": 0.4360655737704918,
"grad_norm": 9.681833267211914,
"learning_rate": 9.664615360602478e-06,
"loss": 2.0547,
"step": 133
},
{
"epoch": 0.43934426229508194,
"grad_norm": 8.213553428649902,
"learning_rate": 9.658209502451245e-06,
"loss": 1.9746,
"step": 134
},
{
"epoch": 0.4426229508196721,
"grad_norm": 9.513401985168457,
"learning_rate": 9.651745209566379e-06,
"loss": 2.0625,
"step": 135
},
{
"epoch": 0.4459016393442623,
"grad_norm": 9.364225387573242,
"learning_rate": 9.645222563038959e-06,
"loss": 2.0078,
"step": 136
},
{
"epoch": 0.4491803278688525,
"grad_norm": 8.659600257873535,
"learning_rate": 9.638641644692077e-06,
"loss": 2.0469,
"step": 137
},
{
"epoch": 0.4524590163934426,
"grad_norm": 9.638226509094238,
"learning_rate": 9.632002537079816e-06,
"loss": 2.0391,
"step": 138
},
{
"epoch": 0.4557377049180328,
"grad_norm": 9.299017906188965,
"learning_rate": 9.625305323486214e-06,
"loss": 2.0508,
"step": 139
},
{
"epoch": 0.45901639344262296,
"grad_norm": 9.940561294555664,
"learning_rate": 9.61855008792421e-06,
"loss": 2.082,
"step": 140
},
{
"epoch": 0.46229508196721314,
"grad_norm": 9.618109703063965,
"learning_rate": 9.611736915134605e-06,
"loss": 1.998,
"step": 141
},
{
"epoch": 0.46557377049180326,
"grad_norm": 8.827125549316406,
"learning_rate": 9.604865890584987e-06,
"loss": 2.1055,
"step": 142
},
{
"epoch": 0.46885245901639344,
"grad_norm": 10.079780578613281,
"learning_rate": 9.597937100468668e-06,
"loss": 2.0859,
"step": 143
},
{
"epoch": 0.4721311475409836,
"grad_norm": 9.007691383361816,
"learning_rate": 9.590950631703589e-06,
"loss": 2.125,
"step": 144
},
{
"epoch": 0.47540983606557374,
"grad_norm": 9.498886108398438,
"learning_rate": 9.583906571931248e-06,
"loss": 2.1172,
"step": 145
},
{
"epoch": 0.4786885245901639,
"grad_norm": 10.918264389038086,
"learning_rate": 9.576805009515588e-06,
"loss": 2.0508,
"step": 146
},
{
"epoch": 0.4819672131147541,
"grad_norm": 8.792062759399414,
"learning_rate": 9.569646033541888e-06,
"loss": 2.1484,
"step": 147
},
{
"epoch": 0.4852459016393443,
"grad_norm": 9.490158081054688,
"learning_rate": 9.562429733815655e-06,
"loss": 2.1133,
"step": 148
},
{
"epoch": 0.4885245901639344,
"grad_norm": 9.344221115112305,
"learning_rate": 9.55515620086149e-06,
"loss": 2.0684,
"step": 149
},
{
"epoch": 0.4918032786885246,
"grad_norm": 10.885297775268555,
"learning_rate": 9.54782552592195e-06,
"loss": 2.0957,
"step": 150
},
{
"epoch": 0.49508196721311476,
"grad_norm": 8.593646049499512,
"learning_rate": 9.540437800956412e-06,
"loss": 2.0625,
"step": 151
},
{
"epoch": 0.49836065573770494,
"grad_norm": 8.966227531433105,
"learning_rate": 9.532993118639915e-06,
"loss": 2.0,
"step": 152
},
{
"epoch": 0.5016393442622951,
"grad_norm": 10.044122695922852,
"learning_rate": 9.525491572361995e-06,
"loss": 2.0176,
"step": 153
},
{
"epoch": 0.5049180327868853,
"grad_norm": 8.53621768951416,
"learning_rate": 9.517933256225516e-06,
"loss": 2.1094,
"step": 154
},
{
"epoch": 0.5081967213114754,
"grad_norm": 10.106084823608398,
"learning_rate": 9.510318265045488e-06,
"loss": 2.0195,
"step": 155
},
{
"epoch": 0.5114754098360655,
"grad_norm": 9.872212409973145,
"learning_rate": 9.502646694347886e-06,
"loss": 2.1055,
"step": 156
},
{
"epoch": 0.5147540983606558,
"grad_norm": 8.695504188537598,
"learning_rate": 9.494918640368435e-06,
"loss": 1.9961,
"step": 157
},
{
"epoch": 0.5180327868852459,
"grad_norm": 9.583698272705078,
"learning_rate": 9.487134200051422e-06,
"loss": 2.0801,
"step": 158
},
{
"epoch": 0.521311475409836,
"grad_norm": 9.81774616241455,
"learning_rate": 9.479293471048461e-06,
"loss": 2.043,
"step": 159
},
{
"epoch": 0.5245901639344263,
"grad_norm": 8.542972564697266,
"learning_rate": 9.471396551717283e-06,
"loss": 1.9922,
"step": 160
},
{
"epoch": 0.5278688524590164,
"grad_norm": 9.894759178161621,
"learning_rate": 9.463443541120496e-06,
"loss": 2.0469,
"step": 161
},
{
"epoch": 0.5311475409836065,
"grad_norm": 9.396486282348633,
"learning_rate": 9.45543453902434e-06,
"loss": 1.9688,
"step": 162
},
{
"epoch": 0.5344262295081967,
"grad_norm": 9.620316505432129,
"learning_rate": 9.447369645897441e-06,
"loss": 2.0391,
"step": 163
},
{
"epoch": 0.5377049180327869,
"grad_norm": 10.236593246459961,
"learning_rate": 9.439248962909547e-06,
"loss": 1.957,
"step": 164
},
{
"epoch": 0.5409836065573771,
"grad_norm": 11.980149269104004,
"learning_rate": 9.431072591930261e-06,
"loss": 2.0293,
"step": 165
},
{
"epoch": 0.5442622950819672,
"grad_norm": 14.74476146697998,
"learning_rate": 9.42284063552776e-06,
"loss": 1.9844,
"step": 166
},
{
"epoch": 0.5475409836065573,
"grad_norm": 9.34021282196045,
"learning_rate": 9.414553196967508e-06,
"loss": 2.0703,
"step": 167
},
{
"epoch": 0.5508196721311476,
"grad_norm": 12.805119514465332,
"learning_rate": 9.406210380210971e-06,
"loss": 2.0117,
"step": 168
},
{
"epoch": 0.5540983606557377,
"grad_norm": 9.839883804321289,
"learning_rate": 9.397812289914297e-06,
"loss": 1.9727,
"step": 169
},
{
"epoch": 0.5573770491803278,
"grad_norm": 14.596024513244629,
"learning_rate": 9.389359031427013e-06,
"loss": 2.0742,
"step": 170
},
{
"epoch": 0.5606557377049181,
"grad_norm": 13.544656753540039,
"learning_rate": 9.380850710790701e-06,
"loss": 2.0742,
"step": 171
},
{
"epoch": 0.5639344262295082,
"grad_norm": 9.45289421081543,
"learning_rate": 9.372287434737673e-06,
"loss": 2.0586,
"step": 172
},
{
"epoch": 0.5672131147540984,
"grad_norm": 10.174077987670898,
"learning_rate": 9.363669310689617e-06,
"loss": 2.1367,
"step": 173
},
{
"epoch": 0.5704918032786885,
"grad_norm": 12.690667152404785,
"learning_rate": 9.354996446756268e-06,
"loss": 1.9395,
"step": 174
},
{
"epoch": 0.5737704918032787,
"grad_norm": 10.625876426696777,
"learning_rate": 9.346268951734042e-06,
"loss": 2.0215,
"step": 175
},
{
"epoch": 0.5770491803278689,
"grad_norm": 11.856694221496582,
"learning_rate": 9.337486935104672e-06,
"loss": 1.9766,
"step": 176
},
{
"epoch": 0.580327868852459,
"grad_norm": 10.2452974319458,
"learning_rate": 9.328650507033832e-06,
"loss": 2.0586,
"step": 177
},
{
"epoch": 0.5836065573770491,
"grad_norm": 12.020367622375488,
"learning_rate": 9.319759778369764e-06,
"loss": 2.0352,
"step": 178
},
{
"epoch": 0.5868852459016394,
"grad_norm": 11.364824295043945,
"learning_rate": 9.310814860641874e-06,
"loss": 2.0352,
"step": 179
},
{
"epoch": 0.5901639344262295,
"grad_norm": 12.439066886901855,
"learning_rate": 9.301815866059353e-06,
"loss": 2.0703,
"step": 180
},
{
"epoch": 0.5934426229508196,
"grad_norm": 12.66345500946045,
"learning_rate": 9.292762907509742e-06,
"loss": 1.9922,
"step": 181
},
{
"epoch": 0.5967213114754099,
"grad_norm": 11.868645668029785,
"learning_rate": 9.283656098557545e-06,
"loss": 1.9863,
"step": 182
},
{
"epoch": 0.6,
"grad_norm": 11.235689163208008,
"learning_rate": 9.27449555344278e-06,
"loss": 2.0195,
"step": 183
},
{
"epoch": 0.6032786885245902,
"grad_norm": 10.719315528869629,
"learning_rate": 9.26528138707956e-06,
"loss": 1.9883,
"step": 184
},
{
"epoch": 0.6065573770491803,
"grad_norm": 11.960482597351074,
"learning_rate": 9.256013715054648e-06,
"loss": 1.9082,
"step": 185
},
{
"epoch": 0.6098360655737705,
"grad_norm": 9.54014778137207,
"learning_rate": 9.24669265362601e-06,
"loss": 2.0156,
"step": 186
},
{
"epoch": 0.6131147540983607,
"grad_norm": 10.753348350524902,
"learning_rate": 9.237318319721346e-06,
"loss": 1.9746,
"step": 187
},
{
"epoch": 0.6163934426229508,
"grad_norm": 9.75783634185791,
"learning_rate": 9.227890830936634e-06,
"loss": 2.0195,
"step": 188
},
{
"epoch": 0.6196721311475409,
"grad_norm": 15.201295852661133,
"learning_rate": 9.218410305534657e-06,
"loss": 2.0117,
"step": 189
},
{
"epoch": 0.6229508196721312,
"grad_norm": 10.927349090576172,
"learning_rate": 9.208876862443504e-06,
"loss": 1.9316,
"step": 190
},
{
"epoch": 0.6262295081967213,
"grad_norm": 13.822660446166992,
"learning_rate": 9.199290621255095e-06,
"loss": 2.0371,
"step": 191
},
{
"epoch": 0.6295081967213115,
"grad_norm": 13.426775932312012,
"learning_rate": 9.189651702223672e-06,
"loss": 2.0273,
"step": 192
},
{
"epoch": 0.6327868852459017,
"grad_norm": 9.90354061126709,
"learning_rate": 9.179960226264288e-06,
"loss": 1.957,
"step": 193
},
{
"epoch": 0.6360655737704918,
"grad_norm": 20.340970993041992,
"learning_rate": 9.170216314951301e-06,
"loss": 2.0488,
"step": 194
},
{
"epoch": 0.639344262295082,
"grad_norm": 9.366877555847168,
"learning_rate": 9.16042009051684e-06,
"loss": 1.9688,
"step": 195
},
{
"epoch": 0.6426229508196721,
"grad_norm": 14.084966659545898,
"learning_rate": 9.150571675849273e-06,
"loss": 2.0938,
"step": 196
},
{
"epoch": 0.6459016393442623,
"grad_norm": 16.520889282226562,
"learning_rate": 9.140671194491666e-06,
"loss": 2.0508,
"step": 197
},
{
"epoch": 0.6491803278688525,
"grad_norm": 13.539576530456543,
"learning_rate": 9.130718770640235e-06,
"loss": 1.9922,
"step": 198
},
{
"epoch": 0.6524590163934426,
"grad_norm": 14.922486305236816,
"learning_rate": 9.120714529142784e-06,
"loss": 2.0371,
"step": 199
},
{
"epoch": 0.6557377049180327,
"grad_norm": 12.108205795288086,
"learning_rate": 9.110658595497148e-06,
"loss": 1.9902,
"step": 200
},
{
"epoch": 0.659016393442623,
"grad_norm": 10.225913047790527,
"learning_rate": 9.100551095849603e-06,
"loss": 1.8574,
"step": 201
},
{
"epoch": 0.6622950819672131,
"grad_norm": 10.973288536071777,
"learning_rate": 9.090392156993301e-06,
"loss": 1.9902,
"step": 202
},
{
"epoch": 0.6655737704918033,
"grad_norm": 12.73608684539795,
"learning_rate": 9.080181906366668e-06,
"loss": 1.9551,
"step": 203
},
{
"epoch": 0.6688524590163935,
"grad_norm": 11.886429786682129,
"learning_rate": 9.069920472051804e-06,
"loss": 1.957,
"step": 204
},
{
"epoch": 0.6721311475409836,
"grad_norm": 13.453667640686035,
"learning_rate": 9.05960798277289e-06,
"loss": 1.9512,
"step": 205
},
{
"epoch": 0.6754098360655738,
"grad_norm": 19.135602951049805,
"learning_rate": 9.04924456789456e-06,
"loss": 1.9688,
"step": 206
},
{
"epoch": 0.6786885245901639,
"grad_norm": 16.149311065673828,
"learning_rate": 9.038830357420276e-06,
"loss": 1.9688,
"step": 207
},
{
"epoch": 0.6819672131147541,
"grad_norm": 14.444845199584961,
"learning_rate": 9.028365481990713e-06,
"loss": 2.0625,
"step": 208
},
{
"epoch": 0.6852459016393443,
"grad_norm": 12.08676528930664,
"learning_rate": 9.017850072882107e-06,
"loss": 1.9121,
"step": 209
},
{
"epoch": 0.6885245901639344,
"grad_norm": 11.756418228149414,
"learning_rate": 9.007284262004608e-06,
"loss": 1.9102,
"step": 210
},
{
"epoch": 0.6918032786885245,
"grad_norm": 12.56296443939209,
"learning_rate": 8.996668181900636e-06,
"loss": 2.0312,
"step": 211
},
{
"epoch": 0.6950819672131148,
"grad_norm": 10.218338966369629,
"learning_rate": 8.986001965743205e-06,
"loss": 1.9629,
"step": 212
},
{
"epoch": 0.6983606557377049,
"grad_norm": 9.9441556930542,
"learning_rate": 8.975285747334267e-06,
"loss": 1.9844,
"step": 213
},
{
"epoch": 0.7016393442622951,
"grad_norm": 10.612693786621094,
"learning_rate": 8.964519661103014e-06,
"loss": 1.9746,
"step": 214
},
{
"epoch": 0.7049180327868853,
"grad_norm": 10.07176399230957,
"learning_rate": 8.953703842104212e-06,
"loss": 1.9531,
"step": 215
},
{
"epoch": 0.7081967213114754,
"grad_norm": 9.502864837646484,
"learning_rate": 8.942838426016496e-06,
"loss": 1.9492,
"step": 216
},
{
"epoch": 0.7114754098360656,
"grad_norm": 9.91391372680664,
"learning_rate": 8.931923549140668e-06,
"loss": 1.9844,
"step": 217
},
{
"epoch": 0.7147540983606557,
"grad_norm": 9.780139923095703,
"learning_rate": 8.920959348397988e-06,
"loss": 1.9473,
"step": 218
},
{
"epoch": 0.7180327868852459,
"grad_norm": 15.030618667602539,
"learning_rate": 8.909945961328462e-06,
"loss": 1.9922,
"step": 219
},
{
"epoch": 0.7213114754098361,
"grad_norm": 10.924345016479492,
"learning_rate": 8.898883526089108e-06,
"loss": 2.0195,
"step": 220
},
{
"epoch": 0.7245901639344262,
"grad_norm": 10.839616775512695,
"learning_rate": 8.887772181452228e-06,
"loss": 2.0234,
"step": 221
},
{
"epoch": 0.7278688524590164,
"grad_norm": 11.212786674499512,
"learning_rate": 8.876612066803666e-06,
"loss": 1.9727,
"step": 222
},
{
"epoch": 0.7311475409836066,
"grad_norm": 9.742143630981445,
"learning_rate": 8.86540332214106e-06,
"loss": 1.9844,
"step": 223
},
{
"epoch": 0.7344262295081967,
"grad_norm": 12.224595069885254,
"learning_rate": 8.854146088072087e-06,
"loss": 2.0566,
"step": 224
},
{
"epoch": 0.7377049180327869,
"grad_norm": 9.907976150512695,
"learning_rate": 8.842840505812691e-06,
"loss": 2.0215,
"step": 225
},
{
"epoch": 0.740983606557377,
"grad_norm": 10.605637550354004,
"learning_rate": 8.83148671718533e-06,
"loss": 1.959,
"step": 226
},
{
"epoch": 0.7442622950819672,
"grad_norm": 10.371489524841309,
"learning_rate": 8.82008486461717e-06,
"loss": 1.9492,
"step": 227
},
{
"epoch": 0.7475409836065574,
"grad_norm": 13.147061347961426,
"learning_rate": 8.808635091138327e-06,
"loss": 1.9375,
"step": 228
},
{
"epoch": 0.7508196721311475,
"grad_norm": 13.442245483398438,
"learning_rate": 8.79713754038005e-06,
"loss": 1.9219,
"step": 229
},
{
"epoch": 0.7540983606557377,
"grad_norm": 10.87454891204834,
"learning_rate": 8.785592356572931e-06,
"loss": 1.9609,
"step": 230
},
{
"epoch": 0.7573770491803279,
"grad_norm": 12.752360343933105,
"learning_rate": 8.773999684545096e-06,
"loss": 1.9414,
"step": 231
},
{
"epoch": 0.760655737704918,
"grad_norm": 10.660215377807617,
"learning_rate": 8.76235966972038e-06,
"loss": 1.9395,
"step": 232
},
{
"epoch": 0.7639344262295082,
"grad_norm": 13.825894355773926,
"learning_rate": 8.750672458116513e-06,
"loss": 1.9102,
"step": 233
},
{
"epoch": 0.7672131147540984,
"grad_norm": 11.399506568908691,
"learning_rate": 8.738938196343278e-06,
"loss": 2.002,
"step": 234
},
{
"epoch": 0.7704918032786885,
"grad_norm": 10.732869148254395,
"learning_rate": 8.72715703160068e-06,
"loss": 1.8887,
"step": 235
},
{
"epoch": 0.7737704918032787,
"grad_norm": 13.854918479919434,
"learning_rate": 8.715329111677102e-06,
"loss": 1.9922,
"step": 236
},
{
"epoch": 0.7770491803278688,
"grad_norm": 12.330432891845703,
"learning_rate": 8.703454584947433e-06,
"loss": 1.9395,
"step": 237
},
{
"epoch": 0.780327868852459,
"grad_norm": 9.50846004486084,
"learning_rate": 8.691533600371227e-06,
"loss": 1.8965,
"step": 238
},
{
"epoch": 0.7836065573770492,
"grad_norm": 10.240988731384277,
"learning_rate": 8.67956630749083e-06,
"loss": 1.9121,
"step": 239
},
{
"epoch": 0.7868852459016393,
"grad_norm": 10.392232894897461,
"learning_rate": 8.667552856429494e-06,
"loss": 1.8809,
"step": 240
},
{
"epoch": 0.7901639344262295,
"grad_norm": 10.917051315307617,
"learning_rate": 8.655493397889503e-06,
"loss": 1.9805,
"step": 241
},
{
"epoch": 0.7934426229508197,
"grad_norm": 10.163880348205566,
"learning_rate": 8.64338808315028e-06,
"loss": 1.8711,
"step": 242
},
{
"epoch": 0.7967213114754098,
"grad_norm": 11.161616325378418,
"learning_rate": 8.631237064066491e-06,
"loss": 1.9102,
"step": 243
},
{
"epoch": 0.8,
"grad_norm": 11.414627075195312,
"learning_rate": 8.61904049306614e-06,
"loss": 1.9727,
"step": 244
},
{
"epoch": 0.8032786885245902,
"grad_norm": 10.282430648803711,
"learning_rate": 8.606798523148648e-06,
"loss": 1.9414,
"step": 245
},
{
"epoch": 0.8065573770491803,
"grad_norm": 12.212276458740234,
"learning_rate": 8.594511307882945e-06,
"loss": 1.8496,
"step": 246
},
{
"epoch": 0.8098360655737705,
"grad_norm": 14.458395957946777,
"learning_rate": 8.582179001405544e-06,
"loss": 1.9414,
"step": 247
},
{
"epoch": 0.8131147540983606,
"grad_norm": 10.135568618774414,
"learning_rate": 8.569801758418596e-06,
"loss": 1.8691,
"step": 248
},
{
"epoch": 0.8163934426229508,
"grad_norm": 14.193105697631836,
"learning_rate": 8.55737973418796e-06,
"loss": 1.834,
"step": 249
},
{
"epoch": 0.819672131147541,
"grad_norm": 10.561907768249512,
"learning_rate": 8.544913084541253e-06,
"loss": 2.043,
"step": 250
},
{
"epoch": 0.8229508196721311,
"grad_norm": 12.753805160522461,
"learning_rate": 8.532401965865889e-06,
"loss": 1.9141,
"step": 251
},
{
"epoch": 0.8262295081967214,
"grad_norm": 11.008598327636719,
"learning_rate": 8.519846535107129e-06,
"loss": 1.9258,
"step": 252
},
{
"epoch": 0.8295081967213115,
"grad_norm": 10.186516761779785,
"learning_rate": 8.507246949766095e-06,
"loss": 1.9062,
"step": 253
},
{
"epoch": 0.8327868852459016,
"grad_norm": 12.843867301940918,
"learning_rate": 8.494603367897817e-06,
"loss": 1.8516,
"step": 254
},
{
"epoch": 0.8360655737704918,
"grad_norm": 11.588765144348145,
"learning_rate": 8.481915948109226e-06,
"loss": 1.9121,
"step": 255
},
{
"epoch": 0.839344262295082,
"grad_norm": 12.169210433959961,
"learning_rate": 8.469184849557188e-06,
"loss": 1.9238,
"step": 256
},
{
"epoch": 0.8426229508196721,
"grad_norm": 17.143991470336914,
"learning_rate": 8.456410231946484e-06,
"loss": 1.8887,
"step": 257
},
{
"epoch": 0.8459016393442623,
"grad_norm": 9.951960563659668,
"learning_rate": 8.443592255527824e-06,
"loss": 1.9355,
"step": 258
},
{
"epoch": 0.8491803278688524,
"grad_norm": 11.594623565673828,
"learning_rate": 8.430731081095832e-06,
"loss": 1.9512,
"step": 259
},
{
"epoch": 0.8524590163934426,
"grad_norm": 10.37341022491455,
"learning_rate": 8.417826869987026e-06,
"loss": 1.9883,
"step": 260
},
{
"epoch": 0.8557377049180328,
"grad_norm": 13.815895080566406,
"learning_rate": 8.404879784077793e-06,
"loss": 1.9746,
"step": 261
},
{
"epoch": 0.8590163934426229,
"grad_norm": 10.756362915039062,
"learning_rate": 8.39188998578237e-06,
"loss": 1.9023,
"step": 262
},
{
"epoch": 0.8622950819672132,
"grad_norm": 11.125969886779785,
"learning_rate": 8.37885763805079e-06,
"loss": 1.873,
"step": 263
},
{
"epoch": 0.8655737704918033,
"grad_norm": 10.486440658569336,
"learning_rate": 8.365782904366845e-06,
"loss": 1.8789,
"step": 264
},
{
"epoch": 0.8688524590163934,
"grad_norm": 10.452739715576172,
"learning_rate": 8.352665948746044e-06,
"loss": 1.9453,
"step": 265
},
{
"epoch": 0.8721311475409836,
"grad_norm": 10.094730377197266,
"learning_rate": 8.33950693573354e-06,
"loss": 1.8516,
"step": 266
},
{
"epoch": 0.8754098360655738,
"grad_norm": 11.559701919555664,
"learning_rate": 8.326306030402076e-06,
"loss": 1.9844,
"step": 267
},
{
"epoch": 0.8786885245901639,
"grad_norm": 11.006271362304688,
"learning_rate": 8.313063398349914e-06,
"loss": 1.8887,
"step": 268
},
{
"epoch": 0.8819672131147541,
"grad_norm": 12.529583930969238,
"learning_rate": 8.29977920569875e-06,
"loss": 1.877,
"step": 269
},
{
"epoch": 0.8852459016393442,
"grad_norm": 10.77204704284668,
"learning_rate": 8.28645361909164e-06,
"loss": 1.998,
"step": 270
},
{
"epoch": 0.8885245901639345,
"grad_norm": 13.109503746032715,
"learning_rate": 8.273086805690903e-06,
"loss": 1.8711,
"step": 271
},
{
"epoch": 0.8918032786885246,
"grad_norm": 15.495031356811523,
"learning_rate": 8.259678933176025e-06,
"loss": 1.916,
"step": 272
},
{
"epoch": 0.8950819672131147,
"grad_norm": 11.081769943237305,
"learning_rate": 8.24623016974156e-06,
"loss": 1.8809,
"step": 273
},
{
"epoch": 0.898360655737705,
"grad_norm": 10.445962905883789,
"learning_rate": 8.232740684095012e-06,
"loss": 1.8301,
"step": 274
},
{
"epoch": 0.9016393442622951,
"grad_norm": 11.152101516723633,
"learning_rate": 8.219210645454727e-06,
"loss": 1.9258,
"step": 275
},
{
"epoch": 0.9049180327868852,
"grad_norm": 10.69294261932373,
"learning_rate": 8.205640223547766e-06,
"loss": 1.8242,
"step": 276
},
{
"epoch": 0.9081967213114754,
"grad_norm": 13.176959991455078,
"learning_rate": 8.192029588607772e-06,
"loss": 1.9453,
"step": 277
},
{
"epoch": 0.9114754098360656,
"grad_norm": 12.394086837768555,
"learning_rate": 8.178378911372846e-06,
"loss": 1.9434,
"step": 278
},
{
"epoch": 0.9147540983606557,
"grad_norm": 13.449362754821777,
"learning_rate": 8.16468836308339e-06,
"loss": 1.9961,
"step": 279
},
{
"epoch": 0.9180327868852459,
"grad_norm": 10.922250747680664,
"learning_rate": 8.150958115479976e-06,
"loss": 1.9531,
"step": 280
},
{
"epoch": 0.921311475409836,
"grad_norm": 14.1903657913208,
"learning_rate": 8.137188340801177e-06,
"loss": 1.9336,
"step": 281
},
{
"epoch": 0.9245901639344263,
"grad_norm": 13.266737937927246,
"learning_rate": 8.12337921178141e-06,
"loss": 1.9688,
"step": 282
},
{
"epoch": 0.9278688524590164,
"grad_norm": 12.7962064743042,
"learning_rate": 8.109530901648776e-06,
"loss": 1.875,
"step": 283
},
{
"epoch": 0.9311475409836065,
"grad_norm": 12.458824157714844,
"learning_rate": 8.095643584122879e-06,
"loss": 1.9141,
"step": 284
},
{
"epoch": 0.9344262295081968,
"grad_norm": 10.26793384552002,
"learning_rate": 8.08171743341265e-06,
"loss": 1.8633,
"step": 285
},
{
"epoch": 0.9377049180327869,
"grad_norm": 12.272446632385254,
"learning_rate": 8.067752624214164e-06,
"loss": 1.8945,
"step": 286
},
{
"epoch": 0.940983606557377,
"grad_norm": 11.974457740783691,
"learning_rate": 8.053749331708444e-06,
"loss": 1.877,
"step": 287
},
{
"epoch": 0.9442622950819672,
"grad_norm": 10.955528259277344,
"learning_rate": 8.039707731559262e-06,
"loss": 1.8867,
"step": 288
},
{
"epoch": 0.9475409836065574,
"grad_norm": 12.32547378540039,
"learning_rate": 8.025627999910946e-06,
"loss": 1.9629,
"step": 289
},
{
"epoch": 0.9508196721311475,
"grad_norm": 12.567078590393066,
"learning_rate": 8.011510313386156e-06,
"loss": 1.877,
"step": 290
},
{
"epoch": 0.9540983606557377,
"grad_norm": 10.757590293884277,
"learning_rate": 7.997354849083682e-06,
"loss": 1.8926,
"step": 291
},
{
"epoch": 0.9573770491803278,
"grad_norm": 12.629182815551758,
"learning_rate": 7.98316178457621e-06,
"loss": 1.9199,
"step": 292
},
{
"epoch": 0.9606557377049181,
"grad_norm": 11.572930335998535,
"learning_rate": 7.968931297908105e-06,
"loss": 1.9492,
"step": 293
},
{
"epoch": 0.9639344262295082,
"grad_norm": 13.813660621643066,
"learning_rate": 7.954663567593169e-06,
"loss": 1.793,
"step": 294
},
{
"epoch": 0.9672131147540983,
"grad_norm": 10.30826473236084,
"learning_rate": 7.940358772612408e-06,
"loss": 1.8809,
"step": 295
},
{
"epoch": 0.9704918032786886,
"grad_norm": 11.040604591369629,
"learning_rate": 7.926017092411786e-06,
"loss": 1.8438,
"step": 296
},
{
"epoch": 0.9737704918032787,
"grad_norm": 11.625853538513184,
"learning_rate": 7.911638706899968e-06,
"loss": 1.8652,
"step": 297
},
{
"epoch": 0.9770491803278688,
"grad_norm": 12.791728973388672,
"learning_rate": 7.89722379644607e-06,
"loss": 1.8555,
"step": 298
},
{
"epoch": 0.980327868852459,
"grad_norm": 15.870569229125977,
"learning_rate": 7.882772541877392e-06,
"loss": 1.9043,
"step": 299
},
{
"epoch": 0.9836065573770492,
"grad_norm": 11.703765869140625,
"learning_rate": 7.868285124477156e-06,
"loss": 1.8613,
"step": 300
},
{
"epoch": 0.9868852459016394,
"grad_norm": 11.708532333374023,
"learning_rate": 7.853761725982221e-06,
"loss": 1.8574,
"step": 301
},
{
"epoch": 0.9901639344262295,
"grad_norm": 10.677489280700684,
"learning_rate": 7.839202528580812e-06,
"loss": 1.9004,
"step": 302
},
{
"epoch": 0.9934426229508196,
"grad_norm": 11.307282447814941,
"learning_rate": 7.824607714910233e-06,
"loss": 1.7969,
"step": 303
},
{
"epoch": 0.9967213114754099,
"grad_norm": 14.72847843170166,
"learning_rate": 7.809977468054574e-06,
"loss": 1.8672,
"step": 304
},
{
"epoch": 1.0,
"grad_norm": 14.625303268432617,
"learning_rate": 7.795311971542418e-06,
"loss": 1.8945,
"step": 305
},
{
"epoch": 1.0032786885245901,
"grad_norm": 10.49122142791748,
"learning_rate": 7.78061140934453e-06,
"loss": 1.709,
"step": 306
},
{
"epoch": 1.0065573770491802,
"grad_norm": 11.381379127502441,
"learning_rate": 7.765875965871561e-06,
"loss": 1.6719,
"step": 307
},
{
"epoch": 1.0098360655737706,
"grad_norm": 13.590991973876953,
"learning_rate": 7.751105825971727e-06,
"loss": 1.7441,
"step": 308
},
{
"epoch": 1.0131147540983607,
"grad_norm": 10.184123992919922,
"learning_rate": 7.736301174928489e-06,
"loss": 1.7246,
"step": 309
},
{
"epoch": 1.0163934426229508,
"grad_norm": 11.995527267456055,
"learning_rate": 7.721462198458235e-06,
"loss": 1.6797,
"step": 310
},
{
"epoch": 1.019672131147541,
"grad_norm": 17.523670196533203,
"learning_rate": 7.706589082707951e-06,
"loss": 1.6953,
"step": 311
},
{
"epoch": 1.022950819672131,
"grad_norm": 11.37983226776123,
"learning_rate": 7.69168201425287e-06,
"loss": 1.6387,
"step": 312
},
{
"epoch": 1.0262295081967212,
"grad_norm": 16.715530395507812,
"learning_rate": 7.676741180094156e-06,
"loss": 1.6797,
"step": 313
},
{
"epoch": 1.0295081967213116,
"grad_norm": 13.554855346679688,
"learning_rate": 7.661766767656538e-06,
"loss": 1.6738,
"step": 314
},
{
"epoch": 1.0327868852459017,
"grad_norm": 20.694581985473633,
"learning_rate": 7.64675896478597e-06,
"loss": 1.6992,
"step": 315
},
{
"epoch": 1.0360655737704918,
"grad_norm": 16.622581481933594,
"learning_rate": 7.631717959747269e-06,
"loss": 1.7305,
"step": 316
},
{
"epoch": 1.039344262295082,
"grad_norm": 19.643783569335938,
"learning_rate": 7.616643941221755e-06,
"loss": 1.752,
"step": 317
},
{
"epoch": 1.042622950819672,
"grad_norm": 14.100711822509766,
"learning_rate": 7.6015370983048875e-06,
"loss": 1.7285,
"step": 318
},
{
"epoch": 1.0459016393442624,
"grad_norm": 19.153236389160156,
"learning_rate": 7.586397620503887e-06,
"loss": 1.6953,
"step": 319
},
{
"epoch": 1.0491803278688525,
"grad_norm": 11.576682090759277,
"learning_rate": 7.571225697735365e-06,
"loss": 1.668,
"step": 320
},
{
"epoch": 1.0524590163934426,
"grad_norm": 11.654809951782227,
"learning_rate": 7.556021520322932e-06,
"loss": 1.7539,
"step": 321
},
{
"epoch": 1.0557377049180328,
"grad_norm": 14.915346145629883,
"learning_rate": 7.54078527899482e-06,
"loss": 1.6797,
"step": 322
},
{
"epoch": 1.0590163934426229,
"grad_norm": 16.05927276611328,
"learning_rate": 7.525517164881486e-06,
"loss": 1.5918,
"step": 323
},
{
"epoch": 1.0622950819672132,
"grad_norm": 12.948592185974121,
"learning_rate": 7.51021736951321e-06,
"loss": 1.5879,
"step": 324
},
{
"epoch": 1.0655737704918034,
"grad_norm": 14.317193031311035,
"learning_rate": 7.4948860848176984e-06,
"loss": 1.709,
"step": 325
},
{
"epoch": 1.0688524590163935,
"grad_norm": 14.903904914855957,
"learning_rate": 7.479523503117676e-06,
"loss": 1.6738,
"step": 326
},
{
"epoch": 1.0721311475409836,
"grad_norm": 19.48590087890625,
"learning_rate": 7.4641298171284715e-06,
"loss": 1.6777,
"step": 327
},
{
"epoch": 1.0754098360655737,
"grad_norm": 12.375781059265137,
"learning_rate": 7.448705219955596e-06,
"loss": 1.6719,
"step": 328
},
{
"epoch": 1.0786885245901638,
"grad_norm": 13.3538179397583,
"learning_rate": 7.433249905092331e-06,
"loss": 1.707,
"step": 329
},
{
"epoch": 1.0819672131147542,
"grad_norm": 14.281739234924316,
"learning_rate": 7.417764066417289e-06,
"loss": 1.7148,
"step": 330
},
{
"epoch": 1.0852459016393443,
"grad_norm": 12.570676803588867,
"learning_rate": 7.4022478981919884e-06,
"loss": 1.6445,
"step": 331
},
{
"epoch": 1.0885245901639344,
"grad_norm": 14.846644401550293,
"learning_rate": 7.386701595058422e-06,
"loss": 1.6309,
"step": 332
},
{
"epoch": 1.0918032786885246,
"grad_norm": 12.106386184692383,
"learning_rate": 7.3711253520365995e-06,
"loss": 1.6641,
"step": 333
},
{
"epoch": 1.0950819672131147,
"grad_norm": 14.234429359436035,
"learning_rate": 7.355519364522117e-06,
"loss": 1.6738,
"step": 334
},
{
"epoch": 1.098360655737705,
"grad_norm": 13.406672477722168,
"learning_rate": 7.339883828283696e-06,
"loss": 1.6035,
"step": 335
},
{
"epoch": 1.1016393442622952,
"grad_norm": 12.718591690063477,
"learning_rate": 7.324218939460733e-06,
"loss": 1.6172,
"step": 336
},
{
"epoch": 1.1049180327868853,
"grad_norm": 12.047405242919922,
"learning_rate": 7.308524894560835e-06,
"loss": 1.6328,
"step": 337
},
{
"epoch": 1.1081967213114754,
"grad_norm": 10.482436180114746,
"learning_rate": 7.292801890457357e-06,
"loss": 1.5742,
"step": 338
},
{
"epoch": 1.1114754098360655,
"grad_norm": 13.475817680358887,
"learning_rate": 7.2770501243869315e-06,
"loss": 1.7402,
"step": 339
},
{
"epoch": 1.1147540983606556,
"grad_norm": 12.393983840942383,
"learning_rate": 7.261269793946994e-06,
"loss": 1.6562,
"step": 340
},
{
"epoch": 1.118032786885246,
"grad_norm": 12.331396102905273,
"learning_rate": 7.245461097093307e-06,
"loss": 1.6758,
"step": 341
},
{
"epoch": 1.1213114754098361,
"grad_norm": 14.81454086303711,
"learning_rate": 7.229624232137472e-06,
"loss": 1.6152,
"step": 342
},
{
"epoch": 1.1245901639344262,
"grad_norm": 15.760224342346191,
"learning_rate": 7.213759397744446e-06,
"loss": 1.6465,
"step": 343
},
{
"epoch": 1.1278688524590164,
"grad_norm": 12.658391952514648,
"learning_rate": 7.197866792930044e-06,
"loss": 1.6309,
"step": 344
},
{
"epoch": 1.1311475409836065,
"grad_norm": 13.215618133544922,
"learning_rate": 7.1819466170584485e-06,
"loss": 1.582,
"step": 345
},
{
"epoch": 1.1344262295081968,
"grad_norm": 17.812692642211914,
"learning_rate": 7.165999069839708e-06,
"loss": 1.6035,
"step": 346
},
{
"epoch": 1.137704918032787,
"grad_norm": 12.584436416625977,
"learning_rate": 7.1500243513272275e-06,
"loss": 1.6875,
"step": 347
},
{
"epoch": 1.140983606557377,
"grad_norm": 18.275434494018555,
"learning_rate": 7.1340226619152595e-06,
"loss": 1.6914,
"step": 348
},
{
"epoch": 1.1442622950819672,
"grad_norm": 13.600272178649902,
"learning_rate": 7.117994202336398e-06,
"loss": 1.6367,
"step": 349
},
{
"epoch": 1.1475409836065573,
"grad_norm": 16.630714416503906,
"learning_rate": 7.101939173659045e-06,
"loss": 1.6191,
"step": 350
},
{
"epoch": 1.1508196721311474,
"grad_norm": 15.959205627441406,
"learning_rate": 7.085857777284908e-06,
"loss": 1.7305,
"step": 351
},
{
"epoch": 1.1540983606557378,
"grad_norm": 13.133319854736328,
"learning_rate": 7.069750214946458e-06,
"loss": 1.6641,
"step": 352
},
{
"epoch": 1.157377049180328,
"grad_norm": 20.936429977416992,
"learning_rate": 7.053616688704399e-06,
"loss": 1.6738,
"step": 353
},
{
"epoch": 1.160655737704918,
"grad_norm": 12.605098724365234,
"learning_rate": 7.037457400945147e-06,
"loss": 1.6953,
"step": 354
},
{
"epoch": 1.1639344262295082,
"grad_norm": 14.746296882629395,
"learning_rate": 7.021272554378279e-06,
"loss": 1.75,
"step": 355
},
{
"epoch": 1.1672131147540983,
"grad_norm": 14.727060317993164,
"learning_rate": 7.005062352033991e-06,
"loss": 1.6406,
"step": 356
},
{
"epoch": 1.1704918032786886,
"grad_norm": 11.865753173828125,
"learning_rate": 6.988826997260556e-06,
"loss": 1.584,
"step": 357
},
{
"epoch": 1.1737704918032787,
"grad_norm": 12.896044731140137,
"learning_rate": 6.972566693721772e-06,
"loss": 1.6797,
"step": 358
},
{
"epoch": 1.1770491803278689,
"grad_norm": 12.144120216369629,
"learning_rate": 6.956281645394404e-06,
"loss": 1.6055,
"step": 359
},
{
"epoch": 1.180327868852459,
"grad_norm": 12.519413948059082,
"learning_rate": 6.939972056565626e-06,
"loss": 1.6895,
"step": 360
},
{
"epoch": 1.1836065573770491,
"grad_norm": 14.610289573669434,
"learning_rate": 6.923638131830461e-06,
"loss": 1.6582,
"step": 361
},
{
"epoch": 1.1868852459016392,
"grad_norm": 11.816085815429688,
"learning_rate": 6.907280076089213e-06,
"loss": 1.5996,
"step": 362
},
{
"epoch": 1.1901639344262296,
"grad_norm": 13.615991592407227,
"learning_rate": 6.890898094544894e-06,
"loss": 1.6504,
"step": 363
},
{
"epoch": 1.1934426229508197,
"grad_norm": 16.203214645385742,
"learning_rate": 6.874492392700659e-06,
"loss": 1.6758,
"step": 364
},
{
"epoch": 1.1967213114754098,
"grad_norm": 12.530990600585938,
"learning_rate": 6.858063176357214e-06,
"loss": 1.6738,
"step": 365
},
{
"epoch": 1.2,
"grad_norm": 13.96877384185791,
"learning_rate": 6.841610651610243e-06,
"loss": 1.6953,
"step": 366
},
{
"epoch": 1.20327868852459,
"grad_norm": 13.824159622192383,
"learning_rate": 6.825135024847825e-06,
"loss": 1.6543,
"step": 367
},
{
"epoch": 1.2065573770491804,
"grad_norm": 12.192745208740234,
"learning_rate": 6.808636502747841e-06,
"loss": 1.6445,
"step": 368
},
{
"epoch": 1.2098360655737705,
"grad_norm": 12.605790138244629,
"learning_rate": 6.792115292275374e-06,
"loss": 1.668,
"step": 369
},
{
"epoch": 1.2131147540983607,
"grad_norm": 15.496692657470703,
"learning_rate": 6.775571600680133e-06,
"loss": 1.6426,
"step": 370
},
{
"epoch": 1.2163934426229508,
"grad_norm": 13.692412376403809,
"learning_rate": 6.759005635493831e-06,
"loss": 1.7207,
"step": 371
},
{
"epoch": 1.219672131147541,
"grad_norm": 17.969615936279297,
"learning_rate": 6.742417604527594e-06,
"loss": 1.5938,
"step": 372
},
{
"epoch": 1.222950819672131,
"grad_norm": 18.373065948486328,
"learning_rate": 6.725807715869352e-06,
"loss": 1.6875,
"step": 373
},
{
"epoch": 1.2262295081967214,
"grad_norm": 13.26612663269043,
"learning_rate": 6.709176177881228e-06,
"loss": 1.6602,
"step": 374
},
{
"epoch": 1.2295081967213115,
"grad_norm": 21.26793098449707,
"learning_rate": 6.692523199196926e-06,
"loss": 1.7227,
"step": 375
},
{
"epoch": 1.2327868852459016,
"grad_norm": 12.17033576965332,
"learning_rate": 6.675848988719102e-06,
"loss": 1.6465,
"step": 376
},
{
"epoch": 1.2360655737704918,
"grad_norm": 21.347976684570312,
"learning_rate": 6.659153755616771e-06,
"loss": 1.6289,
"step": 377
},
{
"epoch": 1.2393442622950819,
"grad_norm": 19.131080627441406,
"learning_rate": 6.64243770932265e-06,
"loss": 1.6406,
"step": 378
},
{
"epoch": 1.2426229508196722,
"grad_norm": 15.474773406982422,
"learning_rate": 6.625701059530552e-06,
"loss": 1.668,
"step": 379
},
{
"epoch": 1.2459016393442623,
"grad_norm": 12.6391019821167,
"learning_rate": 6.608944016192754e-06,
"loss": 1.623,
"step": 380
},
{
"epoch": 1.2491803278688525,
"grad_norm": 12.770636558532715,
"learning_rate": 6.5921667895173515e-06,
"loss": 1.6523,
"step": 381
},
{
"epoch": 1.2524590163934426,
"grad_norm": 18.565126419067383,
"learning_rate": 6.575369589965632e-06,
"loss": 1.6641,
"step": 382
},
{
"epoch": 1.2557377049180327,
"grad_norm": 13.343095779418945,
"learning_rate": 6.558552628249435e-06,
"loss": 1.6426,
"step": 383
},
{
"epoch": 1.2590163934426228,
"grad_norm": 12.588839530944824,
"learning_rate": 6.541716115328503e-06,
"loss": 1.6172,
"step": 384
},
{
"epoch": 1.2622950819672132,
"grad_norm": 13.01188850402832,
"learning_rate": 6.524860262407835e-06,
"loss": 1.5957,
"step": 385
},
{
"epoch": 1.2655737704918033,
"grad_norm": 11.886181831359863,
"learning_rate": 6.507985280935047e-06,
"loss": 1.6445,
"step": 386
},
{
"epoch": 1.2688524590163934,
"grad_norm": 18.22747039794922,
"learning_rate": 6.491091382597705e-06,
"loss": 1.5781,
"step": 387
},
{
"epoch": 1.2721311475409836,
"grad_norm": 11.928472518920898,
"learning_rate": 6.4741787793206824e-06,
"loss": 1.5645,
"step": 388
},
{
"epoch": 1.275409836065574,
"grad_norm": 15.313958168029785,
"learning_rate": 6.457247683263494e-06,
"loss": 1.6289,
"step": 389
},
{
"epoch": 1.278688524590164,
"grad_norm": 14.239816665649414,
"learning_rate": 6.440298306817637e-06,
"loss": 1.6094,
"step": 390
},
{
"epoch": 1.2819672131147541,
"grad_norm": 21.43814468383789,
"learning_rate": 6.423330862603924e-06,
"loss": 1.6113,
"step": 391
},
{
"epoch": 1.2852459016393443,
"grad_norm": 14.304780006408691,
"learning_rate": 6.406345563469819e-06,
"loss": 1.625,
"step": 392
},
{
"epoch": 1.2885245901639344,
"grad_norm": 15.105829238891602,
"learning_rate": 6.38934262248677e-06,
"loss": 1.5078,
"step": 393
},
{
"epoch": 1.2918032786885245,
"grad_norm": 13.64173412322998,
"learning_rate": 6.372322252947529e-06,
"loss": 1.6094,
"step": 394
},
{
"epoch": 1.2950819672131146,
"grad_norm": 12.446194648742676,
"learning_rate": 6.355284668363479e-06,
"loss": 1.6172,
"step": 395
},
{
"epoch": 1.298360655737705,
"grad_norm": 12.818573951721191,
"learning_rate": 6.3382300824619584e-06,
"loss": 1.623,
"step": 396
},
{
"epoch": 1.301639344262295,
"grad_norm": 16.565000534057617,
"learning_rate": 6.321158709183579e-06,
"loss": 1.6016,
"step": 397
},
{
"epoch": 1.3049180327868852,
"grad_norm": 12.29269027709961,
"learning_rate": 6.30407076267954e-06,
"loss": 1.666,
"step": 398
},
{
"epoch": 1.3081967213114754,
"grad_norm": 13.712056159973145,
"learning_rate": 6.286966457308941e-06,
"loss": 1.6289,
"step": 399
},
{
"epoch": 1.3114754098360657,
"grad_norm": 12.996943473815918,
"learning_rate": 6.2698460076360966e-06,
"loss": 1.6035,
"step": 400
},
{
"epoch": 1.3147540983606558,
"grad_norm": 12.406416893005371,
"learning_rate": 6.252709628427842e-06,
"loss": 1.6953,
"step": 401
},
{
"epoch": 1.318032786885246,
"grad_norm": 14.069875717163086,
"learning_rate": 6.235557534650844e-06,
"loss": 1.6367,
"step": 402
},
{
"epoch": 1.321311475409836,
"grad_norm": 12.035994529724121,
"learning_rate": 6.218389941468894e-06,
"loss": 1.7168,
"step": 403
},
{
"epoch": 1.3245901639344262,
"grad_norm": 13.915684700012207,
"learning_rate": 6.20120706424022e-06,
"loss": 1.6953,
"step": 404
},
{
"epoch": 1.3278688524590163,
"grad_norm": 13.748454093933105,
"learning_rate": 6.184009118514776e-06,
"loss": 1.7012,
"step": 405
},
{
"epoch": 1.3311475409836064,
"grad_norm": 12.660347938537598,
"learning_rate": 6.166796320031545e-06,
"loss": 1.5762,
"step": 406
},
{
"epoch": 1.3344262295081968,
"grad_norm": 13.897893905639648,
"learning_rate": 6.149568884715831e-06,
"loss": 1.6816,
"step": 407
},
{
"epoch": 1.337704918032787,
"grad_norm": 15.359755516052246,
"learning_rate": 6.132327028676544e-06,
"loss": 1.5742,
"step": 408
},
{
"epoch": 1.340983606557377,
"grad_norm": 15.530616760253906,
"learning_rate": 6.1150709682034995e-06,
"loss": 1.6094,
"step": 409
},
{
"epoch": 1.3442622950819672,
"grad_norm": 13.77632999420166,
"learning_rate": 6.097800919764698e-06,
"loss": 1.6895,
"step": 410
},
{
"epoch": 1.3475409836065575,
"grad_norm": 14.026497840881348,
"learning_rate": 6.080517100003611e-06,
"loss": 1.6523,
"step": 411
},
{
"epoch": 1.3508196721311476,
"grad_norm": 11.663541793823242,
"learning_rate": 6.063219725736468e-06,
"loss": 1.6328,
"step": 412
},
{
"epoch": 1.3540983606557377,
"grad_norm": 13.371345520019531,
"learning_rate": 6.0459090139495235e-06,
"loss": 1.7012,
"step": 413
},
{
"epoch": 1.3573770491803279,
"grad_norm": 14.87950325012207,
"learning_rate": 6.028585181796351e-06,
"loss": 1.6211,
"step": 414
},
{
"epoch": 1.360655737704918,
"grad_norm": 13.860067367553711,
"learning_rate": 6.011248446595113e-06,
"loss": 1.6816,
"step": 415
},
{
"epoch": 1.3639344262295081,
"grad_norm": 20.906126022338867,
"learning_rate": 5.99389902582583e-06,
"loss": 1.6582,
"step": 416
},
{
"epoch": 1.3672131147540982,
"grad_norm": 13.586402893066406,
"learning_rate": 5.9765371371276544e-06,
"loss": 1.625,
"step": 417
},
{
"epoch": 1.3704918032786886,
"grad_norm": 19.022512435913086,
"learning_rate": 5.959162998296149e-06,
"loss": 1.6875,
"step": 418
},
{
"epoch": 1.3737704918032787,
"grad_norm": 15.78264045715332,
"learning_rate": 5.941776827280544e-06,
"loss": 1.5957,
"step": 419
},
{
"epoch": 1.3770491803278688,
"grad_norm": 13.40628433227539,
"learning_rate": 5.924378842181002e-06,
"loss": 1.6582,
"step": 420
},
{
"epoch": 1.380327868852459,
"grad_norm": 16.419567108154297,
"learning_rate": 5.906969261245896e-06,
"loss": 1.6055,
"step": 421
},
{
"epoch": 1.3836065573770493,
"grad_norm": 14.798409461975098,
"learning_rate": 5.889548302869057e-06,
"loss": 1.5508,
"step": 422
},
{
"epoch": 1.3868852459016394,
"grad_norm": 11.23767375946045,
"learning_rate": 5.87211618558704e-06,
"loss": 1.5664,
"step": 423
},
{
"epoch": 1.3901639344262295,
"grad_norm": 14.773558616638184,
"learning_rate": 5.8546731280763846e-06,
"loss": 1.6973,
"step": 424
},
{
"epoch": 1.3934426229508197,
"grad_norm": 12.42011833190918,
"learning_rate": 5.83721934915087e-06,
"loss": 1.6016,
"step": 425
},
{
"epoch": 1.3967213114754098,
"grad_norm": 13.847085952758789,
"learning_rate": 5.819755067758767e-06,
"loss": 1.7383,
"step": 426
},
{
"epoch": 1.4,
"grad_norm": 14.425787925720215,
"learning_rate": 5.802280502980095e-06,
"loss": 1.6406,
"step": 427
},
{
"epoch": 1.40327868852459,
"grad_norm": 17.782608032226562,
"learning_rate": 5.784795874023876e-06,
"loss": 1.6113,
"step": 428
},
{
"epoch": 1.4065573770491804,
"grad_norm": 12.806195259094238,
"learning_rate": 5.767301400225378e-06,
"loss": 1.6465,
"step": 429
},
{
"epoch": 1.4098360655737705,
"grad_norm": 12.222467422485352,
"learning_rate": 5.749797301043366e-06,
"loss": 1.6016,
"step": 430
},
{
"epoch": 1.4131147540983606,
"grad_norm": 23.587997436523438,
"learning_rate": 5.732283796057356e-06,
"loss": 1.6426,
"step": 431
},
{
"epoch": 1.4163934426229507,
"grad_norm": 15.535636901855469,
"learning_rate": 5.714761104964852e-06,
"loss": 1.6348,
"step": 432
},
{
"epoch": 1.419672131147541,
"grad_norm": 24.00181770324707,
"learning_rate": 5.69722944757859e-06,
"loss": 1.6445,
"step": 433
},
{
"epoch": 1.4229508196721312,
"grad_norm": 22.185400009155273,
"learning_rate": 5.679689043823787e-06,
"loss": 1.6016,
"step": 434
},
{
"epoch": 1.4262295081967213,
"grad_norm": 15.497023582458496,
"learning_rate": 5.6621401137353784e-06,
"loss": 1.6172,
"step": 435
},
{
"epoch": 1.4295081967213115,
"grad_norm": 19.018062591552734,
"learning_rate": 5.644582877455254e-06,
"loss": 1.5918,
"step": 436
},
{
"epoch": 1.4327868852459016,
"grad_norm": 16.216062545776367,
"learning_rate": 5.6270175552295035e-06,
"loss": 1.6445,
"step": 437
},
{
"epoch": 1.4360655737704917,
"grad_norm": 17.771896362304688,
"learning_rate": 5.609444367405652e-06,
"loss": 1.6133,
"step": 438
},
{
"epoch": 1.4393442622950818,
"grad_norm": 15.940698623657227,
"learning_rate": 5.5918635344298885e-06,
"loss": 1.6914,
"step": 439
},
{
"epoch": 1.4426229508196722,
"grad_norm": 13.144735336303711,
"learning_rate": 5.574275276844315e-06,
"loss": 1.6055,
"step": 440
},
{
"epoch": 1.4459016393442623,
"grad_norm": 22.014144897460938,
"learning_rate": 5.556679815284162e-06,
"loss": 1.5879,
"step": 441
},
{
"epoch": 1.4491803278688524,
"grad_norm": 15.573799133300781,
"learning_rate": 5.5390773704750355e-06,
"loss": 1.6191,
"step": 442
},
{
"epoch": 1.4524590163934425,
"grad_norm": 14.217083930969238,
"learning_rate": 5.5214681632301425e-06,
"loss": 1.5469,
"step": 443
},
{
"epoch": 1.455737704918033,
"grad_norm": 15.9507417678833,
"learning_rate": 5.503852414447519e-06,
"loss": 1.6602,
"step": 444
},
{
"epoch": 1.459016393442623,
"grad_norm": 12.370546340942383,
"learning_rate": 5.486230345107262e-06,
"loss": 1.6348,
"step": 445
},
{
"epoch": 1.4622950819672131,
"grad_norm": 18.62546730041504,
"learning_rate": 5.468602176268756e-06,
"loss": 1.5859,
"step": 446
},
{
"epoch": 1.4655737704918033,
"grad_norm": 13.223871231079102,
"learning_rate": 5.4509681290679014e-06,
"loss": 1.5605,
"step": 447
},
{
"epoch": 1.4688524590163934,
"grad_norm": 14.363565444946289,
"learning_rate": 5.43332842471434e-06,
"loss": 1.6504,
"step": 448
},
{
"epoch": 1.4721311475409835,
"grad_norm": 13.501849174499512,
"learning_rate": 5.4156832844886775e-06,
"loss": 1.6016,
"step": 449
},
{
"epoch": 1.4754098360655736,
"grad_norm": 13.610998153686523,
"learning_rate": 5.398032929739712e-06,
"loss": 1.6133,
"step": 450
},
{
"epoch": 1.478688524590164,
"grad_norm": 18.596263885498047,
"learning_rate": 5.380377581881653e-06,
"loss": 1.5664,
"step": 451
},
{
"epoch": 1.481967213114754,
"grad_norm": 15.4996919631958,
"learning_rate": 5.362717462391345e-06,
"loss": 1.5176,
"step": 452
},
{
"epoch": 1.4852459016393442,
"grad_norm": 11.770246505737305,
"learning_rate": 5.345052792805494e-06,
"loss": 1.4805,
"step": 453
},
{
"epoch": 1.4885245901639343,
"grad_norm": 18.300512313842773,
"learning_rate": 5.327383794717881e-06,
"loss": 1.6074,
"step": 454
},
{
"epoch": 1.4918032786885247,
"grad_norm": 18.962173461914062,
"learning_rate": 5.309710689776587e-06,
"loss": 1.5898,
"step": 455
},
{
"epoch": 1.4950819672131148,
"grad_norm": 12.80952262878418,
"learning_rate": 5.2920336996812125e-06,
"loss": 1.5762,
"step": 456
},
{
"epoch": 1.498360655737705,
"grad_norm": 22.957740783691406,
"learning_rate": 5.274353046180093e-06,
"loss": 1.5918,
"step": 457
},
{
"epoch": 1.501639344262295,
"grad_norm": 13.40424633026123,
"learning_rate": 5.256668951067521e-06,
"loss": 1.6406,
"step": 458
},
{
"epoch": 1.5049180327868852,
"grad_norm": 13.17849349975586,
"learning_rate": 5.23898163618096e-06,
"loss": 1.7012,
"step": 459
},
{
"epoch": 1.5081967213114753,
"grad_norm": 17.233837127685547,
"learning_rate": 5.221291323398267e-06,
"loss": 1.6543,
"step": 460
},
{
"epoch": 1.5114754098360654,
"grad_norm": 12.22879695892334,
"learning_rate": 5.203598234634903e-06,
"loss": 1.6133,
"step": 461
},
{
"epoch": 1.5147540983606558,
"grad_norm": 22.00145721435547,
"learning_rate": 5.185902591841152e-06,
"loss": 1.6191,
"step": 462
},
{
"epoch": 1.518032786885246,
"grad_norm": 15.629786491394043,
"learning_rate": 5.168204616999342e-06,
"loss": 1.5605,
"step": 463
},
{
"epoch": 1.521311475409836,
"grad_norm": 14.949792861938477,
"learning_rate": 5.15050453212105e-06,
"loss": 1.6094,
"step": 464
},
{
"epoch": 1.5245901639344264,
"grad_norm": 18.31121826171875,
"learning_rate": 5.132802559244326e-06,
"loss": 1.5098,
"step": 465
},
{
"epoch": 1.5278688524590165,
"grad_norm": 12.61305046081543,
"learning_rate": 5.115098920430897e-06,
"loss": 1.5137,
"step": 466
},
{
"epoch": 1.5311475409836066,
"grad_norm": 15.578408241271973,
"learning_rate": 5.097393837763398e-06,
"loss": 1.6055,
"step": 467
},
{
"epoch": 1.5344262295081967,
"grad_norm": 18.71062469482422,
"learning_rate": 5.079687533342569e-06,
"loss": 1.6172,
"step": 468
},
{
"epoch": 1.5377049180327869,
"grad_norm": 13.096431732177734,
"learning_rate": 5.061980229284481e-06,
"loss": 1.6094,
"step": 469
},
{
"epoch": 1.540983606557377,
"grad_norm": 18.373483657836914,
"learning_rate": 5.04427214771774e-06,
"loss": 1.6484,
"step": 470
},
{
"epoch": 1.544262295081967,
"grad_norm": 13.364453315734863,
"learning_rate": 5.02656351078071e-06,
"loss": 1.6035,
"step": 471
},
{
"epoch": 1.5475409836065572,
"grad_norm": 17.02009391784668,
"learning_rate": 5.008854540618721e-06,
"loss": 1.6191,
"step": 472
},
{
"epoch": 1.5508196721311476,
"grad_norm": 18.287324905395508,
"learning_rate": 4.991145459381279e-06,
"loss": 1.6445,
"step": 473
},
{
"epoch": 1.5540983606557377,
"grad_norm": 13.834267616271973,
"learning_rate": 4.97343648921929e-06,
"loss": 1.6582,
"step": 474
},
{
"epoch": 1.5573770491803278,
"grad_norm": 15.57867431640625,
"learning_rate": 4.955727852282261e-06,
"loss": 1.6699,
"step": 475
},
{
"epoch": 1.5606557377049182,
"grad_norm": 15.531145095825195,
"learning_rate": 4.938019770715521e-06,
"loss": 1.4805,
"step": 476
},
{
"epoch": 1.5639344262295083,
"grad_norm": 20.70842742919922,
"learning_rate": 4.9203124666574325e-06,
"loss": 1.5957,
"step": 477
},
{
"epoch": 1.5672131147540984,
"grad_norm": 16.70767593383789,
"learning_rate": 4.902606162236605e-06,
"loss": 1.5566,
"step": 478
},
{
"epoch": 1.5704918032786885,
"grad_norm": 12.975936889648438,
"learning_rate": 4.8849010795691035e-06,
"loss": 1.6016,
"step": 479
},
{
"epoch": 1.5737704918032787,
"grad_norm": 16.035783767700195,
"learning_rate": 4.867197440755677e-06,
"loss": 1.5273,
"step": 480
},
{
"epoch": 1.5770491803278688,
"grad_norm": 25.241321563720703,
"learning_rate": 4.849495467878951e-06,
"loss": 1.6172,
"step": 481
},
{
"epoch": 1.580327868852459,
"grad_norm": 13.46953296661377,
"learning_rate": 4.831795383000659e-06,
"loss": 1.5547,
"step": 482
},
{
"epoch": 1.583606557377049,
"grad_norm": 24.528270721435547,
"learning_rate": 4.81409740815885e-06,
"loss": 1.5859,
"step": 483
},
{
"epoch": 1.5868852459016394,
"grad_norm": 24.17436981201172,
"learning_rate": 4.796401765365101e-06,
"loss": 1.5664,
"step": 484
},
{
"epoch": 1.5901639344262295,
"grad_norm": 13.636819839477539,
"learning_rate": 4.778708676601736e-06,
"loss": 1.6641,
"step": 485
},
{
"epoch": 1.5934426229508196,
"grad_norm": 17.118165969848633,
"learning_rate": 4.761018363819041e-06,
"loss": 1.6348,
"step": 486
},
{
"epoch": 1.59672131147541,
"grad_norm": 21.050155639648438,
"learning_rate": 4.743331048932481e-06,
"loss": 1.5957,
"step": 487
},
{
"epoch": 1.6,
"grad_norm": 14.285764694213867,
"learning_rate": 4.725646953819908e-06,
"loss": 1.6621,
"step": 488
},
{
"epoch": 1.6032786885245902,
"grad_norm": 19.979021072387695,
"learning_rate": 4.707966300318788e-06,
"loss": 1.6328,
"step": 489
},
{
"epoch": 1.6065573770491803,
"grad_norm": 24.40378189086914,
"learning_rate": 4.6902893102234145e-06,
"loss": 1.6113,
"step": 490
},
{
"epoch": 1.6098360655737705,
"grad_norm": 17.49124526977539,
"learning_rate": 4.6726162052821214e-06,
"loss": 1.6816,
"step": 491
},
{
"epoch": 1.6131147540983606,
"grad_norm": 18.845081329345703,
"learning_rate": 4.654947207194507e-06,
"loss": 1.6562,
"step": 492
},
{
"epoch": 1.6163934426229507,
"grad_norm": 24.073442459106445,
"learning_rate": 4.637282537608656e-06,
"loss": 1.6934,
"step": 493
},
{
"epoch": 1.6196721311475408,
"grad_norm": 13.014028549194336,
"learning_rate": 4.619622418118349e-06,
"loss": 1.5996,
"step": 494
},
{
"epoch": 1.6229508196721312,
"grad_norm": 15.70720100402832,
"learning_rate": 4.601967070260289e-06,
"loss": 1.5918,
"step": 495
},
{
"epoch": 1.6262295081967213,
"grad_norm": 20.249507904052734,
"learning_rate": 4.584316715511324e-06,
"loss": 1.6152,
"step": 496
},
{
"epoch": 1.6295081967213116,
"grad_norm": 13.594206809997559,
"learning_rate": 4.5666715752856624e-06,
"loss": 1.582,
"step": 497
},
{
"epoch": 1.6327868852459018,
"grad_norm": 14.592798233032227,
"learning_rate": 4.549031870932101e-06,
"loss": 1.5449,
"step": 498
},
{
"epoch": 1.6360655737704919,
"grad_norm": 16.72430419921875,
"learning_rate": 4.531397823731245e-06,
"loss": 1.5566,
"step": 499
},
{
"epoch": 1.639344262295082,
"grad_norm": 13.189786911010742,
"learning_rate": 4.51376965489274e-06,
"loss": 1.5977,
"step": 500
},
{
"epoch": 1.6426229508196721,
"grad_norm": 17.594057083129883,
"learning_rate": 4.4961475855524825e-06,
"loss": 1.6211,
"step": 501
},
{
"epoch": 1.6459016393442623,
"grad_norm": 14.489039421081543,
"learning_rate": 4.478531836769859e-06,
"loss": 1.5957,
"step": 502
},
{
"epoch": 1.6491803278688524,
"grad_norm": 13.191361427307129,
"learning_rate": 4.460922629524966e-06,
"loss": 1.5879,
"step": 503
},
{
"epoch": 1.6524590163934425,
"grad_norm": 17.483108520507812,
"learning_rate": 4.44332018471584e-06,
"loss": 1.5723,
"step": 504
},
{
"epoch": 1.6557377049180326,
"grad_norm": 14.9766206741333,
"learning_rate": 4.425724723155686e-06,
"loss": 1.6211,
"step": 505
},
{
"epoch": 1.659016393442623,
"grad_norm": 18.884735107421875,
"learning_rate": 4.408136465570112e-06,
"loss": 1.5801,
"step": 506
},
{
"epoch": 1.662295081967213,
"grad_norm": 18.028215408325195,
"learning_rate": 4.39055563259435e-06,
"loss": 1.6094,
"step": 507
},
{
"epoch": 1.6655737704918034,
"grad_norm": 12.109418869018555,
"learning_rate": 4.372982444770498e-06,
"loss": 1.6016,
"step": 508
},
{
"epoch": 1.6688524590163936,
"grad_norm": 15.456534385681152,
"learning_rate": 4.355417122544748e-06,
"loss": 1.5996,
"step": 509
},
{
"epoch": 1.6721311475409837,
"grad_norm": 17.789567947387695,
"learning_rate": 4.337859886264624e-06,
"loss": 1.6367,
"step": 510
},
{
"epoch": 1.6754098360655738,
"grad_norm": 12.670686721801758,
"learning_rate": 4.320310956176214e-06,
"loss": 1.5723,
"step": 511
},
{
"epoch": 1.678688524590164,
"grad_norm": 20.38010597229004,
"learning_rate": 4.302770552421411e-06,
"loss": 1.5449,
"step": 512
},
{
"epoch": 1.681967213114754,
"grad_norm": 16.769620895385742,
"learning_rate": 4.2852388950351496e-06,
"loss": 1.6328,
"step": 513
},
{
"epoch": 1.6852459016393442,
"grad_norm": 12.062372207641602,
"learning_rate": 4.2677162039426454e-06,
"loss": 1.6055,
"step": 514
},
{
"epoch": 1.6885245901639343,
"grad_norm": 16.456920623779297,
"learning_rate": 4.2502026989566354e-06,
"loss": 1.5801,
"step": 515
},
{
"epoch": 1.6918032786885244,
"grad_norm": 13.964550018310547,
"learning_rate": 4.232698599774625e-06,
"loss": 1.5762,
"step": 516
},
{
"epoch": 1.6950819672131148,
"grad_norm": 13.01318645477295,
"learning_rate": 4.215204125976126e-06,
"loss": 1.5859,
"step": 517
},
{
"epoch": 1.698360655737705,
"grad_norm": 16.137088775634766,
"learning_rate": 4.197719497019906e-06,
"loss": 1.6953,
"step": 518
},
{
"epoch": 1.7016393442622952,
"grad_norm": 13.629316329956055,
"learning_rate": 4.180244932241234e-06,
"loss": 1.582,
"step": 519
},
{
"epoch": 1.7049180327868854,
"grad_norm": 12.793038368225098,
"learning_rate": 4.162780650849131e-06,
"loss": 1.5332,
"step": 520
},
{
"epoch": 1.7081967213114755,
"grad_norm": 12.091232299804688,
"learning_rate": 4.145326871923616e-06,
"loss": 1.5898,
"step": 521
},
{
"epoch": 1.7114754098360656,
"grad_norm": 14.179364204406738,
"learning_rate": 4.1278838144129614e-06,
"loss": 1.623,
"step": 522
},
{
"epoch": 1.7147540983606557,
"grad_norm": 13.386200904846191,
"learning_rate": 4.110451697130946e-06,
"loss": 1.541,
"step": 523
},
{
"epoch": 1.7180327868852459,
"grad_norm": 13.438315391540527,
"learning_rate": 4.093030738754105e-06,
"loss": 1.5527,
"step": 524
},
{
"epoch": 1.721311475409836,
"grad_norm": 14.931065559387207,
"learning_rate": 4.0756211578189995e-06,
"loss": 1.6602,
"step": 525
},
{
"epoch": 1.724590163934426,
"grad_norm": 12.362815856933594,
"learning_rate": 4.058223172719459e-06,
"loss": 1.5801,
"step": 526
},
{
"epoch": 1.7278688524590164,
"grad_norm": 14.543976783752441,
"learning_rate": 4.0408370017038515e-06,
"loss": 1.5,
"step": 527
},
{
"epoch": 1.7311475409836066,
"grad_norm": 16.341663360595703,
"learning_rate": 4.023462862872346e-06,
"loss": 1.5957,
"step": 528
},
{
"epoch": 1.7344262295081967,
"grad_norm": 13.109537124633789,
"learning_rate": 4.006100974174173e-06,
"loss": 1.6426,
"step": 529
},
{
"epoch": 1.737704918032787,
"grad_norm": 18.410266876220703,
"learning_rate": 3.9887515534048866e-06,
"loss": 1.5898,
"step": 530
},
{
"epoch": 1.7409836065573772,
"grad_norm": 12.593286514282227,
"learning_rate": 3.971414818203648e-06,
"loss": 1.625,
"step": 531
},
{
"epoch": 1.7442622950819673,
"grad_norm": 14.592744827270508,
"learning_rate": 3.954090986050477e-06,
"loss": 1.5918,
"step": 532
},
{
"epoch": 1.7475409836065574,
"grad_norm": 13.23892593383789,
"learning_rate": 3.9367802742635335e-06,
"loss": 1.6504,
"step": 533
},
{
"epoch": 1.7508196721311475,
"grad_norm": 13.610597610473633,
"learning_rate": 3.9194828999963905e-06,
"loss": 1.5742,
"step": 534
},
{
"epoch": 1.7540983606557377,
"grad_norm": 12.243042945861816,
"learning_rate": 3.902199080235305e-06,
"loss": 1.5625,
"step": 535
},
{
"epoch": 1.7573770491803278,
"grad_norm": 13.913399696350098,
"learning_rate": 3.884929031796504e-06,
"loss": 1.5547,
"step": 536
},
{
"epoch": 1.760655737704918,
"grad_norm": 12.08027458190918,
"learning_rate": 3.867672971323457e-06,
"loss": 1.543,
"step": 537
},
{
"epoch": 1.7639344262295082,
"grad_norm": 13.890872955322266,
"learning_rate": 3.850431115284171e-06,
"loss": 1.5391,
"step": 538
},
{
"epoch": 1.7672131147540984,
"grad_norm": 13.448533058166504,
"learning_rate": 3.8332036799684554e-06,
"loss": 1.5547,
"step": 539
},
{
"epoch": 1.7704918032786885,
"grad_norm": 12.864797592163086,
"learning_rate": 3.815990881485225e-06,
"loss": 1.5625,
"step": 540
},
{
"epoch": 1.7737704918032788,
"grad_norm": 13.268163681030273,
"learning_rate": 3.798792935759782e-06,
"loss": 1.5684,
"step": 541
},
{
"epoch": 1.777049180327869,
"grad_norm": 12.976286888122559,
"learning_rate": 3.781610058531107e-06,
"loss": 1.5996,
"step": 542
},
{
"epoch": 1.780327868852459,
"grad_norm": 14.119148254394531,
"learning_rate": 3.7644424653491564e-06,
"loss": 1.5918,
"step": 543
},
{
"epoch": 1.7836065573770492,
"grad_norm": 14.292675018310547,
"learning_rate": 3.7472903715721576e-06,
"loss": 1.5098,
"step": 544
},
{
"epoch": 1.7868852459016393,
"grad_norm": 13.508952140808105,
"learning_rate": 3.730153992363905e-06,
"loss": 1.5176,
"step": 545
},
{
"epoch": 1.7901639344262295,
"grad_norm": 13.02145767211914,
"learning_rate": 3.713033542691061e-06,
"loss": 1.6113,
"step": 546
},
{
"epoch": 1.7934426229508196,
"grad_norm": 12.737781524658203,
"learning_rate": 3.6959292373204615e-06,
"loss": 1.623,
"step": 547
},
{
"epoch": 1.7967213114754097,
"grad_norm": 12.331480026245117,
"learning_rate": 3.678841290816422e-06,
"loss": 1.4766,
"step": 548
},
{
"epoch": 1.8,
"grad_norm": 12.874061584472656,
"learning_rate": 3.661769917538043e-06,
"loss": 1.6211,
"step": 549
},
{
"epoch": 1.8032786885245902,
"grad_norm": 12.349608421325684,
"learning_rate": 3.6447153316365226e-06,
"loss": 1.5898,
"step": 550
},
{
"epoch": 1.8065573770491803,
"grad_norm": 13.023985862731934,
"learning_rate": 3.6276777470524727e-06,
"loss": 1.5469,
"step": 551
},
{
"epoch": 1.8098360655737706,
"grad_norm": 15.1737699508667,
"learning_rate": 3.6106573775132315e-06,
"loss": 1.5918,
"step": 552
},
{
"epoch": 1.8131147540983608,
"grad_norm": 14.11626148223877,
"learning_rate": 3.5936544365301824e-06,
"loss": 1.5684,
"step": 553
},
{
"epoch": 1.8163934426229509,
"grad_norm": 15.703447341918945,
"learning_rate": 3.5766691373960787e-06,
"loss": 1.6035,
"step": 554
},
{
"epoch": 1.819672131147541,
"grad_norm": 13.144828796386719,
"learning_rate": 3.559701693182366e-06,
"loss": 1.5723,
"step": 555
},
{
"epoch": 1.8229508196721311,
"grad_norm": 16.379074096679688,
"learning_rate": 3.542752316736506e-06,
"loss": 1.6133,
"step": 556
},
{
"epoch": 1.8262295081967213,
"grad_norm": 18.03300666809082,
"learning_rate": 3.5258212206793184e-06,
"loss": 1.582,
"step": 557
},
{
"epoch": 1.8295081967213114,
"grad_norm": 12.72996997833252,
"learning_rate": 3.5089086174022964e-06,
"loss": 1.6328,
"step": 558
},
{
"epoch": 1.8327868852459015,
"grad_norm": 17.85942840576172,
"learning_rate": 3.4920147190649555e-06,
"loss": 1.6523,
"step": 559
},
{
"epoch": 1.8360655737704918,
"grad_norm": 21.671913146972656,
"learning_rate": 3.475139737592167e-06,
"loss": 1.5801,
"step": 560
},
{
"epoch": 1.839344262295082,
"grad_norm": 13.127819061279297,
"learning_rate": 3.4582838846715e-06,
"loss": 1.5469,
"step": 561
},
{
"epoch": 1.842622950819672,
"grad_norm": 18.55519676208496,
"learning_rate": 3.441447371750566e-06,
"loss": 1.5176,
"step": 562
},
{
"epoch": 1.8459016393442624,
"grad_norm": 21.33066177368164,
"learning_rate": 3.4246304100343686e-06,
"loss": 1.5566,
"step": 563
},
{
"epoch": 1.8491803278688526,
"grad_norm": 16.495615005493164,
"learning_rate": 3.4078332104826506e-06,
"loss": 1.543,
"step": 564
},
{
"epoch": 1.8524590163934427,
"grad_norm": 13.102204322814941,
"learning_rate": 3.3910559838072475e-06,
"loss": 1.6289,
"step": 565
},
{
"epoch": 1.8557377049180328,
"grad_norm": 21.321346282958984,
"learning_rate": 3.374298940469448e-06,
"loss": 1.6191,
"step": 566
},
{
"epoch": 1.859016393442623,
"grad_norm": 21.263696670532227,
"learning_rate": 3.357562290677352e-06,
"loss": 1.5312,
"step": 567
},
{
"epoch": 1.862295081967213,
"grad_norm": 14.131853103637695,
"learning_rate": 3.340846244383231e-06,
"loss": 1.5762,
"step": 568
},
{
"epoch": 1.8655737704918032,
"grad_norm": 17.478233337402344,
"learning_rate": 3.324151011280897e-06,
"loss": 1.543,
"step": 569
},
{
"epoch": 1.8688524590163933,
"grad_norm": 19.541519165039062,
"learning_rate": 3.307476800803077e-06,
"loss": 1.5312,
"step": 570
},
{
"epoch": 1.8721311475409836,
"grad_norm": 18.075885772705078,
"learning_rate": 3.2908238221187736e-06,
"loss": 1.6348,
"step": 571
},
{
"epoch": 1.8754098360655738,
"grad_norm": 12.494379043579102,
"learning_rate": 3.27419228413065e-06,
"loss": 1.5527,
"step": 572
},
{
"epoch": 1.8786885245901639,
"grad_norm": 20.536605834960938,
"learning_rate": 3.257582395472408e-06,
"loss": 1.5508,
"step": 573
},
{
"epoch": 1.8819672131147542,
"grad_norm": 16.939430236816406,
"learning_rate": 3.240994364506172e-06,
"loss": 1.5449,
"step": 574
},
{
"epoch": 1.8852459016393444,
"grad_norm": 14.016660690307617,
"learning_rate": 3.224428399319868e-06,
"loss": 1.5352,
"step": 575
},
{
"epoch": 1.8885245901639345,
"grad_norm": 13.903589248657227,
"learning_rate": 3.207884707724626e-06,
"loss": 1.4844,
"step": 576
},
{
"epoch": 1.8918032786885246,
"grad_norm": 20.456378936767578,
"learning_rate": 3.1913634972521614e-06,
"loss": 1.6328,
"step": 577
},
{
"epoch": 1.8950819672131147,
"grad_norm": 22.15937042236328,
"learning_rate": 3.1748649751521754e-06,
"loss": 1.5977,
"step": 578
},
{
"epoch": 1.8983606557377048,
"grad_norm": 13.084799766540527,
"learning_rate": 3.158389348389758e-06,
"loss": 1.5273,
"step": 579
},
{
"epoch": 1.901639344262295,
"grad_norm": 21.793209075927734,
"learning_rate": 3.1419368236427894e-06,
"loss": 1.6094,
"step": 580
},
{
"epoch": 1.904918032786885,
"grad_norm": 24.526473999023438,
"learning_rate": 3.125507607299341e-06,
"loss": 1.5508,
"step": 581
},
{
"epoch": 1.9081967213114754,
"grad_norm": 18.045026779174805,
"learning_rate": 3.1091019054551053e-06,
"loss": 1.5508,
"step": 582
},
{
"epoch": 1.9114754098360656,
"grad_norm": 16.46772575378418,
"learning_rate": 3.0927199239107887e-06,
"loss": 1.5957,
"step": 583
},
{
"epoch": 1.9147540983606557,
"grad_norm": 19.109159469604492,
"learning_rate": 3.0763618681695405e-06,
"loss": 1.5059,
"step": 584
},
{
"epoch": 1.918032786885246,
"grad_norm": 18.3557186126709,
"learning_rate": 3.060027943434376e-06,
"loss": 1.5645,
"step": 585
},
{
"epoch": 1.9213114754098362,
"grad_norm": 13.355094909667969,
"learning_rate": 3.0437183546055977e-06,
"loss": 1.5508,
"step": 586
},
{
"epoch": 1.9245901639344263,
"grad_norm": 20.06283950805664,
"learning_rate": 3.027433306278229e-06,
"loss": 1.6016,
"step": 587
},
{
"epoch": 1.9278688524590164,
"grad_norm": 24.55561065673828,
"learning_rate": 3.0111730027394443e-06,
"loss": 1.5703,
"step": 588
},
{
"epoch": 1.9311475409836065,
"grad_norm": 16.579423904418945,
"learning_rate": 2.9949376479660104e-06,
"loss": 1.4688,
"step": 589
},
{
"epoch": 1.9344262295081966,
"grad_norm": 12.7395658493042,
"learning_rate": 2.9787274456217225e-06,
"loss": 1.5918,
"step": 590
},
{
"epoch": 1.9377049180327868,
"grad_norm": 13.82606029510498,
"learning_rate": 2.962542599054854e-06,
"loss": 1.5332,
"step": 591
},
{
"epoch": 1.940983606557377,
"grad_norm": 23.461950302124023,
"learning_rate": 2.9463833112956026e-06,
"loss": 1.5234,
"step": 592
},
{
"epoch": 1.9442622950819672,
"grad_norm": 14.912766456604004,
"learning_rate": 2.9302497850535447e-06,
"loss": 1.6074,
"step": 593
},
{
"epoch": 1.9475409836065574,
"grad_norm": 12.350762367248535,
"learning_rate": 2.9141422227150917e-06,
"loss": 1.541,
"step": 594
},
{
"epoch": 1.9508196721311475,
"grad_norm": 21.709842681884766,
"learning_rate": 2.898060826340954e-06,
"loss": 1.5742,
"step": 595
},
{
"epoch": 1.9540983606557378,
"grad_norm": 21.633665084838867,
"learning_rate": 2.882005797663604e-06,
"loss": 1.5781,
"step": 596
},
{
"epoch": 1.957377049180328,
"grad_norm": 17.24868392944336,
"learning_rate": 2.86597733808474e-06,
"loss": 1.5273,
"step": 597
},
{
"epoch": 1.960655737704918,
"grad_norm": 16.522829055786133,
"learning_rate": 2.849975648672775e-06,
"loss": 1.5645,
"step": 598
},
{
"epoch": 1.9639344262295082,
"grad_norm": 20.824695587158203,
"learning_rate": 2.8340009301602943e-06,
"loss": 1.5938,
"step": 599
},
{
"epoch": 1.9672131147540983,
"grad_norm": 24.554397583007812,
"learning_rate": 2.8180533829415536e-06,
"loss": 1.5195,
"step": 600
},
{
"epoch": 1.9704918032786884,
"grad_norm": 15.339919090270996,
"learning_rate": 2.8021332070699582e-06,
"loss": 1.5918,
"step": 601
},
{
"epoch": 1.9737704918032786,
"grad_norm": 16.299833297729492,
"learning_rate": 2.7862406022555547e-06,
"loss": 1.5469,
"step": 602
},
{
"epoch": 1.9770491803278687,
"grad_norm": 21.7740535736084,
"learning_rate": 2.7703757678625286e-06,
"loss": 1.6309,
"step": 603
},
{
"epoch": 1.980327868852459,
"grad_norm": 15.119240760803223,
"learning_rate": 2.754538902906693e-06,
"loss": 1.4824,
"step": 604
},
{
"epoch": 1.9836065573770492,
"grad_norm": 12.31226921081543,
"learning_rate": 2.7387302060530076e-06,
"loss": 1.5566,
"step": 605
},
{
"epoch": 1.9868852459016395,
"grad_norm": 14.131988525390625,
"learning_rate": 2.72294987561307e-06,
"loss": 1.6523,
"step": 606
},
{
"epoch": 1.9901639344262296,
"grad_norm": 16.699298858642578,
"learning_rate": 2.7071981095426435e-06,
"loss": 1.5371,
"step": 607
},
{
"epoch": 1.9934426229508198,
"grad_norm": 12.761926651000977,
"learning_rate": 2.6914751054391665e-06,
"loss": 1.584,
"step": 608
},
{
"epoch": 1.9967213114754099,
"grad_norm": 12.398103713989258,
"learning_rate": 2.6757810605392675e-06,
"loss": 1.543,
"step": 609
},
{
"epoch": 2.0,
"grad_norm": 14.358308792114258,
"learning_rate": 2.660116171716305e-06,
"loss": 1.5586,
"step": 610
},
{
"epoch": 2.00327868852459,
"grad_norm": 13.359660148620605,
"learning_rate": 2.6444806354778833e-06,
"loss": 1.4277,
"step": 611
},
{
"epoch": 2.0065573770491802,
"grad_norm": 12.226639747619629,
"learning_rate": 2.628874647963402e-06,
"loss": 1.418,
"step": 612
},
{
"epoch": 2.0098360655737704,
"grad_norm": 14.138394355773926,
"learning_rate": 2.6132984049415798e-06,
"loss": 1.4336,
"step": 613
},
{
"epoch": 2.0131147540983605,
"grad_norm": 13.725980758666992,
"learning_rate": 2.5977521018080115e-06,
"loss": 1.4082,
"step": 614
},
{
"epoch": 2.0163934426229506,
"grad_norm": 14.25224494934082,
"learning_rate": 2.582235933582714e-06,
"loss": 1.3633,
"step": 615
},
{
"epoch": 2.019672131147541,
"grad_norm": 13.436676979064941,
"learning_rate": 2.5667500949076705e-06,
"loss": 1.3887,
"step": 616
},
{
"epoch": 2.0229508196721313,
"grad_norm": 12.966423034667969,
"learning_rate": 2.5512947800444054e-06,
"loss": 1.3633,
"step": 617
},
{
"epoch": 2.0262295081967214,
"grad_norm": 14.51209545135498,
"learning_rate": 2.5358701828715298e-06,
"loss": 1.4883,
"step": 618
},
{
"epoch": 2.0295081967213116,
"grad_norm": 13.7439546585083,
"learning_rate": 2.520476496882326e-06,
"loss": 1.4238,
"step": 619
},
{
"epoch": 2.0327868852459017,
"grad_norm": 14.126566886901855,
"learning_rate": 2.5051139151823032e-06,
"loss": 1.4219,
"step": 620
},
{
"epoch": 2.036065573770492,
"grad_norm": 13.732039451599121,
"learning_rate": 2.4897826304867917e-06,
"loss": 1.4238,
"step": 621
},
{
"epoch": 2.039344262295082,
"grad_norm": 13.921870231628418,
"learning_rate": 2.4744828351185167e-06,
"loss": 1.4434,
"step": 622
},
{
"epoch": 2.042622950819672,
"grad_norm": 17.297060012817383,
"learning_rate": 2.459214721005181e-06,
"loss": 1.3398,
"step": 623
},
{
"epoch": 2.045901639344262,
"grad_norm": 13.58006763458252,
"learning_rate": 2.4439784796770705e-06,
"loss": 1.4238,
"step": 624
},
{
"epoch": 2.0491803278688523,
"grad_norm": 14.021245956420898,
"learning_rate": 2.428774302264637e-06,
"loss": 1.3906,
"step": 625
},
{
"epoch": 2.0524590163934424,
"grad_norm": 14.446585655212402,
"learning_rate": 2.4136023794961126e-06,
"loss": 1.4121,
"step": 626
},
{
"epoch": 2.055737704918033,
"grad_norm": 16.137575149536133,
"learning_rate": 2.3984629016951146e-06,
"loss": 1.457,
"step": 627
},
{
"epoch": 2.059016393442623,
"grad_norm": 13.775222778320312,
"learning_rate": 2.3833560587782462e-06,
"loss": 1.3477,
"step": 628
},
{
"epoch": 2.0622950819672132,
"grad_norm": 13.22794246673584,
"learning_rate": 2.3682820402527342e-06,
"loss": 1.4043,
"step": 629
},
{
"epoch": 2.0655737704918034,
"grad_norm": 14.258461952209473,
"learning_rate": 2.3532410352140317e-06,
"loss": 1.4277,
"step": 630
},
{
"epoch": 2.0688524590163935,
"grad_norm": 14.627102851867676,
"learning_rate": 2.3382332323434644e-06,
"loss": 1.4727,
"step": 631
},
{
"epoch": 2.0721311475409836,
"grad_norm": 13.571576118469238,
"learning_rate": 2.3232588199058455e-06,
"loss": 1.4453,
"step": 632
},
{
"epoch": 2.0754098360655737,
"grad_norm": 13.201940536499023,
"learning_rate": 2.3083179857471304e-06,
"loss": 1.4805,
"step": 633
},
{
"epoch": 2.078688524590164,
"grad_norm": 15.000838279724121,
"learning_rate": 2.29341091729205e-06,
"loss": 1.3867,
"step": 634
},
{
"epoch": 2.081967213114754,
"grad_norm": 15.08394718170166,
"learning_rate": 2.278537801541765e-06,
"loss": 1.3535,
"step": 635
},
{
"epoch": 2.085245901639344,
"grad_norm": 17.611570358276367,
"learning_rate": 2.2636988250715136e-06,
"loss": 1.4238,
"step": 636
},
{
"epoch": 2.088524590163934,
"grad_norm": 13.167271614074707,
"learning_rate": 2.2488941740282757e-06,
"loss": 1.377,
"step": 637
},
{
"epoch": 2.091803278688525,
"grad_norm": 16.23381996154785,
"learning_rate": 2.2341240341284416e-06,
"loss": 1.4219,
"step": 638
},
{
"epoch": 2.095081967213115,
"grad_norm": 12.979382514953613,
"learning_rate": 2.2193885906554694e-06,
"loss": 1.3652,
"step": 639
},
{
"epoch": 2.098360655737705,
"grad_norm": 13.941397666931152,
"learning_rate": 2.2046880284575833e-06,
"loss": 1.3711,
"step": 640
},
{
"epoch": 2.101639344262295,
"grad_norm": 13.71773624420166,
"learning_rate": 2.1900225319454254e-06,
"loss": 1.4043,
"step": 641
},
{
"epoch": 2.1049180327868853,
"grad_norm": 12.830635070800781,
"learning_rate": 2.1753922850897684e-06,
"loss": 1.3809,
"step": 642
},
{
"epoch": 2.1081967213114754,
"grad_norm": 15.367294311523438,
"learning_rate": 2.1607974714191894e-06,
"loss": 1.4609,
"step": 643
},
{
"epoch": 2.1114754098360655,
"grad_norm": 12.984966278076172,
"learning_rate": 2.1462382740177814e-06,
"loss": 1.4219,
"step": 644
},
{
"epoch": 2.1147540983606556,
"grad_norm": 14.171280860900879,
"learning_rate": 2.131714875522845e-06,
"loss": 1.3906,
"step": 645
},
{
"epoch": 2.1180327868852458,
"grad_norm": 13.716917037963867,
"learning_rate": 2.1172274581226064e-06,
"loss": 1.3574,
"step": 646
},
{
"epoch": 2.121311475409836,
"grad_norm": 13.324223518371582,
"learning_rate": 2.1027762035539314e-06,
"loss": 1.3457,
"step": 647
},
{
"epoch": 2.1245901639344265,
"grad_norm": 14.234905242919922,
"learning_rate": 2.0883612931000323e-06,
"loss": 1.3887,
"step": 648
},
{
"epoch": 2.1278688524590166,
"grad_norm": 14.057714462280273,
"learning_rate": 2.073982907588215e-06,
"loss": 1.4199,
"step": 649
},
{
"epoch": 2.1311475409836067,
"grad_norm": 13.39286994934082,
"learning_rate": 2.059641227387592e-06,
"loss": 1.4238,
"step": 650
},
{
"epoch": 2.134426229508197,
"grad_norm": 13.911747932434082,
"learning_rate": 2.0453364324068335e-06,
"loss": 1.4434,
"step": 651
},
{
"epoch": 2.137704918032787,
"grad_norm": 14.115652084350586,
"learning_rate": 2.0310687020918972e-06,
"loss": 1.3516,
"step": 652
},
{
"epoch": 2.140983606557377,
"grad_norm": 13.558361053466797,
"learning_rate": 2.016838215423791e-06,
"loss": 1.4375,
"step": 653
},
{
"epoch": 2.144262295081967,
"grad_norm": 13.805608749389648,
"learning_rate": 2.0026451509163204e-06,
"loss": 1.4199,
"step": 654
},
{
"epoch": 2.1475409836065573,
"grad_norm": 14.977523803710938,
"learning_rate": 1.988489686613845e-06,
"loss": 1.4492,
"step": 655
},
{
"epoch": 2.1508196721311474,
"grad_norm": 14.626441955566406,
"learning_rate": 1.9743720000890564e-06,
"loss": 1.3691,
"step": 656
},
{
"epoch": 2.1540983606557376,
"grad_norm": 14.720403671264648,
"learning_rate": 1.9602922684407388e-06,
"loss": 1.4023,
"step": 657
},
{
"epoch": 2.1573770491803277,
"grad_norm": 14.904003143310547,
"learning_rate": 1.9462506682915573e-06,
"loss": 1.4004,
"step": 658
},
{
"epoch": 2.160655737704918,
"grad_norm": 14.525262832641602,
"learning_rate": 1.932247375785837e-06,
"loss": 1.3984,
"step": 659
},
{
"epoch": 2.1639344262295084,
"grad_norm": 13.78215503692627,
"learning_rate": 1.91828256658735e-06,
"loss": 1.4082,
"step": 660
},
{
"epoch": 2.1672131147540985,
"grad_norm": 14.337031364440918,
"learning_rate": 1.9043564158771226e-06,
"loss": 1.4062,
"step": 661
},
{
"epoch": 2.1704918032786886,
"grad_norm": 14.619343757629395,
"learning_rate": 1.8904690983512253e-06,
"loss": 1.3965,
"step": 662
},
{
"epoch": 2.1737704918032787,
"grad_norm": 14.823307991027832,
"learning_rate": 1.876620788218592e-06,
"loss": 1.3223,
"step": 663
},
{
"epoch": 2.177049180327869,
"grad_norm": 14.898371696472168,
"learning_rate": 1.8628116591988232e-06,
"loss": 1.4004,
"step": 664
},
{
"epoch": 2.180327868852459,
"grad_norm": 17.55912208557129,
"learning_rate": 1.8490418845200241e-06,
"loss": 1.373,
"step": 665
},
{
"epoch": 2.183606557377049,
"grad_norm": 16.01456069946289,
"learning_rate": 1.8353116369166107e-06,
"loss": 1.4727,
"step": 666
},
{
"epoch": 2.1868852459016392,
"grad_norm": 13.750204086303711,
"learning_rate": 1.8216210886271562e-06,
"loss": 1.3711,
"step": 667
},
{
"epoch": 2.1901639344262294,
"grad_norm": 15.304807662963867,
"learning_rate": 1.8079704113922303e-06,
"loss": 1.4102,
"step": 668
},
{
"epoch": 2.1934426229508195,
"grad_norm": 14.734195709228516,
"learning_rate": 1.7943597764522363e-06,
"loss": 1.4199,
"step": 669
},
{
"epoch": 2.19672131147541,
"grad_norm": 13.935043334960938,
"learning_rate": 1.7807893545452748e-06,
"loss": 1.4258,
"step": 670
},
{
"epoch": 2.2,
"grad_norm": 13.949748039245605,
"learning_rate": 1.7672593159049877e-06,
"loss": 1.4141,
"step": 671
},
{
"epoch": 2.2032786885245903,
"grad_norm": 13.482948303222656,
"learning_rate": 1.753769830258441e-06,
"loss": 1.4199,
"step": 672
},
{
"epoch": 2.2065573770491804,
"grad_norm": 19.490720748901367,
"learning_rate": 1.740321066823975e-06,
"loss": 1.3574,
"step": 673
},
{
"epoch": 2.2098360655737705,
"grad_norm": 14.87624740600586,
"learning_rate": 1.7269131943090984e-06,
"loss": 1.4414,
"step": 674
},
{
"epoch": 2.2131147540983607,
"grad_norm": 14.58738899230957,
"learning_rate": 1.7135463809083625e-06,
"loss": 1.3867,
"step": 675
},
{
"epoch": 2.216393442622951,
"grad_norm": 14.250307083129883,
"learning_rate": 1.7002207943012516e-06,
"loss": 1.4277,
"step": 676
},
{
"epoch": 2.219672131147541,
"grad_norm": 14.0363187789917,
"learning_rate": 1.6869366016500872e-06,
"loss": 1.4062,
"step": 677
},
{
"epoch": 2.222950819672131,
"grad_norm": 13.977777481079102,
"learning_rate": 1.6736939695979237e-06,
"loss": 1.4102,
"step": 678
},
{
"epoch": 2.226229508196721,
"grad_norm": 13.99720573425293,
"learning_rate": 1.6604930642664614e-06,
"loss": 1.3828,
"step": 679
},
{
"epoch": 2.2295081967213113,
"grad_norm": 14.72916030883789,
"learning_rate": 1.6473340512539564e-06,
"loss": 1.3418,
"step": 680
},
{
"epoch": 2.2327868852459014,
"grad_norm": 14.218852043151855,
"learning_rate": 1.634217095633156e-06,
"loss": 1.4043,
"step": 681
},
{
"epoch": 2.236065573770492,
"grad_norm": 14.683760643005371,
"learning_rate": 1.6211423619492117e-06,
"loss": 1.3711,
"step": 682
},
{
"epoch": 2.239344262295082,
"grad_norm": 14.292176246643066,
"learning_rate": 1.6081100142176314e-06,
"loss": 1.416,
"step": 683
},
{
"epoch": 2.2426229508196722,
"grad_norm": 14.603547096252441,
"learning_rate": 1.5951202159222073e-06,
"loss": 1.3789,
"step": 684
},
{
"epoch": 2.2459016393442623,
"grad_norm": 14.563721656799316,
"learning_rate": 1.5821731300129755e-06,
"loss": 1.4062,
"step": 685
},
{
"epoch": 2.2491803278688525,
"grad_norm": 14.297794342041016,
"learning_rate": 1.5692689189041698e-06,
"loss": 1.4434,
"step": 686
},
{
"epoch": 2.2524590163934426,
"grad_norm": 16.67906379699707,
"learning_rate": 1.556407744472177e-06,
"loss": 1.4043,
"step": 687
},
{
"epoch": 2.2557377049180327,
"grad_norm": 14.280274391174316,
"learning_rate": 1.5435897680535184e-06,
"loss": 1.4023,
"step": 688
},
{
"epoch": 2.259016393442623,
"grad_norm": 14.600772857666016,
"learning_rate": 1.530815150442813e-06,
"loss": 1.416,
"step": 689
},
{
"epoch": 2.262295081967213,
"grad_norm": 13.637638092041016,
"learning_rate": 1.518084051890773e-06,
"loss": 1.4238,
"step": 690
},
{
"epoch": 2.265573770491803,
"grad_norm": 15.59545612335205,
"learning_rate": 1.5053966321021847e-06,
"loss": 1.3535,
"step": 691
},
{
"epoch": 2.2688524590163937,
"grad_norm": 15.58894157409668,
"learning_rate": 1.4927530502339048e-06,
"loss": 1.4199,
"step": 692
},
{
"epoch": 2.2721311475409838,
"grad_norm": 13.924785614013672,
"learning_rate": 1.4801534648928733e-06,
"loss": 1.2949,
"step": 693
},
{
"epoch": 2.275409836065574,
"grad_norm": 13.374207496643066,
"learning_rate": 1.4675980341341112e-06,
"loss": 1.4219,
"step": 694
},
{
"epoch": 2.278688524590164,
"grad_norm": 13.448588371276855,
"learning_rate": 1.4550869154587487e-06,
"loss": 1.3379,
"step": 695
},
{
"epoch": 2.281967213114754,
"grad_norm": 15.436904907226562,
"learning_rate": 1.4426202658120408e-06,
"loss": 1.3945,
"step": 696
},
{
"epoch": 2.2852459016393443,
"grad_norm": 13.138762474060059,
"learning_rate": 1.4301982415814046e-06,
"loss": 1.4316,
"step": 697
},
{
"epoch": 2.2885245901639344,
"grad_norm": 13.476716995239258,
"learning_rate": 1.417820998594458e-06,
"loss": 1.3887,
"step": 698
},
{
"epoch": 2.2918032786885245,
"grad_norm": 14.63200569152832,
"learning_rate": 1.4054886921170557e-06,
"loss": 1.4043,
"step": 699
},
{
"epoch": 2.2950819672131146,
"grad_norm": 14.927804946899414,
"learning_rate": 1.3932014768513553e-06,
"loss": 1.4395,
"step": 700
},
{
"epoch": 2.2983606557377048,
"grad_norm": 14.905073165893555,
"learning_rate": 1.3809595069338621e-06,
"loss": 1.4277,
"step": 701
},
{
"epoch": 2.301639344262295,
"grad_norm": 14.02908706665039,
"learning_rate": 1.3687629359335098e-06,
"loss": 1.3887,
"step": 702
},
{
"epoch": 2.304918032786885,
"grad_norm": 14.156634330749512,
"learning_rate": 1.356611916849721e-06,
"loss": 1.4453,
"step": 703
},
{
"epoch": 2.3081967213114756,
"grad_norm": 15.097304344177246,
"learning_rate": 1.3445066021104986e-06,
"loss": 1.4082,
"step": 704
},
{
"epoch": 2.3114754098360657,
"grad_norm": 14.943670272827148,
"learning_rate": 1.3324471435705088e-06,
"loss": 1.4082,
"step": 705
},
{
"epoch": 2.314754098360656,
"grad_norm": 14.368483543395996,
"learning_rate": 1.3204336925091715e-06,
"loss": 1.3711,
"step": 706
},
{
"epoch": 2.318032786885246,
"grad_norm": 14.06216049194336,
"learning_rate": 1.3084663996287744e-06,
"loss": 1.3848,
"step": 707
},
{
"epoch": 2.321311475409836,
"grad_norm": 14.391357421875,
"learning_rate": 1.296545415052569e-06,
"loss": 1.4043,
"step": 708
},
{
"epoch": 2.324590163934426,
"grad_norm": 13.395355224609375,
"learning_rate": 1.2846708883228991e-06,
"loss": 1.4688,
"step": 709
},
{
"epoch": 2.3278688524590163,
"grad_norm": 13.983238220214844,
"learning_rate": 1.2728429683993181e-06,
"loss": 1.4023,
"step": 710
},
{
"epoch": 2.3311475409836064,
"grad_norm": 14.64704704284668,
"learning_rate": 1.261061803656723e-06,
"loss": 1.3867,
"step": 711
},
{
"epoch": 2.3344262295081966,
"grad_norm": 14.733027458190918,
"learning_rate": 1.2493275418834894e-06,
"loss": 1.3008,
"step": 712
},
{
"epoch": 2.337704918032787,
"grad_norm": 14.756686210632324,
"learning_rate": 1.2376403302796214e-06,
"loss": 1.377,
"step": 713
},
{
"epoch": 2.3409836065573773,
"grad_norm": 13.700766563415527,
"learning_rate": 1.2260003154549066e-06,
"loss": 1.3965,
"step": 714
},
{
"epoch": 2.3442622950819674,
"grad_norm": 13.78193187713623,
"learning_rate": 1.214407643427069e-06,
"loss": 1.3633,
"step": 715
},
{
"epoch": 2.3475409836065575,
"grad_norm": 14.956125259399414,
"learning_rate": 1.2028624596199523e-06,
"loss": 1.4238,
"step": 716
},
{
"epoch": 2.3508196721311476,
"grad_norm": 13.948843955993652,
"learning_rate": 1.1913649088616747e-06,
"loss": 1.375,
"step": 717
},
{
"epoch": 2.3540983606557377,
"grad_norm": 14.496105194091797,
"learning_rate": 1.1799151353828314e-06,
"loss": 1.4121,
"step": 718
},
{
"epoch": 2.357377049180328,
"grad_norm": 14.629096031188965,
"learning_rate": 1.1685132828146723e-06,
"loss": 1.4512,
"step": 719
},
{
"epoch": 2.360655737704918,
"grad_norm": 13.506555557250977,
"learning_rate": 1.1571594941873098e-06,
"loss": 1.4492,
"step": 720
},
{
"epoch": 2.363934426229508,
"grad_norm": 14.089963912963867,
"learning_rate": 1.1458539119279167e-06,
"loss": 1.4102,
"step": 721
},
{
"epoch": 2.3672131147540982,
"grad_norm": 15.402302742004395,
"learning_rate": 1.1345966778589401e-06,
"loss": 1.3555,
"step": 722
},
{
"epoch": 2.3704918032786884,
"grad_norm": 14.145624160766602,
"learning_rate": 1.1233879331963355e-06,
"loss": 1.4297,
"step": 723
},
{
"epoch": 2.3737704918032785,
"grad_norm": 13.872859954833984,
"learning_rate": 1.112227818547773e-06,
"loss": 1.3789,
"step": 724
},
{
"epoch": 2.3770491803278686,
"grad_norm": 16.524686813354492,
"learning_rate": 1.1011164739108938e-06,
"loss": 1.4277,
"step": 725
},
{
"epoch": 2.380327868852459,
"grad_norm": 15.741491317749023,
"learning_rate": 1.0900540386715386e-06,
"loss": 1.3574,
"step": 726
},
{
"epoch": 2.3836065573770493,
"grad_norm": 13.59623908996582,
"learning_rate": 1.079040651602013e-06,
"loss": 1.4043,
"step": 727
},
{
"epoch": 2.3868852459016394,
"grad_norm": 14.150300979614258,
"learning_rate": 1.0680764508593332e-06,
"loss": 1.4414,
"step": 728
},
{
"epoch": 2.3901639344262295,
"grad_norm": 15.002487182617188,
"learning_rate": 1.0571615739835044e-06,
"loss": 1.4512,
"step": 729
},
{
"epoch": 2.3934426229508197,
"grad_norm": 15.407000541687012,
"learning_rate": 1.0462961578957886e-06,
"loss": 1.4102,
"step": 730
},
{
"epoch": 2.39672131147541,
"grad_norm": 13.538361549377441,
"learning_rate": 1.035480338896987e-06,
"loss": 1.4297,
"step": 731
},
{
"epoch": 2.4,
"grad_norm": 14.728962898254395,
"learning_rate": 1.0247142526657356e-06,
"loss": 1.3906,
"step": 732
},
{
"epoch": 2.40327868852459,
"grad_norm": 15.064108848571777,
"learning_rate": 1.0139980342567952e-06,
"loss": 1.4219,
"step": 733
},
{
"epoch": 2.40655737704918,
"grad_norm": 14.572790145874023,
"learning_rate": 1.0033318180993667e-06,
"loss": 1.4453,
"step": 734
},
{
"epoch": 2.4098360655737707,
"grad_norm": 15.974475860595703,
"learning_rate": 9.927157379953938e-07,
"loss": 1.3652,
"step": 735
},
{
"epoch": 2.413114754098361,
"grad_norm": 17.50275993347168,
"learning_rate": 9.821499271178952e-07,
"loss": 1.375,
"step": 736
},
{
"epoch": 2.416393442622951,
"grad_norm": 13.482318878173828,
"learning_rate": 9.71634518009289e-07,
"loss": 1.377,
"step": 737
},
{
"epoch": 2.419672131147541,
"grad_norm": 13.551551818847656,
"learning_rate": 9.61169642579725e-07,
"loss": 1.4121,
"step": 738
},
{
"epoch": 2.422950819672131,
"grad_norm": 13.170391082763672,
"learning_rate": 9.50755432105443e-07,
"loss": 1.3379,
"step": 739
},
{
"epoch": 2.4262295081967213,
"grad_norm": 13.548935890197754,
"learning_rate": 9.403920172271103e-07,
"loss": 1.4023,
"step": 740
},
{
"epoch": 2.4295081967213115,
"grad_norm": 13.065211296081543,
"learning_rate": 9.300795279481955e-07,
"loss": 1.3555,
"step": 741
},
{
"epoch": 2.4327868852459016,
"grad_norm": 14.714930534362793,
"learning_rate": 9.19818093633334e-07,
"loss": 1.3477,
"step": 742
},
{
"epoch": 2.4360655737704917,
"grad_norm": 14.191553115844727,
"learning_rate": 9.096078430066996e-07,
"loss": 1.4824,
"step": 743
},
{
"epoch": 2.439344262295082,
"grad_norm": 13.253253936767578,
"learning_rate": 8.994489041503979e-07,
"loss": 1.373,
"step": 744
},
{
"epoch": 2.442622950819672,
"grad_norm": 15.373652458190918,
"learning_rate": 8.893414045028537e-07,
"loss": 1.4258,
"step": 745
},
{
"epoch": 2.445901639344262,
"grad_norm": 14.864066123962402,
"learning_rate": 8.792854708572174e-07,
"loss": 1.4141,
"step": 746
},
{
"epoch": 2.4491803278688526,
"grad_norm": 14.104143142700195,
"learning_rate": 8.692812293597658e-07,
"loss": 1.3477,
"step": 747
},
{
"epoch": 2.4524590163934428,
"grad_norm": 12.742308616638184,
"learning_rate": 8.593288055083348e-07,
"loss": 1.3926,
"step": 748
},
{
"epoch": 2.455737704918033,
"grad_norm": 14.611797332763672,
"learning_rate": 8.494283241507284e-07,
"loss": 1.4121,
"step": 749
},
{
"epoch": 2.459016393442623,
"grad_norm": 13.654319763183594,
"learning_rate": 8.395799094831602e-07,
"loss": 1.3809,
"step": 750
},
{
"epoch": 2.462295081967213,
"grad_norm": 14.529156684875488,
"learning_rate": 8.297836850487001e-07,
"loss": 1.4199,
"step": 751
},
{
"epoch": 2.4655737704918033,
"grad_norm": 15.091279029846191,
"learning_rate": 8.200397737357135e-07,
"loss": 1.3496,
"step": 752
},
{
"epoch": 2.4688524590163934,
"grad_norm": 14.46184253692627,
"learning_rate": 8.103482977763311e-07,
"loss": 1.4082,
"step": 753
},
{
"epoch": 2.4721311475409835,
"grad_norm": 14.449289321899414,
"learning_rate": 8.00709378744905e-07,
"loss": 1.3809,
"step": 754
},
{
"epoch": 2.4754098360655736,
"grad_norm": 13.937576293945312,
"learning_rate": 7.911231375564965e-07,
"loss": 1.3652,
"step": 755
},
{
"epoch": 2.4786885245901638,
"grad_norm": 14.12602424621582,
"learning_rate": 7.815896944653433e-07,
"loss": 1.3457,
"step": 756
},
{
"epoch": 2.4819672131147543,
"grad_norm": 14.014487266540527,
"learning_rate": 7.721091690633659e-07,
"loss": 1.4199,
"step": 757
},
{
"epoch": 2.4852459016393444,
"grad_norm": 14.530862808227539,
"learning_rate": 7.626816802786563e-07,
"loss": 1.4453,
"step": 758
},
{
"epoch": 2.4885245901639346,
"grad_norm": 14.348193168640137,
"learning_rate": 7.533073463739921e-07,
"loss": 1.5156,
"step": 759
},
{
"epoch": 2.4918032786885247,
"grad_norm": 14.085508346557617,
"learning_rate": 7.439862849453522e-07,
"loss": 1.3223,
"step": 760
},
{
"epoch": 2.495081967213115,
"grad_norm": 15.197397232055664,
"learning_rate": 7.347186129204408e-07,
"loss": 1.3613,
"step": 761
},
{
"epoch": 2.498360655737705,
"grad_norm": 14.015279769897461,
"learning_rate": 7.25504446557222e-07,
"loss": 1.4375,
"step": 762
},
{
"epoch": 2.501639344262295,
"grad_norm": 14.460212707519531,
"learning_rate": 7.163439014424555e-07,
"loss": 1.3789,
"step": 763
},
{
"epoch": 2.504918032786885,
"grad_norm": 14.248664855957031,
"learning_rate": 7.072370924902583e-07,
"loss": 1.4512,
"step": 764
},
{
"epoch": 2.5081967213114753,
"grad_norm": 13.521627426147461,
"learning_rate": 6.981841339406481e-07,
"loss": 1.3613,
"step": 765
},
{
"epoch": 2.5114754098360654,
"grad_norm": 14.41736888885498,
"learning_rate": 6.891851393581262e-07,
"loss": 1.4102,
"step": 766
},
{
"epoch": 2.5147540983606556,
"grad_norm": 13.729426383972168,
"learning_rate": 6.802402216302384e-07,
"loss": 1.4238,
"step": 767
},
{
"epoch": 2.5180327868852457,
"grad_norm": 14.271988868713379,
"learning_rate": 6.713494929661684e-07,
"loss": 1.3926,
"step": 768
},
{
"epoch": 2.521311475409836,
"grad_norm": 14.867559432983398,
"learning_rate": 6.625130648953299e-07,
"loss": 1.457,
"step": 769
},
{
"epoch": 2.5245901639344264,
"grad_norm": 14.078306198120117,
"learning_rate": 6.537310482659581e-07,
"loss": 1.377,
"step": 770
},
{
"epoch": 2.5278688524590165,
"grad_norm": 13.979537010192871,
"learning_rate": 6.450035532437332e-07,
"loss": 1.4414,
"step": 771
},
{
"epoch": 2.5311475409836066,
"grad_norm": 14.6762113571167,
"learning_rate": 6.363306893103843e-07,
"loss": 1.3398,
"step": 772
},
{
"epoch": 2.5344262295081967,
"grad_norm": 14.324873924255371,
"learning_rate": 6.27712565262329e-07,
"loss": 1.4102,
"step": 773
},
{
"epoch": 2.537704918032787,
"grad_norm": 14.86697006225586,
"learning_rate": 6.191492892092998e-07,
"loss": 1.4492,
"step": 774
},
{
"epoch": 2.540983606557377,
"grad_norm": 13.636669158935547,
"learning_rate": 6.106409685729886e-07,
"loss": 1.5176,
"step": 775
},
{
"epoch": 2.544262295081967,
"grad_norm": 13.217398643493652,
"learning_rate": 6.021877100857048e-07,
"loss": 1.416,
"step": 776
},
{
"epoch": 2.5475409836065572,
"grad_norm": 13.749228477478027,
"learning_rate": 5.937896197890303e-07,
"loss": 1.3477,
"step": 777
},
{
"epoch": 2.550819672131148,
"grad_norm": 13.94660758972168,
"learning_rate": 5.854468030324933e-07,
"loss": 1.4238,
"step": 778
},
{
"epoch": 2.554098360655738,
"grad_norm": 12.767383575439453,
"learning_rate": 5.771593644722428e-07,
"loss": 1.3594,
"step": 779
},
{
"epoch": 2.557377049180328,
"grad_norm": 14.669730186462402,
"learning_rate": 5.689274080697404e-07,
"loss": 1.3789,
"step": 780
},
{
"epoch": 2.560655737704918,
"grad_norm": 13.398807525634766,
"learning_rate": 5.607510370904545e-07,
"loss": 1.4336,
"step": 781
},
{
"epoch": 2.5639344262295083,
"grad_norm": 13.599152565002441,
"learning_rate": 5.526303541025601e-07,
"loss": 1.4316,
"step": 782
},
{
"epoch": 2.5672131147540984,
"grad_norm": 13.629499435424805,
"learning_rate": 5.445654609756623e-07,
"loss": 1.4082,
"step": 783
},
{
"epoch": 2.5704918032786885,
"grad_norm": 13.0151948928833,
"learning_rate": 5.365564588795058e-07,
"loss": 1.4043,
"step": 784
},
{
"epoch": 2.5737704918032787,
"grad_norm": 13.3595609664917,
"learning_rate": 5.286034482827184e-07,
"loss": 1.4062,
"step": 785
},
{
"epoch": 2.577049180327869,
"grad_norm": 13.040207862854004,
"learning_rate": 5.207065289515406e-07,
"loss": 1.4434,
"step": 786
},
{
"epoch": 2.580327868852459,
"grad_norm": 13.416333198547363,
"learning_rate": 5.128657999485787e-07,
"loss": 1.3262,
"step": 787
},
{
"epoch": 2.583606557377049,
"grad_norm": 13.94367790222168,
"learning_rate": 5.050813596315651e-07,
"loss": 1.4277,
"step": 788
},
{
"epoch": 2.586885245901639,
"grad_norm": 13.634977340698242,
"learning_rate": 4.973533056521151e-07,
"loss": 1.3418,
"step": 789
},
{
"epoch": 2.5901639344262293,
"grad_norm": 13.323789596557617,
"learning_rate": 4.89681734954513e-07,
"loss": 1.3848,
"step": 790
},
{
"epoch": 2.5934426229508194,
"grad_norm": 14.824501991271973,
"learning_rate": 4.820667437744863e-07,
"loss": 1.3906,
"step": 791
},
{
"epoch": 2.59672131147541,
"grad_norm": 14.00196361541748,
"learning_rate": 4.7450842763800633e-07,
"loss": 1.3984,
"step": 792
},
{
"epoch": 2.6,
"grad_norm": 13.563268661499023,
"learning_rate": 4.670068813600853e-07,
"loss": 1.3848,
"step": 793
},
{
"epoch": 2.60327868852459,
"grad_norm": 14.38114070892334,
"learning_rate": 4.595621990435883e-07,
"loss": 1.4043,
"step": 794
},
{
"epoch": 2.6065573770491803,
"grad_norm": 13.415763854980469,
"learning_rate": 4.521744740780515e-07,
"loss": 1.4121,
"step": 795
},
{
"epoch": 2.6098360655737705,
"grad_norm": 14.09338665008545,
"learning_rate": 4.448437991385113e-07,
"loss": 1.4141,
"step": 796
},
{
"epoch": 2.6131147540983606,
"grad_norm": 14.792821884155273,
"learning_rate": 4.3757026618434564e-07,
"loss": 1.3711,
"step": 797
},
{
"epoch": 2.6163934426229507,
"grad_norm": 13.891531944274902,
"learning_rate": 4.303539664581119e-07,
"loss": 1.4023,
"step": 798
},
{
"epoch": 2.619672131147541,
"grad_norm": 15.367050170898438,
"learning_rate": 4.231949904844135e-07,
"loss": 1.3848,
"step": 799
},
{
"epoch": 2.6229508196721314,
"grad_norm": 14.552631378173828,
"learning_rate": 4.160934280687523e-07,
"loss": 1.4238,
"step": 800
},
{
"epoch": 2.6262295081967215,
"grad_norm": 14.381885528564453,
"learning_rate": 4.0904936829641194e-07,
"loss": 1.4648,
"step": 801
},
{
"epoch": 2.6295081967213116,
"grad_norm": 14.29288387298584,
"learning_rate": 4.0206289953133357e-07,
"loss": 1.3574,
"step": 802
},
{
"epoch": 2.6327868852459018,
"grad_norm": 14.015763282775879,
"learning_rate": 3.9513410941501353e-07,
"loss": 1.4043,
"step": 803
},
{
"epoch": 2.636065573770492,
"grad_norm": 13.356470108032227,
"learning_rate": 3.8826308486539675e-07,
"loss": 1.4062,
"step": 804
},
{
"epoch": 2.639344262295082,
"grad_norm": 14.03546142578125,
"learning_rate": 3.814499120757903e-07,
"loss": 1.4141,
"step": 805
},
{
"epoch": 2.642622950819672,
"grad_norm": 13.209156036376953,
"learning_rate": 3.746946765137877e-07,
"loss": 1.3125,
"step": 806
},
{
"epoch": 2.6459016393442623,
"grad_norm": 13.753551483154297,
"learning_rate": 3.6799746292018326e-07,
"loss": 1.3789,
"step": 807
},
{
"epoch": 2.6491803278688524,
"grad_norm": 14.690096855163574,
"learning_rate": 3.6135835530792365e-07,
"loss": 1.3906,
"step": 808
},
{
"epoch": 2.6524590163934425,
"grad_norm": 14.63331413269043,
"learning_rate": 3.5477743696104186e-07,
"loss": 1.3516,
"step": 809
},
{
"epoch": 2.6557377049180326,
"grad_norm": 15.335869789123535,
"learning_rate": 3.4825479043362197e-07,
"loss": 1.3691,
"step": 810
},
{
"epoch": 2.6590163934426227,
"grad_norm": 13.936873435974121,
"learning_rate": 3.417904975487557e-07,
"loss": 1.4688,
"step": 811
},
{
"epoch": 2.662295081967213,
"grad_norm": 13.005012512207031,
"learning_rate": 3.3538463939752243e-07,
"loss": 1.375,
"step": 812
},
{
"epoch": 2.6655737704918034,
"grad_norm": 13.84536361694336,
"learning_rate": 3.290372963379701e-07,
"loss": 1.3672,
"step": 813
},
{
"epoch": 2.6688524590163936,
"grad_norm": 13.590289115905762,
"learning_rate": 3.2274854799410224e-07,
"loss": 1.418,
"step": 814
},
{
"epoch": 2.6721311475409837,
"grad_norm": 14.040593147277832,
"learning_rate": 3.1651847325488703e-07,
"loss": 1.3789,
"step": 815
},
{
"epoch": 2.675409836065574,
"grad_norm": 13.69813060760498,
"learning_rate": 3.1034715027326136e-07,
"loss": 1.3457,
"step": 816
},
{
"epoch": 2.678688524590164,
"grad_norm": 13.852890014648438,
"learning_rate": 3.0423465646515547e-07,
"loss": 1.4043,
"step": 817
},
{
"epoch": 2.681967213114754,
"grad_norm": 13.40916633605957,
"learning_rate": 2.981810685085157e-07,
"loss": 1.4219,
"step": 818
},
{
"epoch": 2.685245901639344,
"grad_norm": 14.204110145568848,
"learning_rate": 2.921864623423487e-07,
"loss": 1.3906,
"step": 819
},
{
"epoch": 2.6885245901639343,
"grad_norm": 14.494122505187988,
"learning_rate": 2.8625091316576704e-07,
"loss": 1.3535,
"step": 820
},
{
"epoch": 2.6918032786885244,
"grad_norm": 14.431355476379395,
"learning_rate": 2.8037449543704244e-07,
"loss": 1.3984,
"step": 821
},
{
"epoch": 2.695081967213115,
"grad_norm": 14.423640251159668,
"learning_rate": 2.745572828726767e-07,
"loss": 1.4395,
"step": 822
},
{
"epoch": 2.698360655737705,
"grad_norm": 13.368934631347656,
"learning_rate": 2.687993484464718e-07,
"loss": 1.3359,
"step": 823
},
{
"epoch": 2.7016393442622952,
"grad_norm": 13.653352737426758,
"learning_rate": 2.631007643886213e-07,
"loss": 1.3613,
"step": 824
},
{
"epoch": 2.7049180327868854,
"grad_norm": 13.941828727722168,
"learning_rate": 2.574616021847981e-07,
"loss": 1.3418,
"step": 825
},
{
"epoch": 2.7081967213114755,
"grad_norm": 14.337931632995605,
"learning_rate": 2.5188193257525864e-07,
"loss": 1.4043,
"step": 826
},
{
"epoch": 2.7114754098360656,
"grad_norm": 14.350947380065918,
"learning_rate": 2.463618255539596e-07,
"loss": 1.3418,
"step": 827
},
{
"epoch": 2.7147540983606557,
"grad_norm": 14.535887718200684,
"learning_rate": 2.4090135036767436e-07,
"loss": 1.3848,
"step": 828
},
{
"epoch": 2.718032786885246,
"grad_norm": 15.234739303588867,
"learning_rate": 2.355005755151296e-07,
"loss": 1.4238,
"step": 829
},
{
"epoch": 2.721311475409836,
"grad_norm": 14.099320411682129,
"learning_rate": 2.3015956874613998e-07,
"loss": 1.3477,
"step": 830
},
{
"epoch": 2.724590163934426,
"grad_norm": 12.997334480285645,
"learning_rate": 2.2487839706076598e-07,
"loss": 1.3125,
"step": 831
},
{
"epoch": 2.7278688524590162,
"grad_norm": 14.754070281982422,
"learning_rate": 2.1965712670846519e-07,
"loss": 1.4648,
"step": 832
},
{
"epoch": 2.7311475409836063,
"grad_norm": 14.203169822692871,
"learning_rate": 2.1449582318726846e-07,
"loss": 1.4395,
"step": 833
},
{
"epoch": 2.7344262295081965,
"grad_norm": 14.142770767211914,
"learning_rate": 2.0939455124295238e-07,
"loss": 1.4141,
"step": 834
},
{
"epoch": 2.737704918032787,
"grad_norm": 13.850031852722168,
"learning_rate": 2.0435337486823025e-07,
"loss": 1.3867,
"step": 835
},
{
"epoch": 2.740983606557377,
"grad_norm": 14.479482650756836,
"learning_rate": 1.9937235730195014e-07,
"loss": 1.3965,
"step": 836
},
{
"epoch": 2.7442622950819673,
"grad_norm": 14.6297607421875,
"learning_rate": 1.9445156102829765e-07,
"loss": 1.4883,
"step": 837
},
{
"epoch": 2.7475409836065574,
"grad_norm": 14.299484252929688,
"learning_rate": 1.8959104777601877e-07,
"loss": 1.4375,
"step": 838
},
{
"epoch": 2.7508196721311475,
"grad_norm": 14.430228233337402,
"learning_rate": 1.8479087851763722e-07,
"loss": 1.3711,
"step": 839
},
{
"epoch": 2.7540983606557377,
"grad_norm": 13.631085395812988,
"learning_rate": 1.800511134686961e-07,
"loss": 1.4141,
"step": 840
},
{
"epoch": 2.7573770491803278,
"grad_norm": 13.788529396057129,
"learning_rate": 1.753718120869996e-07,
"loss": 1.3965,
"step": 841
},
{
"epoch": 2.760655737704918,
"grad_norm": 13.72615909576416,
"learning_rate": 1.7075303307186652e-07,
"loss": 1.3301,
"step": 842
},
{
"epoch": 2.7639344262295085,
"grad_norm": 13.844966888427734,
"learning_rate": 1.6619483436339735e-07,
"loss": 1.3867,
"step": 843
},
{
"epoch": 2.7672131147540986,
"grad_norm": 14.308938980102539,
"learning_rate": 1.6169727314174377e-07,
"loss": 1.3906,
"step": 844
},
{
"epoch": 2.7704918032786887,
"grad_norm": 15.179529190063477,
"learning_rate": 1.5726040582639325e-07,
"loss": 1.4492,
"step": 845
},
{
"epoch": 2.773770491803279,
"grad_norm": 16.270008087158203,
"learning_rate": 1.5288428807545996e-07,
"loss": 1.4023,
"step": 846
},
{
"epoch": 2.777049180327869,
"grad_norm": 15.23530101776123,
"learning_rate": 1.4856897478498945e-07,
"loss": 1.4023,
"step": 847
},
{
"epoch": 2.780327868852459,
"grad_norm": 16.05985450744629,
"learning_rate": 1.4431452008826564e-07,
"loss": 1.4023,
"step": 848
},
{
"epoch": 2.783606557377049,
"grad_norm": 15.568984031677246,
"learning_rate": 1.4012097735513596e-07,
"loss": 1.4102,
"step": 849
},
{
"epoch": 2.7868852459016393,
"grad_norm": 14.330516815185547,
"learning_rate": 1.359883991913391e-07,
"loss": 1.4023,
"step": 850
},
{
"epoch": 2.7901639344262295,
"grad_norm": 14.142221450805664,
"learning_rate": 1.3191683743784546e-07,
"loss": 1.3477,
"step": 851
},
{
"epoch": 2.7934426229508196,
"grad_norm": 14.495235443115234,
"learning_rate": 1.279063431702088e-07,
"loss": 1.4258,
"step": 852
},
{
"epoch": 2.7967213114754097,
"grad_norm": 14.223053932189941,
"learning_rate": 1.239569666979229e-07,
"loss": 1.418,
"step": 853
},
{
"epoch": 2.8,
"grad_norm": 15.072879791259766,
"learning_rate": 1.200687575637932e-07,
"loss": 1.3398,
"step": 854
},
{
"epoch": 2.80327868852459,
"grad_norm": 13.775506019592285,
"learning_rate": 1.1624176454331116e-07,
"loss": 1.4199,
"step": 855
},
{
"epoch": 2.80655737704918,
"grad_norm": 14.002941131591797,
"learning_rate": 1.1247603564404796e-07,
"loss": 1.4062,
"step": 856
},
{
"epoch": 2.8098360655737706,
"grad_norm": 15.763291358947754,
"learning_rate": 1.0877161810504744e-07,
"loss": 1.4316,
"step": 857
},
{
"epoch": 2.8131147540983608,
"grad_norm": 14.995899200439453,
"learning_rate": 1.0512855839623526e-07,
"loss": 1.4629,
"step": 858
},
{
"epoch": 2.816393442622951,
"grad_norm": 15.499988555908203,
"learning_rate": 1.0154690221783781e-07,
"loss": 1.4746,
"step": 859
},
{
"epoch": 2.819672131147541,
"grad_norm": 15.275351524353027,
"learning_rate": 9.802669449980484e-08,
"loss": 1.3457,
"step": 860
},
{
"epoch": 2.822950819672131,
"grad_norm": 14.729512214660645,
"learning_rate": 9.456797940125051e-08,
"loss": 1.4238,
"step": 861
},
{
"epoch": 2.8262295081967213,
"grad_norm": 14.843419075012207,
"learning_rate": 9.117080030989545e-08,
"loss": 1.4102,
"step": 862
},
{
"epoch": 2.8295081967213114,
"grad_norm": 13.580461502075195,
"learning_rate": 8.783519984152555e-08,
"loss": 1.377,
"step": 863
},
{
"epoch": 2.8327868852459015,
"grad_norm": 14.83128833770752,
"learning_rate": 8.456121983945465e-08,
"loss": 1.3203,
"step": 864
},
{
"epoch": 2.836065573770492,
"grad_norm": 14.989274024963379,
"learning_rate": 8.134890137400153e-08,
"loss": 1.4316,
"step": 865
},
{
"epoch": 2.839344262295082,
"grad_norm": 14.32812213897705,
"learning_rate": 7.819828474197433e-08,
"loss": 1.4141,
"step": 866
},
{
"epoch": 2.8426229508196723,
"grad_norm": 14.380533218383789,
"learning_rate": 7.510940946616474e-08,
"loss": 1.3828,
"step": 867
},
{
"epoch": 2.8459016393442624,
"grad_norm": 16.36128044128418,
"learning_rate": 7.208231429485235e-08,
"loss": 1.4023,
"step": 868
},
{
"epoch": 2.8491803278688526,
"grad_norm": 14.605631828308105,
"learning_rate": 6.91170372013178e-08,
"loss": 1.3984,
"step": 869
},
{
"epoch": 2.8524590163934427,
"grad_norm": 14.52630615234375,
"learning_rate": 6.621361538336924e-08,
"loss": 1.3906,
"step": 870
},
{
"epoch": 2.855737704918033,
"grad_norm": 13.681370735168457,
"learning_rate": 6.337208526287109e-08,
"loss": 1.3418,
"step": 871
},
{
"epoch": 2.859016393442623,
"grad_norm": 14.512091636657715,
"learning_rate": 6.05924824852916e-08,
"loss": 1.3359,
"step": 872
},
{
"epoch": 2.862295081967213,
"grad_norm": 14.500898361206055,
"learning_rate": 5.7874841919253186e-08,
"loss": 1.4277,
"step": 873
},
{
"epoch": 2.865573770491803,
"grad_norm": 13.123601913452148,
"learning_rate": 5.521919765609507e-08,
"loss": 1.375,
"step": 874
},
{
"epoch": 2.8688524590163933,
"grad_norm": 13.465143203735352,
"learning_rate": 5.262558300944631e-08,
"loss": 1.3809,
"step": 875
},
{
"epoch": 2.8721311475409834,
"grad_norm": 14.465463638305664,
"learning_rate": 5.009403051480899e-08,
"loss": 1.4258,
"step": 876
},
{
"epoch": 2.8754098360655735,
"grad_norm": 14.458550453186035,
"learning_rate": 4.762457192914849e-08,
"loss": 1.3789,
"step": 877
},
{
"epoch": 2.8786885245901637,
"grad_norm": 13.62592887878418,
"learning_rate": 4.5217238230493286e-08,
"loss": 1.416,
"step": 878
},
{
"epoch": 2.8819672131147542,
"grad_norm": 15.585021018981934,
"learning_rate": 4.287205961755192e-08,
"loss": 1.375,
"step": 879
},
{
"epoch": 2.8852459016393444,
"grad_norm": 14.603592872619629,
"learning_rate": 4.058906550932829e-08,
"loss": 1.4316,
"step": 880
},
{
"epoch": 2.8885245901639345,
"grad_norm": 14.276565551757812,
"learning_rate": 3.836828454475583e-08,
"loss": 1.4043,
"step": 881
},
{
"epoch": 2.8918032786885246,
"grad_norm": 15.078627586364746,
"learning_rate": 3.620974458233839e-08,
"loss": 1.3867,
"step": 882
},
{
"epoch": 2.8950819672131147,
"grad_norm": 14.924882888793945,
"learning_rate": 3.411347269979881e-08,
"loss": 1.4238,
"step": 883
},
{
"epoch": 2.898360655737705,
"grad_norm": 14.461564064025879,
"learning_rate": 3.2079495193740874e-08,
"loss": 1.4375,
"step": 884
},
{
"epoch": 2.901639344262295,
"grad_norm": 13.775385856628418,
"learning_rate": 3.0107837579318455e-08,
"loss": 1.3789,
"step": 885
},
{
"epoch": 2.904918032786885,
"grad_norm": 13.404571533203125,
"learning_rate": 2.8198524589916897e-08,
"loss": 1.3867,
"step": 886
},
{
"epoch": 2.9081967213114757,
"grad_norm": 14.355637550354004,
"learning_rate": 2.6351580176840473e-08,
"loss": 1.4238,
"step": 887
},
{
"epoch": 2.911475409836066,
"grad_norm": 14.208024024963379,
"learning_rate": 2.4567027509013742e-08,
"loss": 1.3945,
"step": 888
},
{
"epoch": 2.914754098360656,
"grad_norm": 13.640280723571777,
"learning_rate": 2.2844888972691216e-08,
"loss": 1.4102,
"step": 889
},
{
"epoch": 2.918032786885246,
"grad_norm": 13.365429878234863,
"learning_rate": 2.1185186171174265e-08,
"loss": 1.3398,
"step": 890
},
{
"epoch": 2.921311475409836,
"grad_norm": 14.118301391601562,
"learning_rate": 1.9587939924542444e-08,
"loss": 1.3223,
"step": 891
},
{
"epoch": 2.9245901639344263,
"grad_norm": 14.460697174072266,
"learning_rate": 1.8053170269390353e-08,
"loss": 1.3828,
"step": 892
},
{
"epoch": 2.9278688524590164,
"grad_norm": 13.865715980529785,
"learning_rate": 1.658089645857952e-08,
"loss": 1.3984,
"step": 893
},
{
"epoch": 2.9311475409836065,
"grad_norm": 13.84589958190918,
"learning_rate": 1.5171136960991927e-08,
"loss": 1.4629,
"step": 894
},
{
"epoch": 2.9344262295081966,
"grad_norm": 14.238113403320312,
"learning_rate": 1.3823909461302965e-08,
"loss": 1.4492,
"step": 895
},
{
"epoch": 2.9377049180327868,
"grad_norm": 13.380562782287598,
"learning_rate": 1.2539230859757723e-08,
"loss": 1.3828,
"step": 896
},
{
"epoch": 2.940983606557377,
"grad_norm": 14.529212951660156,
"learning_rate": 1.1317117271957833e-08,
"loss": 1.418,
"step": 897
},
{
"epoch": 2.944262295081967,
"grad_norm": 14.324894905090332,
"learning_rate": 1.0157584028662183e-08,
"loss": 1.4336,
"step": 898
},
{
"epoch": 2.947540983606557,
"grad_norm": 12.702259063720703,
"learning_rate": 9.060645675590395e-09,
"loss": 1.3652,
"step": 899
},
{
"epoch": 2.9508196721311473,
"grad_norm": 14.72761344909668,
"learning_rate": 8.026315973245768e-09,
"loss": 1.373,
"step": 900
},
{
"epoch": 2.954098360655738,
"grad_norm": 13.49645709991455,
"learning_rate": 7.0546078967370645e-09,
"loss": 1.377,
"step": 901
},
{
"epoch": 2.957377049180328,
"grad_norm": 13.39814281463623,
"learning_rate": 6.145533635620316e-09,
"loss": 1.4395,
"step": 902
},
{
"epoch": 2.960655737704918,
"grad_norm": 14.366438865661621,
"learning_rate": 5.299104593743387e-09,
"loss": 1.3301,
"step": 903
},
{
"epoch": 2.963934426229508,
"grad_norm": 13.086654663085938,
"learning_rate": 4.5153313891027615e-09,
"loss": 1.3594,
"step": 904
},
{
"epoch": 2.9672131147540983,
"grad_norm": 14.331830978393555,
"learning_rate": 3.794223853713086e-09,
"loss": 1.3848,
"step": 905
},
{
"epoch": 2.9704918032786884,
"grad_norm": 13.504541397094727,
"learning_rate": 3.1357910334800512e-09,
"loss": 1.3652,
"step": 906
},
{
"epoch": 2.9737704918032786,
"grad_norm": 13.3035249710083,
"learning_rate": 2.5400411880893703e-09,
"loss": 1.4297,
"step": 907
},
{
"epoch": 2.9770491803278687,
"grad_norm": 14.15910816192627,
"learning_rate": 2.006981790902418e-09,
"loss": 1.416,
"step": 908
},
{
"epoch": 2.9803278688524593,
"grad_norm": 13.784579277038574,
"learning_rate": 1.5366195288629703e-09,
"loss": 1.3301,
"step": 909
},
{
"epoch": 2.9836065573770494,
"grad_norm": 13.555578231811523,
"learning_rate": 1.1289603024122742e-09,
"loss": 1.332,
"step": 910
},
{
"epoch": 2.9868852459016395,
"grad_norm": 14.873602867126465,
"learning_rate": 7.840092254163267e-10,
"loss": 1.4062,
"step": 911
},
{
"epoch": 2.9901639344262296,
"grad_norm": 18.19393539428711,
"learning_rate": 5.017706251014831e-10,
"loss": 1.4395,
"step": 912
},
{
"epoch": 2.9934426229508198,
"grad_norm": 13.874499320983887,
"learning_rate": 2.8224804199894486e-10,
"loss": 1.3477,
"step": 913
},
{
"epoch": 2.99672131147541,
"grad_norm": 13.585966110229492,
"learning_rate": 1.254442299009062e-10,
"loss": 1.3262,
"step": 914
},
{
"epoch": 3.0,
"grad_norm": 13.874456405639648,
"learning_rate": 3.136115582724753e-11,
"loss": 1.3672,
"step": 915
},
{
"epoch": 3.0,
"step": 915,
"total_flos": 1.0734914748022784e+17,
"train_loss": 1.7213306864754099,
"train_runtime": 1313.019,
"train_samples_per_second": 1427.111,
"train_steps_per_second": 0.697
}
],
"logging_steps": 1,
"max_steps": 915,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.0734914748022784e+17,
"train_batch_size": 512,
"trial_name": null,
"trial_params": null
}