Leonardo6's picture
Add files using upload-large-folder tool
4fe1710 verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.6557377049180327,
"eval_steps": 500,
"global_step": 200,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.003278688524590164,
"grad_norm": 58.999969482421875,
"learning_rate": 0.0,
"loss": 3.8125,
"step": 1
},
{
"epoch": 0.006557377049180328,
"grad_norm": 56.5999870300293,
"learning_rate": 3.5714285714285716e-07,
"loss": 3.8984,
"step": 2
},
{
"epoch": 0.009836065573770493,
"grad_norm": 58.442317962646484,
"learning_rate": 7.142857142857143e-07,
"loss": 3.8359,
"step": 3
},
{
"epoch": 0.013114754098360656,
"grad_norm": 59.13441467285156,
"learning_rate": 1.0714285714285714e-06,
"loss": 3.7422,
"step": 4
},
{
"epoch": 0.01639344262295082,
"grad_norm": 59.55280685424805,
"learning_rate": 1.4285714285714286e-06,
"loss": 3.7383,
"step": 5
},
{
"epoch": 0.019672131147540985,
"grad_norm": 57.113956451416016,
"learning_rate": 1.7857142857142859e-06,
"loss": 3.8281,
"step": 6
},
{
"epoch": 0.022950819672131147,
"grad_norm": 44.69753646850586,
"learning_rate": 2.1428571428571427e-06,
"loss": 3.6562,
"step": 7
},
{
"epoch": 0.02622950819672131,
"grad_norm": 42.74599075317383,
"learning_rate": 2.5e-06,
"loss": 3.668,
"step": 8
},
{
"epoch": 0.029508196721311476,
"grad_norm": 27.557815551757812,
"learning_rate": 2.8571428571428573e-06,
"loss": 3.3438,
"step": 9
},
{
"epoch": 0.03278688524590164,
"grad_norm": 25.87570571899414,
"learning_rate": 3.2142857142857147e-06,
"loss": 3.3633,
"step": 10
},
{
"epoch": 0.036065573770491806,
"grad_norm": 25.204158782958984,
"learning_rate": 3.5714285714285718e-06,
"loss": 3.2852,
"step": 11
},
{
"epoch": 0.03934426229508197,
"grad_norm": 25.082277297973633,
"learning_rate": 3.928571428571429e-06,
"loss": 3.2188,
"step": 12
},
{
"epoch": 0.04262295081967213,
"grad_norm": 21.52399253845215,
"learning_rate": 4.2857142857142855e-06,
"loss": 3.1016,
"step": 13
},
{
"epoch": 0.04590163934426229,
"grad_norm": 21.302213668823242,
"learning_rate": 4.642857142857144e-06,
"loss": 3.0938,
"step": 14
},
{
"epoch": 0.04918032786885246,
"grad_norm": 17.48920249938965,
"learning_rate": 5e-06,
"loss": 3.0703,
"step": 15
},
{
"epoch": 0.05245901639344262,
"grad_norm": 14.723689079284668,
"learning_rate": 5.357142857142857e-06,
"loss": 2.7812,
"step": 16
},
{
"epoch": 0.05573770491803279,
"grad_norm": 13.425204277038574,
"learning_rate": 5.7142857142857145e-06,
"loss": 2.8398,
"step": 17
},
{
"epoch": 0.05901639344262295,
"grad_norm": 12.121846199035645,
"learning_rate": 6.071428571428571e-06,
"loss": 2.7539,
"step": 18
},
{
"epoch": 0.06229508196721312,
"grad_norm": 12.97847843170166,
"learning_rate": 6.4285714285714295e-06,
"loss": 2.6719,
"step": 19
},
{
"epoch": 0.06557377049180328,
"grad_norm": 12.588919639587402,
"learning_rate": 6.785714285714287e-06,
"loss": 2.6758,
"step": 20
},
{
"epoch": 0.06885245901639345,
"grad_norm": 10.492659568786621,
"learning_rate": 7.1428571428571436e-06,
"loss": 2.6562,
"step": 21
},
{
"epoch": 0.07213114754098361,
"grad_norm": 9.676924705505371,
"learning_rate": 7.500000000000001e-06,
"loss": 2.6094,
"step": 22
},
{
"epoch": 0.07540983606557378,
"grad_norm": 10.222829818725586,
"learning_rate": 7.857142857142858e-06,
"loss": 2.6211,
"step": 23
},
{
"epoch": 0.07868852459016394,
"grad_norm": 10.020594596862793,
"learning_rate": 8.214285714285714e-06,
"loss": 2.5273,
"step": 24
},
{
"epoch": 0.08196721311475409,
"grad_norm": 9.404995918273926,
"learning_rate": 8.571428571428571e-06,
"loss": 2.5352,
"step": 25
},
{
"epoch": 0.08524590163934426,
"grad_norm": 10.194731712341309,
"learning_rate": 8.92857142857143e-06,
"loss": 2.543,
"step": 26
},
{
"epoch": 0.08852459016393442,
"grad_norm": 9.045639991760254,
"learning_rate": 9.285714285714288e-06,
"loss": 2.4961,
"step": 27
},
{
"epoch": 0.09180327868852459,
"grad_norm": 9.108716011047363,
"learning_rate": 9.642857142857144e-06,
"loss": 2.4336,
"step": 28
},
{
"epoch": 0.09508196721311475,
"grad_norm": 9.336297988891602,
"learning_rate": 1e-05,
"loss": 2.457,
"step": 29
},
{
"epoch": 0.09836065573770492,
"grad_norm": 8.170738220214844,
"learning_rate": 9.999968638844173e-06,
"loss": 2.4688,
"step": 30
},
{
"epoch": 0.10163934426229508,
"grad_norm": 8.507989883422852,
"learning_rate": 9.999874555770099e-06,
"loss": 2.4062,
"step": 31
},
{
"epoch": 0.10491803278688525,
"grad_norm": 8.38835334777832,
"learning_rate": 9.999717751958002e-06,
"loss": 2.3711,
"step": 32
},
{
"epoch": 0.10819672131147541,
"grad_norm": 8.36460018157959,
"learning_rate": 9.9994982293749e-06,
"loss": 2.4805,
"step": 33
},
{
"epoch": 0.11147540983606558,
"grad_norm": 8.547989845275879,
"learning_rate": 9.999215990774584e-06,
"loss": 2.3906,
"step": 34
},
{
"epoch": 0.11475409836065574,
"grad_norm": 8.938812255859375,
"learning_rate": 9.998871039697589e-06,
"loss": 2.3906,
"step": 35
},
{
"epoch": 0.1180327868852459,
"grad_norm": 8.230469703674316,
"learning_rate": 9.998463380471138e-06,
"loss": 2.3164,
"step": 36
},
{
"epoch": 0.12131147540983607,
"grad_norm": 7.883755683898926,
"learning_rate": 9.997993018209098e-06,
"loss": 2.3633,
"step": 37
},
{
"epoch": 0.12459016393442623,
"grad_norm": 8.220748901367188,
"learning_rate": 9.997459958811911e-06,
"loss": 2.3516,
"step": 38
},
{
"epoch": 0.12786885245901639,
"grad_norm": 8.738327980041504,
"learning_rate": 9.996864208966522e-06,
"loss": 2.3398,
"step": 39
},
{
"epoch": 0.13114754098360656,
"grad_norm": 7.944454669952393,
"learning_rate": 9.996205776146288e-06,
"loss": 2.3867,
"step": 40
},
{
"epoch": 0.13442622950819672,
"grad_norm": 7.631208419799805,
"learning_rate": 9.995484668610897e-06,
"loss": 2.3242,
"step": 41
},
{
"epoch": 0.1377049180327869,
"grad_norm": 9.301526069641113,
"learning_rate": 9.994700895406258e-06,
"loss": 2.3203,
"step": 42
},
{
"epoch": 0.14098360655737704,
"grad_norm": 7.625834941864014,
"learning_rate": 9.99385446636438e-06,
"loss": 2.3398,
"step": 43
},
{
"epoch": 0.14426229508196722,
"grad_norm": 7.908705234527588,
"learning_rate": 9.992945392103264e-06,
"loss": 2.3477,
"step": 44
},
{
"epoch": 0.14754098360655737,
"grad_norm": 8.002193450927734,
"learning_rate": 9.991973684026755e-06,
"loss": 2.2852,
"step": 45
},
{
"epoch": 0.15081967213114755,
"grad_norm": 8.058524131774902,
"learning_rate": 9.99093935432441e-06,
"loss": 2.3711,
"step": 46
},
{
"epoch": 0.1540983606557377,
"grad_norm": 8.803773880004883,
"learning_rate": 9.989842415971338e-06,
"loss": 2.2344,
"step": 47
},
{
"epoch": 0.15737704918032788,
"grad_norm": 7.851852893829346,
"learning_rate": 9.988682882728043e-06,
"loss": 2.3047,
"step": 48
},
{
"epoch": 0.16065573770491803,
"grad_norm": 8.29660415649414,
"learning_rate": 9.987460769140242e-06,
"loss": 2.3047,
"step": 49
},
{
"epoch": 0.16393442622950818,
"grad_norm": 7.779457092285156,
"learning_rate": 9.986176090538697e-06,
"loss": 2.1992,
"step": 50
},
{
"epoch": 0.16721311475409836,
"grad_norm": 8.602660179138184,
"learning_rate": 9.984828863039008e-06,
"loss": 2.25,
"step": 51
},
{
"epoch": 0.17049180327868851,
"grad_norm": 8.079516410827637,
"learning_rate": 9.983419103541421e-06,
"loss": 2.3203,
"step": 52
},
{
"epoch": 0.1737704918032787,
"grad_norm": 8.443635940551758,
"learning_rate": 9.981946829730611e-06,
"loss": 2.3242,
"step": 53
},
{
"epoch": 0.17704918032786884,
"grad_norm": 7.873276710510254,
"learning_rate": 9.980412060075459e-06,
"loss": 2.2656,
"step": 54
},
{
"epoch": 0.18032786885245902,
"grad_norm": 8.633429527282715,
"learning_rate": 9.978814813828827e-06,
"loss": 2.2227,
"step": 55
},
{
"epoch": 0.18360655737704917,
"grad_norm": 9.135279655456543,
"learning_rate": 9.97715511102731e-06,
"loss": 2.2148,
"step": 56
},
{
"epoch": 0.18688524590163935,
"grad_norm": 7.584745407104492,
"learning_rate": 9.975432972490985e-06,
"loss": 2.2773,
"step": 57
},
{
"epoch": 0.1901639344262295,
"grad_norm": 9.086216926574707,
"learning_rate": 9.973648419823161e-06,
"loss": 2.2656,
"step": 58
},
{
"epoch": 0.19344262295081968,
"grad_norm": 8.223559379577637,
"learning_rate": 9.971801475410084e-06,
"loss": 2.2773,
"step": 59
},
{
"epoch": 0.19672131147540983,
"grad_norm": 8.726387023925781,
"learning_rate": 9.969892162420682e-06,
"loss": 2.3125,
"step": 60
},
{
"epoch": 0.2,
"grad_norm": 8.304938316345215,
"learning_rate": 9.96792050480626e-06,
"loss": 2.168,
"step": 61
},
{
"epoch": 0.20327868852459016,
"grad_norm": 8.20266342163086,
"learning_rate": 9.965886527300201e-06,
"loss": 2.25,
"step": 62
},
{
"epoch": 0.20655737704918034,
"grad_norm": 7.833676815032959,
"learning_rate": 9.963790255417663e-06,
"loss": 2.2422,
"step": 63
},
{
"epoch": 0.2098360655737705,
"grad_norm": 7.891449928283691,
"learning_rate": 9.961631715455245e-06,
"loss": 2.1758,
"step": 64
},
{
"epoch": 0.21311475409836064,
"grad_norm": 8.999512672424316,
"learning_rate": 9.959410934490673e-06,
"loss": 2.1484,
"step": 65
},
{
"epoch": 0.21639344262295082,
"grad_norm": 7.718410015106201,
"learning_rate": 9.95712794038245e-06,
"loss": 2.1172,
"step": 66
},
{
"epoch": 0.21967213114754097,
"grad_norm": 8.272541999816895,
"learning_rate": 9.954782761769509e-06,
"loss": 2.25,
"step": 67
},
{
"epoch": 0.22295081967213115,
"grad_norm": 9.607975006103516,
"learning_rate": 9.952375428070853e-06,
"loss": 2.2812,
"step": 68
},
{
"epoch": 0.2262295081967213,
"grad_norm": 8.297865867614746,
"learning_rate": 9.949905969485192e-06,
"loss": 2.1406,
"step": 69
},
{
"epoch": 0.22950819672131148,
"grad_norm": 8.192915916442871,
"learning_rate": 9.947374416990554e-06,
"loss": 2.2109,
"step": 70
},
{
"epoch": 0.23278688524590163,
"grad_norm": 8.489333152770996,
"learning_rate": 9.944780802343906e-06,
"loss": 2.1914,
"step": 71
},
{
"epoch": 0.2360655737704918,
"grad_norm": 8.244175910949707,
"learning_rate": 9.942125158080747e-06,
"loss": 2.1797,
"step": 72
},
{
"epoch": 0.23934426229508196,
"grad_norm": 8.009477615356445,
"learning_rate": 9.939407517514709e-06,
"loss": 2.2578,
"step": 73
},
{
"epoch": 0.24262295081967214,
"grad_norm": 8.284704208374023,
"learning_rate": 9.936627914737129e-06,
"loss": 2.2305,
"step": 74
},
{
"epoch": 0.2459016393442623,
"grad_norm": 9.479103088378906,
"learning_rate": 9.933786384616631e-06,
"loss": 2.1406,
"step": 75
},
{
"epoch": 0.24918032786885247,
"grad_norm": 8.558723449707031,
"learning_rate": 9.930882962798683e-06,
"loss": 2.2812,
"step": 76
},
{
"epoch": 0.25245901639344265,
"grad_norm": 8.608718872070312,
"learning_rate": 9.927917685705148e-06,
"loss": 2.2461,
"step": 77
},
{
"epoch": 0.25573770491803277,
"grad_norm": 7.863804817199707,
"learning_rate": 9.924890590533837e-06,
"loss": 2.1914,
"step": 78
},
{
"epoch": 0.25901639344262295,
"grad_norm": 8.842366218566895,
"learning_rate": 9.921801715258027e-06,
"loss": 2.0996,
"step": 79
},
{
"epoch": 0.26229508196721313,
"grad_norm": 8.273311614990234,
"learning_rate": 9.918651098626e-06,
"loss": 2.1641,
"step": 80
},
{
"epoch": 0.26557377049180325,
"grad_norm": 8.011392593383789,
"learning_rate": 9.915438780160547e-06,
"loss": 2.168,
"step": 81
},
{
"epoch": 0.26885245901639343,
"grad_norm": 7.946706771850586,
"learning_rate": 9.912164800158474e-06,
"loss": 2.2227,
"step": 82
},
{
"epoch": 0.2721311475409836,
"grad_norm": 9.032291412353516,
"learning_rate": 9.908829199690106e-06,
"loss": 2.2891,
"step": 83
},
{
"epoch": 0.2754098360655738,
"grad_norm": 8.33333969116211,
"learning_rate": 9.905432020598751e-06,
"loss": 2.2344,
"step": 84
},
{
"epoch": 0.2786885245901639,
"grad_norm": 7.967901229858398,
"learning_rate": 9.901973305500197e-06,
"loss": 2.1172,
"step": 85
},
{
"epoch": 0.2819672131147541,
"grad_norm": 8.24918270111084,
"learning_rate": 9.898453097782164e-06,
"loss": 2.1602,
"step": 86
},
{
"epoch": 0.28524590163934427,
"grad_norm": 8.877111434936523,
"learning_rate": 9.894871441603766e-06,
"loss": 2.2383,
"step": 87
},
{
"epoch": 0.28852459016393445,
"grad_norm": 8.32304573059082,
"learning_rate": 9.891228381894954e-06,
"loss": 2.1719,
"step": 88
},
{
"epoch": 0.29180327868852457,
"grad_norm": 9.091806411743164,
"learning_rate": 9.887523964355953e-06,
"loss": 2.168,
"step": 89
},
{
"epoch": 0.29508196721311475,
"grad_norm": 8.54736614227295,
"learning_rate": 9.88375823545669e-06,
"loss": 2.1211,
"step": 90
},
{
"epoch": 0.2983606557377049,
"grad_norm": 9.3049955368042,
"learning_rate": 9.879931242436208e-06,
"loss": 2.1719,
"step": 91
},
{
"epoch": 0.3016393442622951,
"grad_norm": 8.523479461669922,
"learning_rate": 9.876043033302079e-06,
"loss": 2.1719,
"step": 92
},
{
"epoch": 0.30491803278688523,
"grad_norm": 7.741846561431885,
"learning_rate": 9.872093656829792e-06,
"loss": 2.0391,
"step": 93
},
{
"epoch": 0.3081967213114754,
"grad_norm": 8.36119270324707,
"learning_rate": 9.868083162562155e-06,
"loss": 2.0938,
"step": 94
},
{
"epoch": 0.3114754098360656,
"grad_norm": 8.163023948669434,
"learning_rate": 9.864011600808663e-06,
"loss": 2.0938,
"step": 95
},
{
"epoch": 0.31475409836065577,
"grad_norm": 8.827863693237305,
"learning_rate": 9.859879022644865e-06,
"loss": 2.1406,
"step": 96
},
{
"epoch": 0.3180327868852459,
"grad_norm": 8.434041023254395,
"learning_rate": 9.855685479911736e-06,
"loss": 2.168,
"step": 97
},
{
"epoch": 0.32131147540983607,
"grad_norm": 8.144036293029785,
"learning_rate": 9.851431025215012e-06,
"loss": 2.0742,
"step": 98
},
{
"epoch": 0.32459016393442625,
"grad_norm": 8.843683242797852,
"learning_rate": 9.847115711924542e-06,
"loss": 2.1016,
"step": 99
},
{
"epoch": 0.32786885245901637,
"grad_norm": 8.300407409667969,
"learning_rate": 9.842739594173607e-06,
"loss": 2.1953,
"step": 100
},
{
"epoch": 0.33114754098360655,
"grad_norm": 8.825713157653809,
"learning_rate": 9.838302726858258e-06,
"loss": 2.1406,
"step": 101
},
{
"epoch": 0.3344262295081967,
"grad_norm": 8.763128280639648,
"learning_rate": 9.833805165636603e-06,
"loss": 2.1523,
"step": 102
},
{
"epoch": 0.3377049180327869,
"grad_norm": 8.53940486907959,
"learning_rate": 9.829246966928135e-06,
"loss": 2.0703,
"step": 103
},
{
"epoch": 0.34098360655737703,
"grad_norm": 8.830872535705566,
"learning_rate": 9.824628187913001e-06,
"loss": 2.1602,
"step": 104
},
{
"epoch": 0.3442622950819672,
"grad_norm": 8.062509536743164,
"learning_rate": 9.819948886531305e-06,
"loss": 2.1211,
"step": 105
},
{
"epoch": 0.3475409836065574,
"grad_norm": 8.8229398727417,
"learning_rate": 9.815209121482363e-06,
"loss": 2.1172,
"step": 106
},
{
"epoch": 0.35081967213114756,
"grad_norm": 9.78320598602295,
"learning_rate": 9.810408952223982e-06,
"loss": 2.0586,
"step": 107
},
{
"epoch": 0.3540983606557377,
"grad_norm": 8.267208099365234,
"learning_rate": 9.805548438971702e-06,
"loss": 2.1406,
"step": 108
},
{
"epoch": 0.35737704918032787,
"grad_norm": 10.9806489944458,
"learning_rate": 9.80062764269805e-06,
"loss": 2.1035,
"step": 109
},
{
"epoch": 0.36065573770491804,
"grad_norm": 9.699831008911133,
"learning_rate": 9.795646625131771e-06,
"loss": 2.1797,
"step": 110
},
{
"epoch": 0.3639344262295082,
"grad_norm": 10.373594284057617,
"learning_rate": 9.790605448757049e-06,
"loss": 2.1445,
"step": 111
},
{
"epoch": 0.36721311475409835,
"grad_norm": 8.51201343536377,
"learning_rate": 9.785504176812733e-06,
"loss": 2.1875,
"step": 112
},
{
"epoch": 0.3704918032786885,
"grad_norm": 10.090012550354004,
"learning_rate": 9.780342873291535e-06,
"loss": 2.2109,
"step": 113
},
{
"epoch": 0.3737704918032787,
"grad_norm": 8.939981460571289,
"learning_rate": 9.775121602939234e-06,
"loss": 2.0273,
"step": 114
},
{
"epoch": 0.3770491803278688,
"grad_norm": 8.556815147399902,
"learning_rate": 9.76984043125386e-06,
"loss": 2.002,
"step": 115
},
{
"epoch": 0.380327868852459,
"grad_norm": 9.271822929382324,
"learning_rate": 9.764499424484873e-06,
"loss": 2.0742,
"step": 116
},
{
"epoch": 0.3836065573770492,
"grad_norm": 9.027301788330078,
"learning_rate": 9.759098649632326e-06,
"loss": 2.1133,
"step": 117
},
{
"epoch": 0.38688524590163936,
"grad_norm": 9.81689453125,
"learning_rate": 9.753638174446042e-06,
"loss": 2.0645,
"step": 118
},
{
"epoch": 0.3901639344262295,
"grad_norm": 9.42857551574707,
"learning_rate": 9.748118067424742e-06,
"loss": 2.1172,
"step": 119
},
{
"epoch": 0.39344262295081966,
"grad_norm": 8.636916160583496,
"learning_rate": 9.742538397815203e-06,
"loss": 2.0859,
"step": 120
},
{
"epoch": 0.39672131147540984,
"grad_norm": 9.01211166381836,
"learning_rate": 9.73689923561138e-06,
"loss": 2.0605,
"step": 121
},
{
"epoch": 0.4,
"grad_norm": 8.320965766906738,
"learning_rate": 9.73120065155353e-06,
"loss": 2.0352,
"step": 122
},
{
"epoch": 0.40327868852459015,
"grad_norm": 8.969964027404785,
"learning_rate": 9.725442717127324e-06,
"loss": 2.1055,
"step": 123
},
{
"epoch": 0.4065573770491803,
"grad_norm": 7.825037002563477,
"learning_rate": 9.719625504562959e-06,
"loss": 2.1133,
"step": 124
},
{
"epoch": 0.4098360655737705,
"grad_norm": 9.594252586364746,
"learning_rate": 9.713749086834234e-06,
"loss": 2.1367,
"step": 125
},
{
"epoch": 0.4131147540983607,
"grad_norm": 9.286437034606934,
"learning_rate": 9.707813537657652e-06,
"loss": 2.0293,
"step": 126
},
{
"epoch": 0.4163934426229508,
"grad_norm": 11.219290733337402,
"learning_rate": 9.701818931491485e-06,
"loss": 2.1992,
"step": 127
},
{
"epoch": 0.419672131147541,
"grad_norm": 8.125706672668457,
"learning_rate": 9.695765343534846e-06,
"loss": 2.0273,
"step": 128
},
{
"epoch": 0.42295081967213116,
"grad_norm": 9.294842720031738,
"learning_rate": 9.689652849726739e-06,
"loss": 2.082,
"step": 129
},
{
"epoch": 0.4262295081967213,
"grad_norm": 8.297966957092285,
"learning_rate": 9.683481526745114e-06,
"loss": 2.084,
"step": 130
},
{
"epoch": 0.42950819672131146,
"grad_norm": 9.68375015258789,
"learning_rate": 9.677251452005897e-06,
"loss": 2.0996,
"step": 131
},
{
"epoch": 0.43278688524590164,
"grad_norm": 8.27182388305664,
"learning_rate": 9.670962703662032e-06,
"loss": 2.0312,
"step": 132
},
{
"epoch": 0.4360655737704918,
"grad_norm": 9.681833267211914,
"learning_rate": 9.664615360602478e-06,
"loss": 2.0547,
"step": 133
},
{
"epoch": 0.43934426229508194,
"grad_norm": 8.213553428649902,
"learning_rate": 9.658209502451245e-06,
"loss": 1.9746,
"step": 134
},
{
"epoch": 0.4426229508196721,
"grad_norm": 9.513401985168457,
"learning_rate": 9.651745209566379e-06,
"loss": 2.0625,
"step": 135
},
{
"epoch": 0.4459016393442623,
"grad_norm": 9.364225387573242,
"learning_rate": 9.645222563038959e-06,
"loss": 2.0078,
"step": 136
},
{
"epoch": 0.4491803278688525,
"grad_norm": 8.659600257873535,
"learning_rate": 9.638641644692077e-06,
"loss": 2.0469,
"step": 137
},
{
"epoch": 0.4524590163934426,
"grad_norm": 9.638226509094238,
"learning_rate": 9.632002537079816e-06,
"loss": 2.0391,
"step": 138
},
{
"epoch": 0.4557377049180328,
"grad_norm": 9.299017906188965,
"learning_rate": 9.625305323486214e-06,
"loss": 2.0508,
"step": 139
},
{
"epoch": 0.45901639344262296,
"grad_norm": 9.940561294555664,
"learning_rate": 9.61855008792421e-06,
"loss": 2.082,
"step": 140
},
{
"epoch": 0.46229508196721314,
"grad_norm": 9.618109703063965,
"learning_rate": 9.611736915134605e-06,
"loss": 1.998,
"step": 141
},
{
"epoch": 0.46557377049180326,
"grad_norm": 8.827125549316406,
"learning_rate": 9.604865890584987e-06,
"loss": 2.1055,
"step": 142
},
{
"epoch": 0.46885245901639344,
"grad_norm": 10.079780578613281,
"learning_rate": 9.597937100468668e-06,
"loss": 2.0859,
"step": 143
},
{
"epoch": 0.4721311475409836,
"grad_norm": 9.007691383361816,
"learning_rate": 9.590950631703589e-06,
"loss": 2.125,
"step": 144
},
{
"epoch": 0.47540983606557374,
"grad_norm": 9.498886108398438,
"learning_rate": 9.583906571931248e-06,
"loss": 2.1172,
"step": 145
},
{
"epoch": 0.4786885245901639,
"grad_norm": 10.918264389038086,
"learning_rate": 9.576805009515588e-06,
"loss": 2.0508,
"step": 146
},
{
"epoch": 0.4819672131147541,
"grad_norm": 8.792062759399414,
"learning_rate": 9.569646033541888e-06,
"loss": 2.1484,
"step": 147
},
{
"epoch": 0.4852459016393443,
"grad_norm": 9.490158081054688,
"learning_rate": 9.562429733815655e-06,
"loss": 2.1133,
"step": 148
},
{
"epoch": 0.4885245901639344,
"grad_norm": 9.344221115112305,
"learning_rate": 9.55515620086149e-06,
"loss": 2.0684,
"step": 149
},
{
"epoch": 0.4918032786885246,
"grad_norm": 10.885297775268555,
"learning_rate": 9.54782552592195e-06,
"loss": 2.0957,
"step": 150
},
{
"epoch": 0.49508196721311476,
"grad_norm": 8.593646049499512,
"learning_rate": 9.540437800956412e-06,
"loss": 2.0625,
"step": 151
},
{
"epoch": 0.49836065573770494,
"grad_norm": 8.966227531433105,
"learning_rate": 9.532993118639915e-06,
"loss": 2.0,
"step": 152
},
{
"epoch": 0.5016393442622951,
"grad_norm": 10.044122695922852,
"learning_rate": 9.525491572361995e-06,
"loss": 2.0176,
"step": 153
},
{
"epoch": 0.5049180327868853,
"grad_norm": 8.53621768951416,
"learning_rate": 9.517933256225516e-06,
"loss": 2.1094,
"step": 154
},
{
"epoch": 0.5081967213114754,
"grad_norm": 10.106084823608398,
"learning_rate": 9.510318265045488e-06,
"loss": 2.0195,
"step": 155
},
{
"epoch": 0.5114754098360655,
"grad_norm": 9.872212409973145,
"learning_rate": 9.502646694347886e-06,
"loss": 2.1055,
"step": 156
},
{
"epoch": 0.5147540983606558,
"grad_norm": 8.695504188537598,
"learning_rate": 9.494918640368435e-06,
"loss": 1.9961,
"step": 157
},
{
"epoch": 0.5180327868852459,
"grad_norm": 9.583698272705078,
"learning_rate": 9.487134200051422e-06,
"loss": 2.0801,
"step": 158
},
{
"epoch": 0.521311475409836,
"grad_norm": 9.81774616241455,
"learning_rate": 9.479293471048461e-06,
"loss": 2.043,
"step": 159
},
{
"epoch": 0.5245901639344263,
"grad_norm": 8.542972564697266,
"learning_rate": 9.471396551717283e-06,
"loss": 1.9922,
"step": 160
},
{
"epoch": 0.5278688524590164,
"grad_norm": 9.894759178161621,
"learning_rate": 9.463443541120496e-06,
"loss": 2.0469,
"step": 161
},
{
"epoch": 0.5311475409836065,
"grad_norm": 9.396486282348633,
"learning_rate": 9.45543453902434e-06,
"loss": 1.9688,
"step": 162
},
{
"epoch": 0.5344262295081967,
"grad_norm": 9.620316505432129,
"learning_rate": 9.447369645897441e-06,
"loss": 2.0391,
"step": 163
},
{
"epoch": 0.5377049180327869,
"grad_norm": 10.236593246459961,
"learning_rate": 9.439248962909547e-06,
"loss": 1.957,
"step": 164
},
{
"epoch": 0.5409836065573771,
"grad_norm": 11.980149269104004,
"learning_rate": 9.431072591930261e-06,
"loss": 2.0293,
"step": 165
},
{
"epoch": 0.5442622950819672,
"grad_norm": 14.74476146697998,
"learning_rate": 9.42284063552776e-06,
"loss": 1.9844,
"step": 166
},
{
"epoch": 0.5475409836065573,
"grad_norm": 9.34021282196045,
"learning_rate": 9.414553196967508e-06,
"loss": 2.0703,
"step": 167
},
{
"epoch": 0.5508196721311476,
"grad_norm": 12.805119514465332,
"learning_rate": 9.406210380210971e-06,
"loss": 2.0117,
"step": 168
},
{
"epoch": 0.5540983606557377,
"grad_norm": 9.839883804321289,
"learning_rate": 9.397812289914297e-06,
"loss": 1.9727,
"step": 169
},
{
"epoch": 0.5573770491803278,
"grad_norm": 14.596024513244629,
"learning_rate": 9.389359031427013e-06,
"loss": 2.0742,
"step": 170
},
{
"epoch": 0.5606557377049181,
"grad_norm": 13.544656753540039,
"learning_rate": 9.380850710790701e-06,
"loss": 2.0742,
"step": 171
},
{
"epoch": 0.5639344262295082,
"grad_norm": 9.45289421081543,
"learning_rate": 9.372287434737673e-06,
"loss": 2.0586,
"step": 172
},
{
"epoch": 0.5672131147540984,
"grad_norm": 10.174077987670898,
"learning_rate": 9.363669310689617e-06,
"loss": 2.1367,
"step": 173
},
{
"epoch": 0.5704918032786885,
"grad_norm": 12.690667152404785,
"learning_rate": 9.354996446756268e-06,
"loss": 1.9395,
"step": 174
},
{
"epoch": 0.5737704918032787,
"grad_norm": 10.625876426696777,
"learning_rate": 9.346268951734042e-06,
"loss": 2.0215,
"step": 175
},
{
"epoch": 0.5770491803278689,
"grad_norm": 11.856694221496582,
"learning_rate": 9.337486935104672e-06,
"loss": 1.9766,
"step": 176
},
{
"epoch": 0.580327868852459,
"grad_norm": 10.2452974319458,
"learning_rate": 9.328650507033832e-06,
"loss": 2.0586,
"step": 177
},
{
"epoch": 0.5836065573770491,
"grad_norm": 12.020367622375488,
"learning_rate": 9.319759778369764e-06,
"loss": 2.0352,
"step": 178
},
{
"epoch": 0.5868852459016394,
"grad_norm": 11.364824295043945,
"learning_rate": 9.310814860641874e-06,
"loss": 2.0352,
"step": 179
},
{
"epoch": 0.5901639344262295,
"grad_norm": 12.439066886901855,
"learning_rate": 9.301815866059353e-06,
"loss": 2.0703,
"step": 180
},
{
"epoch": 0.5934426229508196,
"grad_norm": 12.66345500946045,
"learning_rate": 9.292762907509742e-06,
"loss": 1.9922,
"step": 181
},
{
"epoch": 0.5967213114754099,
"grad_norm": 11.868645668029785,
"learning_rate": 9.283656098557545e-06,
"loss": 1.9863,
"step": 182
},
{
"epoch": 0.6,
"grad_norm": 11.235689163208008,
"learning_rate": 9.27449555344278e-06,
"loss": 2.0195,
"step": 183
},
{
"epoch": 0.6032786885245902,
"grad_norm": 10.719315528869629,
"learning_rate": 9.26528138707956e-06,
"loss": 1.9883,
"step": 184
},
{
"epoch": 0.6065573770491803,
"grad_norm": 11.960482597351074,
"learning_rate": 9.256013715054648e-06,
"loss": 1.9082,
"step": 185
},
{
"epoch": 0.6098360655737705,
"grad_norm": 9.54014778137207,
"learning_rate": 9.24669265362601e-06,
"loss": 2.0156,
"step": 186
},
{
"epoch": 0.6131147540983607,
"grad_norm": 10.753348350524902,
"learning_rate": 9.237318319721346e-06,
"loss": 1.9746,
"step": 187
},
{
"epoch": 0.6163934426229508,
"grad_norm": 9.75783634185791,
"learning_rate": 9.227890830936634e-06,
"loss": 2.0195,
"step": 188
},
{
"epoch": 0.6196721311475409,
"grad_norm": 15.201295852661133,
"learning_rate": 9.218410305534657e-06,
"loss": 2.0117,
"step": 189
},
{
"epoch": 0.6229508196721312,
"grad_norm": 10.927349090576172,
"learning_rate": 9.208876862443504e-06,
"loss": 1.9316,
"step": 190
},
{
"epoch": 0.6262295081967213,
"grad_norm": 13.822660446166992,
"learning_rate": 9.199290621255095e-06,
"loss": 2.0371,
"step": 191
},
{
"epoch": 0.6295081967213115,
"grad_norm": 13.426775932312012,
"learning_rate": 9.189651702223672e-06,
"loss": 2.0273,
"step": 192
},
{
"epoch": 0.6327868852459017,
"grad_norm": 9.90354061126709,
"learning_rate": 9.179960226264288e-06,
"loss": 1.957,
"step": 193
},
{
"epoch": 0.6360655737704918,
"grad_norm": 20.340970993041992,
"learning_rate": 9.170216314951301e-06,
"loss": 2.0488,
"step": 194
},
{
"epoch": 0.639344262295082,
"grad_norm": 9.366877555847168,
"learning_rate": 9.16042009051684e-06,
"loss": 1.9688,
"step": 195
},
{
"epoch": 0.6426229508196721,
"grad_norm": 14.084966659545898,
"learning_rate": 9.150571675849273e-06,
"loss": 2.0938,
"step": 196
},
{
"epoch": 0.6459016393442623,
"grad_norm": 16.520889282226562,
"learning_rate": 9.140671194491666e-06,
"loss": 2.0508,
"step": 197
},
{
"epoch": 0.6491803278688525,
"grad_norm": 13.539576530456543,
"learning_rate": 9.130718770640235e-06,
"loss": 1.9922,
"step": 198
},
{
"epoch": 0.6524590163934426,
"grad_norm": 14.922486305236816,
"learning_rate": 9.120714529142784e-06,
"loss": 2.0371,
"step": 199
},
{
"epoch": 0.6557377049180327,
"grad_norm": 12.108205795288086,
"learning_rate": 9.110658595497148e-06,
"loss": 1.9902,
"step": 200
}
],
"logging_steps": 1,
"max_steps": 915,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 2.3465465892306944e+16,
"train_batch_size": 512,
"trial_name": null,
"trial_params": null
}