| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 295, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.003395585738539898, | |
| "grad_norm": 6.320343624704885, | |
| "learning_rate": 0.0, | |
| "loss": 0.9899, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006791171477079796, | |
| "grad_norm": 16.545756923443665, | |
| "learning_rate": 3.3333333333333335e-07, | |
| "loss": 0.9683, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.010186757215619695, | |
| "grad_norm": 6.124626290117518, | |
| "learning_rate": 6.666666666666667e-07, | |
| "loss": 0.9722, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.013582342954159592, | |
| "grad_norm": 6.907471369837716, | |
| "learning_rate": 1.0000000000000002e-06, | |
| "loss": 0.9667, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01697792869269949, | |
| "grad_norm": 6.275431523928528, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "loss": 0.9814, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.02037351443123939, | |
| "grad_norm": 6.074623009583982, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 0.9502, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.023769100169779286, | |
| "grad_norm": 89.99670872123082, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 0.9411, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.027164685908319185, | |
| "grad_norm": 6.224698090768632, | |
| "learning_rate": 2.3333333333333336e-06, | |
| "loss": 0.9075, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.030560271646859084, | |
| "grad_norm": 5.913600661145363, | |
| "learning_rate": 2.666666666666667e-06, | |
| "loss": 0.8979, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.03395585738539898, | |
| "grad_norm": 7.267004601688997, | |
| "learning_rate": 3e-06, | |
| "loss": 0.8589, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03735144312393888, | |
| "grad_norm": 4.920293108168373, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.8797, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.04074702886247878, | |
| "grad_norm": 7.0251120254603325, | |
| "learning_rate": 3.6666666666666666e-06, | |
| "loss": 0.785, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.044142614601018676, | |
| "grad_norm": 4.620531235688851, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 0.7932, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04753820033955857, | |
| "grad_norm": 8.017042756220283, | |
| "learning_rate": 4.333333333333334e-06, | |
| "loss": 0.7878, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.050933786078098474, | |
| "grad_norm": 7.168335735527789, | |
| "learning_rate": 4.666666666666667e-06, | |
| "loss": 0.7854, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.05432937181663837, | |
| "grad_norm": 2.6102783506270137, | |
| "learning_rate": 5e-06, | |
| "loss": 0.7194, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.057724957555178265, | |
| "grad_norm": 2.8281275726026074, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 0.7059, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.06112054329371817, | |
| "grad_norm": 2.1330350735122505, | |
| "learning_rate": 5.666666666666667e-06, | |
| "loss": 0.7236, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06451612903225806, | |
| "grad_norm": 3.3828007198200694, | |
| "learning_rate": 6e-06, | |
| "loss": 0.7133, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.06791171477079797, | |
| "grad_norm": 1.733407506222324, | |
| "learning_rate": 6.333333333333333e-06, | |
| "loss": 0.6807, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.07130730050933787, | |
| "grad_norm": 1.5732888077219966, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.6717, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.07470288624787776, | |
| "grad_norm": 2.869294784618442, | |
| "learning_rate": 7e-06, | |
| "loss": 0.6836, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07809847198641766, | |
| "grad_norm": 1.539403446338188, | |
| "learning_rate": 7.333333333333333e-06, | |
| "loss": 0.656, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.08149405772495756, | |
| "grad_norm": 1.390711875015812, | |
| "learning_rate": 7.666666666666667e-06, | |
| "loss": 0.652, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.08488964346349745, | |
| "grad_norm": 1.4133171062619616, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 0.6237, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.08828522920203735, | |
| "grad_norm": 1.2675100082185478, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.5999, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.09168081494057725, | |
| "grad_norm": 1.4010718655700412, | |
| "learning_rate": 8.666666666666668e-06, | |
| "loss": 0.6229, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.09507640067911714, | |
| "grad_norm": 1.4302934299507057, | |
| "learning_rate": 9e-06, | |
| "loss": 0.5909, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.09847198641765705, | |
| "grad_norm": 1.3417528783040296, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 0.6236, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.10186757215619695, | |
| "grad_norm": 1.2550326498175997, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 0.5978, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.10526315789473684, | |
| "grad_norm": 1.2949203936642646, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6206, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.10865874363327674, | |
| "grad_norm": 1.3404316606873072, | |
| "learning_rate": 9.999648647603774e-06, | |
| "loss": 0.6103, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.11205432937181664, | |
| "grad_norm": 1.2464543142979883, | |
| "learning_rate": 9.998594639794502e-06, | |
| "loss": 0.5823, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.11544991511035653, | |
| "grad_norm": 1.28881560865773, | |
| "learning_rate": 9.996838124703448e-06, | |
| "loss": 0.5724, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.11884550084889643, | |
| "grad_norm": 1.236715524722473, | |
| "learning_rate": 9.994379349192927e-06, | |
| "loss": 0.6006, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.12224108658743633, | |
| "grad_norm": 1.1748122882399419, | |
| "learning_rate": 9.991218658821609e-06, | |
| "loss": 0.5834, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.12563667232597622, | |
| "grad_norm": 1.3335055567165142, | |
| "learning_rate": 9.987356497795944e-06, | |
| "loss": 0.5781, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.12903225806451613, | |
| "grad_norm": 1.2573177388462913, | |
| "learning_rate": 9.982793408907747e-06, | |
| "loss": 0.5613, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.13242784380305603, | |
| "grad_norm": 614.0974853168025, | |
| "learning_rate": 9.977530033457906e-06, | |
| "loss": 0.5518, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.13582342954159593, | |
| "grad_norm": 1.8130801394796245, | |
| "learning_rate": 9.971567111166246e-06, | |
| "loss": 0.5875, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.13921901528013583, | |
| "grad_norm": 3.021540974319598, | |
| "learning_rate": 9.964905480067585e-06, | |
| "loss": 0.5666, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.14261460101867574, | |
| "grad_norm": 1.4307732415515269, | |
| "learning_rate": 9.957546076393944e-06, | |
| "loss": 0.5829, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.1460101867572156, | |
| "grad_norm": 1.2948953962351142, | |
| "learning_rate": 9.949489934442966e-06, | |
| "loss": 0.592, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.1494057724957555, | |
| "grad_norm": 1.28013431192408, | |
| "learning_rate": 9.940738186432565e-06, | |
| "loss": 0.554, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.15280135823429541, | |
| "grad_norm": 1.364283730780204, | |
| "learning_rate": 9.931292062341793e-06, | |
| "loss": 0.5563, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.15619694397283532, | |
| "grad_norm": 1.1789812327801241, | |
| "learning_rate": 9.921152889737985e-06, | |
| "loss": 0.5349, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.15959252971137522, | |
| "grad_norm": 1.272995264216437, | |
| "learning_rate": 9.910322093590177e-06, | |
| "loss": 0.5885, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.16298811544991512, | |
| "grad_norm": 1.2071797315958241, | |
| "learning_rate": 9.898801196068839e-06, | |
| "loss": 0.555, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.166383701188455, | |
| "grad_norm": 1.2466300266575705, | |
| "learning_rate": 9.886591816331953e-06, | |
| "loss": 0.5412, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1697792869269949, | |
| "grad_norm": 1.1951025261533827, | |
| "learning_rate": 9.87369567029745e-06, | |
| "loss": 0.5266, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1731748726655348, | |
| "grad_norm": 1.1409578758956818, | |
| "learning_rate": 9.860114570402055e-06, | |
| "loss": 0.5364, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.1765704584040747, | |
| "grad_norm": 1.194968225105999, | |
| "learning_rate": 9.845850425346563e-06, | |
| "loss": 0.544, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.1799660441426146, | |
| "grad_norm": 1.105032206844106, | |
| "learning_rate": 9.830905239827592e-06, | |
| "loss": 0.526, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.1833616298811545, | |
| "grad_norm": 1.0951143452700236, | |
| "learning_rate": 9.815281114255841e-06, | |
| "loss": 0.5384, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1867572156196944, | |
| "grad_norm": 1.237628011550819, | |
| "learning_rate": 9.798980244460892e-06, | |
| "loss": 0.5216, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.19015280135823429, | |
| "grad_norm": 1.2049941993061364, | |
| "learning_rate": 9.782004921382612e-06, | |
| "loss": 0.5602, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.1935483870967742, | |
| "grad_norm": 1.0963401467603866, | |
| "learning_rate": 9.764357530749178e-06, | |
| "loss": 0.5211, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.1969439728353141, | |
| "grad_norm": 1.1153693044651702, | |
| "learning_rate": 9.74604055274178e-06, | |
| "loss": 0.5408, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.200339558573854, | |
| "grad_norm": 1.1446372149192978, | |
| "learning_rate": 9.727056561646067e-06, | |
| "loss": 0.5297, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.2037351443123939, | |
| "grad_norm": 1.1379654178567067, | |
| "learning_rate": 9.707408225490343e-06, | |
| "loss": 0.5241, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.2071307300509338, | |
| "grad_norm": 1.1133344090884694, | |
| "learning_rate": 9.687098305670606e-06, | |
| "loss": 0.5373, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.21052631578947367, | |
| "grad_norm": 1.14642700050245, | |
| "learning_rate": 9.66612965656245e-06, | |
| "loss": 0.5397, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.21392190152801357, | |
| "grad_norm": 1.0251599768158084, | |
| "learning_rate": 9.644505225119922e-06, | |
| "loss": 0.5444, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.21731748726655348, | |
| "grad_norm": 0.9890854380699226, | |
| "learning_rate": 9.622228050461345e-06, | |
| "loss": 0.4984, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.22071307300509338, | |
| "grad_norm": 1.1117068387014335, | |
| "learning_rate": 9.599301263442194e-06, | |
| "loss": 0.5214, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.22410865874363328, | |
| "grad_norm": 1.1785090789686485, | |
| "learning_rate": 9.575728086215093e-06, | |
| "loss": 0.5265, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.22750424448217318, | |
| "grad_norm": 1.0565597242070186, | |
| "learning_rate": 9.551511831776966e-06, | |
| "loss": 0.5155, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.23089983022071306, | |
| "grad_norm": 1.2278748412937952, | |
| "learning_rate": 9.526655903503423e-06, | |
| "loss": 0.5277, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.23429541595925296, | |
| "grad_norm": 1.1213279864335683, | |
| "learning_rate": 9.501163794670445e-06, | |
| "loss": 0.5307, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.23769100169779286, | |
| "grad_norm": 1.1237007845490015, | |
| "learning_rate": 9.475039087963443e-06, | |
| "loss": 0.51, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.24108658743633277, | |
| "grad_norm": 1.0615774210564253, | |
| "learning_rate": 9.448285454973739e-06, | |
| "loss": 0.517, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.24448217317487267, | |
| "grad_norm": 1.2104405507373044, | |
| "learning_rate": 9.420906655682553e-06, | |
| "loss": 0.5223, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.24787775891341257, | |
| "grad_norm": 1.0985265075768098, | |
| "learning_rate": 9.392906537932582e-06, | |
| "loss": 0.5257, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.25127334465195245, | |
| "grad_norm": 1.0626836434651132, | |
| "learning_rate": 9.364289036887214e-06, | |
| "loss": 0.5187, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.2546689303904924, | |
| "grad_norm": 1.0234412989238388, | |
| "learning_rate": 9.335058174477472e-06, | |
| "loss": 0.5127, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.25806451612903225, | |
| "grad_norm": 1.0702433549745685, | |
| "learning_rate": 9.305218058836778e-06, | |
| "loss": 0.4966, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.2614601018675722, | |
| "grad_norm": 1.1228773632449267, | |
| "learning_rate": 9.274772883723587e-06, | |
| "loss": 0.5265, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.26485568760611206, | |
| "grad_norm": 1.0076068781933833, | |
| "learning_rate": 9.24372692793199e-06, | |
| "loss": 0.5003, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.26825127334465193, | |
| "grad_norm": 1.0987680518642855, | |
| "learning_rate": 9.21208455469037e-06, | |
| "loss": 0.4971, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.27164685908319186, | |
| "grad_norm": 1.1051172285171345, | |
| "learning_rate": 9.179850211048193e-06, | |
| "loss": 0.491, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.27504244482173174, | |
| "grad_norm": 1.0886761114445709, | |
| "learning_rate": 9.14702842725101e-06, | |
| "loss": 0.5033, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.27843803056027167, | |
| "grad_norm": 1.101989710769037, | |
| "learning_rate": 9.113623816103775e-06, | |
| "loss": 0.5157, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.28183361629881154, | |
| "grad_norm": 1.0846044794179348, | |
| "learning_rate": 9.079641072322555e-06, | |
| "loss": 0.5103, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.28522920203735147, | |
| "grad_norm": 1.0994243701550208, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.5118, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.28862478777589134, | |
| "grad_norm": 0.982679989364588, | |
| "learning_rate": 9.009960371307798e-06, | |
| "loss": 0.4753, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.2920203735144312, | |
| "grad_norm": 1.0974557383394132, | |
| "learning_rate": 8.974272207066767e-06, | |
| "loss": 0.5065, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.29541595925297115, | |
| "grad_norm": 1.191767026119928, | |
| "learning_rate": 8.938025494800454e-06, | |
| "loss": 0.496, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.298811544991511, | |
| "grad_norm": 1.0863558052080193, | |
| "learning_rate": 8.901225328656543e-06, | |
| "loss": 0.4768, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.30220713073005095, | |
| "grad_norm": 1.0292339490206497, | |
| "learning_rate": 8.863876880565656e-06, | |
| "loss": 0.4982, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.30560271646859083, | |
| "grad_norm": 1.0263488228624327, | |
| "learning_rate": 8.825985399514488e-06, | |
| "loss": 0.49, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.3089983022071307, | |
| "grad_norm": 1.1083934275101859, | |
| "learning_rate": 8.787556210808101e-06, | |
| "loss": 0.508, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.31239388794567063, | |
| "grad_norm": 1.0759718773481644, | |
| "learning_rate": 8.748594715321512e-06, | |
| "loss": 0.493, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.3157894736842105, | |
| "grad_norm": 1.1275176872041444, | |
| "learning_rate": 8.70910638874064e-06, | |
| "loss": 0.5063, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.31918505942275044, | |
| "grad_norm": 1.0268180450930244, | |
| "learning_rate": 8.669096780792754e-06, | |
| "loss": 0.4953, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 1.0957503547012442, | |
| "learning_rate": 8.628571514466502e-06, | |
| "loss": 0.5062, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.32597623089983024, | |
| "grad_norm": 1.0476160480506713, | |
| "learning_rate": 8.587536285221656e-06, | |
| "loss": 0.5254, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3293718166383701, | |
| "grad_norm": 0.973467408933069, | |
| "learning_rate": 8.545996860188668e-06, | |
| "loss": 0.5107, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.33276740237691, | |
| "grad_norm": 0.9690640110532758, | |
| "learning_rate": 8.503959077358143e-06, | |
| "loss": 0.4767, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.3361629881154499, | |
| "grad_norm": 1.1272513638936046, | |
| "learning_rate": 8.46142884476038e-06, | |
| "loss": 0.4991, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.3395585738539898, | |
| "grad_norm": 1.089949180068211, | |
| "learning_rate": 8.418412139635026e-06, | |
| "loss": 0.5202, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.34295415959252973, | |
| "grad_norm": 1.0904976650398113, | |
| "learning_rate": 8.374915007591053e-06, | |
| "loss": 0.5019, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3463497453310696, | |
| "grad_norm": 1.0132973869482662, | |
| "learning_rate": 8.330943561757092e-06, | |
| "loss": 0.4981, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.34974533106960953, | |
| "grad_norm": 1.0144345934272716, | |
| "learning_rate": 8.286503981922284e-06, | |
| "loss": 0.4828, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.3531409168081494, | |
| "grad_norm": 1.0708364693456955, | |
| "learning_rate": 8.241602513667775e-06, | |
| "loss": 0.4677, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3565365025466893, | |
| "grad_norm": 1.0734641865863697, | |
| "learning_rate": 8.19624546748895e-06, | |
| "loss": 0.4991, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.3599320882852292, | |
| "grad_norm": 1.206730960165882, | |
| "learning_rate": 8.150439217908557e-06, | |
| "loss": 0.4874, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.3633276740237691, | |
| "grad_norm": 0.990112721589732, | |
| "learning_rate": 8.104190202580811e-06, | |
| "loss": 0.4613, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.366723259762309, | |
| "grad_norm": 1.135017537331381, | |
| "learning_rate": 8.057504921386661e-06, | |
| "loss": 0.4981, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.3701188455008489, | |
| "grad_norm": 1.1168906124405507, | |
| "learning_rate": 8.010389935520269e-06, | |
| "loss": 0.4812, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.3735144312393888, | |
| "grad_norm": 1.109365518761395, | |
| "learning_rate": 7.962851866566912e-06, | |
| "loss": 0.4901, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3769100169779287, | |
| "grad_norm": 1.1121750297406146, | |
| "learning_rate": 7.914897395572362e-06, | |
| "loss": 0.4841, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.38030560271646857, | |
| "grad_norm": 1.10407244769018, | |
| "learning_rate": 7.866533262103937e-06, | |
| "loss": 0.4813, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.3837011884550085, | |
| "grad_norm": 1.109502100982312, | |
| "learning_rate": 7.817766263303312e-06, | |
| "loss": 0.4731, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 1.0709239172717544, | |
| "learning_rate": 7.768603252931243e-06, | |
| "loss": 0.4988, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.3904923599320883, | |
| "grad_norm": 1.0915174076138725, | |
| "learning_rate": 7.719051140404327e-06, | |
| "loss": 0.4825, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.3938879456706282, | |
| "grad_norm": 1.0965896575304275, | |
| "learning_rate": 7.669116889823955e-06, | |
| "loss": 0.4742, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.39728353140916806, | |
| "grad_norm": 1.0241307032424805, | |
| "learning_rate": 7.6188075189975644e-06, | |
| "loss": 0.4743, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.400679117147708, | |
| "grad_norm": 0.9435780818732129, | |
| "learning_rate": 7.568130098452352e-06, | |
| "loss": 0.4739, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.40407470288624786, | |
| "grad_norm": 1.0248298007190784, | |
| "learning_rate": 7.517091750441576e-06, | |
| "loss": 0.4814, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.4074702886247878, | |
| "grad_norm": 1.0315243401737155, | |
| "learning_rate": 7.465699647943586e-06, | |
| "loss": 0.4805, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.41086587436332767, | |
| "grad_norm": 1.091443614674752, | |
| "learning_rate": 7.413961013653725e-06, | |
| "loss": 0.4953, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.4142614601018676, | |
| "grad_norm": 1.0680561484041602, | |
| "learning_rate": 7.361883118969248e-06, | |
| "loss": 0.4588, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.41765704584040747, | |
| "grad_norm": 0.9761759048656751, | |
| "learning_rate": 7.309473282967387e-06, | |
| "loss": 0.469, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.42105263157894735, | |
| "grad_norm": 1.0119186737160737, | |
| "learning_rate": 7.256738871376733e-06, | |
| "loss": 0.4812, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.4244482173174873, | |
| "grad_norm": 0.9670302415957511, | |
| "learning_rate": 7.203687295542032e-06, | |
| "loss": 0.456, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.42784380305602715, | |
| "grad_norm": 1.001107021910696, | |
| "learning_rate": 7.1503260113826035e-06, | |
| "loss": 0.4597, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.4312393887945671, | |
| "grad_norm": 1.024857257795839, | |
| "learning_rate": 7.09666251834447e-06, | |
| "loss": 0.4884, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.43463497453310695, | |
| "grad_norm": 1.0331435407774852, | |
| "learning_rate": 7.042704358346375e-06, | |
| "loss": 0.4581, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.4380305602716469, | |
| "grad_norm": 1.00793619623988, | |
| "learning_rate": 6.988459114719849e-06, | |
| "loss": 0.4672, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.44142614601018676, | |
| "grad_norm": 0.9145035341259203, | |
| "learning_rate": 6.933934411143419e-06, | |
| "loss": 0.4524, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.44482173174872663, | |
| "grad_norm": 1.1118101624400085, | |
| "learning_rate": 6.879137910571191e-06, | |
| "loss": 0.4738, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.44821731748726656, | |
| "grad_norm": 1.0044078413498474, | |
| "learning_rate": 6.824077314155877e-06, | |
| "loss": 0.4679, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.45161290322580644, | |
| "grad_norm": 1.0568591065810937, | |
| "learning_rate": 6.768760360166471e-06, | |
| "loss": 0.4909, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.45500848896434637, | |
| "grad_norm": 0.9937372079053678, | |
| "learning_rate": 6.713194822900707e-06, | |
| "loss": 0.4734, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.45840407470288624, | |
| "grad_norm": 1.0380172124065203, | |
| "learning_rate": 6.657388511592453e-06, | |
| "loss": 0.4979, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.4617996604414261, | |
| "grad_norm": 0.9710945454163669, | |
| "learning_rate": 6.601349269314188e-06, | |
| "loss": 0.4826, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.46519524617996605, | |
| "grad_norm": 1.1195229655523649, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.4685, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.4685908319185059, | |
| "grad_norm": 1.079871197310449, | |
| "learning_rate": 6.488603526712391e-06, | |
| "loss": 0.483, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.47198641765704585, | |
| "grad_norm": 1.0089317127122743, | |
| "learning_rate": 6.431912871783587e-06, | |
| "loss": 0.5028, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.47538200339558573, | |
| "grad_norm": 0.9671431137430636, | |
| "learning_rate": 6.3750209744473105e-06, | |
| "loss": 0.4669, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.47877758913412566, | |
| "grad_norm": 1.0520557175009426, | |
| "learning_rate": 6.3179358303453386e-06, | |
| "loss": 0.4735, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.48217317487266553, | |
| "grad_norm": 1.0538462837406155, | |
| "learning_rate": 6.260665462278544e-06, | |
| "loss": 0.5051, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.4855687606112054, | |
| "grad_norm": 1.037965055623141, | |
| "learning_rate": 6.203217919079343e-06, | |
| "loss": 0.4762, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.48896434634974534, | |
| "grad_norm": 1.0176275927771237, | |
| "learning_rate": 6.145601274480521e-06, | |
| "loss": 0.4869, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.4923599320882852, | |
| "grad_norm": 1.0431340329887582, | |
| "learning_rate": 6.08782362598054e-06, | |
| "loss": 0.4611, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.49575551782682514, | |
| "grad_norm": 1.0308571103490511, | |
| "learning_rate": 6.029893093705492e-06, | |
| "loss": 0.4763, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.499151103565365, | |
| "grad_norm": 1.0355702334516321, | |
| "learning_rate": 5.971817819267914e-06, | |
| "loss": 0.4654, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.5025466893039049, | |
| "grad_norm": 0.9893529623332218, | |
| "learning_rate": 5.9136059646225375e-06, | |
| "loss": 0.5044, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.5059422750424448, | |
| "grad_norm": 0.960195438000855, | |
| "learning_rate": 5.855265710919211e-06, | |
| "loss": 0.4435, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.5093378607809848, | |
| "grad_norm": 0.9646163806867624, | |
| "learning_rate": 5.796805257353109e-06, | |
| "loss": 0.4834, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.5127334465195246, | |
| "grad_norm": 0.9931874900764169, | |
| "learning_rate": 5.738232820012407e-06, | |
| "loss": 0.461, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.5161290322580645, | |
| "grad_norm": 1.0149629817842267, | |
| "learning_rate": 5.679556630723592e-06, | |
| "loss": 0.4611, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.5195246179966044, | |
| "grad_norm": 1.0022805808479243, | |
| "learning_rate": 5.620784935894548e-06, | |
| "loss": 0.4712, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.5229202037351444, | |
| "grad_norm": 0.9091219788824919, | |
| "learning_rate": 5.561925995355595e-06, | |
| "loss": 0.4906, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 0.9508691466403413, | |
| "learning_rate": 5.5029880811986546e-06, | |
| "loss": 0.4836, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.5297113752122241, | |
| "grad_norm": 0.9816091460679524, | |
| "learning_rate": 5.443979476614674e-06, | |
| "loss": 0.4568, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.533106960950764, | |
| "grad_norm": 0.9890714751673821, | |
| "learning_rate": 5.384908474729501e-06, | |
| "loss": 0.4669, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.5365025466893039, | |
| "grad_norm": 0.9852345937629808, | |
| "learning_rate": 5.325783377438357e-06, | |
| "loss": 0.4631, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.5398981324278438, | |
| "grad_norm": 0.9992258942689997, | |
| "learning_rate": 5.266612494239088e-06, | |
| "loss": 0.4824, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.5432937181663837, | |
| "grad_norm": 1.0010167382867965, | |
| "learning_rate": 5.207404141064334e-06, | |
| "loss": 0.4619, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5466893039049237, | |
| "grad_norm": 1.117194827109753, | |
| "learning_rate": 5.148166639112799e-06, | |
| "loss": 0.518, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.5500848896434635, | |
| "grad_norm": 0.9423156223035444, | |
| "learning_rate": 5.088908313679788e-06, | |
| "loss": 0.4567, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.5534804753820034, | |
| "grad_norm": 0.919109798014551, | |
| "learning_rate": 5.029637492987153e-06, | |
| "loss": 0.4649, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.5568760611205433, | |
| "grad_norm": 1.1232411363329386, | |
| "learning_rate": 4.970362507012848e-06, | |
| "loss": 0.4682, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.5602716468590832, | |
| "grad_norm": 0.9560703825281516, | |
| "learning_rate": 4.911091686320213e-06, | |
| "loss": 0.4594, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5636672325976231, | |
| "grad_norm": 0.9927617945956294, | |
| "learning_rate": 4.8518333608872015e-06, | |
| "loss": 0.4266, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.567062818336163, | |
| "grad_norm": 1.0053279255521879, | |
| "learning_rate": 4.792595858935668e-06, | |
| "loss": 0.4745, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5704584040747029, | |
| "grad_norm": 1.1247627665383018, | |
| "learning_rate": 4.733387505760913e-06, | |
| "loss": 0.4863, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5738539898132428, | |
| "grad_norm": 0.9506838521386066, | |
| "learning_rate": 4.674216622561645e-06, | |
| "loss": 0.4459, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5772495755517827, | |
| "grad_norm": 0.8918545360346245, | |
| "learning_rate": 4.6150915252705005e-06, | |
| "loss": 0.4571, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5806451612903226, | |
| "grad_norm": 0.9704497436094051, | |
| "learning_rate": 4.556020523385326e-06, | |
| "loss": 0.4701, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5840407470288624, | |
| "grad_norm": 0.9830084007827714, | |
| "learning_rate": 4.497011918801347e-06, | |
| "loss": 0.477, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5874363327674024, | |
| "grad_norm": 1.0446285595393363, | |
| "learning_rate": 4.438074004644407e-06, | |
| "loss": 0.4963, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5908319185059423, | |
| "grad_norm": 0.9430261659822053, | |
| "learning_rate": 4.379215064105454e-06, | |
| "loss": 0.4502, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5942275042444821, | |
| "grad_norm": 0.9528761706191039, | |
| "learning_rate": 4.32044336927641e-06, | |
| "loss": 0.4667, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.597623089983022, | |
| "grad_norm": 0.9712945630496029, | |
| "learning_rate": 4.261767179987595e-06, | |
| "loss": 0.469, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.601018675721562, | |
| "grad_norm": 0.9203712027671123, | |
| "learning_rate": 4.203194742646893e-06, | |
| "loss": 0.4549, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.6044142614601019, | |
| "grad_norm": 0.9335750991253596, | |
| "learning_rate": 4.1447342890807905e-06, | |
| "loss": 0.4589, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.6078098471986417, | |
| "grad_norm": 0.9302840956981335, | |
| "learning_rate": 4.086394035377463e-06, | |
| "loss": 0.4525, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.6112054329371817, | |
| "grad_norm": 0.9205763667360544, | |
| "learning_rate": 4.028182180732088e-06, | |
| "loss": 0.4652, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6146010186757216, | |
| "grad_norm": 0.9053159533350853, | |
| "learning_rate": 3.970106906294509e-06, | |
| "loss": 0.4887, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.6179966044142614, | |
| "grad_norm": 0.9639072424421392, | |
| "learning_rate": 3.912176374019462e-06, | |
| "loss": 0.4928, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.6213921901528013, | |
| "grad_norm": 0.8934882136410526, | |
| "learning_rate": 3.85439872551948e-06, | |
| "loss": 0.4613, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.6247877758913413, | |
| "grad_norm": 0.9142153731395757, | |
| "learning_rate": 3.796782080920659e-06, | |
| "loss": 0.4489, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.6281833616298812, | |
| "grad_norm": 0.9197655133200423, | |
| "learning_rate": 3.7393345377214584e-06, | |
| "loss": 0.4433, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.631578947368421, | |
| "grad_norm": 0.9569197839619695, | |
| "learning_rate": 3.682064169654663e-06, | |
| "loss": 0.4665, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.634974533106961, | |
| "grad_norm": 0.9362494462879061, | |
| "learning_rate": 3.6249790255526916e-06, | |
| "loss": 0.4363, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.6383701188455009, | |
| "grad_norm": 0.8671267136185908, | |
| "learning_rate": 3.568087128216414e-06, | |
| "loss": 0.4646, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.6417657045840407, | |
| "grad_norm": 0.9203019838984255, | |
| "learning_rate": 3.511396473287611e-06, | |
| "loss": 0.4571, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 0.9768723974984408, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.4484, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6485568760611206, | |
| "grad_norm": 0.9396504959111149, | |
| "learning_rate": 3.398650730685813e-06, | |
| "loss": 0.457, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.6519524617996605, | |
| "grad_norm": 0.9336335579059931, | |
| "learning_rate": 3.3426114884075488e-06, | |
| "loss": 0.4467, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.6553480475382003, | |
| "grad_norm": 0.8976643217426382, | |
| "learning_rate": 3.2868051770992935e-06, | |
| "loss": 0.4572, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.6587436332767402, | |
| "grad_norm": 0.9373033603378723, | |
| "learning_rate": 3.2312396398335312e-06, | |
| "loss": 0.4687, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.6621392190152802, | |
| "grad_norm": 0.9707294952319975, | |
| "learning_rate": 3.175922685844125e-06, | |
| "loss": 0.4336, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.66553480475382, | |
| "grad_norm": 0.9583493306758707, | |
| "learning_rate": 3.1208620894288105e-06, | |
| "loss": 0.4486, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.6689303904923599, | |
| "grad_norm": 0.9820021819293268, | |
| "learning_rate": 3.0660655888565827e-06, | |
| "loss": 0.4588, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.6723259762308998, | |
| "grad_norm": 0.9400174461623624, | |
| "learning_rate": 3.0115408852801535e-06, | |
| "loss": 0.4423, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.6757215619694398, | |
| "grad_norm": 0.9603934801371697, | |
| "learning_rate": 2.9572956416536267e-06, | |
| "loss": 0.444, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6791171477079796, | |
| "grad_norm": 0.9537870436181752, | |
| "learning_rate": 2.9033374816555338e-06, | |
| "loss": 0.4556, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6825127334465195, | |
| "grad_norm": 0.9149135094841083, | |
| "learning_rate": 2.8496739886173994e-06, | |
| "loss": 0.4727, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6859083191850595, | |
| "grad_norm": 0.9486316397589806, | |
| "learning_rate": 2.7963127044579697e-06, | |
| "loss": 0.432, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6893039049235993, | |
| "grad_norm": 0.9240748880967119, | |
| "learning_rate": 2.743261128623269e-06, | |
| "loss": 0.4596, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6926994906621392, | |
| "grad_norm": 0.9573847569611018, | |
| "learning_rate": 2.6905267170326143e-06, | |
| "loss": 0.4679, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6960950764006791, | |
| "grad_norm": 0.8845992056166951, | |
| "learning_rate": 2.6381168810307536e-06, | |
| "loss": 0.4631, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6994906621392191, | |
| "grad_norm": 1.004797015683845, | |
| "learning_rate": 2.5860389863462765e-06, | |
| "loss": 0.4475, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.7028862478777589, | |
| "grad_norm": 0.9408081397458115, | |
| "learning_rate": 2.534300352056416e-06, | |
| "loss": 0.4529, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.7062818336162988, | |
| "grad_norm": 0.8830371296945094, | |
| "learning_rate": 2.4829082495584244e-06, | |
| "loss": 0.4431, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.7096774193548387, | |
| "grad_norm": 0.918591969840702, | |
| "learning_rate": 2.4318699015476495e-06, | |
| "loss": 0.4295, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.7130730050933786, | |
| "grad_norm": 0.9126233293580099, | |
| "learning_rate": 2.3811924810024385e-06, | |
| "loss": 0.4462, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7164685908319185, | |
| "grad_norm": 0.944049132172904, | |
| "learning_rate": 2.330883110176049e-06, | |
| "loss": 0.4764, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.7198641765704584, | |
| "grad_norm": 0.8594916774059891, | |
| "learning_rate": 2.2809488595956746e-06, | |
| "loss": 0.4245, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.7232597623089984, | |
| "grad_norm": 0.9004237430035651, | |
| "learning_rate": 2.2313967470687593e-06, | |
| "loss": 0.4618, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.7266553480475382, | |
| "grad_norm": 0.9087973453481795, | |
| "learning_rate": 2.18223373669669e-06, | |
| "loss": 0.4614, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.7300509337860781, | |
| "grad_norm": 0.8919978483526617, | |
| "learning_rate": 2.1334667378960642e-06, | |
| "loss": 0.462, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.733446519524618, | |
| "grad_norm": 0.9099211098465206, | |
| "learning_rate": 2.0851026044276405e-06, | |
| "loss": 0.4634, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.7368421052631579, | |
| "grad_norm": 0.8896204484265512, | |
| "learning_rate": 2.0371481334330913e-06, | |
| "loss": 0.463, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.7402376910016978, | |
| "grad_norm": 0.9519257493559129, | |
| "learning_rate": 1.9896100644797316e-06, | |
| "loss": 0.4788, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.7436332767402377, | |
| "grad_norm": 0.9116718849117885, | |
| "learning_rate": 1.9424950786133414e-06, | |
| "loss": 0.4634, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.7470288624787776, | |
| "grad_norm": 0.9263410885293661, | |
| "learning_rate": 1.8958097974191909e-06, | |
| "loss": 0.484, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7504244482173175, | |
| "grad_norm": 0.9023645875611066, | |
| "learning_rate": 1.8495607820914451e-06, | |
| "loss": 0.4442, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.7538200339558574, | |
| "grad_norm": 0.8875288134755263, | |
| "learning_rate": 1.8037545325110506e-06, | |
| "loss": 0.4241, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.7572156196943973, | |
| "grad_norm": 0.9432820849930534, | |
| "learning_rate": 1.7583974863322272e-06, | |
| "loss": 0.4837, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.7606112054329371, | |
| "grad_norm": 0.9658199086993665, | |
| "learning_rate": 1.7134960180777171e-06, | |
| "loss": 0.4728, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.7640067911714771, | |
| "grad_norm": 0.8457866400541412, | |
| "learning_rate": 1.6690564382429104e-06, | |
| "loss": 0.4301, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.767402376910017, | |
| "grad_norm": 0.8989697214841547, | |
| "learning_rate": 1.6250849924089485e-06, | |
| "loss": 0.4527, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.7707979626485568, | |
| "grad_norm": 0.9022700419018949, | |
| "learning_rate": 1.581587860364977e-06, | |
| "loss": 0.4543, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.9173241233268217, | |
| "learning_rate": 1.5385711552396227e-06, | |
| "loss": 0.4466, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.7775891341256367, | |
| "grad_norm": 0.8617337826425147, | |
| "learning_rate": 1.4960409226418576e-06, | |
| "loss": 0.4386, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.7809847198641766, | |
| "grad_norm": 0.8625616538339923, | |
| "learning_rate": 1.4540031398113335e-06, | |
| "loss": 0.4533, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7843803056027164, | |
| "grad_norm": 0.894048701617508, | |
| "learning_rate": 1.4124637147783431e-06, | |
| "loss": 0.4447, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7877758913412564, | |
| "grad_norm": 0.9208844935282854, | |
| "learning_rate": 1.371428485533498e-06, | |
| "loss": 0.4826, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.7911714770797963, | |
| "grad_norm": 0.8883039726822113, | |
| "learning_rate": 1.3309032192072463e-06, | |
| "loss": 0.4486, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.7945670628183361, | |
| "grad_norm": 0.899703690304073, | |
| "learning_rate": 1.29089361125936e-06, | |
| "loss": 0.4396, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.797962648556876, | |
| "grad_norm": 0.942126628969688, | |
| "learning_rate": 1.251405284678488e-06, | |
| "loss": 0.4666, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.801358234295416, | |
| "grad_norm": 0.876574882692073, | |
| "learning_rate": 1.2124437891918995e-06, | |
| "loss": 0.4338, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.8047538200339559, | |
| "grad_norm": 0.8738541998828565, | |
| "learning_rate": 1.1740146004855141e-06, | |
| "loss": 0.4609, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.8081494057724957, | |
| "grad_norm": 0.8732256896169818, | |
| "learning_rate": 1.1361231194343436e-06, | |
| "loss": 0.4505, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.8115449915110357, | |
| "grad_norm": 0.8634374293174691, | |
| "learning_rate": 1.0987746713434578e-06, | |
| "loss": 0.4626, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.8149405772495756, | |
| "grad_norm": 0.8709360064531477, | |
| "learning_rate": 1.0619745051995473e-06, | |
| "loss": 0.465, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8183361629881154, | |
| "grad_norm": 0.8612129831209381, | |
| "learning_rate": 1.0257277929332332e-06, | |
| "loss": 0.4543, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.8217317487266553, | |
| "grad_norm": 0.9089176413771565, | |
| "learning_rate": 9.900396286922025e-07, | |
| "loss": 0.4676, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.8251273344651953, | |
| "grad_norm": 0.8856710278778406, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.4366, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.8285229202037352, | |
| "grad_norm": 0.8188623584178454, | |
| "learning_rate": 9.203589276774438e-07, | |
| "loss": 0.4404, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.831918505942275, | |
| "grad_norm": 0.9252917982729971, | |
| "learning_rate": 8.86376183896226e-07, | |
| "loss": 0.4447, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.8353140916808149, | |
| "grad_norm": 0.8784423429363009, | |
| "learning_rate": 8.529715727489912e-07, | |
| "loss": 0.4488, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.8387096774193549, | |
| "grad_norm": 0.9207047553546756, | |
| "learning_rate": 8.201497889518073e-07, | |
| "loss": 0.4561, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.8421052631578947, | |
| "grad_norm": 0.886908726068121, | |
| "learning_rate": 7.879154453096305e-07, | |
| "loss": 0.4556, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.8455008488964346, | |
| "grad_norm": 0.8585705366949494, | |
| "learning_rate": 7.562730720680111e-07, | |
| "loss": 0.4333, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.8488964346349746, | |
| "grad_norm": 0.8504815527006262, | |
| "learning_rate": 7.25227116276413e-07, | |
| "loss": 0.4512, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8522920203735145, | |
| "grad_norm": 0.8201572218259242, | |
| "learning_rate": 6.947819411632223e-07, | |
| "loss": 0.4492, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.8556876061120543, | |
| "grad_norm": 0.8579475897548395, | |
| "learning_rate": 6.649418255225298e-07, | |
| "loss": 0.4633, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.8590831918505942, | |
| "grad_norm": 0.8140416489339838, | |
| "learning_rate": 6.357109631127889e-07, | |
| "loss": 0.4418, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.8624787775891342, | |
| "grad_norm": 0.8552881051968678, | |
| "learning_rate": 6.07093462067419e-07, | |
| "loss": 0.4572, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.865874363327674, | |
| "grad_norm": 0.8971540169404577, | |
| "learning_rate": 5.79093344317449e-07, | |
| "loss": 0.4775, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.8692699490662139, | |
| "grad_norm": 0.8058254068183305, | |
| "learning_rate": 5.517145450262639e-07, | |
| "loss": 0.4313, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.8726655348047538, | |
| "grad_norm": 0.8530487589073922, | |
| "learning_rate": 5.249609120365579e-07, | |
| "loss": 0.4397, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.8760611205432938, | |
| "grad_norm": 0.9429999718413201, | |
| "learning_rate": 4.988362053295564e-07, | |
| "loss": 0.4496, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.8794567062818336, | |
| "grad_norm": 0.8375356078246441, | |
| "learning_rate": 4.733440964965791e-07, | |
| "loss": 0.4303, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.8828522920203735, | |
| "grad_norm": 0.8256703834683652, | |
| "learning_rate": 4.484881682230341e-07, | |
| "loss": 0.4268, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8862478777589134, | |
| "grad_norm": 0.8543078561007811, | |
| "learning_rate": 4.242719137849077e-07, | |
| "loss": 0.4514, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.8896434634974533, | |
| "grad_norm": 0.905414358204414, | |
| "learning_rate": 4.00698736557808e-07, | |
| "loss": 0.4475, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.8930390492359932, | |
| "grad_norm": 0.8479836834072867, | |
| "learning_rate": 3.777719495386567e-07, | |
| "loss": 0.4479, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.8964346349745331, | |
| "grad_norm": 0.8646327130622585, | |
| "learning_rate": 3.5549477488007853e-07, | |
| "loss": 0.4563, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.8998302207130731, | |
| "grad_norm": 0.8363483102479007, | |
| "learning_rate": 3.3387034343755063e-07, | |
| "loss": 0.4447, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.9032258064516129, | |
| "grad_norm": 0.8772923486186023, | |
| "learning_rate": 3.1290169432939556e-07, | |
| "loss": 0.4416, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.9066213921901528, | |
| "grad_norm": 0.8320996733730202, | |
| "learning_rate": 2.925917745096568e-07, | |
| "loss": 0.4263, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.9100169779286927, | |
| "grad_norm": 0.8735391402072182, | |
| "learning_rate": 2.7294343835393366e-07, | |
| "loss": 0.4635, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.9134125636672326, | |
| "grad_norm": 0.8476560390812548, | |
| "learning_rate": 2.539594472582213e-07, | |
| "loss": 0.4387, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.9168081494057725, | |
| "grad_norm": 0.8265472830061668, | |
| "learning_rate": 2.3564246925082358e-07, | |
| "loss": 0.4409, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9202037351443124, | |
| "grad_norm": 0.8275085794146296, | |
| "learning_rate": 2.179950786173879e-07, | |
| "loss": 0.422, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.9235993208828522, | |
| "grad_norm": 0.8378368477541204, | |
| "learning_rate": 2.01019755539108e-07, | |
| "loss": 0.4533, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.9269949066213922, | |
| "grad_norm": 0.8225280576542648, | |
| "learning_rate": 1.8471888574415953e-07, | |
| "loss": 0.4504, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.9303904923599321, | |
| "grad_norm": 0.8689138287376853, | |
| "learning_rate": 1.690947601724091e-07, | |
| "loss": 0.4616, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.933786078098472, | |
| "grad_norm": 0.8153963652578823, | |
| "learning_rate": 1.5414957465343883e-07, | |
| "loss": 0.4362, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.9371816638370118, | |
| "grad_norm": 0.7970051219455544, | |
| "learning_rate": 1.3988542959794627e-07, | |
| "loss": 0.4476, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.9405772495755518, | |
| "grad_norm": 0.8286263852975556, | |
| "learning_rate": 1.2630432970255014e-07, | |
| "loss": 0.4317, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.9439728353140917, | |
| "grad_norm": 0.819703548732724, | |
| "learning_rate": 1.1340818366804728e-07, | |
| "loss": 0.4374, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.9473684210526315, | |
| "grad_norm": 0.8630335285550543, | |
| "learning_rate": 1.0119880393116177e-07, | |
| "loss": 0.4463, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.9507640067911715, | |
| "grad_norm": 0.8905416016998572, | |
| "learning_rate": 8.967790640982466e-08, | |
| "loss": 0.4534, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9541595925297114, | |
| "grad_norm": 0.869553726844365, | |
| "learning_rate": 7.884711026201586e-08, | |
| "loss": 0.4474, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.9575551782682513, | |
| "grad_norm": 0.8516101851144577, | |
| "learning_rate": 6.870793765820783e-08, | |
| "loss": 0.4735, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.9609507640067911, | |
| "grad_norm": 0.834166916764426, | |
| "learning_rate": 5.92618135674361e-08, | |
| "loss": 0.4506, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.9643463497453311, | |
| "grad_norm": 0.8500281237762543, | |
| "learning_rate": 5.0510065557034526e-08, | |
| "loss": 0.4677, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 0.833367943844974, | |
| "learning_rate": 4.245392360605727e-08, | |
| "loss": 0.4582, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.9711375212224108, | |
| "grad_norm": 0.8458107328647104, | |
| "learning_rate": 3.5094519932415417e-08, | |
| "loss": 0.4505, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.9745331069609507, | |
| "grad_norm": 0.8443895579435003, | |
| "learning_rate": 2.843288883375539e-08, | |
| "loss": 0.4538, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.9779286926994907, | |
| "grad_norm": 0.8631958241337926, | |
| "learning_rate": 2.2469966542096323e-08, | |
| "loss": 0.4432, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.9813242784380306, | |
| "grad_norm": 0.886260366562854, | |
| "learning_rate": 1.7206591092253642e-08, | |
| "loss": 0.4567, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.9847198641765704, | |
| "grad_norm": 0.7994144470255627, | |
| "learning_rate": 1.264350220405719e-08, | |
| "loss": 0.4317, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9881154499151104, | |
| "grad_norm": 0.8700681537853684, | |
| "learning_rate": 8.781341178393244e-09, | |
| "loss": 0.4555, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.9915110356536503, | |
| "grad_norm": 0.8575898812488901, | |
| "learning_rate": 5.620650807073857e-09, | |
| "loss": 0.4606, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.9949066213921901, | |
| "grad_norm": 0.8616135380601608, | |
| "learning_rate": 3.1618752965534295e-09, | |
| "loss": 0.4408, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.99830220713073, | |
| "grad_norm": 0.7784658740487929, | |
| "learning_rate": 1.4053602054991954e-09, | |
| "loss": 0.4157, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.2063313976605035, | |
| "learning_rate": 3.513523962256349e-10, | |
| "loss": 0.4174, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 295, | |
| "total_flos": 44242544295936.0, | |
| "train_loss": 0.5097209928399425, | |
| "train_runtime": 1256.4644, | |
| "train_samples_per_second": 14.995, | |
| "train_steps_per_second": 0.235 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 295, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 2500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 44242544295936.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |