| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.986425339366516, | |
| "eval_steps": 500, | |
| "global_step": 165, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01809954751131222, | |
| "grad_norm": 4.270520950401436, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 0.8651, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03619909502262444, | |
| "grad_norm": 4.08130709590183, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 0.834, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05429864253393665, | |
| "grad_norm": 6.3327514730586545, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 0.8286, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.07239819004524888, | |
| "grad_norm": 4.619785546975456, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.8592, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.09049773755656108, | |
| "grad_norm": 3.2305077950239336, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.8714, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.1085972850678733, | |
| "grad_norm": 2.7370616230365483, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 0.846, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.12669683257918551, | |
| "grad_norm": 2.0471907108946947, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 0.8033, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.14479638009049775, | |
| "grad_norm": 1.7780057810998475, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.7808, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.16289592760180996, | |
| "grad_norm": 2.3908751757347773, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 0.7251, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.18099547511312217, | |
| "grad_norm": 2.25104000061671, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.756, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.19909502262443438, | |
| "grad_norm": 1.8941739130758755, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.7291, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.2171945701357466, | |
| "grad_norm": 1.6227195871257627, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.7187, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.23529411764705882, | |
| "grad_norm": 2.117879729057304, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.7101, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.25339366515837103, | |
| "grad_norm": 1.6475677575928487, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.693, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.27149321266968324, | |
| "grad_norm": 1.352033468624444, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.7142, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2895927601809955, | |
| "grad_norm": 1.0129577643301186, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.6941, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.3076923076923077, | |
| "grad_norm": 1.3363060312344113, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6946, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.3257918552036199, | |
| "grad_norm": 0.9818113208945979, | |
| "learning_rate": 9.998873580873848e-06, | |
| "loss": 0.6559, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3438914027149321, | |
| "grad_norm": 0.9497100630620063, | |
| "learning_rate": 9.99549483102341e-06, | |
| "loss": 0.6293, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.36199095022624433, | |
| "grad_norm": 0.7666994164661116, | |
| "learning_rate": 9.989865272804064e-06, | |
| "loss": 0.6363, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.38009049773755654, | |
| "grad_norm": 0.7657473460823022, | |
| "learning_rate": 9.981987442712634e-06, | |
| "loss": 0.619, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.39819004524886875, | |
| "grad_norm": 0.7395769869216733, | |
| "learning_rate": 9.971864890244514e-06, | |
| "loss": 0.6137, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.416289592760181, | |
| "grad_norm": 0.6202139618434414, | |
| "learning_rate": 9.959502176294384e-06, | |
| "loss": 0.6142, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.4343891402714932, | |
| "grad_norm": 0.7479430347437638, | |
| "learning_rate": 9.944904871101227e-06, | |
| "loss": 0.618, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.45248868778280543, | |
| "grad_norm": 0.6779505395481803, | |
| "learning_rate": 9.928079551738542e-06, | |
| "loss": 0.6177, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.47058823529411764, | |
| "grad_norm": 0.5363485701627213, | |
| "learning_rate": 9.909033799150947e-06, | |
| "loss": 0.6323, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.48868778280542985, | |
| "grad_norm": 0.5974955089825978, | |
| "learning_rate": 9.887776194738433e-06, | |
| "loss": 0.5826, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.5067873303167421, | |
| "grad_norm": 0.5444695893455697, | |
| "learning_rate": 9.864316316489873e-06, | |
| "loss": 0.578, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.5248868778280543, | |
| "grad_norm": 0.4489447311178371, | |
| "learning_rate": 9.838664734667496e-06, | |
| "loss": 0.6192, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.5429864253393665, | |
| "grad_norm": 0.5254652960581311, | |
| "learning_rate": 9.810833007044247e-06, | |
| "loss": 0.5858, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5610859728506787, | |
| "grad_norm": 0.43507105065822094, | |
| "learning_rate": 9.780833673696255e-06, | |
| "loss": 0.5912, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.579185520361991, | |
| "grad_norm": 0.5884456720678961, | |
| "learning_rate": 9.74868025135266e-06, | |
| "loss": 0.601, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.5972850678733032, | |
| "grad_norm": 0.446124158858029, | |
| "learning_rate": 9.714387227305422e-06, | |
| "loss": 0.5716, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.6153846153846154, | |
| "grad_norm": 0.4999259943183043, | |
| "learning_rate": 9.677970052881811e-06, | |
| "loss": 0.6004, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.6334841628959276, | |
| "grad_norm": 0.5452059230819422, | |
| "learning_rate": 9.639445136482549e-06, | |
| "loss": 0.5771, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6515837104072398, | |
| "grad_norm": 0.45682271068627184, | |
| "learning_rate": 9.598829836188694e-06, | |
| "loss": 0.5837, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.669683257918552, | |
| "grad_norm": 0.43493826579652684, | |
| "learning_rate": 9.55614245194068e-06, | |
| "loss": 0.5963, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.6877828054298643, | |
| "grad_norm": 0.42543550395893537, | |
| "learning_rate": 9.511402217292927e-06, | |
| "loss": 0.5633, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.7058823529411765, | |
| "grad_norm": 0.3891549600693992, | |
| "learning_rate": 9.464629290747844e-06, | |
| "loss": 0.5607, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.7239819004524887, | |
| "grad_norm": 0.4002335108145771, | |
| "learning_rate": 9.415844746673047e-06, | |
| "loss": 0.5778, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7420814479638009, | |
| "grad_norm": 0.4287499076319389, | |
| "learning_rate": 9.365070565805941e-06, | |
| "loss": 0.5649, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.7601809954751131, | |
| "grad_norm": 0.38942600340952016, | |
| "learning_rate": 9.312329625349903e-06, | |
| "loss": 0.5701, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.7782805429864253, | |
| "grad_norm": 0.3306475208382961, | |
| "learning_rate": 9.257645688666557e-06, | |
| "loss": 0.5641, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.7963800904977375, | |
| "grad_norm": 0.37084164933852937, | |
| "learning_rate": 9.201043394568773e-06, | |
| "loss": 0.5511, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.8144796380090498, | |
| "grad_norm": 0.3494129880763865, | |
| "learning_rate": 9.142548246219212e-06, | |
| "loss": 0.5626, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.832579185520362, | |
| "grad_norm": 0.36906169844507203, | |
| "learning_rate": 9.082186599639429e-06, | |
| "loss": 0.5659, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.8506787330316742, | |
| "grad_norm": 0.3758901401117832, | |
| "learning_rate": 9.019985651834703e-06, | |
| "loss": 0.572, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.8687782805429864, | |
| "grad_norm": 0.32671844293334396, | |
| "learning_rate": 8.955973428539943e-06, | |
| "loss": 0.5755, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.8868778280542986, | |
| "grad_norm": 0.4489028958210117, | |
| "learning_rate": 8.890178771592198e-06, | |
| "loss": 0.578, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.9049773755656109, | |
| "grad_norm": 0.39698421743018597, | |
| "learning_rate": 8.822631325935463e-06, | |
| "loss": 0.5742, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.9230769230769231, | |
| "grad_norm": 0.40912494828005824, | |
| "learning_rate": 8.753361526263622e-06, | |
| "loss": 0.5581, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.9411764705882353, | |
| "grad_norm": 0.46734953420703257, | |
| "learning_rate": 8.682400583307562e-06, | |
| "loss": 0.5684, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.9592760180995475, | |
| "grad_norm": 0.3467889011356855, | |
| "learning_rate": 8.609780469772623e-06, | |
| "loss": 0.556, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.9773755656108597, | |
| "grad_norm": 0.5085559424990912, | |
| "learning_rate": 8.535533905932739e-06, | |
| "loss": 0.5514, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.995475113122172, | |
| "grad_norm": 0.4523513658790744, | |
| "learning_rate": 8.459694344887732e-06, | |
| "loss": 0.5925, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.0135746606334841, | |
| "grad_norm": 0.7340348802970464, | |
| "learning_rate": 8.382295957490435e-06, | |
| "loss": 0.9574, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.0316742081447963, | |
| "grad_norm": 0.420046446649172, | |
| "learning_rate": 8.303373616950408e-06, | |
| "loss": 0.5356, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.0497737556561086, | |
| "grad_norm": 0.3421282037573979, | |
| "learning_rate": 8.222962883121196e-06, | |
| "loss": 0.5521, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.0678733031674208, | |
| "grad_norm": 0.4190729595247935, | |
| "learning_rate": 8.141099986478212e-06, | |
| "loss": 0.5362, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.085972850678733, | |
| "grad_norm": 0.359499883552521, | |
| "learning_rate": 8.057821811794457e-06, | |
| "loss": 0.541, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.1040723981900453, | |
| "grad_norm": 0.3756476194471776, | |
| "learning_rate": 7.973165881521435e-06, | |
| "loss": 0.5467, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.1221719457013575, | |
| "grad_norm": 0.35661012462013447, | |
| "learning_rate": 7.887170338882742e-06, | |
| "loss": 0.5462, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.1402714932126696, | |
| "grad_norm": 0.3498507388990045, | |
| "learning_rate": 7.799873930687979e-06, | |
| "loss": 0.5171, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.1583710407239818, | |
| "grad_norm": 0.3170076180552228, | |
| "learning_rate": 7.711315989874677e-06, | |
| "loss": 0.507, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.1764705882352942, | |
| "grad_norm": 0.3037529085890902, | |
| "learning_rate": 7.621536417786159e-06, | |
| "loss": 0.5174, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.1945701357466063, | |
| "grad_norm": 0.342570759484934, | |
| "learning_rate": 7.530575666193283e-06, | |
| "loss": 0.5317, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.2126696832579185, | |
| "grad_norm": 0.30104088055723055, | |
| "learning_rate": 7.438474719068174e-06, | |
| "loss": 0.5274, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.2307692307692308, | |
| "grad_norm": 0.326669875074583, | |
| "learning_rate": 7.3452750741181855e-06, | |
| "loss": 0.5135, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.248868778280543, | |
| "grad_norm": 0.3551023399546242, | |
| "learning_rate": 7.251018724088367e-06, | |
| "loss": 0.5204, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.2669683257918551, | |
| "grad_norm": 0.37895672243853146, | |
| "learning_rate": 7.155748137840892e-06, | |
| "loss": 0.5479, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.2850678733031673, | |
| "grad_norm": 0.36133670714468175, | |
| "learning_rate": 7.059506241219964e-06, | |
| "loss": 0.5434, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.3031674208144797, | |
| "grad_norm": 0.33317369601816094, | |
| "learning_rate": 6.962336397710819e-06, | |
| "loss": 0.5253, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.3212669683257918, | |
| "grad_norm": 0.28803695913966876, | |
| "learning_rate": 6.864282388901544e-06, | |
| "loss": 0.5076, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.3393665158371042, | |
| "grad_norm": 0.3913327961715048, | |
| "learning_rate": 6.765388394756504e-06, | |
| "loss": 0.5365, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.3574660633484164, | |
| "grad_norm": 0.2977675871279601, | |
| "learning_rate": 6.665698973710289e-06, | |
| "loss": 0.5393, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.3755656108597285, | |
| "grad_norm": 0.3702071945183516, | |
| "learning_rate": 6.565259042591112e-06, | |
| "loss": 0.517, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.3936651583710407, | |
| "grad_norm": 0.35663025248537605, | |
| "learning_rate": 6.464113856382752e-06, | |
| "loss": 0.5218, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.4117647058823528, | |
| "grad_norm": 0.3846006097732817, | |
| "learning_rate": 6.3623089878341146e-06, | |
| "loss": 0.5281, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.4298642533936652, | |
| "grad_norm": 0.38145233359354735, | |
| "learning_rate": 6.259890306925627e-06, | |
| "loss": 0.5494, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.4479638009049773, | |
| "grad_norm": 0.321686326326639, | |
| "learning_rate": 6.156903960201709e-06, | |
| "loss": 0.5215, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.4660633484162897, | |
| "grad_norm": 0.3553248299309718, | |
| "learning_rate": 6.053396349978632e-06, | |
| "loss": 0.5124, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.4841628959276019, | |
| "grad_norm": 0.3688316984975195, | |
| "learning_rate": 5.949414113437142e-06, | |
| "loss": 0.5297, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.502262443438914, | |
| "grad_norm": 0.31268728687027997, | |
| "learning_rate": 5.8450041016092465e-06, | |
| "loss": 0.5395, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.5203619909502262, | |
| "grad_norm": 0.36390895213403174, | |
| "learning_rate": 5.740213358268658e-06, | |
| "loss": 0.5247, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.5384615384615383, | |
| "grad_norm": 0.3532101356565584, | |
| "learning_rate": 5.635089098734394e-06, | |
| "loss": 0.5121, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.5565610859728507, | |
| "grad_norm": 0.30616495012538836, | |
| "learning_rate": 5.529678688597081e-06, | |
| "loss": 0.5301, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.5746606334841629, | |
| "grad_norm": 0.41168813574652363, | |
| "learning_rate": 5.4240296223775465e-06, | |
| "loss": 0.5297, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.5927601809954752, | |
| "grad_norm": 0.3480419851531957, | |
| "learning_rate": 5.318189502127332e-06, | |
| "loss": 0.5346, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.6108597285067874, | |
| "grad_norm": 0.3951211138988914, | |
| "learning_rate": 5.212206015980742e-06, | |
| "loss": 0.5442, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.6289592760180995, | |
| "grad_norm": 0.32142900262246665, | |
| "learning_rate": 5.106126916668118e-06, | |
| "loss": 0.5139, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.6470588235294117, | |
| "grad_norm": 0.372934660354091, | |
| "learning_rate": 5e-06, | |
| "loss": 0.537, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.6651583710407238, | |
| "grad_norm": 0.31940542344772543, | |
| "learning_rate": 4.8938730833318825e-06, | |
| "loss": 0.5255, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.6832579185520362, | |
| "grad_norm": 0.3484072513306141, | |
| "learning_rate": 4.78779398401926e-06, | |
| "loss": 0.5312, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.7013574660633484, | |
| "grad_norm": 0.305688116719012, | |
| "learning_rate": 4.6818104978726685e-06, | |
| "loss": 0.5404, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.7194570135746607, | |
| "grad_norm": 0.2610608476826038, | |
| "learning_rate": 4.575970377622456e-06, | |
| "loss": 0.5308, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.737556561085973, | |
| "grad_norm": 0.323315649537396, | |
| "learning_rate": 4.47032131140292e-06, | |
| "loss": 0.5129, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.755656108597285, | |
| "grad_norm": 0.2868605984916918, | |
| "learning_rate": 4.364910901265607e-06, | |
| "loss": 0.5324, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.7737556561085972, | |
| "grad_norm": 0.2480631165360626, | |
| "learning_rate": 4.259786641731344e-06, | |
| "loss": 0.5204, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.7918552036199094, | |
| "grad_norm": 0.2845617409603977, | |
| "learning_rate": 4.154995898390756e-06, | |
| "loss": 0.5207, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.8099547511312217, | |
| "grad_norm": 0.2901557749018309, | |
| "learning_rate": 4.050585886562858e-06, | |
| "loss": 0.5401, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.8280542986425339, | |
| "grad_norm": 0.2577789829212818, | |
| "learning_rate": 3.94660365002137e-06, | |
| "loss": 0.525, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.8461538461538463, | |
| "grad_norm": 0.28060392539372403, | |
| "learning_rate": 3.843096039798293e-06, | |
| "loss": 0.5267, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.8642533936651584, | |
| "grad_norm": 0.27493292579589645, | |
| "learning_rate": 3.7401096930743753e-06, | |
| "loss": 0.5225, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.8823529411764706, | |
| "grad_norm": 0.2503147317440557, | |
| "learning_rate": 3.6376910121658867e-06, | |
| "loss": 0.5183, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.9004524886877827, | |
| "grad_norm": 0.29883473530138427, | |
| "learning_rate": 3.5358861436172487e-06, | |
| "loss": 0.5338, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.9185520361990949, | |
| "grad_norm": 0.3003751720185607, | |
| "learning_rate": 3.4347409574088896e-06, | |
| "loss": 0.5248, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.9366515837104072, | |
| "grad_norm": 0.296798930565972, | |
| "learning_rate": 3.3343010262897125e-06, | |
| "loss": 0.5172, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.9547511312217196, | |
| "grad_norm": 0.29960841020898205, | |
| "learning_rate": 3.234611605243496e-06, | |
| "loss": 0.5416, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.9728506787330318, | |
| "grad_norm": 0.24656341443926255, | |
| "learning_rate": 3.1357176110984578e-06, | |
| "loss": 0.5169, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.990950226244344, | |
| "grad_norm": 0.24416915079855595, | |
| "learning_rate": 3.0376636022891813e-06, | |
| "loss": 0.5031, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.009049773755656, | |
| "grad_norm": 0.5106478330596561, | |
| "learning_rate": 2.9404937587800374e-06, | |
| "loss": 0.8733, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.0271493212669682, | |
| "grad_norm": 0.28895565841398857, | |
| "learning_rate": 2.8442518621591085e-06, | |
| "loss": 0.5102, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.0452488687782804, | |
| "grad_norm": 0.274250226920425, | |
| "learning_rate": 2.748981275911633e-06, | |
| "loss": 0.4991, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.0633484162895925, | |
| "grad_norm": 0.2683303919910382, | |
| "learning_rate": 2.6547249258818162e-06, | |
| "loss": 0.4811, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.081447963800905, | |
| "grad_norm": 0.2670782977498998, | |
| "learning_rate": 2.5615252809318287e-06, | |
| "loss": 0.5246, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.0995475113122173, | |
| "grad_norm": 0.25059501980164506, | |
| "learning_rate": 2.469424333806718e-06, | |
| "loss": 0.5181, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.1176470588235294, | |
| "grad_norm": 0.28254518452392197, | |
| "learning_rate": 2.3784635822138424e-06, | |
| "loss": 0.5112, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.1357466063348416, | |
| "grad_norm": 0.29324248893783117, | |
| "learning_rate": 2.288684010125325e-06, | |
| "loss": 0.5114, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.1538461538461537, | |
| "grad_norm": 0.2503268575452895, | |
| "learning_rate": 2.2001260693120236e-06, | |
| "loss": 0.4967, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.171945701357466, | |
| "grad_norm": 0.2699479211486462, | |
| "learning_rate": 2.1128296611172593e-06, | |
| "loss": 0.4959, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.1900452488687785, | |
| "grad_norm": 0.2580177426665245, | |
| "learning_rate": 2.0268341184785674e-06, | |
| "loss": 0.5088, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.2081447963800906, | |
| "grad_norm": 0.2508244895204738, | |
| "learning_rate": 1.9421781882055447e-06, | |
| "loss": 0.4917, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.226244343891403, | |
| "grad_norm": 0.2792571659281552, | |
| "learning_rate": 1.8589000135217882e-06, | |
| "loss": 0.5023, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.244343891402715, | |
| "grad_norm": 0.23158505061350954, | |
| "learning_rate": 1.7770371168788042e-06, | |
| "loss": 0.4863, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.262443438914027, | |
| "grad_norm": 0.2683239496999239, | |
| "learning_rate": 1.6966263830495939e-06, | |
| "loss": 0.4873, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.2805429864253393, | |
| "grad_norm": 0.24178359031260568, | |
| "learning_rate": 1.6177040425095664e-06, | |
| "loss": 0.4977, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.2986425339366514, | |
| "grad_norm": 0.2542786308206507, | |
| "learning_rate": 1.5403056551122697e-06, | |
| "loss": 0.4998, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.3167420814479636, | |
| "grad_norm": 0.2345038790062461, | |
| "learning_rate": 1.4644660940672628e-06, | |
| "loss": 0.5061, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.334841628959276, | |
| "grad_norm": 0.2494018788793317, | |
| "learning_rate": 1.390219530227378e-06, | |
| "loss": 0.5169, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.3529411764705883, | |
| "grad_norm": 0.23720925343998778, | |
| "learning_rate": 1.3175994166924394e-06, | |
| "loss": 0.5023, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.3710407239819005, | |
| "grad_norm": 0.20440215406384443, | |
| "learning_rate": 1.246638473736378e-06, | |
| "loss": 0.4753, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.3891402714932126, | |
| "grad_norm": 0.21449535787548765, | |
| "learning_rate": 1.1773686740645384e-06, | |
| "loss": 0.5136, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.4072398190045248, | |
| "grad_norm": 0.24588391549876998, | |
| "learning_rate": 1.1098212284078037e-06, | |
| "loss": 0.5009, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.425339366515837, | |
| "grad_norm": 0.2388777734423218, | |
| "learning_rate": 1.0440265714600573e-06, | |
| "loss": 0.5246, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.4434389140271495, | |
| "grad_norm": 0.23579449100780014, | |
| "learning_rate": 9.80014348165298e-07, | |
| "loss": 0.5026, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.4615384615384617, | |
| "grad_norm": 0.22823611772484245, | |
| "learning_rate": 9.178134003605721e-07, | |
| "loss": 0.492, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.479638009049774, | |
| "grad_norm": 0.21111741055317057, | |
| "learning_rate": 8.574517537807897e-07, | |
| "loss": 0.5105, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.497737556561086, | |
| "grad_norm": 0.23249058681576157, | |
| "learning_rate": 7.989566054312286e-07, | |
| "loss": 0.5143, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.515837104072398, | |
| "grad_norm": 0.21764828402899594, | |
| "learning_rate": 7.423543113334436e-07, | |
| "loss": 0.4849, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.5339366515837103, | |
| "grad_norm": 0.24396341429275906, | |
| "learning_rate": 6.876703746500984e-07, | |
| "loss": 0.4903, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.5520361990950224, | |
| "grad_norm": 0.22499941187789713, | |
| "learning_rate": 6.349294341940593e-07, | |
| "loss": 0.4772, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.5701357466063346, | |
| "grad_norm": 0.23288553371110793, | |
| "learning_rate": 5.841552533269534e-07, | |
| "loss": 0.5162, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.588235294117647, | |
| "grad_norm": 0.205460117436211, | |
| "learning_rate": 5.353707092521581e-07, | |
| "loss": 0.4976, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.6063348416289593, | |
| "grad_norm": 0.2214846135707125, | |
| "learning_rate": 4.885977827070748e-07, | |
| "loss": 0.5116, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.6244343891402715, | |
| "grad_norm": 0.22509550835701087, | |
| "learning_rate": 4.43857548059321e-07, | |
| "loss": 0.5115, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.6425339366515836, | |
| "grad_norm": 0.2041589989166428, | |
| "learning_rate": 4.0117016381130636e-07, | |
| "loss": 0.4798, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.660633484162896, | |
| "grad_norm": 0.22016570643213332, | |
| "learning_rate": 3.6055486351745327e-07, | |
| "loss": 0.4998, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.6787330316742084, | |
| "grad_norm": 0.2478568518194293, | |
| "learning_rate": 3.220299471181898e-07, | |
| "loss": 0.5039, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.6968325791855206, | |
| "grad_norm": 0.20717209753663476, | |
| "learning_rate": 2.85612772694579e-07, | |
| "loss": 0.4928, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.7149321266968327, | |
| "grad_norm": 0.21806938834032513, | |
| "learning_rate": 2.5131974864734063e-07, | |
| "loss": 0.5131, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.733031674208145, | |
| "grad_norm": 0.21612495922499686, | |
| "learning_rate": 2.1916632630374579e-07, | |
| "loss": 0.5145, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.751131221719457, | |
| "grad_norm": 0.21635896851957898, | |
| "learning_rate": 1.8916699295575324e-07, | |
| "loss": 0.4964, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.769230769230769, | |
| "grad_norm": 0.19798724526464564, | |
| "learning_rate": 1.6133526533250566e-07, | |
| "loss": 0.4855, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.7873303167420813, | |
| "grad_norm": 0.20783663967394034, | |
| "learning_rate": 1.3568368351012718e-07, | |
| "loss": 0.4966, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.8054298642533935, | |
| "grad_norm": 0.19191009162032222, | |
| "learning_rate": 1.1222380526156929e-07, | |
| "loss": 0.4974, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.8235294117647056, | |
| "grad_norm": 0.21216955411007024, | |
| "learning_rate": 9.096620084905472e-08, | |
| "loss": 0.5191, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.841628959276018, | |
| "grad_norm": 0.21639489053110292, | |
| "learning_rate": 7.192044826145772e-08, | |
| "loss": 0.5268, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.8597285067873304, | |
| "grad_norm": 0.21244774609292955, | |
| "learning_rate": 5.509512889877333e-08, | |
| "loss": 0.5088, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.8778280542986425, | |
| "grad_norm": 0.21215837031481655, | |
| "learning_rate": 4.0497823705615836e-08, | |
| "loss": 0.4942, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.8959276018099547, | |
| "grad_norm": 0.2070665647344241, | |
| "learning_rate": 2.8135109755487723e-08, | |
| "loss": 0.5331, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.914027149321267, | |
| "grad_norm": 0.19955418175982154, | |
| "learning_rate": 1.8012557287367394e-08, | |
| "loss": 0.4901, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.9321266968325794, | |
| "grad_norm": 0.2052841051806031, | |
| "learning_rate": 1.0134727195937332e-08, | |
| "loss": 0.5096, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.9502262443438916, | |
| "grad_norm": 0.22007204293809482, | |
| "learning_rate": 4.505168976592922e-09, | |
| "loss": 0.5248, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.9683257918552037, | |
| "grad_norm": 0.21247616301770453, | |
| "learning_rate": 1.1264191261528557e-09, | |
| "loss": 0.5081, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.986425339366516, | |
| "grad_norm": 0.20780388742772332, | |
| "learning_rate": 0.0, | |
| "loss": 0.5126, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.986425339366516, | |
| "step": 165, | |
| "total_flos": 518749773889536.0, | |
| "train_loss": 0.5635366864276655, | |
| "train_runtime": 44509.1752, | |
| "train_samples_per_second": 0.357, | |
| "train_steps_per_second": 0.004 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 165, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 518749773889536.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |