| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 336, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.008928571428571428, | |
| "grad_norm": 5.458911610311603, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.9075, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.017857142857142856, | |
| "grad_norm": 5.459968806476413, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.9041, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.026785714285714284, | |
| "grad_norm": 5.116219510257077, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.8974, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03571428571428571, | |
| "grad_norm": 3.7608817755792816, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.8634, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.044642857142857144, | |
| "grad_norm": 1.9769831033319734, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 0.8282, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.05357142857142857, | |
| "grad_norm": 5.097457631844493, | |
| "learning_rate": 1.4117647058823532e-05, | |
| "loss": 0.8469, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0625, | |
| "grad_norm": 6.677710780281878, | |
| "learning_rate": 1.647058823529412e-05, | |
| "loss": 0.8328, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.07142857142857142, | |
| "grad_norm": 6.801682007638335, | |
| "learning_rate": 1.8823529411764708e-05, | |
| "loss": 0.8458, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.08035714285714286, | |
| "grad_norm": 3.7515376420989193, | |
| "learning_rate": 2.1176470588235296e-05, | |
| "loss": 0.7872, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.08928571428571429, | |
| "grad_norm": 2.8621020997538817, | |
| "learning_rate": 2.3529411764705884e-05, | |
| "loss": 0.7587, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.09821428571428571, | |
| "grad_norm": 2.451873169585479, | |
| "learning_rate": 2.5882352941176475e-05, | |
| "loss": 0.7254, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.10714285714285714, | |
| "grad_norm": 1.409713255422223, | |
| "learning_rate": 2.8235294117647063e-05, | |
| "loss": 0.7001, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.11607142857142858, | |
| "grad_norm": 1.3093345645984427, | |
| "learning_rate": 3.0588235294117644e-05, | |
| "loss": 0.6808, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 1.0595350381669622, | |
| "learning_rate": 3.294117647058824e-05, | |
| "loss": 0.6725, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.13392857142857142, | |
| "grad_norm": 1.1345682356803515, | |
| "learning_rate": 3.529411764705883e-05, | |
| "loss": 0.6625, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.14285714285714285, | |
| "grad_norm": 1.1508222129857923, | |
| "learning_rate": 3.7647058823529415e-05, | |
| "loss": 0.6508, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.15178571428571427, | |
| "grad_norm": 1.3093404043153851, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6344, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.16071428571428573, | |
| "grad_norm": 0.9342090430783941, | |
| "learning_rate": 4.235294117647059e-05, | |
| "loss": 0.6347, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.16964285714285715, | |
| "grad_norm": 1.1694538889722852, | |
| "learning_rate": 4.470588235294118e-05, | |
| "loss": 0.626, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.17857142857142858, | |
| "grad_norm": 1.3017562899458819, | |
| "learning_rate": 4.705882352941177e-05, | |
| "loss": 0.6116, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 1.3227965952570264, | |
| "learning_rate": 4.941176470588236e-05, | |
| "loss": 0.6116, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.19642857142857142, | |
| "grad_norm": 0.6130823943695296, | |
| "learning_rate": 5.176470588235295e-05, | |
| "loss": 0.607, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.20535714285714285, | |
| "grad_norm": 1.251454832596592, | |
| "learning_rate": 5.411764705882354e-05, | |
| "loss": 0.6033, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.21428571428571427, | |
| "grad_norm": 0.8156899349803868, | |
| "learning_rate": 5.6470588235294126e-05, | |
| "loss": 0.6052, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.22321428571428573, | |
| "grad_norm": 1.37947321601158, | |
| "learning_rate": 5.8823529411764714e-05, | |
| "loss": 0.6063, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.23214285714285715, | |
| "grad_norm": 1.0776438582531838, | |
| "learning_rate": 6.117647058823529e-05, | |
| "loss": 0.5931, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.24107142857142858, | |
| "grad_norm": 1.3960841262426464, | |
| "learning_rate": 6.352941176470589e-05, | |
| "loss": 0.584, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.8924864474776818, | |
| "learning_rate": 6.588235294117648e-05, | |
| "loss": 0.5893, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.25892857142857145, | |
| "grad_norm": 0.969188961164555, | |
| "learning_rate": 6.823529411764707e-05, | |
| "loss": 0.5852, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.26785714285714285, | |
| "grad_norm": 1.3198098256340314, | |
| "learning_rate": 7.058823529411765e-05, | |
| "loss": 0.5904, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2767857142857143, | |
| "grad_norm": 1.4766678918535208, | |
| "learning_rate": 7.294117647058824e-05, | |
| "loss": 0.588, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.2857142857142857, | |
| "grad_norm": 1.0600070593717186, | |
| "learning_rate": 7.529411764705883e-05, | |
| "loss": 0.5903, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.29464285714285715, | |
| "grad_norm": 1.4709971965977178, | |
| "learning_rate": 7.764705882352942e-05, | |
| "loss": 0.5856, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.30357142857142855, | |
| "grad_norm": 9.732884272846816, | |
| "learning_rate": 8e-05, | |
| "loss": 0.5782, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 1.471329125996165, | |
| "learning_rate": 7.999783572751229e-05, | |
| "loss": 0.591, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.32142857142857145, | |
| "grad_norm": 0.9582405842286696, | |
| "learning_rate": 7.999134314425292e-05, | |
| "loss": 0.5654, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.33035714285714285, | |
| "grad_norm": 1.3392752737259712, | |
| "learning_rate": 7.998052295280786e-05, | |
| "loss": 0.5629, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.3392857142857143, | |
| "grad_norm": 1.0198091139639283, | |
| "learning_rate": 7.996537632406925e-05, | |
| "loss": 0.5701, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.3482142857142857, | |
| "grad_norm": 0.9130649378368526, | |
| "learning_rate": 7.994590489710867e-05, | |
| "loss": 0.5667, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.35714285714285715, | |
| "grad_norm": 1.1018536674455315, | |
| "learning_rate": 7.992211077899981e-05, | |
| "loss": 0.5621, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.36607142857142855, | |
| "grad_norm": 1.3069162163076586, | |
| "learning_rate": 7.989399654459043e-05, | |
| "loss": 0.557, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 0.9331234760948489, | |
| "learning_rate": 7.986156523622373e-05, | |
| "loss": 0.5621, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.38392857142857145, | |
| "grad_norm": 0.72739280302451, | |
| "learning_rate": 7.982482036340912e-05, | |
| "loss": 0.5562, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.39285714285714285, | |
| "grad_norm": 0.9580174839293588, | |
| "learning_rate": 7.978376590244249e-05, | |
| "loss": 0.5648, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.4017857142857143, | |
| "grad_norm": 1.171231893769441, | |
| "learning_rate": 7.973840629597584e-05, | |
| "loss": 0.5549, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4107142857142857, | |
| "grad_norm": 0.6910616601450175, | |
| "learning_rate": 7.968874645253661e-05, | |
| "loss": 0.5554, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.41964285714285715, | |
| "grad_norm": 0.6712508262420084, | |
| "learning_rate": 7.963479174599642e-05, | |
| "loss": 0.5539, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.42857142857142855, | |
| "grad_norm": 0.8981493488966513, | |
| "learning_rate": 7.957654801498961e-05, | |
| "loss": 0.551, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.4375, | |
| "grad_norm": 0.9743397904480438, | |
| "learning_rate": 7.951402156228145e-05, | |
| "loss": 0.5454, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.44642857142857145, | |
| "grad_norm": 0.6631707178264241, | |
| "learning_rate": 7.944721915408596e-05, | |
| "loss": 0.534, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.45535714285714285, | |
| "grad_norm": 0.6793775277783014, | |
| "learning_rate": 7.937614801933388e-05, | |
| "loss": 0.5451, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.4642857142857143, | |
| "grad_norm": 0.8605556642034515, | |
| "learning_rate": 7.930081584889029e-05, | |
| "loss": 0.5403, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.4732142857142857, | |
| "grad_norm": 0.7496935145980926, | |
| "learning_rate": 7.922123079472237e-05, | |
| "loss": 0.5437, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.48214285714285715, | |
| "grad_norm": 0.5579609752590933, | |
| "learning_rate": 7.913740146901729e-05, | |
| "loss": 0.5352, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.49107142857142855, | |
| "grad_norm": 0.7574194668910744, | |
| "learning_rate": 7.90493369432502e-05, | |
| "loss": 0.5402, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.9921442886534483, | |
| "learning_rate": 7.895704674720265e-05, | |
| "loss": 0.53, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.5089285714285714, | |
| "grad_norm": 0.8243330121689626, | |
| "learning_rate": 7.88605408679312e-05, | |
| "loss": 0.5456, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.5178571428571429, | |
| "grad_norm": 0.5792225695439277, | |
| "learning_rate": 7.875982974868684e-05, | |
| "loss": 0.5354, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.5267857142857143, | |
| "grad_norm": 0.7457644638076486, | |
| "learning_rate": 7.865492428778483e-05, | |
| "loss": 0.5389, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.5357142857142857, | |
| "grad_norm": 1.0227407044588295, | |
| "learning_rate": 7.854583583742527e-05, | |
| "loss": 0.5323, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.5446428571428571, | |
| "grad_norm": 1.1493686113568151, | |
| "learning_rate": 7.843257620246477e-05, | |
| "loss": 0.5365, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.5535714285714286, | |
| "grad_norm": 0.6003138642474546, | |
| "learning_rate": 7.831515763913891e-05, | |
| "loss": 0.5301, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 0.5676139643526338, | |
| "learning_rate": 7.819359285373604e-05, | |
| "loss": 0.5246, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 0.42378345040062193, | |
| "learning_rate": 7.806789500122212e-05, | |
| "loss": 0.5237, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.5803571428571429, | |
| "grad_norm": 0.8692395127139948, | |
| "learning_rate": 7.793807768381743e-05, | |
| "loss": 0.52, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.5892857142857143, | |
| "grad_norm": 0.7123093455414804, | |
| "learning_rate": 7.780415494952431e-05, | |
| "loss": 0.5313, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.5982142857142857, | |
| "grad_norm": 0.7693658767139915, | |
| "learning_rate": 7.766614129060728e-05, | |
| "loss": 0.5206, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.6071428571428571, | |
| "grad_norm": 0.5764493737404597, | |
| "learning_rate": 7.752405164202459e-05, | |
| "loss": 0.531, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.6160714285714286, | |
| "grad_norm": 2.718091673505057, | |
| "learning_rate": 7.737790137981205e-05, | |
| "loss": 0.5296, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.8095852508927421, | |
| "learning_rate": 7.722770631941928e-05, | |
| "loss": 0.5288, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.6339285714285714, | |
| "grad_norm": 3.3841935199500925, | |
| "learning_rate": 7.707348271399813e-05, | |
| "loss": 0.5502, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.6428571428571429, | |
| "grad_norm": 1.745098535789061, | |
| "learning_rate": 7.691524725264389e-05, | |
| "loss": 0.5516, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.6517857142857143, | |
| "grad_norm": 0.5725808597680306, | |
| "learning_rate": 7.675301705858935e-05, | |
| "loss": 0.5363, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.6607142857142857, | |
| "grad_norm": 1.2584222907391829, | |
| "learning_rate": 7.658680968735181e-05, | |
| "loss": 0.5402, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.6696428571428571, | |
| "grad_norm": 0.6752011986304464, | |
| "learning_rate": 7.641664312483328e-05, | |
| "loss": 0.5321, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.6785714285714286, | |
| "grad_norm": 0.9309963276595731, | |
| "learning_rate": 7.624253578537424e-05, | |
| "loss": 0.532, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.6875, | |
| "grad_norm": 0.6408984967666898, | |
| "learning_rate": 7.606450650976095e-05, | |
| "loss": 0.5367, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.6964285714285714, | |
| "grad_norm": 0.6349526215998039, | |
| "learning_rate": 7.588257456318657e-05, | |
| "loss": 0.5302, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.7053571428571429, | |
| "grad_norm": 0.5614114341482542, | |
| "learning_rate": 7.56967596331664e-05, | |
| "loss": 0.5202, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.7142857142857143, | |
| "grad_norm": 0.5310297157289734, | |
| "learning_rate": 7.550708182740751e-05, | |
| "loss": 0.5327, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.7232142857142857, | |
| "grad_norm": 0.4999950300313544, | |
| "learning_rate": 7.531356167163272e-05, | |
| "loss": 0.5256, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.7321428571428571, | |
| "grad_norm": 0.41402163485309396, | |
| "learning_rate": 7.511622010735948e-05, | |
| "loss": 0.5241, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.7410714285714286, | |
| "grad_norm": 0.43962685575693505, | |
| "learning_rate": 7.49150784896337e-05, | |
| "loss": 0.5215, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.3656133936292797, | |
| "learning_rate": 7.471015858471882e-05, | |
| "loss": 0.5081, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.7589285714285714, | |
| "grad_norm": 0.3744198516273023, | |
| "learning_rate": 7.450148256774053e-05, | |
| "loss": 0.5142, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.7678571428571429, | |
| "grad_norm": 0.40929219272400663, | |
| "learning_rate": 7.428907302028687e-05, | |
| "loss": 0.5196, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.7767857142857143, | |
| "grad_norm": 0.4065715708969531, | |
| "learning_rate": 7.407295292796488e-05, | |
| "loss": 0.5204, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.7857142857142857, | |
| "grad_norm": 0.34204445173011166, | |
| "learning_rate": 7.385314567791302e-05, | |
| "loss": 0.5183, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.7946428571428571, | |
| "grad_norm": 0.32805261246572137, | |
| "learning_rate": 7.362967505627051e-05, | |
| "loss": 0.5213, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.8035714285714286, | |
| "grad_norm": 0.3272013216504384, | |
| "learning_rate": 7.340256524560325e-05, | |
| "loss": 0.512, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.8125, | |
| "grad_norm": 0.293354381788003, | |
| "learning_rate": 7.317184082228697e-05, | |
| "loss": 0.5069, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.8214285714285714, | |
| "grad_norm": 0.4821061440980201, | |
| "learning_rate": 7.293752675384777e-05, | |
| "loss": 0.5107, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.8303571428571429, | |
| "grad_norm": 0.693462071819636, | |
| "learning_rate": 7.269964839626024e-05, | |
| "loss": 0.5184, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.8392857142857143, | |
| "grad_norm": 0.9969190523365147, | |
| "learning_rate": 7.245823149120362e-05, | |
| "loss": 0.5249, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.8482142857142857, | |
| "grad_norm": 0.9850693615579191, | |
| "learning_rate": 7.221330216327619e-05, | |
| "loss": 0.5218, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.7805590557073407, | |
| "learning_rate": 7.196488691716824e-05, | |
| "loss": 0.5152, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.8660714285714286, | |
| "grad_norm": 0.5570109379564657, | |
| "learning_rate": 7.17130126347939e-05, | |
| "loss": 0.5118, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 0.5791838534781877, | |
| "learning_rate": 7.145770657238215e-05, | |
| "loss": 0.5095, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.8839285714285714, | |
| "grad_norm": 0.6535836061766679, | |
| "learning_rate": 7.119899635752737e-05, | |
| "loss": 0.5066, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.8928571428571429, | |
| "grad_norm": 0.4994295705575278, | |
| "learning_rate": 7.093690998619953e-05, | |
| "loss": 0.5108, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.9017857142857143, | |
| "grad_norm": 0.5215314045730167, | |
| "learning_rate": 7.067147581971478e-05, | |
| "loss": 0.5144, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.9107142857142857, | |
| "grad_norm": 0.556364903677944, | |
| "learning_rate": 7.040272258166633e-05, | |
| "loss": 0.5088, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.9196428571428571, | |
| "grad_norm": 0.3522985441955947, | |
| "learning_rate": 7.013067935481614e-05, | |
| "loss": 0.5236, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.9285714285714286, | |
| "grad_norm": 0.5091651011552728, | |
| "learning_rate": 6.985537557794776e-05, | |
| "loss": 0.501, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.3561680464897058, | |
| "learning_rate": 6.957684104268068e-05, | |
| "loss": 0.5023, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.9464285714285714, | |
| "grad_norm": 0.40001485875356707, | |
| "learning_rate": 6.92951058902465e-05, | |
| "loss": 0.5066, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.9553571428571429, | |
| "grad_norm": 0.36789962565963513, | |
| "learning_rate": 6.901020060822718e-05, | |
| "loss": 0.5042, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.9642857142857143, | |
| "grad_norm": 0.4184528762777128, | |
| "learning_rate": 6.872215602725589e-05, | |
| "loss": 0.5128, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.9732142857142857, | |
| "grad_norm": 0.4469489989551305, | |
| "learning_rate": 6.843100331768071e-05, | |
| "loss": 0.5042, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.9821428571428571, | |
| "grad_norm": 0.3927827934106694, | |
| "learning_rate": 6.813677398619161e-05, | |
| "loss": 0.5014, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.9910714285714286, | |
| "grad_norm": 0.30551724357758264, | |
| "learning_rate": 6.783949987241095e-05, | |
| "loss": 0.4939, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.3878355524761787, | |
| "learning_rate": 6.7539213145448e-05, | |
| "loss": 0.5016, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.0089285714285714, | |
| "grad_norm": 0.3788672363521083, | |
| "learning_rate": 6.723594630041786e-05, | |
| "loss": 0.4729, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.0178571428571428, | |
| "grad_norm": 0.3055300238162846, | |
| "learning_rate": 6.692973215492498e-05, | |
| "loss": 0.4692, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.0267857142857142, | |
| "grad_norm": 0.3859310266273548, | |
| "learning_rate": 6.662060384551188e-05, | |
| "loss": 0.4707, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.0357142857142858, | |
| "grad_norm": 0.5582243479324142, | |
| "learning_rate": 6.630859482407331e-05, | |
| "loss": 0.4722, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.0446428571428572, | |
| "grad_norm": 0.6769506937788301, | |
| "learning_rate": 6.599373885423635e-05, | |
| "loss": 0.4729, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.0535714285714286, | |
| "grad_norm": 0.6799095444990083, | |
| "learning_rate": 6.567607000770664e-05, | |
| "loss": 0.4778, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.0625, | |
| "grad_norm": 0.6711015115485335, | |
| "learning_rate": 6.535562266058141e-05, | |
| "loss": 0.4783, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.0714285714285714, | |
| "grad_norm": 0.6616384329423337, | |
| "learning_rate": 6.50324314896295e-05, | |
| "loss": 0.483, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.0803571428571428, | |
| "grad_norm": 0.5335329011878119, | |
| "learning_rate": 6.470653146853895e-05, | |
| "loss": 0.4676, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.0892857142857142, | |
| "grad_norm": 0.5280180658388947, | |
| "learning_rate": 6.437795786413217e-05, | |
| "loss": 0.4676, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.0982142857142858, | |
| "grad_norm": 0.5203647507319533, | |
| "learning_rate": 6.40467462325498e-05, | |
| "loss": 0.4767, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.1071428571428572, | |
| "grad_norm": 0.33866738617528636, | |
| "learning_rate": 6.371293241540292e-05, | |
| "loss": 0.4649, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.1160714285714286, | |
| "grad_norm": 0.39685847712765815, | |
| "learning_rate": 6.337655253589456e-05, | |
| "loss": 0.4654, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.125, | |
| "grad_norm": 0.2986911683728224, | |
| "learning_rate": 6.303764299491065e-05, | |
| "loss": 0.4656, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.1339285714285714, | |
| "grad_norm": 0.27795327083232535, | |
| "learning_rate": 6.269624046708094e-05, | |
| "loss": 0.4674, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.1428571428571428, | |
| "grad_norm": 0.2582126483978543, | |
| "learning_rate": 6.235238189681039e-05, | |
| "loss": 0.4653, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.1517857142857142, | |
| "grad_norm": 0.26393000429448243, | |
| "learning_rate": 6.200610449428111e-05, | |
| "loss": 0.4643, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.1607142857142858, | |
| "grad_norm": 0.2717121850307768, | |
| "learning_rate": 6.16574457314259e-05, | |
| "loss": 0.475, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.1696428571428572, | |
| "grad_norm": 0.29888138759375577, | |
| "learning_rate": 6.130644333787317e-05, | |
| "loss": 0.4627, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.1785714285714286, | |
| "grad_norm": 0.32040905560153704, | |
| "learning_rate": 6.095313529686406e-05, | |
| "loss": 0.4609, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.1875, | |
| "grad_norm": 0.31544315623213, | |
| "learning_rate": 6.059755984114224e-05, | |
| "loss": 0.4591, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.1964285714285714, | |
| "grad_norm": 0.35573100943666236, | |
| "learning_rate": 6.02397554488165e-05, | |
| "loss": 0.4655, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.2053571428571428, | |
| "grad_norm": 0.3371073630997141, | |
| "learning_rate": 5.987976083919699e-05, | |
| "loss": 0.4602, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.2142857142857142, | |
| "grad_norm": 0.24367143463905358, | |
| "learning_rate": 5.951761496860513e-05, | |
| "loss": 0.4686, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.2232142857142858, | |
| "grad_norm": 0.18490249219086996, | |
| "learning_rate": 5.915335702615817e-05, | |
| "loss": 0.4694, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.2321428571428572, | |
| "grad_norm": 0.3662203733341645, | |
| "learning_rate": 5.878702642952824e-05, | |
| "loss": 0.4562, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.2410714285714286, | |
| "grad_norm": 0.44890191448322514, | |
| "learning_rate": 5.8418662820676954e-05, | |
| "loss": 0.4639, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.4261072543726084, | |
| "learning_rate": 5.80483060615655e-05, | |
| "loss": 0.4732, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.2589285714285714, | |
| "grad_norm": 0.3422728954134477, | |
| "learning_rate": 5.7675996229841107e-05, | |
| "loss": 0.4619, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.2678571428571428, | |
| "grad_norm": 0.3076389147366431, | |
| "learning_rate": 5.730177361450005e-05, | |
| "loss": 0.4722, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.2767857142857144, | |
| "grad_norm": 0.2820370998236569, | |
| "learning_rate": 5.6925678711527865e-05, | |
| "loss": 0.468, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.2857142857142856, | |
| "grad_norm": 0.21191838816193023, | |
| "learning_rate": 5.6547752219517136e-05, | |
| "loss": 0.4595, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.2946428571428572, | |
| "grad_norm": 0.18589392586021203, | |
| "learning_rate": 5.616803503526328e-05, | |
| "loss": 0.4644, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.3035714285714286, | |
| "grad_norm": 0.20528736580535784, | |
| "learning_rate": 5.578656824933906e-05, | |
| "loss": 0.458, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.3125, | |
| "grad_norm": 0.25935591546202547, | |
| "learning_rate": 5.540339314164798e-05, | |
| "loss": 0.4625, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.3214285714285714, | |
| "grad_norm": 0.2603875779161184, | |
| "learning_rate": 5.501855117695721e-05, | |
| "loss": 0.464, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.3303571428571428, | |
| "grad_norm": 0.259347292325694, | |
| "learning_rate": 5.463208400041055e-05, | |
| "loss": 0.4697, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.3392857142857144, | |
| "grad_norm": 0.2644029291060694, | |
| "learning_rate": 5.424403343302188e-05, | |
| "loss": 0.4576, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.3482142857142856, | |
| "grad_norm": 0.2308783303320916, | |
| "learning_rate": 5.385444146714957e-05, | |
| "loss": 0.4564, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.3571428571428572, | |
| "grad_norm": 0.22882568254344396, | |
| "learning_rate": 5.346335026195226e-05, | |
| "loss": 0.4721, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.3660714285714286, | |
| "grad_norm": 0.23616899606363104, | |
| "learning_rate": 5.307080213882672e-05, | |
| "loss": 0.4662, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.375, | |
| "grad_norm": 0.2885371827319689, | |
| "learning_rate": 5.267683957682812e-05, | |
| "loss": 0.462, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.3839285714285714, | |
| "grad_norm": 0.2820779719230081, | |
| "learning_rate": 5.2281505208073147e-05, | |
| "loss": 0.4632, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.3928571428571428, | |
| "grad_norm": 0.22468099441098213, | |
| "learning_rate": 5.18848418131267e-05, | |
| "loss": 0.4551, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.4017857142857144, | |
| "grad_norm": 0.21751416007716054, | |
| "learning_rate": 5.1486892316372404e-05, | |
| "loss": 0.4722, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.4107142857142856, | |
| "grad_norm": 0.27461792920575495, | |
| "learning_rate": 5.1087699781367625e-05, | |
| "loss": 0.463, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.4196428571428572, | |
| "grad_norm": 0.2393265717043113, | |
| "learning_rate": 5.0687307406183406e-05, | |
| "loss": 0.4566, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.20221953908786175, | |
| "learning_rate": 5.028575851872984e-05, | |
| "loss": 0.4646, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.4375, | |
| "grad_norm": 0.2208471324202507, | |
| "learning_rate": 4.988309657206742e-05, | |
| "loss": 0.4599, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.4464285714285714, | |
| "grad_norm": 0.19928106325960415, | |
| "learning_rate": 4.947936513970476e-05, | |
| "loss": 0.4587, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.4553571428571428, | |
| "grad_norm": 0.16352276342020416, | |
| "learning_rate": 4.907460791088347e-05, | |
| "loss": 0.4563, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.4642857142857144, | |
| "grad_norm": 0.22322494415515243, | |
| "learning_rate": 4.8668868685850265e-05, | |
| "loss": 0.4607, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.4732142857142856, | |
| "grad_norm": 0.28312396593030925, | |
| "learning_rate": 4.826219137111724e-05, | |
| "loss": 0.4596, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.4821428571428572, | |
| "grad_norm": 0.20863485843019422, | |
| "learning_rate": 4.785461997471058e-05, | |
| "loss": 0.4572, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.4910714285714286, | |
| "grad_norm": 0.18263430431801883, | |
| "learning_rate": 4.7446198601408274e-05, | |
| "loss": 0.461, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.21990339290691535, | |
| "learning_rate": 4.703697144796742e-05, | |
| "loss": 0.4546, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.5089285714285714, | |
| "grad_norm": 0.1919102767561462, | |
| "learning_rate": 4.662698279834149e-05, | |
| "loss": 0.4592, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.5178571428571428, | |
| "grad_norm": 0.17372269475870397, | |
| "learning_rate": 4.62162770188882e-05, | |
| "loss": 0.469, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.5267857142857144, | |
| "grad_norm": 0.1654682068782294, | |
| "learning_rate": 4.580489855356852e-05, | |
| "loss": 0.4559, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.5357142857142856, | |
| "grad_norm": 0.17629220459825287, | |
| "learning_rate": 4.5392891919137155e-05, | |
| "loss": 0.4561, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.5446428571428572, | |
| "grad_norm": 0.21172057493487084, | |
| "learning_rate": 4.498030170032531e-05, | |
| "loss": 0.461, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.5535714285714286, | |
| "grad_norm": 0.17570649332026045, | |
| "learning_rate": 4.456717254501595e-05, | |
| "loss": 0.461, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 0.16836022815630258, | |
| "learning_rate": 4.4153549159412276e-05, | |
| "loss": 0.4576, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.5714285714285714, | |
| "grad_norm": 0.1749663199663638, | |
| "learning_rate": 4.37394763032e-05, | |
| "loss": 0.4549, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.5803571428571428, | |
| "grad_norm": 0.17474917301910808, | |
| "learning_rate": 4.332499878470367e-05, | |
| "loss": 0.4631, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.5892857142857144, | |
| "grad_norm": 0.14004770259306568, | |
| "learning_rate": 4.291016145603776e-05, | |
| "loss": 0.4562, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.5982142857142856, | |
| "grad_norm": 0.19029016036379953, | |
| "learning_rate": 4.249500920825314e-05, | |
| "loss": 0.4616, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.6071428571428572, | |
| "grad_norm": 0.19188224946309962, | |
| "learning_rate": 4.2079586966479214e-05, | |
| "loss": 0.4636, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.6160714285714286, | |
| "grad_norm": 0.18536862750852307, | |
| "learning_rate": 4.166393968506244e-05, | |
| "loss": 0.4596, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.625, | |
| "grad_norm": 0.17003638172457017, | |
| "learning_rate": 4.124811234270157e-05, | |
| "loss": 0.4546, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.6339285714285714, | |
| "grad_norm": 0.18025420888895052, | |
| "learning_rate": 4.0832149937580457e-05, | |
| "loss": 0.4583, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.6428571428571428, | |
| "grad_norm": 0.17702049953400414, | |
| "learning_rate": 4.0416097482498575e-05, | |
| "loss": 0.4583, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.6517857142857144, | |
| "grad_norm": 0.16060960803693247, | |
| "learning_rate": 4e-05, | |
| "loss": 0.458, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.6607142857142856, | |
| "grad_norm": 0.17703603102767604, | |
| "learning_rate": 3.958390251750144e-05, | |
| "loss": 0.4633, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.6696428571428572, | |
| "grad_norm": 0.1646046678567389, | |
| "learning_rate": 3.916785006241955e-05, | |
| "loss": 0.4626, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.6785714285714286, | |
| "grad_norm": 0.1722881349469007, | |
| "learning_rate": 3.875188765729843e-05, | |
| "loss": 0.4567, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.6875, | |
| "grad_norm": 0.14735936478640962, | |
| "learning_rate": 3.833606031493758e-05, | |
| "loss": 0.4604, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.6964285714285714, | |
| "grad_norm": 0.14292807283154454, | |
| "learning_rate": 3.792041303352079e-05, | |
| "loss": 0.4583, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.7053571428571428, | |
| "grad_norm": 0.16040731375651313, | |
| "learning_rate": 3.7504990791746877e-05, | |
| "loss": 0.4551, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 0.12644736716718807, | |
| "learning_rate": 3.708983854396225e-05, | |
| "loss": 0.4596, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.7232142857142856, | |
| "grad_norm": 0.15010655874057, | |
| "learning_rate": 3.667500121529634e-05, | |
| "loss": 0.4523, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.7321428571428572, | |
| "grad_norm": 0.1433646952462073, | |
| "learning_rate": 3.626052369679999e-05, | |
| "loss": 0.4598, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.7410714285714286, | |
| "grad_norm": 0.11933250859224095, | |
| "learning_rate": 3.584645084058774e-05, | |
| "loss": 0.4526, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 0.14269745540138357, | |
| "learning_rate": 3.543282745498407e-05, | |
| "loss": 0.4609, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.7589285714285714, | |
| "grad_norm": 0.13719631088911224, | |
| "learning_rate": 3.50196982996747e-05, | |
| "loss": 0.4569, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.7678571428571428, | |
| "grad_norm": 0.14123727092169555, | |
| "learning_rate": 3.460710808086285e-05, | |
| "loss": 0.4598, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.7767857142857144, | |
| "grad_norm": 0.13589211534733778, | |
| "learning_rate": 3.419510144643149e-05, | |
| "loss": 0.4583, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.7857142857142856, | |
| "grad_norm": 0.14463228336420042, | |
| "learning_rate": 3.378372298111181e-05, | |
| "loss": 0.4547, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.7946428571428572, | |
| "grad_norm": 0.13381794986766524, | |
| "learning_rate": 3.3373017201658514e-05, | |
| "loss": 0.4588, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.8035714285714286, | |
| "grad_norm": 0.13115685617204254, | |
| "learning_rate": 3.2963028552032594e-05, | |
| "loss": 0.4566, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.8125, | |
| "grad_norm": 0.13196620026552933, | |
| "learning_rate": 3.2553801398591746e-05, | |
| "loss": 0.4557, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.8214285714285714, | |
| "grad_norm": 0.12289630749221309, | |
| "learning_rate": 3.214538002528943e-05, | |
| "loss": 0.4597, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.8303571428571428, | |
| "grad_norm": 0.12502731241075712, | |
| "learning_rate": 3.173780862888277e-05, | |
| "loss": 0.4567, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.8392857142857144, | |
| "grad_norm": 0.11886410880699969, | |
| "learning_rate": 3.133113131414975e-05, | |
| "loss": 0.4628, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.8482142857142856, | |
| "grad_norm": 0.12359267043234633, | |
| "learning_rate": 3.092539208911654e-05, | |
| "loss": 0.4582, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.8571428571428572, | |
| "grad_norm": 0.13017549097611902, | |
| "learning_rate": 3.0520634860295254e-05, | |
| "loss": 0.4521, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.8660714285714286, | |
| "grad_norm": 0.11869071958078599, | |
| "learning_rate": 3.0116903427932603e-05, | |
| "loss": 0.4536, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.13940208007631327, | |
| "learning_rate": 2.9714241481270168e-05, | |
| "loss": 0.4489, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.8839285714285714, | |
| "grad_norm": 0.11974765997048924, | |
| "learning_rate": 2.9312692593816604e-05, | |
| "loss": 0.4487, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.8928571428571428, | |
| "grad_norm": 0.12948893942349837, | |
| "learning_rate": 2.8912300218632385e-05, | |
| "loss": 0.4582, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.9017857142857144, | |
| "grad_norm": 0.12275899875447714, | |
| "learning_rate": 2.8513107683627602e-05, | |
| "loss": 0.4662, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.9107142857142856, | |
| "grad_norm": 0.12457402598657341, | |
| "learning_rate": 2.8115158186873307e-05, | |
| "loss": 0.4498, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.9196428571428572, | |
| "grad_norm": 0.13773022448582894, | |
| "learning_rate": 2.771849479192687e-05, | |
| "loss": 0.4638, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.9285714285714286, | |
| "grad_norm": 0.1382501463811384, | |
| "learning_rate": 2.7323160423171894e-05, | |
| "loss": 0.4587, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.9375, | |
| "grad_norm": 0.13537863495707703, | |
| "learning_rate": 2.692919786117329e-05, | |
| "loss": 0.4536, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.9464285714285714, | |
| "grad_norm": 0.1455865352799823, | |
| "learning_rate": 2.6536649738047756e-05, | |
| "loss": 0.4529, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.9553571428571428, | |
| "grad_norm": 0.12729730828758748, | |
| "learning_rate": 2.614555853285044e-05, | |
| "loss": 0.4512, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.9642857142857144, | |
| "grad_norm": 0.12357176809760773, | |
| "learning_rate": 2.5755966566978124e-05, | |
| "loss": 0.4549, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.9732142857142856, | |
| "grad_norm": 0.1192751623548519, | |
| "learning_rate": 2.5367915999589462e-05, | |
| "loss": 0.4469, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.9821428571428572, | |
| "grad_norm": 0.12358485840017747, | |
| "learning_rate": 2.498144882304281e-05, | |
| "loss": 0.4506, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.9910714285714286, | |
| "grad_norm": 0.11913180318943509, | |
| "learning_rate": 2.4596606858352036e-05, | |
| "loss": 0.4574, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.12792742986464758, | |
| "learning_rate": 2.421343175066095e-05, | |
| "loss": 0.4567, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.0089285714285716, | |
| "grad_norm": 0.18610386161673095, | |
| "learning_rate": 2.3831964964736737e-05, | |
| "loss": 0.4285, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.017857142857143, | |
| "grad_norm": 0.1317157519843888, | |
| "learning_rate": 2.3452247780482877e-05, | |
| "loss": 0.4245, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.0267857142857144, | |
| "grad_norm": 0.15732732092952154, | |
| "learning_rate": 2.3074321288472135e-05, | |
| "loss": 0.4143, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.0357142857142856, | |
| "grad_norm": 0.16814184352306275, | |
| "learning_rate": 2.269822638549997e-05, | |
| "loss": 0.4182, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.044642857142857, | |
| "grad_norm": 0.158387600350679, | |
| "learning_rate": 2.2324003770158917e-05, | |
| "loss": 0.4266, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.0535714285714284, | |
| "grad_norm": 0.16005630429705614, | |
| "learning_rate": 2.1951693938434517e-05, | |
| "loss": 0.4163, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.0625, | |
| "grad_norm": 0.1541823036416597, | |
| "learning_rate": 2.1581337179323062e-05, | |
| "loss": 0.4202, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.0714285714285716, | |
| "grad_norm": 0.15428909223565776, | |
| "learning_rate": 2.1212973570471758e-05, | |
| "loss": 0.4204, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.080357142857143, | |
| "grad_norm": 0.13935809750720052, | |
| "learning_rate": 2.0846642973841833e-05, | |
| "loss": 0.4255, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.0892857142857144, | |
| "grad_norm": 0.1481665794140764, | |
| "learning_rate": 2.0482385031394864e-05, | |
| "loss": 0.4173, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.0982142857142856, | |
| "grad_norm": 0.14270794298166453, | |
| "learning_rate": 2.0120239160803026e-05, | |
| "loss": 0.4253, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.107142857142857, | |
| "grad_norm": 0.12890317989622468, | |
| "learning_rate": 1.9760244551183503e-05, | |
| "loss": 0.4234, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.1160714285714284, | |
| "grad_norm": 0.14547199967318059, | |
| "learning_rate": 1.9402440158857775e-05, | |
| "loss": 0.4241, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.125, | |
| "grad_norm": 0.13117058221551006, | |
| "learning_rate": 1.9046864703135958e-05, | |
| "loss": 0.4174, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.1339285714285716, | |
| "grad_norm": 0.13210270356792833, | |
| "learning_rate": 1.8693556662126857e-05, | |
| "loss": 0.4211, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.142857142857143, | |
| "grad_norm": 0.1282730434203236, | |
| "learning_rate": 1.8342554268574114e-05, | |
| "loss": 0.4158, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.1517857142857144, | |
| "grad_norm": 0.12878778566197982, | |
| "learning_rate": 1.7993895505718907e-05, | |
| "loss": 0.4164, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.1607142857142856, | |
| "grad_norm": 0.12711242314558882, | |
| "learning_rate": 1.7647618103189632e-05, | |
| "loss": 0.4213, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.169642857142857, | |
| "grad_norm": 0.12225869755757035, | |
| "learning_rate": 1.7303759532919066e-05, | |
| "loss": 0.4194, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.1785714285714284, | |
| "grad_norm": 0.12636232755876087, | |
| "learning_rate": 1.6962357005089368e-05, | |
| "loss": 0.4171, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.1875, | |
| "grad_norm": 0.11732096389813856, | |
| "learning_rate": 1.6623447464105455e-05, | |
| "loss": 0.4218, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.1964285714285716, | |
| "grad_norm": 0.12702977153503237, | |
| "learning_rate": 1.628706758459709e-05, | |
| "loss": 0.4184, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.205357142857143, | |
| "grad_norm": 0.11036816052659255, | |
| "learning_rate": 1.5953253767450214e-05, | |
| "loss": 0.4233, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.2142857142857144, | |
| "grad_norm": 0.12343828025142765, | |
| "learning_rate": 1.5622042135867838e-05, | |
| "loss": 0.4189, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.2232142857142856, | |
| "grad_norm": 0.1157421817524145, | |
| "learning_rate": 1.5293468531461063e-05, | |
| "loss": 0.4232, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.232142857142857, | |
| "grad_norm": 0.10768701281522594, | |
| "learning_rate": 1.4967568510370498e-05, | |
| "loss": 0.4259, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.2410714285714284, | |
| "grad_norm": 0.11855236951996624, | |
| "learning_rate": 1.464437733941861e-05, | |
| "loss": 0.4181, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.09981025907626229, | |
| "learning_rate": 1.4323929992293378e-05, | |
| "loss": 0.4196, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.2589285714285716, | |
| "grad_norm": 0.11114796288657297, | |
| "learning_rate": 1.4006261145763657e-05, | |
| "loss": 0.4218, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.267857142857143, | |
| "grad_norm": 0.09998416016387277, | |
| "learning_rate": 1.3691405175926705e-05, | |
| "loss": 0.4179, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.2767857142857144, | |
| "grad_norm": 0.11145317342624324, | |
| "learning_rate": 1.3379396154488142e-05, | |
| "loss": 0.426, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.2857142857142856, | |
| "grad_norm": 0.10809564376234657, | |
| "learning_rate": 1.3070267845075026e-05, | |
| "loss": 0.4154, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.294642857142857, | |
| "grad_norm": 0.09865265999187162, | |
| "learning_rate": 1.2764053699582145e-05, | |
| "loss": 0.4154, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.3035714285714284, | |
| "grad_norm": 0.10356277308081305, | |
| "learning_rate": 1.2460786854552e-05, | |
| "loss": 0.4249, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.3125, | |
| "grad_norm": 0.10554928423562794, | |
| "learning_rate": 1.2160500127589057e-05, | |
| "loss": 0.4215, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.3214285714285716, | |
| "grad_norm": 0.10007716549085707, | |
| "learning_rate": 1.1863226013808392e-05, | |
| "loss": 0.4221, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.330357142857143, | |
| "grad_norm": 0.11646959225366664, | |
| "learning_rate": 1.1568996682319304e-05, | |
| "loss": 0.4197, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.3392857142857144, | |
| "grad_norm": 0.10158651685471926, | |
| "learning_rate": 1.1277843972744127e-05, | |
| "loss": 0.4135, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.3482142857142856, | |
| "grad_norm": 0.10217204948795446, | |
| "learning_rate": 1.0989799391772835e-05, | |
| "loss": 0.413, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.357142857142857, | |
| "grad_norm": 0.1080995691928109, | |
| "learning_rate": 1.070489410975351e-05, | |
| "loss": 0.4211, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.3660714285714284, | |
| "grad_norm": 0.08877756673000493, | |
| "learning_rate": 1.0423158957319331e-05, | |
| "loss": 0.4199, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.375, | |
| "grad_norm": 0.0962041869064699, | |
| "learning_rate": 1.014462442205225e-05, | |
| "loss": 0.4212, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.3839285714285716, | |
| "grad_norm": 0.1045928238806768, | |
| "learning_rate": 9.869320645183861e-06, | |
| "loss": 0.416, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.392857142857143, | |
| "grad_norm": 0.09145965237465084, | |
| "learning_rate": 9.597277418333673e-06, | |
| "loss": 0.4174, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.4017857142857144, | |
| "grad_norm": 0.0946988755325339, | |
| "learning_rate": 9.328524180285226e-06, | |
| "loss": 0.423, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.4107142857142856, | |
| "grad_norm": 0.10152439460621095, | |
| "learning_rate": 9.063090013800488e-06, | |
| "loss": 0.4247, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.419642857142857, | |
| "grad_norm": 0.09916208935998831, | |
| "learning_rate": 8.80100364247265e-06, | |
| "loss": 0.4201, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.4285714285714284, | |
| "grad_norm": 0.09330957695586287, | |
| "learning_rate": 8.542293427617853e-06, | |
| "loss": 0.4174, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.4375, | |
| "grad_norm": 0.10014401502360695, | |
| "learning_rate": 8.286987365206114e-06, | |
| "loss": 0.4177, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.4464285714285716, | |
| "grad_norm": 0.09946787106111032, | |
| "learning_rate": 8.035113082831776e-06, | |
| "loss": 0.4182, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.455357142857143, | |
| "grad_norm": 0.09531413850190633, | |
| "learning_rate": 7.78669783672382e-06, | |
| "loss": 0.4217, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.4642857142857144, | |
| "grad_norm": 0.09377218198215026, | |
| "learning_rate": 7.5417685087963855e-06, | |
| "loss": 0.4224, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.4732142857142856, | |
| "grad_norm": 0.09494424619498867, | |
| "learning_rate": 7.300351603739759e-06, | |
| "loss": 0.422, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.482142857142857, | |
| "grad_norm": 0.09167604547691827, | |
| "learning_rate": 7.062473246152231e-06, | |
| "loss": 0.4182, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.4910714285714284, | |
| "grad_norm": 0.09414104830978579, | |
| "learning_rate": 6.828159177713036e-06, | |
| "loss": 0.4185, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.09349403020134545, | |
| "learning_rate": 6.597434754396763e-06, | |
| "loss": 0.4233, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.508928571428571, | |
| "grad_norm": 0.0893604755299499, | |
| "learning_rate": 6.370324943729498e-06, | |
| "loss": 0.4205, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.517857142857143, | |
| "grad_norm": 0.08281221144451797, | |
| "learning_rate": 6.146854322086984e-06, | |
| "loss": 0.4224, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.5267857142857144, | |
| "grad_norm": 0.08578488209234744, | |
| "learning_rate": 5.9270470720351305e-06, | |
| "loss": 0.4133, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.5357142857142856, | |
| "grad_norm": 0.09120482056235919, | |
| "learning_rate": 5.7109269797131335e-06, | |
| "loss": 0.4192, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.544642857142857, | |
| "grad_norm": 0.0950792789717829, | |
| "learning_rate": 5.498517432259487e-06, | |
| "loss": 0.4196, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.553571428571429, | |
| "grad_norm": 0.09565727805504355, | |
| "learning_rate": 5.2898414152811715e-06, | |
| "loss": 0.4223, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.5625, | |
| "grad_norm": 0.08845162118376791, | |
| "learning_rate": 5.084921510366321e-06, | |
| "loss": 0.4207, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.571428571428571, | |
| "grad_norm": 0.08997378993572731, | |
| "learning_rate": 4.883779892640532e-06, | |
| "loss": 0.4235, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.580357142857143, | |
| "grad_norm": 0.08520848023146284, | |
| "learning_rate": 4.686438328367287e-06, | |
| "loss": 0.4169, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.5892857142857144, | |
| "grad_norm": 0.08661793332561409, | |
| "learning_rate": 4.492918172592489e-06, | |
| "loss": 0.4234, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.5982142857142856, | |
| "grad_norm": 0.08401595957495671, | |
| "learning_rate": 4.303240366833596e-06, | |
| "loss": 0.4139, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.607142857142857, | |
| "grad_norm": 0.08238451078646887, | |
| "learning_rate": 4.117425436813438e-06, | |
| "loss": 0.4202, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.616071428571429, | |
| "grad_norm": 0.08738609784414623, | |
| "learning_rate": 3.935493490239046e-06, | |
| "loss": 0.4208, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.625, | |
| "grad_norm": 0.08259864478704049, | |
| "learning_rate": 3.7574642146257634e-06, | |
| "loss": 0.4186, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.633928571428571, | |
| "grad_norm": 0.08659177417529826, | |
| "learning_rate": 3.583356875166737e-06, | |
| "loss": 0.4237, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.642857142857143, | |
| "grad_norm": 0.08119845593756564, | |
| "learning_rate": 3.4131903126482045e-06, | |
| "loss": 0.4154, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.6517857142857144, | |
| "grad_norm": 0.08611600596660274, | |
| "learning_rate": 3.2469829414106503e-06, | |
| "loss": 0.4201, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.6607142857142856, | |
| "grad_norm": 0.08276786453261639, | |
| "learning_rate": 3.0847527473561167e-06, | |
| "loss": 0.4248, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.669642857142857, | |
| "grad_norm": 0.08240160307178532, | |
| "learning_rate": 2.9265172860018754e-06, | |
| "loss": 0.415, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.678571428571429, | |
| "grad_norm": 0.08006883576509521, | |
| "learning_rate": 2.7722936805807267e-06, | |
| "loss": 0.4171, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.6875, | |
| "grad_norm": 0.08381933167379557, | |
| "learning_rate": 2.622098620187963e-06, | |
| "loss": 0.427, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.696428571428571, | |
| "grad_norm": 0.08303705152303609, | |
| "learning_rate": 2.475948357975435e-06, | |
| "loss": 0.4198, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.705357142857143, | |
| "grad_norm": 0.08870666410228789, | |
| "learning_rate": 2.3338587093927245e-06, | |
| "loss": 0.4219, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.7142857142857144, | |
| "grad_norm": 0.0818167400851114, | |
| "learning_rate": 2.195845050475698e-06, | |
| "loss": 0.4223, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.7232142857142856, | |
| "grad_norm": 0.08139799765060797, | |
| "learning_rate": 2.0619223161825942e-06, | |
| "loss": 0.4184, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.732142857142857, | |
| "grad_norm": 0.07642422600278949, | |
| "learning_rate": 1.932104998777877e-06, | |
| "loss": 0.4179, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.741071428571429, | |
| "grad_norm": 0.07996683628078224, | |
| "learning_rate": 1.8064071462639798e-06, | |
| "loss": 0.4184, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.07773519961892873, | |
| "learning_rate": 1.684842360861092e-06, | |
| "loss": 0.4261, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.758928571428571, | |
| "grad_norm": 0.07860341250434275, | |
| "learning_rate": 1.5674237975352457e-06, | |
| "loss": 0.4171, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.767857142857143, | |
| "grad_norm": 0.0784236768243464, | |
| "learning_rate": 1.4541641625747428e-06, | |
| "loss": 0.4192, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.7767857142857144, | |
| "grad_norm": 0.0761682477040851, | |
| "learning_rate": 1.3450757122151826e-06, | |
| "loss": 0.4129, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.7857142857142856, | |
| "grad_norm": 0.07767572155515515, | |
| "learning_rate": 1.2401702513131597e-06, | |
| "loss": 0.4182, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.794642857142857, | |
| "grad_norm": 0.07941885760590452, | |
| "learning_rate": 1.1394591320688098e-06, | |
| "loss": 0.4186, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.803571428571429, | |
| "grad_norm": 0.07998708863714857, | |
| "learning_rate": 1.0429532527973695e-06, | |
| "loss": 0.4263, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.8125, | |
| "grad_norm": 0.0783378129960652, | |
| "learning_rate": 9.50663056749801e-07, | |
| "loss": 0.4141, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.821428571428571, | |
| "grad_norm": 0.07670652617883925, | |
| "learning_rate": 8.625985309827211e-07, | |
| "loss": 0.4192, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.830357142857143, | |
| "grad_norm": 0.07288684797979382, | |
| "learning_rate": 7.787692052776363e-07, | |
| "loss": 0.409, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.8392857142857144, | |
| "grad_norm": 0.07510033336815654, | |
| "learning_rate": 6.991841511097175e-07, | |
| "loss": 0.4167, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.8482142857142856, | |
| "grad_norm": 0.07666734447787482, | |
| "learning_rate": 6.238519806661192e-07, | |
| "loss": 0.4191, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.07597379543452941, | |
| "learning_rate": 5.527808459140449e-07, | |
| "loss": 0.4196, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.866071428571429, | |
| "grad_norm": 0.07587470302303759, | |
| "learning_rate": 4.859784377185639e-07, | |
| "loss": 0.4188, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.875, | |
| "grad_norm": 0.07535633004319045, | |
| "learning_rate": 4.2345198501038796e-07, | |
| "loss": 0.4138, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.883928571428571, | |
| "grad_norm": 0.0777275889451964, | |
| "learning_rate": 3.652082540035906e-07, | |
| "loss": 0.4231, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.892857142857143, | |
| "grad_norm": 0.07632262719194602, | |
| "learning_rate": 3.112535474633971e-07, | |
| "loss": 0.4214, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.9017857142857144, | |
| "grad_norm": 0.07651730781426783, | |
| "learning_rate": 2.6159370402415674e-07, | |
| "loss": 0.4186, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.9107142857142856, | |
| "grad_norm": 0.07477013510660417, | |
| "learning_rate": 2.1623409755751056e-07, | |
| "loss": 0.4248, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.919642857142857, | |
| "grad_norm": 0.07897064466517018, | |
| "learning_rate": 1.751796365908831e-07, | |
| "loss": 0.4233, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.928571428571429, | |
| "grad_norm": 0.07717251326428437, | |
| "learning_rate": 1.3843476377628507e-07, | |
| "loss": 0.4177, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.9375, | |
| "grad_norm": 0.07474269162329465, | |
| "learning_rate": 1.0600345540958234e-07, | |
| "loss": 0.4185, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.946428571428571, | |
| "grad_norm": 0.07632913690300189, | |
| "learning_rate": 7.788922100020024e-08, | |
| "loss": 0.4227, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.955357142857143, | |
| "grad_norm": 0.07484327906269052, | |
| "learning_rate": 5.4095102891338344e-08, | |
| "loss": 0.4296, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.9642857142857144, | |
| "grad_norm": 0.07247458214414504, | |
| "learning_rate": 3.462367593075833e-08, | |
| "loss": 0.4137, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.9732142857142856, | |
| "grad_norm": 0.07522031543191121, | |
| "learning_rate": 1.9477047192140164e-08, | |
| "loss": 0.4274, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.982142857142857, | |
| "grad_norm": 0.07361261056246672, | |
| "learning_rate": 8.656855747082305e-09, | |
| "loss": 0.4179, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.991071428571429, | |
| "grad_norm": 0.07312212144143204, | |
| "learning_rate": 2.1642724877146693e-09, | |
| "loss": 0.411, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.07511641768667605, | |
| "learning_rate": 0.0, | |
| "loss": 0.4162, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 336, | |
| "total_flos": 8.070235818242867e+18, | |
| "train_loss": 0.4864790922119504, | |
| "train_runtime": 20111.4838, | |
| "train_samples_per_second": 8.547, | |
| "train_steps_per_second": 0.017 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 336, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.070235818242867e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |