| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 2502, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0012, |
| "grad_norm": 5.4833939764880055, |
| "learning_rate": 0.0, |
| "loss": 0.7607, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0024, |
| "grad_norm": 5.645497868805024, |
| "learning_rate": 3.984063745019921e-08, |
| "loss": 0.7813, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0036, |
| "grad_norm": 6.379959917142346, |
| "learning_rate": 7.968127490039842e-08, |
| "loss": 0.831, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0048, |
| "grad_norm": 5.8802628972123365, |
| "learning_rate": 1.195219123505976e-07, |
| "loss": 0.8058, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.006, |
| "grad_norm": 6.3940729810393915, |
| "learning_rate": 1.5936254980079683e-07, |
| "loss": 0.8678, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0072, |
| "grad_norm": 5.766068577494406, |
| "learning_rate": 1.9920318725099604e-07, |
| "loss": 0.7789, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0084, |
| "grad_norm": 6.045841004732535, |
| "learning_rate": 2.390438247011952e-07, |
| "loss": 0.7945, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.0096, |
| "grad_norm": 5.904260263294003, |
| "learning_rate": 2.7888446215139444e-07, |
| "loss": 0.7843, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.0108, |
| "grad_norm": 5.8665611721938005, |
| "learning_rate": 3.1872509960159367e-07, |
| "loss": 0.8259, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.012, |
| "grad_norm": 5.927624645803215, |
| "learning_rate": 3.585657370517929e-07, |
| "loss": 0.802, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.0132, |
| "grad_norm": 5.2336347881583345, |
| "learning_rate": 3.9840637450199207e-07, |
| "loss": 0.7326, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.0144, |
| "grad_norm": 5.744302914470887, |
| "learning_rate": 4.382470119521913e-07, |
| "loss": 0.8034, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.0156, |
| "grad_norm": 5.569314249586872, |
| "learning_rate": 4.780876494023904e-07, |
| "loss": 0.8058, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.0168, |
| "grad_norm": 5.279680827326481, |
| "learning_rate": 5.179282868525898e-07, |
| "loss": 0.7743, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.018, |
| "grad_norm": 5.386497992251826, |
| "learning_rate": 5.577689243027889e-07, |
| "loss": 0.7688, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.0192, |
| "grad_norm": 4.254944734918593, |
| "learning_rate": 5.976095617529881e-07, |
| "loss": 0.7054, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.0204, |
| "grad_norm": 4.523233221330768, |
| "learning_rate": 6.374501992031873e-07, |
| "loss": 0.7534, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.0216, |
| "grad_norm": 4.1987378310333545, |
| "learning_rate": 6.772908366533865e-07, |
| "loss": 0.708, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0228, |
| "grad_norm": 4.574293688793641, |
| "learning_rate": 7.171314741035858e-07, |
| "loss": 0.7916, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.024, |
| "grad_norm": 4.3308716519686525, |
| "learning_rate": 7.569721115537849e-07, |
| "loss": 0.7431, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0252, |
| "grad_norm": 3.7190318716723603, |
| "learning_rate": 7.968127490039841e-07, |
| "loss": 0.7258, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.0264, |
| "grad_norm": 2.543036752039916, |
| "learning_rate": 8.366533864541833e-07, |
| "loss": 0.7224, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.0276, |
| "grad_norm": 2.4950258096616382, |
| "learning_rate": 8.764940239043826e-07, |
| "loss": 0.6736, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.0288, |
| "grad_norm": 2.5069650630469984, |
| "learning_rate": 9.163346613545817e-07, |
| "loss": 0.6854, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 2.4088611292545825, |
| "learning_rate": 9.561752988047808e-07, |
| "loss": 0.7204, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.0312, |
| "grad_norm": 2.186249677635318, |
| "learning_rate": 9.9601593625498e-07, |
| "loss": 0.6998, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.0324, |
| "grad_norm": 2.1736221592657285, |
| "learning_rate": 1.0358565737051795e-06, |
| "loss": 0.7414, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.0336, |
| "grad_norm": 2.1914465779108747, |
| "learning_rate": 1.0756972111553785e-06, |
| "loss": 0.6722, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.0348, |
| "grad_norm": 1.6670960514305742, |
| "learning_rate": 1.1155378486055778e-06, |
| "loss": 0.6422, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.036, |
| "grad_norm": 2.077075223596808, |
| "learning_rate": 1.155378486055777e-06, |
| "loss": 0.6731, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0372, |
| "grad_norm": 2.4897778527200183, |
| "learning_rate": 1.1952191235059762e-06, |
| "loss": 0.6803, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.0384, |
| "grad_norm": 2.4711412460186373, |
| "learning_rate": 1.2350597609561754e-06, |
| "loss": 0.6559, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.0396, |
| "grad_norm": 2.700674504910902, |
| "learning_rate": 1.2749003984063747e-06, |
| "loss": 0.6834, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.0408, |
| "grad_norm": 2.682606066253942, |
| "learning_rate": 1.3147410358565737e-06, |
| "loss": 0.6516, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.042, |
| "grad_norm": 2.1676293521182077, |
| "learning_rate": 1.354581673306773e-06, |
| "loss": 0.579, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.0432, |
| "grad_norm": 2.173873959031573, |
| "learning_rate": 1.3944223107569721e-06, |
| "loss": 0.6436, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.0444, |
| "grad_norm": 2.151838164329406, |
| "learning_rate": 1.4342629482071716e-06, |
| "loss": 0.698, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.0456, |
| "grad_norm": 1.9621644411146546, |
| "learning_rate": 1.4741035856573708e-06, |
| "loss": 0.6488, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.0468, |
| "grad_norm": 1.5368598635090958, |
| "learning_rate": 1.5139442231075698e-06, |
| "loss": 0.6302, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.048, |
| "grad_norm": 1.2190139458412592, |
| "learning_rate": 1.553784860557769e-06, |
| "loss": 0.619, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.0492, |
| "grad_norm": 1.165215618150115, |
| "learning_rate": 1.5936254980079683e-06, |
| "loss": 0.6194, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.0504, |
| "grad_norm": 1.1142120578252954, |
| "learning_rate": 1.6334661354581673e-06, |
| "loss": 0.6337, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.0516, |
| "grad_norm": 1.2057442448808577, |
| "learning_rate": 1.6733067729083665e-06, |
| "loss": 0.6082, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.0528, |
| "grad_norm": 1.1247636014697693, |
| "learning_rate": 1.7131474103585658e-06, |
| "loss": 0.5601, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.054, |
| "grad_norm": 1.41137183436536, |
| "learning_rate": 1.7529880478087652e-06, |
| "loss": 0.6557, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.0552, |
| "grad_norm": 1.052690589137755, |
| "learning_rate": 1.7928286852589644e-06, |
| "loss": 0.5787, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.0564, |
| "grad_norm": 1.0601978966012295, |
| "learning_rate": 1.8326693227091634e-06, |
| "loss": 0.5694, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.0576, |
| "grad_norm": 1.1070630189694626, |
| "learning_rate": 1.8725099601593627e-06, |
| "loss": 0.5738, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.0588, |
| "grad_norm": 0.9889388769001813, |
| "learning_rate": 1.9123505976095617e-06, |
| "loss": 0.6186, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 1.034868436810796, |
| "learning_rate": 1.952191235059761e-06, |
| "loss": 0.5675, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.0612, |
| "grad_norm": 0.9562269599632713, |
| "learning_rate": 1.99203187250996e-06, |
| "loss": 0.5942, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.0624, |
| "grad_norm": 0.9684662774229134, |
| "learning_rate": 2.03187250996016e-06, |
| "loss": 0.6069, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.0636, |
| "grad_norm": 0.8597378190067636, |
| "learning_rate": 2.071713147410359e-06, |
| "loss": 0.5945, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.0648, |
| "grad_norm": 0.8824670909191019, |
| "learning_rate": 2.111553784860558e-06, |
| "loss": 0.5827, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.066, |
| "grad_norm": 0.8654040011891053, |
| "learning_rate": 2.151394422310757e-06, |
| "loss": 0.5595, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.0672, |
| "grad_norm": 0.8364907260530519, |
| "learning_rate": 2.1912350597609563e-06, |
| "loss": 0.5796, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.0684, |
| "grad_norm": 1.1546546198663707, |
| "learning_rate": 2.2310756972111555e-06, |
| "loss": 0.5941, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.0696, |
| "grad_norm": 0.9420669315033214, |
| "learning_rate": 2.2709163346613547e-06, |
| "loss": 0.5198, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.0708, |
| "grad_norm": 0.8528290907483634, |
| "learning_rate": 2.310756972111554e-06, |
| "loss": 0.552, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.072, |
| "grad_norm": 0.687703387092168, |
| "learning_rate": 2.350597609561753e-06, |
| "loss": 0.5594, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0732, |
| "grad_norm": 0.8167294442985146, |
| "learning_rate": 2.3904382470119524e-06, |
| "loss": 0.5207, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.0744, |
| "grad_norm": 0.7156708048117644, |
| "learning_rate": 2.4302788844621517e-06, |
| "loss": 0.5407, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.0756, |
| "grad_norm": 0.7154050872369027, |
| "learning_rate": 2.470119521912351e-06, |
| "loss": 0.5025, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.0768, |
| "grad_norm": 0.7943440935347835, |
| "learning_rate": 2.50996015936255e-06, |
| "loss": 0.5001, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.078, |
| "grad_norm": 0.7903112896611016, |
| "learning_rate": 2.5498007968127493e-06, |
| "loss": 0.5137, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.0792, |
| "grad_norm": 0.8102722270709948, |
| "learning_rate": 2.589641434262948e-06, |
| "loss": 0.556, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.0804, |
| "grad_norm": 0.7763350160286278, |
| "learning_rate": 2.6294820717131474e-06, |
| "loss": 0.5618, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.0816, |
| "grad_norm": 0.682413072872721, |
| "learning_rate": 2.6693227091633466e-06, |
| "loss": 0.4881, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.0828, |
| "grad_norm": 0.737478801790203, |
| "learning_rate": 2.709163346613546e-06, |
| "loss": 0.5233, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.084, |
| "grad_norm": 0.6670536824914544, |
| "learning_rate": 2.749003984063745e-06, |
| "loss": 0.5001, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.0852, |
| "grad_norm": 0.7172377448602175, |
| "learning_rate": 2.7888446215139443e-06, |
| "loss": 0.4963, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.0864, |
| "grad_norm": 0.7354871896590247, |
| "learning_rate": 2.828685258964144e-06, |
| "loss": 0.5183, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.0876, |
| "grad_norm": 0.7366911418089935, |
| "learning_rate": 2.868525896414343e-06, |
| "loss": 0.5217, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.0888, |
| "grad_norm": 0.7134400568775913, |
| "learning_rate": 2.9083665338645424e-06, |
| "loss": 0.4876, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.6927096104174592, |
| "learning_rate": 2.9482071713147416e-06, |
| "loss": 0.4966, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.0912, |
| "grad_norm": 0.8230762404086284, |
| "learning_rate": 2.9880478087649404e-06, |
| "loss": 0.5208, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.0924, |
| "grad_norm": 0.6292358040686521, |
| "learning_rate": 3.0278884462151397e-06, |
| "loss": 0.4882, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.0936, |
| "grad_norm": 0.6147701154442085, |
| "learning_rate": 3.067729083665339e-06, |
| "loss": 0.5072, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.0948, |
| "grad_norm": 0.676809508051408, |
| "learning_rate": 3.107569721115538e-06, |
| "loss": 0.5037, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.096, |
| "grad_norm": 0.725489569911828, |
| "learning_rate": 3.1474103585657373e-06, |
| "loss": 0.5643, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.0972, |
| "grad_norm": 0.7145147926010281, |
| "learning_rate": 3.1872509960159366e-06, |
| "loss": 0.497, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.0984, |
| "grad_norm": 0.7170590419899483, |
| "learning_rate": 3.227091633466136e-06, |
| "loss": 0.5332, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.0996, |
| "grad_norm": 0.7150216583981881, |
| "learning_rate": 3.2669322709163346e-06, |
| "loss": 0.529, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.1008, |
| "grad_norm": 0.8613846664442866, |
| "learning_rate": 3.306772908366534e-06, |
| "loss": 0.5657, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.102, |
| "grad_norm": 0.6494754319315506, |
| "learning_rate": 3.346613545816733e-06, |
| "loss": 0.5012, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.1032, |
| "grad_norm": 0.6471011282897726, |
| "learning_rate": 3.3864541832669323e-06, |
| "loss": 0.5433, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.1044, |
| "grad_norm": 0.750892718766989, |
| "learning_rate": 3.4262948207171315e-06, |
| "loss": 0.492, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.1056, |
| "grad_norm": 0.687548849485851, |
| "learning_rate": 3.466135458167331e-06, |
| "loss": 0.5056, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.1068, |
| "grad_norm": 0.5756351359995551, |
| "learning_rate": 3.5059760956175304e-06, |
| "loss": 0.5009, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.108, |
| "grad_norm": 0.553648661965167, |
| "learning_rate": 3.5458167330677296e-06, |
| "loss": 0.5012, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.1092, |
| "grad_norm": 0.6913182105350174, |
| "learning_rate": 3.585657370517929e-06, |
| "loss": 0.5025, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.1104, |
| "grad_norm": 0.6951766377383839, |
| "learning_rate": 3.625498007968128e-06, |
| "loss": 0.4856, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.1116, |
| "grad_norm": 0.8777499137224892, |
| "learning_rate": 3.665338645418327e-06, |
| "loss": 0.5467, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.1128, |
| "grad_norm": 0.6508644020236586, |
| "learning_rate": 3.705179282868526e-06, |
| "loss": 0.5167, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.114, |
| "grad_norm": 0.7808006683075185, |
| "learning_rate": 3.7450199203187254e-06, |
| "loss": 0.5321, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.1152, |
| "grad_norm": 0.7388512378176182, |
| "learning_rate": 3.7848605577689246e-06, |
| "loss": 0.4805, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.1164, |
| "grad_norm": 0.6408885474401719, |
| "learning_rate": 3.824701195219123e-06, |
| "loss": 0.5082, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.1176, |
| "grad_norm": 0.69317399782124, |
| "learning_rate": 3.864541832669323e-06, |
| "loss": 0.4837, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.1188, |
| "grad_norm": 0.6576371657994416, |
| "learning_rate": 3.904382470119522e-06, |
| "loss": 0.5152, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.6828846265642615, |
| "learning_rate": 3.9442231075697215e-06, |
| "loss": 0.4694, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.1212, |
| "grad_norm": 0.7013083213096467, |
| "learning_rate": 3.98406374501992e-06, |
| "loss": 0.4717, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.1224, |
| "grad_norm": 0.7085053688033561, |
| "learning_rate": 4.02390438247012e-06, |
| "loss": 0.5022, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.1236, |
| "grad_norm": 0.622698153537725, |
| "learning_rate": 4.06374501992032e-06, |
| "loss": 0.472, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.1248, |
| "grad_norm": 0.6203547767857722, |
| "learning_rate": 4.103585657370518e-06, |
| "loss": 0.5088, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.126, |
| "grad_norm": 0.650054802792842, |
| "learning_rate": 4.143426294820718e-06, |
| "loss": 0.4799, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.1272, |
| "grad_norm": 0.8174829915460686, |
| "learning_rate": 4.183266932270917e-06, |
| "loss": 0.5355, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.1284, |
| "grad_norm": 0.7598310150274463, |
| "learning_rate": 4.223107569721116e-06, |
| "loss": 0.4735, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.1296, |
| "grad_norm": 0.6175762629568949, |
| "learning_rate": 4.262948207171315e-06, |
| "loss": 0.4794, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.1308, |
| "grad_norm": 0.6900127333389381, |
| "learning_rate": 4.302788844621514e-06, |
| "loss": 0.5021, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.132, |
| "grad_norm": 0.7055276817230187, |
| "learning_rate": 4.342629482071714e-06, |
| "loss": 0.4629, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.1332, |
| "grad_norm": 0.7842646328067162, |
| "learning_rate": 4.382470119521913e-06, |
| "loss": 0.488, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.1344, |
| "grad_norm": 0.5941563107721001, |
| "learning_rate": 4.422310756972112e-06, |
| "loss": 0.4486, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.1356, |
| "grad_norm": 0.7237315324309976, |
| "learning_rate": 4.462151394422311e-06, |
| "loss": 0.4986, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.1368, |
| "grad_norm": 0.7257207423897334, |
| "learning_rate": 4.50199203187251e-06, |
| "loss": 0.5096, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.138, |
| "grad_norm": 0.7465709953170436, |
| "learning_rate": 4.5418326693227095e-06, |
| "loss": 0.5142, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.1392, |
| "grad_norm": 0.732912113525707, |
| "learning_rate": 4.581673306772908e-06, |
| "loss": 0.461, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.1404, |
| "grad_norm": 0.7210355960852634, |
| "learning_rate": 4.621513944223108e-06, |
| "loss": 0.4912, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.1416, |
| "grad_norm": 0.7445245458464089, |
| "learning_rate": 4.661354581673307e-06, |
| "loss": 0.4772, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.1428, |
| "grad_norm": 0.7799623601896731, |
| "learning_rate": 4.701195219123506e-06, |
| "loss": 0.4767, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.144, |
| "grad_norm": 0.7129943430352418, |
| "learning_rate": 4.741035856573706e-06, |
| "loss": 0.4656, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.1452, |
| "grad_norm": 0.7461185079641656, |
| "learning_rate": 4.780876494023905e-06, |
| "loss": 0.4931, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.1464, |
| "grad_norm": 0.7611752287702312, |
| "learning_rate": 4.8207171314741045e-06, |
| "loss": 0.5384, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.1476, |
| "grad_norm": 0.7308153552924613, |
| "learning_rate": 4.860557768924303e-06, |
| "loss": 0.5044, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.1488, |
| "grad_norm": 0.7504274125405789, |
| "learning_rate": 4.900398406374502e-06, |
| "loss": 0.4739, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.770948215484845, |
| "learning_rate": 4.940239043824702e-06, |
| "loss": 0.4838, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1512, |
| "grad_norm": 0.6552840020750029, |
| "learning_rate": 4.980079681274901e-06, |
| "loss": 0.4546, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.1524, |
| "grad_norm": 0.6499796185675982, |
| "learning_rate": 5.0199203187251e-06, |
| "loss": 0.4281, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.1536, |
| "grad_norm": 0.7061114170956655, |
| "learning_rate": 5.059760956175299e-06, |
| "loss": 0.4866, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.1548, |
| "grad_norm": 0.6691189853551326, |
| "learning_rate": 5.099601593625499e-06, |
| "loss": 0.4656, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.156, |
| "grad_norm": 0.6054265360566154, |
| "learning_rate": 5.1394422310756975e-06, |
| "loss": 0.4613, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.1572, |
| "grad_norm": 0.7027909113103062, |
| "learning_rate": 5.179282868525896e-06, |
| "loss": 0.4839, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.1584, |
| "grad_norm": 0.7587486957676393, |
| "learning_rate": 5.219123505976096e-06, |
| "loss": 0.4712, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.1596, |
| "grad_norm": 0.6686724879335956, |
| "learning_rate": 5.258964143426295e-06, |
| "loss": 0.4807, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.1608, |
| "grad_norm": 0.7646309087614517, |
| "learning_rate": 5.298804780876494e-06, |
| "loss": 0.5157, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.162, |
| "grad_norm": 0.7313825683191312, |
| "learning_rate": 5.338645418326693e-06, |
| "loss": 0.4933, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.1632, |
| "grad_norm": 0.6627767030401844, |
| "learning_rate": 5.378486055776893e-06, |
| "loss": 0.4536, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.1644, |
| "grad_norm": 0.6888832810971692, |
| "learning_rate": 5.418326693227092e-06, |
| "loss": 0.529, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.1656, |
| "grad_norm": 0.6727192064732628, |
| "learning_rate": 5.4581673306772905e-06, |
| "loss": 0.4668, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.1668, |
| "grad_norm": 0.6712098985581779, |
| "learning_rate": 5.49800796812749e-06, |
| "loss": 0.5003, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.168, |
| "grad_norm": 0.7449187354193221, |
| "learning_rate": 5.537848605577689e-06, |
| "loss": 0.4908, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.1692, |
| "grad_norm": 0.6855809836685066, |
| "learning_rate": 5.577689243027889e-06, |
| "loss": 0.4957, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.1704, |
| "grad_norm": 0.7057372872081618, |
| "learning_rate": 5.617529880478087e-06, |
| "loss": 0.4927, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.1716, |
| "grad_norm": 0.7145902018121971, |
| "learning_rate": 5.657370517928288e-06, |
| "loss": 0.4798, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.1728, |
| "grad_norm": 0.7178330938670102, |
| "learning_rate": 5.697211155378487e-06, |
| "loss": 0.487, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.174, |
| "grad_norm": 0.6176066325572566, |
| "learning_rate": 5.737051792828686e-06, |
| "loss": 0.4484, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.1752, |
| "grad_norm": 0.699828650914944, |
| "learning_rate": 5.776892430278885e-06, |
| "loss": 0.5061, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.1764, |
| "grad_norm": 0.6256805259332396, |
| "learning_rate": 5.816733067729085e-06, |
| "loss": 0.4718, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.1776, |
| "grad_norm": 0.671096546707992, |
| "learning_rate": 5.856573705179284e-06, |
| "loss": 0.4496, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.1788, |
| "grad_norm": 0.6365351919676361, |
| "learning_rate": 5.896414342629483e-06, |
| "loss": 0.4771, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.7462898845468449, |
| "learning_rate": 5.936254980079682e-06, |
| "loss": 0.4875, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.1812, |
| "grad_norm": 0.7734601052023893, |
| "learning_rate": 5.976095617529881e-06, |
| "loss": 0.4913, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.1824, |
| "grad_norm": 0.6552628757396356, |
| "learning_rate": 6.0159362549800805e-06, |
| "loss": 0.4287, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.1836, |
| "grad_norm": 0.7160128628436617, |
| "learning_rate": 6.055776892430279e-06, |
| "loss": 0.5023, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.1848, |
| "grad_norm": 0.6975627744114324, |
| "learning_rate": 6.095617529880479e-06, |
| "loss": 0.5021, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.186, |
| "grad_norm": 0.6958244479341551, |
| "learning_rate": 6.135458167330678e-06, |
| "loss": 0.5021, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.1872, |
| "grad_norm": 0.7162813897115381, |
| "learning_rate": 6.1752988047808774e-06, |
| "loss": 0.4672, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.1884, |
| "grad_norm": 0.7789415750652668, |
| "learning_rate": 6.215139442231076e-06, |
| "loss": 0.4896, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.1896, |
| "grad_norm": 0.7168463214778908, |
| "learning_rate": 6.254980079681275e-06, |
| "loss": 0.4777, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.1908, |
| "grad_norm": 0.8135516887242896, |
| "learning_rate": 6.294820717131475e-06, |
| "loss": 0.4925, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 0.6803879928943569, |
| "learning_rate": 6.3346613545816735e-06, |
| "loss": 0.4055, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.1932, |
| "grad_norm": 0.7986039712293413, |
| "learning_rate": 6.374501992031873e-06, |
| "loss": 0.4986, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.1944, |
| "grad_norm": 0.820290402644595, |
| "learning_rate": 6.414342629482072e-06, |
| "loss": 0.5198, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.1956, |
| "grad_norm": 0.7151437249829041, |
| "learning_rate": 6.454183266932272e-06, |
| "loss": 0.4781, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.1968, |
| "grad_norm": 0.7216410394435034, |
| "learning_rate": 6.49402390438247e-06, |
| "loss": 0.4695, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.198, |
| "grad_norm": 0.811009148225086, |
| "learning_rate": 6.533864541832669e-06, |
| "loss": 0.5092, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.1992, |
| "grad_norm": 0.7295087084663571, |
| "learning_rate": 6.573705179282869e-06, |
| "loss": 0.443, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.2004, |
| "grad_norm": 0.7762881643704093, |
| "learning_rate": 6.613545816733068e-06, |
| "loss": 0.463, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.2016, |
| "grad_norm": 0.6598351418548212, |
| "learning_rate": 6.653386454183267e-06, |
| "loss": 0.4849, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.2028, |
| "grad_norm": 0.6153969557744298, |
| "learning_rate": 6.693227091633466e-06, |
| "loss": 0.4856, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.204, |
| "grad_norm": 0.7406116382206025, |
| "learning_rate": 6.733067729083666e-06, |
| "loss": 0.5247, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.2052, |
| "grad_norm": 0.7368196241047612, |
| "learning_rate": 6.772908366533865e-06, |
| "loss": 0.4725, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.2064, |
| "grad_norm": 0.8195250297003582, |
| "learning_rate": 6.812749003984063e-06, |
| "loss": 0.4776, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.2076, |
| "grad_norm": 0.7902293780066731, |
| "learning_rate": 6.852589641434263e-06, |
| "loss": 0.4758, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.2088, |
| "grad_norm": 0.6707040821942747, |
| "learning_rate": 6.8924302788844635e-06, |
| "loss": 0.4627, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.7130948005327684, |
| "learning_rate": 6.932270916334662e-06, |
| "loss": 0.5399, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.2112, |
| "grad_norm": 0.6547820529997068, |
| "learning_rate": 6.972111553784862e-06, |
| "loss": 0.4766, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.2124, |
| "grad_norm": 0.7146947814036898, |
| "learning_rate": 7.011952191235061e-06, |
| "loss": 0.4411, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.2136, |
| "grad_norm": 0.7396734814075536, |
| "learning_rate": 7.05179282868526e-06, |
| "loss": 0.4819, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.2148, |
| "grad_norm": 0.9240207511689131, |
| "learning_rate": 7.091633466135459e-06, |
| "loss": 0.5158, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.216, |
| "grad_norm": 0.6587494748649532, |
| "learning_rate": 7.131474103585658e-06, |
| "loss": 0.4513, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.2172, |
| "grad_norm": 0.7451455583479791, |
| "learning_rate": 7.171314741035858e-06, |
| "loss": 0.5298, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.2184, |
| "grad_norm": 0.7183108926930426, |
| "learning_rate": 7.2111553784860565e-06, |
| "loss": 0.455, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.2196, |
| "grad_norm": 0.7294845173799966, |
| "learning_rate": 7.250996015936256e-06, |
| "loss": 0.5282, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.2208, |
| "grad_norm": 0.7829206050862241, |
| "learning_rate": 7.290836653386455e-06, |
| "loss": 0.4999, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.222, |
| "grad_norm": 0.8144601373674129, |
| "learning_rate": 7.330677290836654e-06, |
| "loss": 0.4479, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2232, |
| "grad_norm": 0.686434544081989, |
| "learning_rate": 7.3705179282868534e-06, |
| "loss": 0.4779, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.2244, |
| "grad_norm": 0.7662036672164863, |
| "learning_rate": 7.410358565737052e-06, |
| "loss": 0.4406, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.2256, |
| "grad_norm": 0.7372149806305216, |
| "learning_rate": 7.450199203187252e-06, |
| "loss": 0.4739, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.2268, |
| "grad_norm": 0.7018904889059249, |
| "learning_rate": 7.490039840637451e-06, |
| "loss": 0.4739, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.228, |
| "grad_norm": 0.8034958475921105, |
| "learning_rate": 7.52988047808765e-06, |
| "loss": 0.4871, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.2292, |
| "grad_norm": 0.6664145984053934, |
| "learning_rate": 7.569721115537849e-06, |
| "loss": 0.4785, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.2304, |
| "grad_norm": 0.8349490725943484, |
| "learning_rate": 7.609561752988048e-06, |
| "loss": 0.4728, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.2316, |
| "grad_norm": 0.8371769404608206, |
| "learning_rate": 7.649402390438247e-06, |
| "loss": 0.49, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.2328, |
| "grad_norm": 0.6234603463023238, |
| "learning_rate": 7.689243027888446e-06, |
| "loss": 0.4656, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.234, |
| "grad_norm": 0.7514666066051947, |
| "learning_rate": 7.729083665338646e-06, |
| "loss": 0.4555, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.2352, |
| "grad_norm": 0.7598762484166676, |
| "learning_rate": 7.768924302788846e-06, |
| "loss": 0.522, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.2364, |
| "grad_norm": 0.6424752977780204, |
| "learning_rate": 7.808764940239044e-06, |
| "loss": 0.4565, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.2376, |
| "grad_norm": 0.755381522376294, |
| "learning_rate": 7.848605577689243e-06, |
| "loss": 0.5013, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.2388, |
| "grad_norm": 0.6978248462713278, |
| "learning_rate": 7.888446215139443e-06, |
| "loss": 0.4662, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.5877441481282527, |
| "learning_rate": 7.928286852589641e-06, |
| "loss": 0.4795, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.2412, |
| "grad_norm": 0.6401561354692661, |
| "learning_rate": 7.96812749003984e-06, |
| "loss": 0.4533, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.2424, |
| "grad_norm": 0.7088942657140168, |
| "learning_rate": 8.00796812749004e-06, |
| "loss": 0.4473, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.2436, |
| "grad_norm": 0.7601768924017626, |
| "learning_rate": 8.04780876494024e-06, |
| "loss": 0.4763, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.2448, |
| "grad_norm": 0.7125101036178078, |
| "learning_rate": 8.087649402390438e-06, |
| "loss": 0.4672, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.246, |
| "grad_norm": 0.8043890918009915, |
| "learning_rate": 8.12749003984064e-06, |
| "loss": 0.437, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.2472, |
| "grad_norm": 0.6323381607535531, |
| "learning_rate": 8.167330677290837e-06, |
| "loss": 0.4562, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.2484, |
| "grad_norm": 0.6219753280994653, |
| "learning_rate": 8.207171314741037e-06, |
| "loss": 0.5002, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.2496, |
| "grad_norm": 0.6933892431644402, |
| "learning_rate": 8.247011952191236e-06, |
| "loss": 0.4716, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.2508, |
| "grad_norm": 0.7369573747691623, |
| "learning_rate": 8.286852589641436e-06, |
| "loss": 0.4511, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.252, |
| "grad_norm": 0.7463770460556426, |
| "learning_rate": 8.326693227091634e-06, |
| "loss": 0.5064, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.2532, |
| "grad_norm": 0.7244915397995428, |
| "learning_rate": 8.366533864541834e-06, |
| "loss": 0.4673, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.2544, |
| "grad_norm": 0.8064801832473215, |
| "learning_rate": 8.406374501992033e-06, |
| "loss": 0.5211, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.2556, |
| "grad_norm": 0.7652570679960387, |
| "learning_rate": 8.446215139442231e-06, |
| "loss": 0.5202, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.2568, |
| "grad_norm": 0.7589679248842585, |
| "learning_rate": 8.486055776892431e-06, |
| "loss": 0.4553, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.258, |
| "grad_norm": 0.7966754694324576, |
| "learning_rate": 8.52589641434263e-06, |
| "loss": 0.5175, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.2592, |
| "grad_norm": 0.7377325050844753, |
| "learning_rate": 8.56573705179283e-06, |
| "loss": 0.4496, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.2604, |
| "grad_norm": 0.7749878316688397, |
| "learning_rate": 8.605577689243028e-06, |
| "loss": 0.4658, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.2616, |
| "grad_norm": 0.6787616893433324, |
| "learning_rate": 8.645418326693228e-06, |
| "loss": 0.4414, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.2628, |
| "grad_norm": 0.7222976534056557, |
| "learning_rate": 8.685258964143428e-06, |
| "loss": 0.4508, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.264, |
| "grad_norm": 0.8075552569283245, |
| "learning_rate": 8.725099601593626e-06, |
| "loss": 0.4243, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.2652, |
| "grad_norm": 0.6491598307437855, |
| "learning_rate": 8.764940239043825e-06, |
| "loss": 0.4997, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.2664, |
| "grad_norm": 0.6844778520577768, |
| "learning_rate": 8.804780876494025e-06, |
| "loss": 0.4858, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.2676, |
| "grad_norm": 0.7299226965380259, |
| "learning_rate": 8.844621513944224e-06, |
| "loss": 0.4667, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.2688, |
| "grad_norm": 0.7229205916891049, |
| "learning_rate": 8.884462151394422e-06, |
| "loss": 0.4917, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.748705120455487, |
| "learning_rate": 8.924302788844622e-06, |
| "loss": 0.4817, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.2712, |
| "grad_norm": 0.677971001063236, |
| "learning_rate": 8.964143426294822e-06, |
| "loss": 0.4709, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.2724, |
| "grad_norm": 0.7616943137332174, |
| "learning_rate": 9.00398406374502e-06, |
| "loss": 0.4808, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.2736, |
| "grad_norm": 0.7143523129028485, |
| "learning_rate": 9.04382470119522e-06, |
| "loss": 0.412, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.2748, |
| "grad_norm": 0.8915146206619335, |
| "learning_rate": 9.083665338645419e-06, |
| "loss": 0.5016, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.276, |
| "grad_norm": 0.7424475381270349, |
| "learning_rate": 9.123505976095619e-06, |
| "loss": 0.4243, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.2772, |
| "grad_norm": 0.7559501710877834, |
| "learning_rate": 9.163346613545817e-06, |
| "loss": 0.458, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.2784, |
| "grad_norm": 0.7142866700220379, |
| "learning_rate": 9.203187250996016e-06, |
| "loss": 0.4185, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.2796, |
| "grad_norm": 0.7452270504572424, |
| "learning_rate": 9.243027888446216e-06, |
| "loss": 0.4892, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.2808, |
| "grad_norm": 0.8804844254437856, |
| "learning_rate": 9.282868525896414e-06, |
| "loss": 0.5244, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.282, |
| "grad_norm": 0.8102009036568012, |
| "learning_rate": 9.322709163346614e-06, |
| "loss": 0.5066, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.2832, |
| "grad_norm": 0.7933963116447116, |
| "learning_rate": 9.362549800796813e-06, |
| "loss": 0.4986, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.2844, |
| "grad_norm": 0.7483634628646818, |
| "learning_rate": 9.402390438247013e-06, |
| "loss": 0.5034, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.2856, |
| "grad_norm": 0.8277784663677117, |
| "learning_rate": 9.442231075697212e-06, |
| "loss": 0.5524, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.2868, |
| "grad_norm": 0.6631534504892171, |
| "learning_rate": 9.482071713147412e-06, |
| "loss": 0.4541, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.288, |
| "grad_norm": 0.78921647058052, |
| "learning_rate": 9.52191235059761e-06, |
| "loss": 0.4768, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.2892, |
| "grad_norm": 0.8301076778286669, |
| "learning_rate": 9.56175298804781e-06, |
| "loss": 0.4848, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.2904, |
| "grad_norm": 0.7697646502273703, |
| "learning_rate": 9.60159362549801e-06, |
| "loss": 0.4503, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.2916, |
| "grad_norm": 0.6985190325570851, |
| "learning_rate": 9.641434262948209e-06, |
| "loss": 0.4828, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.2928, |
| "grad_norm": 0.7025564878021328, |
| "learning_rate": 9.681274900398407e-06, |
| "loss": 0.438, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.294, |
| "grad_norm": 0.7620206097667034, |
| "learning_rate": 9.721115537848607e-06, |
| "loss": 0.43, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.2952, |
| "grad_norm": 0.7424593029148787, |
| "learning_rate": 9.760956175298806e-06, |
| "loss": 0.4403, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.2964, |
| "grad_norm": 0.7360561757895007, |
| "learning_rate": 9.800796812749004e-06, |
| "loss": 0.4622, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.2976, |
| "grad_norm": 0.8243384149245737, |
| "learning_rate": 9.840637450199204e-06, |
| "loss": 0.4751, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.2988, |
| "grad_norm": 0.8199288515333606, |
| "learning_rate": 9.880478087649404e-06, |
| "loss": 0.462, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.7221117049284725, |
| "learning_rate": 9.920318725099603e-06, |
| "loss": 0.458, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.3012, |
| "grad_norm": 0.7821380388196548, |
| "learning_rate": 9.960159362549801e-06, |
| "loss": 0.471, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.3024, |
| "grad_norm": 0.726002114022043, |
| "learning_rate": 1e-05, |
| "loss": 0.4695, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.3036, |
| "grad_norm": 0.8286938369018219, |
| "learning_rate": 9.999995130451526e-06, |
| "loss": 0.4656, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.3048, |
| "grad_norm": 0.7564878742577376, |
| "learning_rate": 9.999980521815582e-06, |
| "loss": 0.4948, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.306, |
| "grad_norm": 0.7000729873027478, |
| "learning_rate": 9.999956174120626e-06, |
| "loss": 0.4332, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.3072, |
| "grad_norm": 0.8115383428570657, |
| "learning_rate": 9.999922087414084e-06, |
| "loss": 0.4798, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.3084, |
| "grad_norm": 0.7624880545561682, |
| "learning_rate": 9.99987826176235e-06, |
| "loss": 0.4566, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.3096, |
| "grad_norm": 0.6533963701995729, |
| "learning_rate": 9.999824697250786e-06, |
| "loss": 0.4639, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.3108, |
| "grad_norm": 0.787105570960542, |
| "learning_rate": 9.999761393983728e-06, |
| "loss": 0.4788, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.312, |
| "grad_norm": 0.7293361363854345, |
| "learning_rate": 9.999688352084482e-06, |
| "loss": 0.4786, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.3132, |
| "grad_norm": 0.6894092015944603, |
| "learning_rate": 9.999605571695317e-06, |
| "loss": 0.4621, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.3144, |
| "grad_norm": 0.7674479288861019, |
| "learning_rate": 9.999513052977473e-06, |
| "loss": 0.4585, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.3156, |
| "grad_norm": 0.6382731142387786, |
| "learning_rate": 9.999410796111163e-06, |
| "loss": 0.4536, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.3168, |
| "grad_norm": 0.662827100843487, |
| "learning_rate": 9.999298801295564e-06, |
| "loss": 0.453, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.318, |
| "grad_norm": 0.6997882828244627, |
| "learning_rate": 9.99917706874882e-06, |
| "loss": 0.4207, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.3192, |
| "grad_norm": 0.6673329177111227, |
| "learning_rate": 9.999045598708047e-06, |
| "loss": 0.4303, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.3204, |
| "grad_norm": 0.7751069968264359, |
| "learning_rate": 9.998904391429323e-06, |
| "loss": 0.4857, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.3216, |
| "grad_norm": 0.6924789628677505, |
| "learning_rate": 9.998753447187693e-06, |
| "loss": 0.4352, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.3228, |
| "grad_norm": 0.6780594398982913, |
| "learning_rate": 9.998592766277173e-06, |
| "loss": 0.4944, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.324, |
| "grad_norm": 0.665053499034254, |
| "learning_rate": 9.998422349010736e-06, |
| "loss": 0.4452, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.3252, |
| "grad_norm": 0.6702681281204724, |
| "learning_rate": 9.998242195720327e-06, |
| "loss": 0.4325, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.3264, |
| "grad_norm": 0.726150711484855, |
| "learning_rate": 9.998052306756852e-06, |
| "loss": 0.5118, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.3276, |
| "grad_norm": 0.6717001483141275, |
| "learning_rate": 9.997852682490179e-06, |
| "loss": 0.5304, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.3288, |
| "grad_norm": 0.7846459341475974, |
| "learning_rate": 9.997643323309139e-06, |
| "loss": 0.4412, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.6081818827878402, |
| "learning_rate": 9.997424229621529e-06, |
| "loss": 0.4469, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.3312, |
| "grad_norm": 0.7262395716436079, |
| "learning_rate": 9.997195401854102e-06, |
| "loss": 0.4597, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.3324, |
| "grad_norm": 0.7600492053225586, |
| "learning_rate": 9.996956840452573e-06, |
| "loss": 0.4595, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.3336, |
| "grad_norm": 0.8932083636988103, |
| "learning_rate": 9.996708545881617e-06, |
| "loss": 0.5023, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.3348, |
| "grad_norm": 0.8005056158540009, |
| "learning_rate": 9.996450518624868e-06, |
| "loss": 0.5398, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.336, |
| "grad_norm": 0.6480206111944329, |
| "learning_rate": 9.996182759184916e-06, |
| "loss": 0.4722, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.3372, |
| "grad_norm": 0.7959325904926813, |
| "learning_rate": 9.995905268083306e-06, |
| "loss": 0.4481, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.3384, |
| "grad_norm": 0.7204153062584008, |
| "learning_rate": 9.995618045860545e-06, |
| "loss": 0.4842, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.3396, |
| "grad_norm": 0.72854290136139, |
| "learning_rate": 9.995321093076085e-06, |
| "loss": 0.4987, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.3408, |
| "grad_norm": 0.897424392822921, |
| "learning_rate": 9.995014410308336e-06, |
| "loss": 0.4771, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.342, |
| "grad_norm": 0.6147300343692838, |
| "learning_rate": 9.994697998154668e-06, |
| "loss": 0.4521, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.3432, |
| "grad_norm": 0.6948770085824113, |
| "learning_rate": 9.994371857231388e-06, |
| "loss": 0.4669, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.3444, |
| "grad_norm": 0.6152586871071891, |
| "learning_rate": 9.99403598817376e-06, |
| "loss": 0.4563, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.3456, |
| "grad_norm": 0.6314128897864619, |
| "learning_rate": 9.993690391636e-06, |
| "loss": 0.4731, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.3468, |
| "grad_norm": 0.7829857719356524, |
| "learning_rate": 9.993335068291264e-06, |
| "loss": 0.4622, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.348, |
| "grad_norm": 0.6737044799083258, |
| "learning_rate": 9.99297001883166e-06, |
| "loss": 0.4813, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.3492, |
| "grad_norm": 0.5917541108922811, |
| "learning_rate": 9.992595243968238e-06, |
| "loss": 0.4081, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.3504, |
| "grad_norm": 0.6643962397169046, |
| "learning_rate": 9.99221074443099e-06, |
| "loss": 0.4286, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.3516, |
| "grad_norm": 0.7065581838104028, |
| "learning_rate": 9.991816520968853e-06, |
| "loss": 0.4936, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.3528, |
| "grad_norm": 0.75814172453645, |
| "learning_rate": 9.991412574349704e-06, |
| "loss": 0.4056, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.354, |
| "grad_norm": 0.8194643924909449, |
| "learning_rate": 9.990998905360357e-06, |
| "loss": 0.5801, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.3552, |
| "grad_norm": 0.763485244925637, |
| "learning_rate": 9.990575514806563e-06, |
| "loss": 0.5059, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.3564, |
| "grad_norm": 0.7735447262387536, |
| "learning_rate": 9.990142403513012e-06, |
| "loss": 0.4766, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.3576, |
| "grad_norm": 0.7264381504889462, |
| "learning_rate": 9.989699572323328e-06, |
| "loss": 0.4333, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.3588, |
| "grad_norm": 0.7271709281599659, |
| "learning_rate": 9.989247022100065e-06, |
| "loss": 0.4666, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.8211659316661323, |
| "learning_rate": 9.988784753724707e-06, |
| "loss": 0.5099, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.3612, |
| "grad_norm": 0.659254969233651, |
| "learning_rate": 9.988312768097673e-06, |
| "loss": 0.4643, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.3624, |
| "grad_norm": 0.7484170942453734, |
| "learning_rate": 9.987831066138302e-06, |
| "loss": 0.5009, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.3636, |
| "grad_norm": 0.7864892723792284, |
| "learning_rate": 9.987339648784866e-06, |
| "loss": 0.4203, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.3648, |
| "grad_norm": 0.6463979301616607, |
| "learning_rate": 9.986838516994555e-06, |
| "loss": 0.4561, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.366, |
| "grad_norm": 0.6423447978300654, |
| "learning_rate": 9.986327671743484e-06, |
| "loss": 0.4574, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.3672, |
| "grad_norm": 0.7542345067080994, |
| "learning_rate": 9.985807114026684e-06, |
| "loss": 0.4578, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.3684, |
| "grad_norm": 0.7439657684658759, |
| "learning_rate": 9.985276844858114e-06, |
| "loss": 0.4514, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.3696, |
| "grad_norm": 0.6667505301910438, |
| "learning_rate": 9.984736865270637e-06, |
| "loss": 0.4599, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.3708, |
| "grad_norm": 0.8943884014425674, |
| "learning_rate": 9.984187176316038e-06, |
| "loss": 0.4623, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.372, |
| "grad_norm": 0.696770872305506, |
| "learning_rate": 9.983627779065012e-06, |
| "loss": 0.4571, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.3732, |
| "grad_norm": 0.7386939862481439, |
| "learning_rate": 9.983058674607164e-06, |
| "loss": 0.5217, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.3744, |
| "grad_norm": 0.696077560396179, |
| "learning_rate": 9.982479864051005e-06, |
| "loss": 0.4162, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.3756, |
| "grad_norm": 0.8530728224344589, |
| "learning_rate": 9.981891348523955e-06, |
| "loss": 0.4728, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.3768, |
| "grad_norm": 0.771830568256366, |
| "learning_rate": 9.981293129172334e-06, |
| "loss": 0.485, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.378, |
| "grad_norm": 0.7063337203590513, |
| "learning_rate": 9.980685207161368e-06, |
| "loss": 0.4422, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.3792, |
| "grad_norm": 0.7224325864884874, |
| "learning_rate": 9.980067583675177e-06, |
| "loss": 0.4256, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.3804, |
| "grad_norm": 0.6246003822583702, |
| "learning_rate": 9.979440259916782e-06, |
| "loss": 0.4678, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.3816, |
| "grad_norm": 0.7559983887815412, |
| "learning_rate": 9.978803237108095e-06, |
| "loss": 0.4484, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.3828, |
| "grad_norm": 0.6978566836204545, |
| "learning_rate": 9.97815651648992e-06, |
| "loss": 0.4573, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 0.6182832620711831, |
| "learning_rate": 9.977500099321956e-06, |
| "loss": 0.4565, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.3852, |
| "grad_norm": 0.7203458615299193, |
| "learning_rate": 9.97683398688278e-06, |
| "loss": 0.471, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.3864, |
| "grad_norm": 0.6931652914141438, |
| "learning_rate": 9.976158180469866e-06, |
| "loss": 0.4371, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.3876, |
| "grad_norm": 0.7575323963763007, |
| "learning_rate": 9.975472681399556e-06, |
| "loss": 0.4439, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.3888, |
| "grad_norm": 0.5994997176503296, |
| "learning_rate": 9.97477749100708e-06, |
| "loss": 0.4709, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 1.2959886351973398, |
| "learning_rate": 9.974072610646543e-06, |
| "loss": 0.496, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.3912, |
| "grad_norm": 0.7430734453560549, |
| "learning_rate": 9.973358041690926e-06, |
| "loss": 0.4611, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.3924, |
| "grad_norm": 0.5893403359291437, |
| "learning_rate": 9.972633785532082e-06, |
| "loss": 0.4519, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.3936, |
| "grad_norm": 0.641216294197375, |
| "learning_rate": 9.971899843580728e-06, |
| "loss": 0.4632, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.3948, |
| "grad_norm": 0.7207213873714433, |
| "learning_rate": 9.971156217266451e-06, |
| "loss": 0.4196, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.396, |
| "grad_norm": 0.6968100390508554, |
| "learning_rate": 9.970402908037703e-06, |
| "loss": 0.4658, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.3972, |
| "grad_norm": 0.6672738370323194, |
| "learning_rate": 9.96963991736179e-06, |
| "loss": 0.5153, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.3984, |
| "grad_norm": 0.738487994232809, |
| "learning_rate": 9.968867246724882e-06, |
| "loss": 0.4853, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.3996, |
| "grad_norm": 0.6741190624842869, |
| "learning_rate": 9.968084897632004e-06, |
| "loss": 0.4555, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.4008, |
| "grad_norm": 0.6061071531934571, |
| "learning_rate": 9.96729287160703e-06, |
| "loss": 0.4528, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.402, |
| "grad_norm": 0.668424285013486, |
| "learning_rate": 9.966491170192682e-06, |
| "loss": 0.4664, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.4032, |
| "grad_norm": 0.71365063515845, |
| "learning_rate": 9.96567979495053e-06, |
| "loss": 0.4182, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.4044, |
| "grad_norm": 0.6537179787219397, |
| "learning_rate": 9.964858747460989e-06, |
| "loss": 0.4339, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.4056, |
| "grad_norm": 0.6696665849925841, |
| "learning_rate": 9.964028029323305e-06, |
| "loss": 0.4289, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.4068, |
| "grad_norm": 0.6317580155063051, |
| "learning_rate": 9.963187642155573e-06, |
| "loss": 0.4945, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.408, |
| "grad_norm": 0.6204662379721577, |
| "learning_rate": 9.962337587594713e-06, |
| "loss": 0.4567, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.4092, |
| "grad_norm": 0.790028299585896, |
| "learning_rate": 9.961477867296479e-06, |
| "loss": 0.4402, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.4104, |
| "grad_norm": 0.7057465563218057, |
| "learning_rate": 9.96060848293545e-06, |
| "loss": 0.4367, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.4116, |
| "grad_norm": 0.7048538659143536, |
| "learning_rate": 9.959729436205027e-06, |
| "loss": 0.4263, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.4128, |
| "grad_norm": 0.7532569063688792, |
| "learning_rate": 9.95884072881744e-06, |
| "loss": 0.4515, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.414, |
| "grad_norm": 0.6432955870583814, |
| "learning_rate": 9.957942362503728e-06, |
| "loss": 0.4401, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.4152, |
| "grad_norm": 0.8190892141495807, |
| "learning_rate": 9.957034339013742e-06, |
| "loss": 0.4806, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.4164, |
| "grad_norm": 0.6171924188310567, |
| "learning_rate": 9.956116660116155e-06, |
| "loss": 0.4543, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.4176, |
| "grad_norm": 0.7490952693283688, |
| "learning_rate": 9.955189327598435e-06, |
| "loss": 0.4771, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.4188, |
| "grad_norm": 0.6659631701706887, |
| "learning_rate": 9.954252343266859e-06, |
| "loss": 0.4746, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.7006208304084274, |
| "learning_rate": 9.953305708946504e-06, |
| "loss": 0.4549, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.4212, |
| "grad_norm": 0.7723073626680784, |
| "learning_rate": 9.952349426481243e-06, |
| "loss": 0.451, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.4224, |
| "grad_norm": 0.6455592166376946, |
| "learning_rate": 9.95138349773374e-06, |
| "loss": 0.4469, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.4236, |
| "grad_norm": 0.6168164821410606, |
| "learning_rate": 9.95040792458545e-06, |
| "loss": 0.446, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.4248, |
| "grad_norm": 0.7884092477255151, |
| "learning_rate": 9.949422708936616e-06, |
| "loss": 0.422, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.426, |
| "grad_norm": 0.698668525342129, |
| "learning_rate": 9.948427852706257e-06, |
| "loss": 0.4477, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.4272, |
| "grad_norm": 0.6442795235670659, |
| "learning_rate": 9.947423357832176e-06, |
| "loss": 0.4609, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.4284, |
| "grad_norm": 0.642341192268614, |
| "learning_rate": 9.946409226270945e-06, |
| "loss": 0.4454, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.4296, |
| "grad_norm": 0.8390627888315508, |
| "learning_rate": 9.945385459997909e-06, |
| "loss": 0.4801, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.4308, |
| "grad_norm": 0.6449530595213682, |
| "learning_rate": 9.944352061007182e-06, |
| "loss": 0.4386, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.432, |
| "grad_norm": 0.7703127556763665, |
| "learning_rate": 9.943309031311637e-06, |
| "loss": 0.4715, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.4332, |
| "grad_norm": 0.6509249606252259, |
| "learning_rate": 9.942256372942909e-06, |
| "loss": 0.4483, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.4344, |
| "grad_norm": 0.7656766131252136, |
| "learning_rate": 9.941194087951384e-06, |
| "loss": 0.4601, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.4356, |
| "grad_norm": 0.6898891431076177, |
| "learning_rate": 9.940122178406205e-06, |
| "loss": 0.4399, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.4368, |
| "grad_norm": 0.7159579906071247, |
| "learning_rate": 9.939040646395252e-06, |
| "loss": 0.476, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.438, |
| "grad_norm": 0.627792587615608, |
| "learning_rate": 9.93794949402516e-06, |
| "loss": 0.4547, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.4392, |
| "grad_norm": 0.645333115151413, |
| "learning_rate": 9.936848723421295e-06, |
| "loss": 0.4094, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.4404, |
| "grad_norm": 0.7175839194918624, |
| "learning_rate": 9.93573833672776e-06, |
| "loss": 0.4823, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.4416, |
| "grad_norm": 0.6214300099183283, |
| "learning_rate": 9.934618336107385e-06, |
| "loss": 0.4408, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.4428, |
| "grad_norm": 0.5518265976503514, |
| "learning_rate": 9.933488723741731e-06, |
| "loss": 0.4457, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.444, |
| "grad_norm": 0.6409718617060514, |
| "learning_rate": 9.932349501831077e-06, |
| "loss": 0.4173, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.4452, |
| "grad_norm": 0.7792630397126719, |
| "learning_rate": 9.931200672594425e-06, |
| "loss": 0.4454, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.4464, |
| "grad_norm": 0.6344199240027713, |
| "learning_rate": 9.930042238269485e-06, |
| "loss": 0.4566, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.4476, |
| "grad_norm": 0.6594867191224307, |
| "learning_rate": 9.928874201112677e-06, |
| "loss": 0.4321, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.4488, |
| "grad_norm": 0.6791155653346912, |
| "learning_rate": 9.927696563399127e-06, |
| "loss": 0.4569, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.6886786049932618, |
| "learning_rate": 9.926509327422661e-06, |
| "loss": 0.4628, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.4512, |
| "grad_norm": 0.6660330432227839, |
| "learning_rate": 9.9253124954958e-06, |
| "loss": 0.4295, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.4524, |
| "grad_norm": 0.6814134075798867, |
| "learning_rate": 9.924106069949756e-06, |
| "loss": 0.4327, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.4536, |
| "grad_norm": 0.5875291506684054, |
| "learning_rate": 9.922890053134428e-06, |
| "loss": 0.3928, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.4548, |
| "grad_norm": 0.6922382672621923, |
| "learning_rate": 9.9216644474184e-06, |
| "loss": 0.4442, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.456, |
| "grad_norm": 0.771249714605511, |
| "learning_rate": 9.920429255188926e-06, |
| "loss": 0.4714, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.4572, |
| "grad_norm": 0.676431519188336, |
| "learning_rate": 9.91918447885194e-06, |
| "loss": 0.4332, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.4584, |
| "grad_norm": 0.6945908407546308, |
| "learning_rate": 9.91793012083204e-06, |
| "loss": 0.4321, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.4596, |
| "grad_norm": 0.7552120817204823, |
| "learning_rate": 9.916666183572492e-06, |
| "loss": 0.474, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.4608, |
| "grad_norm": 0.6574840581158748, |
| "learning_rate": 9.915392669535214e-06, |
| "loss": 0.486, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.462, |
| "grad_norm": 0.6484938793376102, |
| "learning_rate": 9.914109581200785e-06, |
| "loss": 0.4083, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.4632, |
| "grad_norm": 0.6996866528066688, |
| "learning_rate": 9.912816921068424e-06, |
| "loss": 0.4566, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.4644, |
| "grad_norm": 0.7430007400183946, |
| "learning_rate": 9.911514691656003e-06, |
| "loss": 0.4219, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.4656, |
| "grad_norm": 0.6289704944260223, |
| "learning_rate": 9.910202895500031e-06, |
| "loss": 0.4275, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.4668, |
| "grad_norm": 0.7107427907702816, |
| "learning_rate": 9.908881535155647e-06, |
| "loss": 0.4258, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.468, |
| "grad_norm": 0.6733732649843523, |
| "learning_rate": 9.907550613196624e-06, |
| "loss": 0.4277, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.4692, |
| "grad_norm": 0.7252156396982233, |
| "learning_rate": 9.906210132215357e-06, |
| "loss": 0.439, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.4704, |
| "grad_norm": 0.7527036675475188, |
| "learning_rate": 9.904860094822861e-06, |
| "loss": 0.4545, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.4716, |
| "grad_norm": 0.7048228902693239, |
| "learning_rate": 9.903500503648766e-06, |
| "loss": 0.4374, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.4728, |
| "grad_norm": 0.5448140600277945, |
| "learning_rate": 9.902131361341307e-06, |
| "loss": 0.4112, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.474, |
| "grad_norm": 0.8345035569254616, |
| "learning_rate": 9.900752670567331e-06, |
| "loss": 0.4775, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.4752, |
| "grad_norm": 0.6443454053956338, |
| "learning_rate": 9.899364434012273e-06, |
| "loss": 0.4229, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.4764, |
| "grad_norm": 0.6230088120223173, |
| "learning_rate": 9.897966654380172e-06, |
| "loss": 0.4615, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.4776, |
| "grad_norm": 0.6088046653465384, |
| "learning_rate": 9.896559334393644e-06, |
| "loss": 0.4317, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.4788, |
| "grad_norm": 0.7279533322927584, |
| "learning_rate": 9.895142476793902e-06, |
| "loss": 0.4514, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.7745953818595627, |
| "learning_rate": 9.893716084340723e-06, |
| "loss": 0.458, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.4812, |
| "grad_norm": 0.6405817762556505, |
| "learning_rate": 9.892280159812465e-06, |
| "loss": 0.4883, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.4824, |
| "grad_norm": 0.7685486452534169, |
| "learning_rate": 9.890834706006048e-06, |
| "loss": 0.4661, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.4836, |
| "grad_norm": 0.7003098986825237, |
| "learning_rate": 9.889379725736953e-06, |
| "loss": 0.4714, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.4848, |
| "grad_norm": 0.6254581564414184, |
| "learning_rate": 9.887915221839223e-06, |
| "loss": 0.4878, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.486, |
| "grad_norm": 0.6311252500153688, |
| "learning_rate": 9.886441197165446e-06, |
| "loss": 0.4539, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.4872, |
| "grad_norm": 0.7820317979670909, |
| "learning_rate": 9.884957654586753e-06, |
| "loss": 0.4518, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.4884, |
| "grad_norm": 0.6004530584614425, |
| "learning_rate": 9.88346459699282e-06, |
| "loss": 0.4094, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.4896, |
| "grad_norm": 0.6654376969151062, |
| "learning_rate": 9.881962027291855e-06, |
| "loss": 0.4348, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.4908, |
| "grad_norm": 0.6679415371212997, |
| "learning_rate": 9.880449948410587e-06, |
| "loss": 0.4545, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.492, |
| "grad_norm": 0.5547515670600671, |
| "learning_rate": 9.878928363294275e-06, |
| "loss": 0.4536, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.4932, |
| "grad_norm": 0.7125897617236266, |
| "learning_rate": 9.877397274906694e-06, |
| "loss": 0.4473, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.4944, |
| "grad_norm": 0.8192773426910452, |
| "learning_rate": 9.875856686230125e-06, |
| "loss": 0.4249, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.4956, |
| "grad_norm": 0.6649787950976529, |
| "learning_rate": 9.87430660026536e-06, |
| "loss": 0.4264, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.4968, |
| "grad_norm": 0.7064920470637832, |
| "learning_rate": 9.872747020031682e-06, |
| "loss": 0.4363, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.498, |
| "grad_norm": 0.827612732425241, |
| "learning_rate": 9.871177948566875e-06, |
| "loss": 0.461, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.4992, |
| "grad_norm": 0.5894682708442801, |
| "learning_rate": 9.869599388927204e-06, |
| "loss": 0.4355, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.5004, |
| "grad_norm": 0.7652566876221197, |
| "learning_rate": 9.868011344187421e-06, |
| "loss": 0.4847, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.5016, |
| "grad_norm": 0.651422164836644, |
| "learning_rate": 9.866413817440748e-06, |
| "loss": 0.4502, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.5028, |
| "grad_norm": 0.5806540058251656, |
| "learning_rate": 9.864806811798881e-06, |
| "loss": 0.4249, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.504, |
| "grad_norm": 0.674941136160933, |
| "learning_rate": 9.863190330391974e-06, |
| "loss": 0.4216, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.5052, |
| "grad_norm": 0.680982850770216, |
| "learning_rate": 9.861564376368645e-06, |
| "loss": 0.4387, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.5064, |
| "grad_norm": 0.6080701465778402, |
| "learning_rate": 9.859928952895952e-06, |
| "loss": 0.4404, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.5076, |
| "grad_norm": 0.6543859823085602, |
| "learning_rate": 9.858284063159411e-06, |
| "loss": 0.4346, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.5088, |
| "grad_norm": 0.7544442628769478, |
| "learning_rate": 9.856629710362966e-06, |
| "loss": 0.5297, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.6970683012667609, |
| "learning_rate": 9.854965897729001e-06, |
| "loss": 0.4346, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.5112, |
| "grad_norm": 0.701699016606531, |
| "learning_rate": 9.853292628498319e-06, |
| "loss": 0.4832, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.5124, |
| "grad_norm": 0.6531688259436925, |
| "learning_rate": 9.851609905930149e-06, |
| "loss": 0.4374, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.5136, |
| "grad_norm": 0.6459875412165775, |
| "learning_rate": 9.849917733302128e-06, |
| "loss": 0.4342, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.5148, |
| "grad_norm": 0.7824619238063619, |
| "learning_rate": 9.848216113910306e-06, |
| "loss": 0.4755, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.516, |
| "grad_norm": 0.7068114804561866, |
| "learning_rate": 9.846505051069126e-06, |
| "loss": 0.4359, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.5172, |
| "grad_norm": 0.620420764465856, |
| "learning_rate": 9.844784548111433e-06, |
| "loss": 0.4773, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.5184, |
| "grad_norm": 0.7342607747683098, |
| "learning_rate": 9.843054608388455e-06, |
| "loss": 0.4684, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.5196, |
| "grad_norm": 0.6222896944551798, |
| "learning_rate": 9.8413152352698e-06, |
| "loss": 0.4572, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.5208, |
| "grad_norm": 0.6870261388789632, |
| "learning_rate": 9.839566432143459e-06, |
| "loss": 0.4877, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.522, |
| "grad_norm": 0.708094366948388, |
| "learning_rate": 9.837808202415778e-06, |
| "loss": 0.4594, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.5232, |
| "grad_norm": 0.7280627126935877, |
| "learning_rate": 9.836040549511472e-06, |
| "loss": 0.3944, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.5244, |
| "grad_norm": 0.6534507741530826, |
| "learning_rate": 9.83426347687361e-06, |
| "loss": 0.4493, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.5256, |
| "grad_norm": 0.6739264885892525, |
| "learning_rate": 9.832476987963613e-06, |
| "loss": 0.4679, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.5268, |
| "grad_norm": 0.7234520229667384, |
| "learning_rate": 9.830681086261234e-06, |
| "loss": 0.4702, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.528, |
| "grad_norm": 0.7828665277714815, |
| "learning_rate": 9.828875775264564e-06, |
| "loss": 0.4368, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.5292, |
| "grad_norm": 0.6490715970064068, |
| "learning_rate": 9.827061058490027e-06, |
| "loss": 0.4551, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.5304, |
| "grad_norm": 0.6224193032887139, |
| "learning_rate": 9.82523693947236e-06, |
| "loss": 0.4118, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.5316, |
| "grad_norm": 0.7860434173772851, |
| "learning_rate": 9.82340342176462e-06, |
| "loss": 0.4743, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.5328, |
| "grad_norm": 0.6563230618839756, |
| "learning_rate": 9.821560508938167e-06, |
| "loss": 0.4464, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.534, |
| "grad_norm": 0.6357292241353918, |
| "learning_rate": 9.819708204582664e-06, |
| "loss": 0.4408, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.5352, |
| "grad_norm": 0.6339997044952623, |
| "learning_rate": 9.817846512306062e-06, |
| "loss": 0.4349, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.5364, |
| "grad_norm": 0.6905877592322843, |
| "learning_rate": 9.815975435734604e-06, |
| "loss": 0.4519, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.5376, |
| "grad_norm": 0.6436677936539605, |
| "learning_rate": 9.814094978512808e-06, |
| "loss": 0.4515, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.5388, |
| "grad_norm": 0.736711209053276, |
| "learning_rate": 9.812205144303466e-06, |
| "loss": 0.4405, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.570921962141912, |
| "learning_rate": 9.810305936787633e-06, |
| "loss": 0.431, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.5412, |
| "grad_norm": 0.7409880194062697, |
| "learning_rate": 9.808397359664624e-06, |
| "loss": 0.4526, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.5424, |
| "grad_norm": 0.7175975384895951, |
| "learning_rate": 9.806479416652e-06, |
| "loss": 0.485, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.5436, |
| "grad_norm": 0.6253351575776439, |
| "learning_rate": 9.804552111485568e-06, |
| "loss": 0.431, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.5448, |
| "grad_norm": 0.6802528057657675, |
| "learning_rate": 9.80261544791937e-06, |
| "loss": 0.49, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.546, |
| "grad_norm": 0.6486533789851607, |
| "learning_rate": 9.80066942972568e-06, |
| "loss": 0.4338, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.5472, |
| "grad_norm": 0.7020644490479623, |
| "learning_rate": 9.798714060694988e-06, |
| "loss": 0.4304, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.5484, |
| "grad_norm": 0.6211547222543867, |
| "learning_rate": 9.796749344635996e-06, |
| "loss": 0.4478, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.5496, |
| "grad_norm": 0.7757775131158096, |
| "learning_rate": 9.794775285375623e-06, |
| "loss": 0.5034, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.5508, |
| "grad_norm": 0.6482029929415141, |
| "learning_rate": 9.792791886758976e-06, |
| "loss": 0.4419, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.552, |
| "grad_norm": 0.6538846576233442, |
| "learning_rate": 9.790799152649356e-06, |
| "loss": 0.4192, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.5532, |
| "grad_norm": 0.6824916183880225, |
| "learning_rate": 9.788797086928252e-06, |
| "loss": 0.434, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.5544, |
| "grad_norm": 0.5985857501737958, |
| "learning_rate": 9.786785693495327e-06, |
| "loss": 0.4472, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.5556, |
| "grad_norm": 0.5360509289546986, |
| "learning_rate": 9.784764976268408e-06, |
| "loss": 0.4622, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.5568, |
| "grad_norm": 0.5953489959775399, |
| "learning_rate": 9.78273493918349e-06, |
| "loss": 0.4202, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.558, |
| "grad_norm": 0.6533019600801288, |
| "learning_rate": 9.780695586194719e-06, |
| "loss": 0.4935, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.5592, |
| "grad_norm": 0.6660863843497694, |
| "learning_rate": 9.778646921274385e-06, |
| "loss": 0.4377, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.5604, |
| "grad_norm": 0.6203187626727608, |
| "learning_rate": 9.776588948412917e-06, |
| "loss": 0.4375, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.5616, |
| "grad_norm": 0.6898987375410509, |
| "learning_rate": 9.774521671618877e-06, |
| "loss": 0.4353, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.5628, |
| "grad_norm": 0.6234056463809782, |
| "learning_rate": 9.772445094918944e-06, |
| "loss": 0.4783, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.564, |
| "grad_norm": 0.49658034146463864, |
| "learning_rate": 9.770359222357914e-06, |
| "loss": 0.3945, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.5652, |
| "grad_norm": 0.533972255693177, |
| "learning_rate": 9.768264057998693e-06, |
| "loss": 0.4071, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.5664, |
| "grad_norm": 0.6337264376454038, |
| "learning_rate": 9.766159605922282e-06, |
| "loss": 0.4484, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.5676, |
| "grad_norm": 0.6491123411466448, |
| "learning_rate": 9.764045870227772e-06, |
| "loss": 0.4221, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.5688, |
| "grad_norm": 0.6154543475969276, |
| "learning_rate": 9.761922855032339e-06, |
| "loss": 0.4246, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.7818066340168961, |
| "learning_rate": 9.759790564471233e-06, |
| "loss": 0.439, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.5712, |
| "grad_norm": 0.6369517648681066, |
| "learning_rate": 9.757649002697771e-06, |
| "loss": 0.4417, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.5724, |
| "grad_norm": 0.6329140386406038, |
| "learning_rate": 9.755498173883331e-06, |
| "loss": 0.4695, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.5736, |
| "grad_norm": 0.650969631762846, |
| "learning_rate": 9.753338082217334e-06, |
| "loss": 0.4413, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.5748, |
| "grad_norm": 0.6647141567277757, |
| "learning_rate": 9.751168731907253e-06, |
| "loss": 0.4302, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 0.5711206546908786, |
| "learning_rate": 9.748990127178589e-06, |
| "loss": 0.4357, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.5772, |
| "grad_norm": 0.7567029621367047, |
| "learning_rate": 9.746802272274868e-06, |
| "loss": 0.4505, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.5784, |
| "grad_norm": 0.7223332864810444, |
| "learning_rate": 9.74460517145764e-06, |
| "loss": 0.5013, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.5796, |
| "grad_norm": 0.7025765949826398, |
| "learning_rate": 9.74239882900646e-06, |
| "loss": 0.4562, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.5808, |
| "grad_norm": 0.599300464196571, |
| "learning_rate": 9.740183249218883e-06, |
| "loss": 0.454, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.582, |
| "grad_norm": 0.5782068939359957, |
| "learning_rate": 9.737958436410459e-06, |
| "loss": 0.4423, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.5832, |
| "grad_norm": 0.6703880092235986, |
| "learning_rate": 9.73572439491472e-06, |
| "loss": 0.4431, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.5844, |
| "grad_norm": 0.6345048946696713, |
| "learning_rate": 9.73348112908318e-06, |
| "loss": 0.4652, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.5856, |
| "grad_norm": 0.6233074474085821, |
| "learning_rate": 9.73122864328531e-06, |
| "loss": 0.4039, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.5868, |
| "grad_norm": 0.7197278707098507, |
| "learning_rate": 9.72896694190855e-06, |
| "loss": 0.4603, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.588, |
| "grad_norm": 0.8162957949734152, |
| "learning_rate": 9.726696029358283e-06, |
| "loss": 0.4793, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.5892, |
| "grad_norm": 0.601510162216323, |
| "learning_rate": 9.724415910057839e-06, |
| "loss": 0.4162, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.5904, |
| "grad_norm": 0.5806154181617217, |
| "learning_rate": 9.722126588448473e-06, |
| "loss": 0.3978, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.5916, |
| "grad_norm": 0.8352938422444114, |
| "learning_rate": 9.719828068989378e-06, |
| "loss": 0.4814, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.5928, |
| "grad_norm": 0.6607407237224796, |
| "learning_rate": 9.717520356157648e-06, |
| "loss": 0.4352, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.594, |
| "grad_norm": 0.6769495331597323, |
| "learning_rate": 9.715203454448297e-06, |
| "loss": 0.4888, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.5952, |
| "grad_norm": 0.582792575005291, |
| "learning_rate": 9.712877368374226e-06, |
| "loss": 0.4169, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.5964, |
| "grad_norm": 0.6969663052525739, |
| "learning_rate": 9.710542102466229e-06, |
| "loss": 0.427, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.5976, |
| "grad_norm": 0.6940068268557692, |
| "learning_rate": 9.708197661272989e-06, |
| "loss": 0.4432, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.5988, |
| "grad_norm": 0.7417084803159525, |
| "learning_rate": 9.70584404936105e-06, |
| "loss": 0.4439, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.609730769080397, |
| "learning_rate": 9.703481271314823e-06, |
| "loss": 0.4365, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.6012, |
| "grad_norm": 0.777232315860157, |
| "learning_rate": 9.701109331736573e-06, |
| "loss": 0.4083, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.6024, |
| "grad_norm": 0.7618896885321611, |
| "learning_rate": 9.69872823524641e-06, |
| "loss": 0.4805, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.6036, |
| "grad_norm": 0.5887187702529323, |
| "learning_rate": 9.69633798648228e-06, |
| "loss": 0.4099, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.6048, |
| "grad_norm": 0.6133967037667464, |
| "learning_rate": 9.693938590099958e-06, |
| "loss": 0.4899, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.606, |
| "grad_norm": 0.7578058357125181, |
| "learning_rate": 9.691530050773031e-06, |
| "loss": 0.4358, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.6072, |
| "grad_norm": 0.6773190619603114, |
| "learning_rate": 9.689112373192899e-06, |
| "loss": 0.4971, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.6084, |
| "grad_norm": 0.5854080621024346, |
| "learning_rate": 9.686685562068765e-06, |
| "loss": 0.4166, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.6096, |
| "grad_norm": 0.6400199675000283, |
| "learning_rate": 9.684249622127616e-06, |
| "loss": 0.4471, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.6108, |
| "grad_norm": 0.7258315288657347, |
| "learning_rate": 9.681804558114222e-06, |
| "loss": 0.435, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.612, |
| "grad_norm": 0.6272130773016463, |
| "learning_rate": 9.67935037479113e-06, |
| "loss": 0.4332, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.6132, |
| "grad_norm": 0.7048388710386582, |
| "learning_rate": 9.676887076938642e-06, |
| "loss": 0.4581, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.6144, |
| "grad_norm": 0.641338293198559, |
| "learning_rate": 9.674414669354819e-06, |
| "loss": 0.4486, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.6156, |
| "grad_norm": 0.6312876175673993, |
| "learning_rate": 9.671933156855464e-06, |
| "loss": 0.439, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.6168, |
| "grad_norm": 0.611370421206214, |
| "learning_rate": 9.669442544274115e-06, |
| "loss": 0.411, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.618, |
| "grad_norm": 0.6083285462122053, |
| "learning_rate": 9.666942836462036e-06, |
| "loss": 0.4717, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.6192, |
| "grad_norm": 0.6095089710667009, |
| "learning_rate": 9.664434038288207e-06, |
| "loss": 0.4442, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.6204, |
| "grad_norm": 0.5498795949636963, |
| "learning_rate": 9.661916154639312e-06, |
| "loss": 0.4446, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.6216, |
| "grad_norm": 0.5672758183613847, |
| "learning_rate": 9.659389190419735e-06, |
| "loss": 0.4346, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.6228, |
| "grad_norm": 0.5420075396704381, |
| "learning_rate": 9.656853150551543e-06, |
| "loss": 0.4343, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.624, |
| "grad_norm": 0.6174137897150176, |
| "learning_rate": 9.654308039974489e-06, |
| "loss": 0.4635, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.6252, |
| "grad_norm": 0.6180765800998059, |
| "learning_rate": 9.651753863645985e-06, |
| "loss": 0.4549, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.6264, |
| "grad_norm": 0.5533027840877576, |
| "learning_rate": 9.649190626541105e-06, |
| "loss": 0.4173, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.6276, |
| "grad_norm": 0.6897757672749916, |
| "learning_rate": 9.646618333652574e-06, |
| "loss": 0.4569, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.6288, |
| "grad_norm": 0.6526298629600158, |
| "learning_rate": 9.644036989990753e-06, |
| "loss": 0.4641, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.6251092549491414, |
| "learning_rate": 9.641446600583632e-06, |
| "loss": 0.4122, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.6312, |
| "grad_norm": 0.7292256985425808, |
| "learning_rate": 9.638847170476824e-06, |
| "loss": 0.4506, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.6324, |
| "grad_norm": 0.7109600305730553, |
| "learning_rate": 9.636238704733547e-06, |
| "loss": 0.4456, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.6336, |
| "grad_norm": 0.6486870015904621, |
| "learning_rate": 9.633621208434623e-06, |
| "loss": 0.4099, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.6348, |
| "grad_norm": 0.7260733786982925, |
| "learning_rate": 9.630994686678462e-06, |
| "loss": 0.4787, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.636, |
| "grad_norm": 0.6972415633425663, |
| "learning_rate": 9.628359144581052e-06, |
| "loss": 0.4376, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.6372, |
| "grad_norm": 0.6575368501965961, |
| "learning_rate": 9.625714587275954e-06, |
| "loss": 0.4236, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.6384, |
| "grad_norm": 0.70852769192471, |
| "learning_rate": 9.623061019914291e-06, |
| "loss": 0.4051, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.6396, |
| "grad_norm": 0.5920331266680952, |
| "learning_rate": 9.620398447664727e-06, |
| "loss": 0.4555, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.6408, |
| "grad_norm": 0.6512018747591982, |
| "learning_rate": 9.617726875713477e-06, |
| "loss": 0.4562, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.642, |
| "grad_norm": 0.7030860335919872, |
| "learning_rate": 9.615046309264278e-06, |
| "loss": 0.4325, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.6432, |
| "grad_norm": 0.6451559251581166, |
| "learning_rate": 9.612356753538392e-06, |
| "loss": 0.4534, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.6444, |
| "grad_norm": 0.6555638840456719, |
| "learning_rate": 9.609658213774584e-06, |
| "loss": 0.4506, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.6456, |
| "grad_norm": 0.7724431049478658, |
| "learning_rate": 9.606950695229125e-06, |
| "loss": 0.4332, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.6468, |
| "grad_norm": 0.6458233985243111, |
| "learning_rate": 9.60423420317577e-06, |
| "loss": 0.4548, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.648, |
| "grad_norm": 0.7365226748584056, |
| "learning_rate": 9.601508742905757e-06, |
| "loss": 0.4159, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.6492, |
| "grad_norm": 0.6345441906843313, |
| "learning_rate": 9.59877431972779e-06, |
| "loss": 0.4575, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.6504, |
| "grad_norm": 0.6608103527291914, |
| "learning_rate": 9.596030938968028e-06, |
| "loss": 0.4494, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.6516, |
| "grad_norm": 0.6815011264185148, |
| "learning_rate": 9.593278605970086e-06, |
| "loss": 0.4243, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.6528, |
| "grad_norm": 0.6647489003833473, |
| "learning_rate": 9.590517326095012e-06, |
| "loss": 0.4215, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.654, |
| "grad_norm": 0.5306957908700147, |
| "learning_rate": 9.587747104721275e-06, |
| "loss": 0.4095, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.6552, |
| "grad_norm": 0.623430114444001, |
| "learning_rate": 9.58496794724477e-06, |
| "loss": 0.4202, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.6564, |
| "grad_norm": 0.707639118232904, |
| "learning_rate": 9.582179859078793e-06, |
| "loss": 0.4321, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.6576, |
| "grad_norm": 0.6187571241832408, |
| "learning_rate": 9.579382845654038e-06, |
| "loss": 0.445, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.6588, |
| "grad_norm": 0.5968971585377222, |
| "learning_rate": 9.576576912418577e-06, |
| "loss": 0.4513, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.6060109622440814, |
| "learning_rate": 9.573762064837866e-06, |
| "loss": 0.4371, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.6612, |
| "grad_norm": 0.64789777702885, |
| "learning_rate": 9.570938308394717e-06, |
| "loss": 0.4028, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.6624, |
| "grad_norm": 0.6958071032383644, |
| "learning_rate": 9.568105648589299e-06, |
| "loss": 0.4675, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.6636, |
| "grad_norm": 0.5822451096374223, |
| "learning_rate": 9.56526409093912e-06, |
| "loss": 0.4172, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.6648, |
| "grad_norm": 0.7601888523954846, |
| "learning_rate": 9.562413640979024e-06, |
| "loss": 0.4297, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.666, |
| "grad_norm": 0.6567049323975153, |
| "learning_rate": 9.55955430426117e-06, |
| "loss": 0.4847, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.6672, |
| "grad_norm": 0.7131405370806122, |
| "learning_rate": 9.556686086355032e-06, |
| "loss": 0.4297, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.6684, |
| "grad_norm": 0.5589981609294136, |
| "learning_rate": 9.553808992847377e-06, |
| "loss": 0.4024, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.6696, |
| "grad_norm": 0.5931996857224913, |
| "learning_rate": 9.550923029342266e-06, |
| "loss": 0.4292, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.6708, |
| "grad_norm": 0.5971466573228367, |
| "learning_rate": 9.548028201461034e-06, |
| "loss": 0.4534, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.672, |
| "grad_norm": 0.6272928947603887, |
| "learning_rate": 9.545124514842284e-06, |
| "loss": 0.4836, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.6732, |
| "grad_norm": 0.5489911002674158, |
| "learning_rate": 9.542211975141871e-06, |
| "loss": 0.4315, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.6744, |
| "grad_norm": 0.6087915426456199, |
| "learning_rate": 9.5392905880329e-06, |
| "loss": 0.4049, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.6756, |
| "grad_norm": 0.7304570976047295, |
| "learning_rate": 9.5363603592057e-06, |
| "loss": 0.472, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.6768, |
| "grad_norm": 0.5930708966353481, |
| "learning_rate": 9.53342129436783e-06, |
| "loss": 0.4363, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.678, |
| "grad_norm": 0.7121202398856948, |
| "learning_rate": 9.530473399244061e-06, |
| "loss": 0.4731, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.6792, |
| "grad_norm": 0.6019249811320259, |
| "learning_rate": 9.527516679576353e-06, |
| "loss": 0.4384, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.6804, |
| "grad_norm": 0.6200731562962547, |
| "learning_rate": 9.52455114112387e-06, |
| "loss": 0.4585, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.6816, |
| "grad_norm": 0.6093242073566054, |
| "learning_rate": 9.52157678966294e-06, |
| "loss": 0.3909, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.6828, |
| "grad_norm": 0.5730476216253085, |
| "learning_rate": 9.518593630987063e-06, |
| "loss": 0.3917, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.684, |
| "grad_norm": 0.631650579006344, |
| "learning_rate": 9.515601670906895e-06, |
| "loss": 0.4188, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.6852, |
| "grad_norm": 0.5969798975988179, |
| "learning_rate": 9.512600915250232e-06, |
| "loss": 0.4429, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.6864, |
| "grad_norm": 0.7154797074011033, |
| "learning_rate": 9.509591369862007e-06, |
| "loss": 0.4747, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.6876, |
| "grad_norm": 0.631301195463087, |
| "learning_rate": 9.506573040604268e-06, |
| "loss": 0.4594, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.6888, |
| "grad_norm": 0.6430321252881295, |
| "learning_rate": 9.503545933356175e-06, |
| "loss": 0.413, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.7152067265164149, |
| "learning_rate": 9.500510054013989e-06, |
| "loss": 0.4309, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.6912, |
| "grad_norm": 0.7111876494885973, |
| "learning_rate": 9.49746540849105e-06, |
| "loss": 0.4497, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.6924, |
| "grad_norm": 0.6212907351663844, |
| "learning_rate": 9.494412002717784e-06, |
| "loss": 0.4646, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.6936, |
| "grad_norm": 0.730034488610609, |
| "learning_rate": 9.49134984264167e-06, |
| "loss": 0.4624, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.6948, |
| "grad_norm": 0.5959601487634612, |
| "learning_rate": 9.488278934227242e-06, |
| "loss": 0.4428, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.696, |
| "grad_norm": 0.7697382736559194, |
| "learning_rate": 9.485199283456078e-06, |
| "loss": 0.4148, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.6972, |
| "grad_norm": 0.6914659012199768, |
| "learning_rate": 9.48211089632678e-06, |
| "loss": 0.4493, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.6984, |
| "grad_norm": 0.6626411891434971, |
| "learning_rate": 9.479013778854966e-06, |
| "loss": 0.4443, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.6996, |
| "grad_norm": 0.633938170597033, |
| "learning_rate": 9.475907937073265e-06, |
| "loss": 0.3769, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.7008, |
| "grad_norm": 0.7858253823595117, |
| "learning_rate": 9.472793377031293e-06, |
| "loss": 0.4217, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.702, |
| "grad_norm": 0.6575698835345714, |
| "learning_rate": 9.469670104795655e-06, |
| "loss": 0.3947, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.7032, |
| "grad_norm": 0.6759835247423801, |
| "learning_rate": 9.466538126449915e-06, |
| "loss": 0.4487, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.7044, |
| "grad_norm": 0.7224436257813325, |
| "learning_rate": 9.463397448094605e-06, |
| "loss": 0.4517, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.7056, |
| "grad_norm": 0.7738499274833498, |
| "learning_rate": 9.460248075847199e-06, |
| "loss": 0.4717, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.7068, |
| "grad_norm": 0.6335072145475474, |
| "learning_rate": 9.457090015842104e-06, |
| "loss": 0.4718, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.708, |
| "grad_norm": 0.5694752122818949, |
| "learning_rate": 9.453923274230653e-06, |
| "loss": 0.4133, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.7092, |
| "grad_norm": 0.6712010029402053, |
| "learning_rate": 9.450747857181084e-06, |
| "loss": 0.4704, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.7104, |
| "grad_norm": 0.5758341569803189, |
| "learning_rate": 9.447563770878535e-06, |
| "loss": 0.4155, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.7116, |
| "grad_norm": 0.5917081772026239, |
| "learning_rate": 9.444371021525036e-06, |
| "loss": 0.4439, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.7128, |
| "grad_norm": 0.7370046552121895, |
| "learning_rate": 9.441169615339482e-06, |
| "loss": 0.4448, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.714, |
| "grad_norm": 0.5867359493833982, |
| "learning_rate": 9.437959558557635e-06, |
| "loss": 0.4492, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.7152, |
| "grad_norm": 0.5916408949328779, |
| "learning_rate": 9.434740857432105e-06, |
| "loss": 0.4659, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.7164, |
| "grad_norm": 0.6755568158069253, |
| "learning_rate": 9.431513518232343e-06, |
| "loss": 0.4589, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.7176, |
| "grad_norm": 0.6870614012337154, |
| "learning_rate": 9.42827754724462e-06, |
| "loss": 0.429, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.7188, |
| "grad_norm": 0.5993193342124471, |
| "learning_rate": 9.425032950772025e-06, |
| "loss": 0.422, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.7078484042302691, |
| "learning_rate": 9.421779735134446e-06, |
| "loss": 0.4191, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.7212, |
| "grad_norm": 0.599792674357655, |
| "learning_rate": 9.418517906668556e-06, |
| "loss": 0.4043, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.7224, |
| "grad_norm": 0.6845611310806173, |
| "learning_rate": 9.415247471727813e-06, |
| "loss": 0.4655, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.7236, |
| "grad_norm": 0.6932010017308168, |
| "learning_rate": 9.41196843668243e-06, |
| "loss": 0.4682, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.7248, |
| "grad_norm": 0.5990590128220633, |
| "learning_rate": 9.408680807919377e-06, |
| "loss": 0.4526, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.726, |
| "grad_norm": 0.5695053632896584, |
| "learning_rate": 9.405384591842358e-06, |
| "loss": 0.4438, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.7272, |
| "grad_norm": 0.615343410887285, |
| "learning_rate": 9.402079794871812e-06, |
| "loss": 0.3965, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.7284, |
| "grad_norm": 0.6294619092300135, |
| "learning_rate": 9.398766423444883e-06, |
| "loss": 0.4019, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.7296, |
| "grad_norm": 0.6684737765394154, |
| "learning_rate": 9.39544448401542e-06, |
| "loss": 0.4529, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.7308, |
| "grad_norm": 0.6732998913927993, |
| "learning_rate": 9.392113983053958e-06, |
| "loss": 0.4552, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.732, |
| "grad_norm": 0.6195247143898496, |
| "learning_rate": 9.38877492704772e-06, |
| "loss": 0.4395, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.7332, |
| "grad_norm": 0.5872506201056751, |
| "learning_rate": 9.385427322500575e-06, |
| "loss": 0.4559, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.7344, |
| "grad_norm": 0.5536526539474482, |
| "learning_rate": 9.382071175933058e-06, |
| "loss": 0.4194, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.7356, |
| "grad_norm": 0.7582072512128472, |
| "learning_rate": 9.378706493882335e-06, |
| "loss": 0.4653, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.7368, |
| "grad_norm": 0.6614836961213285, |
| "learning_rate": 9.375333282902198e-06, |
| "loss": 0.3983, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.738, |
| "grad_norm": 0.5175199737865152, |
| "learning_rate": 9.37195154956305e-06, |
| "loss": 0.4068, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.7392, |
| "grad_norm": 0.5792350511810672, |
| "learning_rate": 9.368561300451902e-06, |
| "loss": 0.4449, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.7404, |
| "grad_norm": 0.7508510167859355, |
| "learning_rate": 9.365162542172346e-06, |
| "loss": 0.4546, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.7416, |
| "grad_norm": 0.614188583793761, |
| "learning_rate": 9.361755281344547e-06, |
| "loss": 0.4308, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.7428, |
| "grad_norm": 0.5853450714355288, |
| "learning_rate": 9.358339524605233e-06, |
| "loss": 0.4072, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.744, |
| "grad_norm": 0.6606079599644, |
| "learning_rate": 9.354915278607685e-06, |
| "loss": 0.4253, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.7452, |
| "grad_norm": 0.5495365865735293, |
| "learning_rate": 9.351482550021713e-06, |
| "loss": 0.3939, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.7464, |
| "grad_norm": 0.5874360289468493, |
| "learning_rate": 9.348041345533653e-06, |
| "loss": 0.3773, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.7476, |
| "grad_norm": 0.5717129934949987, |
| "learning_rate": 9.34459167184635e-06, |
| "loss": 0.4145, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.7488, |
| "grad_norm": 0.6136224031869074, |
| "learning_rate": 9.341133535679145e-06, |
| "loss": 0.4236, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.6951415308062633, |
| "learning_rate": 9.337666943767863e-06, |
| "loss": 0.4306, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.7512, |
| "grad_norm": 0.6009861219564681, |
| "learning_rate": 9.334191902864799e-06, |
| "loss": 0.4254, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.7524, |
| "grad_norm": 0.6379389773803311, |
| "learning_rate": 9.330708419738704e-06, |
| "loss": 0.4147, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.7536, |
| "grad_norm": 0.6669128113033436, |
| "learning_rate": 9.327216501174775e-06, |
| "loss": 0.3833, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.7548, |
| "grad_norm": 0.7104418292195664, |
| "learning_rate": 9.323716153974639e-06, |
| "loss": 0.4726, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.756, |
| "grad_norm": 0.654440647321272, |
| "learning_rate": 9.320207384956339e-06, |
| "loss": 0.4296, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.7572, |
| "grad_norm": 0.6619054881639291, |
| "learning_rate": 9.316690200954324e-06, |
| "loss": 0.4188, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.7584, |
| "grad_norm": 0.6841333972952182, |
| "learning_rate": 9.313164608819434e-06, |
| "loss": 0.4478, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.7596, |
| "grad_norm": 0.5710359981446784, |
| "learning_rate": 9.309630615418884e-06, |
| "loss": 0.4287, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.7608, |
| "grad_norm": 0.6989870896838141, |
| "learning_rate": 9.306088227636257e-06, |
| "loss": 0.4468, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.762, |
| "grad_norm": 0.6612144448025201, |
| "learning_rate": 9.302537452371482e-06, |
| "loss": 0.425, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.7632, |
| "grad_norm": 0.6969697409579375, |
| "learning_rate": 9.298978296540829e-06, |
| "loss": 0.4517, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.7644, |
| "grad_norm": 0.7919369251237935, |
| "learning_rate": 9.295410767076891e-06, |
| "loss": 0.4607, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.7656, |
| "grad_norm": 0.5458427570437363, |
| "learning_rate": 9.291834870928573e-06, |
| "loss": 0.4031, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.7668, |
| "grad_norm": 0.6381806148795917, |
| "learning_rate": 9.288250615061073e-06, |
| "loss": 0.4361, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 0.5567043441199502, |
| "learning_rate": 9.284658006455871e-06, |
| "loss": 0.4447, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.7692, |
| "grad_norm": 0.6820050719757548, |
| "learning_rate": 9.281057052110725e-06, |
| "loss": 0.4319, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.7704, |
| "grad_norm": 0.6191273927150611, |
| "learning_rate": 9.27744775903964e-06, |
| "loss": 0.429, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.7716, |
| "grad_norm": 0.6025505058454256, |
| "learning_rate": 9.27383013427287e-06, |
| "loss": 0.4405, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.7728, |
| "grad_norm": 0.5645308546428628, |
| "learning_rate": 9.270204184856893e-06, |
| "loss": 0.4113, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.774, |
| "grad_norm": 0.6987379901636854, |
| "learning_rate": 9.266569917854403e-06, |
| "loss": 0.4446, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.7752, |
| "grad_norm": 0.567658261872694, |
| "learning_rate": 9.262927340344296e-06, |
| "loss": 0.4271, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.7764, |
| "grad_norm": 0.6148385805143911, |
| "learning_rate": 9.259276459421655e-06, |
| "loss": 0.4583, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.7776, |
| "grad_norm": 0.5384768224567317, |
| "learning_rate": 9.255617282197739e-06, |
| "loss": 0.4199, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.7788, |
| "grad_norm": 0.5664509797096036, |
| "learning_rate": 9.25194981579996e-06, |
| "loss": 0.4072, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.6345199889121595, |
| "learning_rate": 9.248274067371886e-06, |
| "loss": 0.4061, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.7812, |
| "grad_norm": 0.668319016739173, |
| "learning_rate": 9.244590044073205e-06, |
| "loss": 0.4665, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.7824, |
| "grad_norm": 0.617592082191704, |
| "learning_rate": 9.240897753079734e-06, |
| "loss": 0.4726, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.7836, |
| "grad_norm": 0.528719226788771, |
| "learning_rate": 9.237197201583386e-06, |
| "loss": 0.437, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.7848, |
| "grad_norm": 0.5521191630194192, |
| "learning_rate": 9.233488396792167e-06, |
| "loss": 0.4632, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.786, |
| "grad_norm": 0.5494094848077772, |
| "learning_rate": 9.22977134593016e-06, |
| "loss": 0.4233, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.7872, |
| "grad_norm": 0.6498032978943626, |
| "learning_rate": 9.226046056237508e-06, |
| "loss": 0.4354, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.7884, |
| "grad_norm": 0.565056383687927, |
| "learning_rate": 9.222312534970403e-06, |
| "loss": 0.4424, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.7896, |
| "grad_norm": 0.6451984327966027, |
| "learning_rate": 9.218570789401071e-06, |
| "loss": 0.4216, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.7908, |
| "grad_norm": 0.6069367627302308, |
| "learning_rate": 9.214820826817754e-06, |
| "loss": 0.4254, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.792, |
| "grad_norm": 0.5691604792064953, |
| "learning_rate": 9.211062654524705e-06, |
| "loss": 0.4563, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.7932, |
| "grad_norm": 0.5776804270149507, |
| "learning_rate": 9.207296279842162e-06, |
| "loss": 0.4283, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.7944, |
| "grad_norm": 0.6999394766859208, |
| "learning_rate": 9.203521710106344e-06, |
| "loss": 0.4602, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.7956, |
| "grad_norm": 0.6438802975614829, |
| "learning_rate": 9.199738952669431e-06, |
| "loss": 0.4393, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.7968, |
| "grad_norm": 0.5992918054119178, |
| "learning_rate": 9.195948014899551e-06, |
| "loss": 0.4283, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.798, |
| "grad_norm": 0.6202168032693136, |
| "learning_rate": 9.192148904180769e-06, |
| "loss": 0.4188, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.7992, |
| "grad_norm": 0.7288678745681955, |
| "learning_rate": 9.188341627913061e-06, |
| "loss": 0.4312, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.8004, |
| "grad_norm": 0.5808399736514862, |
| "learning_rate": 9.184526193512318e-06, |
| "loss": 0.4176, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.8016, |
| "grad_norm": 0.5787103392680376, |
| "learning_rate": 9.180702608410314e-06, |
| "loss": 0.4474, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.8028, |
| "grad_norm": 0.5832821317833403, |
| "learning_rate": 9.176870880054704e-06, |
| "loss": 0.3817, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.804, |
| "grad_norm": 0.6566019896631167, |
| "learning_rate": 9.173031015909005e-06, |
| "loss": 0.4272, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.8052, |
| "grad_norm": 0.6120582912743191, |
| "learning_rate": 9.169183023452574e-06, |
| "loss": 0.4293, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.8064, |
| "grad_norm": 0.6226293645151404, |
| "learning_rate": 9.165326910180608e-06, |
| "loss": 0.384, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.8076, |
| "grad_norm": 0.5943686082952457, |
| "learning_rate": 9.161462683604118e-06, |
| "loss": 0.4525, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.8088, |
| "grad_norm": 0.6657427880686746, |
| "learning_rate": 9.157590351249923e-06, |
| "loss": 0.3887, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.6213739366625514, |
| "learning_rate": 9.153709920660624e-06, |
| "loss": 0.4349, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.8112, |
| "grad_norm": 0.5478996164341824, |
| "learning_rate": 9.149821399394597e-06, |
| "loss": 0.4224, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.8124, |
| "grad_norm": 0.6161518824983402, |
| "learning_rate": 9.145924795025984e-06, |
| "loss": 0.4267, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.8136, |
| "grad_norm": 0.653475446858111, |
| "learning_rate": 9.142020115144662e-06, |
| "loss": 0.4256, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.8148, |
| "grad_norm": 0.7258034801778298, |
| "learning_rate": 9.138107367356247e-06, |
| "loss": 0.4782, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.816, |
| "grad_norm": 0.5964600131823877, |
| "learning_rate": 9.134186559282058e-06, |
| "loss": 0.417, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.8172, |
| "grad_norm": 0.6665321408235314, |
| "learning_rate": 9.130257698559129e-06, |
| "loss": 0.4563, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.8184, |
| "grad_norm": 0.6353290941165448, |
| "learning_rate": 9.126320792840165e-06, |
| "loss": 0.4097, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.8196, |
| "grad_norm": 0.5550868353040516, |
| "learning_rate": 9.12237584979355e-06, |
| "loss": 0.4417, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.8208, |
| "grad_norm": 0.6341329284133028, |
| "learning_rate": 9.11842287710332e-06, |
| "loss": 0.4447, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.822, |
| "grad_norm": 0.6840045072161562, |
| "learning_rate": 9.114461882469154e-06, |
| "loss": 0.4093, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.8232, |
| "grad_norm": 0.666702761146514, |
| "learning_rate": 9.110492873606351e-06, |
| "loss": 0.4351, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.8244, |
| "grad_norm": 0.6770837060218068, |
| "learning_rate": 9.106515858245825e-06, |
| "loss": 0.4331, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.8256, |
| "grad_norm": 0.6138322525456078, |
| "learning_rate": 9.102530844134084e-06, |
| "loss": 0.4153, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.8268, |
| "grad_norm": 0.679029102207529, |
| "learning_rate": 9.098537839033213e-06, |
| "loss": 0.4697, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.828, |
| "grad_norm": 0.5730739203701445, |
| "learning_rate": 9.094536850720867e-06, |
| "loss": 0.4137, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.8292, |
| "grad_norm": 0.676913973932885, |
| "learning_rate": 9.090527886990249e-06, |
| "loss": 0.4377, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.8304, |
| "grad_norm": 0.589745753186363, |
| "learning_rate": 9.086510955650095e-06, |
| "loss": 0.439, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.8316, |
| "grad_norm": 0.6562093394558783, |
| "learning_rate": 9.082486064524663e-06, |
| "loss": 0.4702, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.8328, |
| "grad_norm": 0.7014623843789226, |
| "learning_rate": 9.078453221453714e-06, |
| "loss": 0.4241, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.834, |
| "grad_norm": 0.7211409839747195, |
| "learning_rate": 9.074412434292496e-06, |
| "loss": 0.466, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.8352, |
| "grad_norm": 0.6327658360652593, |
| "learning_rate": 9.070363710911736e-06, |
| "loss": 0.4038, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.8364, |
| "grad_norm": 0.7117499494134616, |
| "learning_rate": 9.066307059197612e-06, |
| "loss": 0.4542, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.8376, |
| "grad_norm": 0.6779934892161844, |
| "learning_rate": 9.062242487051752e-06, |
| "loss": 0.4527, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.8388, |
| "grad_norm": 0.6132662026238554, |
| "learning_rate": 9.058170002391205e-06, |
| "loss": 0.3867, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.6328253145414058, |
| "learning_rate": 9.05408961314844e-06, |
| "loss": 0.4552, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.8412, |
| "grad_norm": 0.6093284392860376, |
| "learning_rate": 9.050001327271314e-06, |
| "loss": 0.3813, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.8424, |
| "grad_norm": 0.5175203763355374, |
| "learning_rate": 9.045905152723074e-06, |
| "loss": 0.389, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.8436, |
| "grad_norm": 0.5842103018209932, |
| "learning_rate": 9.041801097482323e-06, |
| "loss": 0.4245, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.8448, |
| "grad_norm": 0.599455910511483, |
| "learning_rate": 9.037689169543024e-06, |
| "loss": 0.47, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.846, |
| "grad_norm": 0.5465215060060533, |
| "learning_rate": 9.033569376914467e-06, |
| "loss": 0.4238, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.8472, |
| "grad_norm": 0.5470742118288643, |
| "learning_rate": 9.029441727621267e-06, |
| "loss": 0.4145, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.8484, |
| "grad_norm": 0.5969139676060078, |
| "learning_rate": 9.025306229703334e-06, |
| "loss": 0.4382, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.8496, |
| "grad_norm": 0.5840979426058325, |
| "learning_rate": 9.021162891215879e-06, |
| "loss": 0.3982, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.8508, |
| "grad_norm": 0.6069802881120857, |
| "learning_rate": 9.017011720229368e-06, |
| "loss": 0.4569, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.852, |
| "grad_norm": 0.603696871584329, |
| "learning_rate": 9.012852724829539e-06, |
| "loss": 0.473, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.8532, |
| "grad_norm": 0.6423008232878531, |
| "learning_rate": 9.008685913117361e-06, |
| "loss": 0.4572, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.8544, |
| "grad_norm": 0.5264477044024208, |
| "learning_rate": 9.00451129320903e-06, |
| "loss": 0.4183, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.8556, |
| "grad_norm": 0.5893466589452381, |
| "learning_rate": 9.000328873235955e-06, |
| "loss": 0.4322, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.8568, |
| "grad_norm": 0.6272192483642774, |
| "learning_rate": 8.996138661344734e-06, |
| "loss": 0.4471, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.858, |
| "grad_norm": 0.6509496086878485, |
| "learning_rate": 8.99194066569714e-06, |
| "loss": 0.4292, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.8592, |
| "grad_norm": 0.5828069494646737, |
| "learning_rate": 8.987734894470111e-06, |
| "loss": 0.4054, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.8604, |
| "grad_norm": 0.6121311166005416, |
| "learning_rate": 8.983521355855731e-06, |
| "loss": 0.4549, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.8616, |
| "grad_norm": 0.5311697585062636, |
| "learning_rate": 8.979300058061214e-06, |
| "loss": 0.4543, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.8628, |
| "grad_norm": 0.5954597656920868, |
| "learning_rate": 8.97507100930888e-06, |
| "loss": 0.4127, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.864, |
| "grad_norm": 0.6603824524906432, |
| "learning_rate": 8.97083421783616e-06, |
| "loss": 0.4689, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.8652, |
| "grad_norm": 0.5873463266140205, |
| "learning_rate": 8.96658969189555e-06, |
| "loss": 0.4581, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.8664, |
| "grad_norm": 0.6302692465732681, |
| "learning_rate": 8.962337439754627e-06, |
| "loss": 0.3957, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.8676, |
| "grad_norm": 0.5660832560318835, |
| "learning_rate": 8.958077469696007e-06, |
| "loss": 0.4216, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.8688, |
| "grad_norm": 0.6057306547058764, |
| "learning_rate": 8.953809790017342e-06, |
| "loss": 0.4203, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.5791265869794112, |
| "learning_rate": 8.949534409031305e-06, |
| "loss": 0.4125, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.8712, |
| "grad_norm": 0.6074661846992012, |
| "learning_rate": 8.94525133506556e-06, |
| "loss": 0.425, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.8724, |
| "grad_norm": 0.6041559413723928, |
| "learning_rate": 8.940960576462763e-06, |
| "loss": 0.4132, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.8736, |
| "grad_norm": 0.5119184403985256, |
| "learning_rate": 8.93666214158054e-06, |
| "loss": 0.4173, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.8748, |
| "grad_norm": 0.6184156720209869, |
| "learning_rate": 8.932356038791465e-06, |
| "loss": 0.47, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.876, |
| "grad_norm": 0.6445492952041065, |
| "learning_rate": 8.928042276483048e-06, |
| "loss": 0.442, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.8772, |
| "grad_norm": 0.560388170961086, |
| "learning_rate": 8.923720863057718e-06, |
| "loss": 0.4378, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.8784, |
| "grad_norm": 0.6295368599127688, |
| "learning_rate": 8.919391806932807e-06, |
| "loss": 0.4294, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.8796, |
| "grad_norm": 0.6154129494849543, |
| "learning_rate": 8.915055116540538e-06, |
| "loss": 0.4734, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.8808, |
| "grad_norm": 0.6775101992228028, |
| "learning_rate": 8.910710800327996e-06, |
| "loss": 0.4273, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.882, |
| "grad_norm": 0.5810481242782447, |
| "learning_rate": 8.906358866757128e-06, |
| "loss": 0.4419, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.8832, |
| "grad_norm": 0.6474016395952771, |
| "learning_rate": 8.901999324304713e-06, |
| "loss": 0.4576, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.8844, |
| "grad_norm": 0.5687417809242015, |
| "learning_rate": 8.897632181462354e-06, |
| "loss": 0.3953, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.8856, |
| "grad_norm": 0.626015511992478, |
| "learning_rate": 8.893257446736455e-06, |
| "loss": 0.458, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.8868, |
| "grad_norm": 0.5936874146722684, |
| "learning_rate": 8.888875128648208e-06, |
| "loss": 0.4139, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.888, |
| "grad_norm": 0.5445910669585022, |
| "learning_rate": 8.884485235733579e-06, |
| "loss": 0.4083, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.8892, |
| "grad_norm": 0.5871241121470941, |
| "learning_rate": 8.880087776543287e-06, |
| "loss": 0.4161, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.8904, |
| "grad_norm": 0.5861156757463913, |
| "learning_rate": 8.875682759642786e-06, |
| "loss": 0.4572, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.8916, |
| "grad_norm": 0.6524639315283577, |
| "learning_rate": 8.871270193612254e-06, |
| "loss": 0.415, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.8928, |
| "grad_norm": 0.5579578541929966, |
| "learning_rate": 8.866850087046574e-06, |
| "loss": 0.4599, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.894, |
| "grad_norm": 0.5983372699604969, |
| "learning_rate": 8.862422448555317e-06, |
| "loss": 0.4685, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.8952, |
| "grad_norm": 0.5313257499086619, |
| "learning_rate": 8.857987286762718e-06, |
| "loss": 0.4482, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.8964, |
| "grad_norm": 0.5741224075650853, |
| "learning_rate": 8.853544610307675e-06, |
| "loss": 0.4593, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.8976, |
| "grad_norm": 0.5180035885365164, |
| "learning_rate": 8.84909442784372e-06, |
| "loss": 0.4393, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.8988, |
| "grad_norm": 0.6267083133045676, |
| "learning_rate": 8.844636748038999e-06, |
| "loss": 0.4835, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.5831683863999229, |
| "learning_rate": 8.840171579576273e-06, |
| "loss": 0.4201, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.9012, |
| "grad_norm": 0.5405806665890253, |
| "learning_rate": 8.83569893115288e-06, |
| "loss": 0.4114, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.9024, |
| "grad_norm": 0.5885634994888672, |
| "learning_rate": 8.831218811480735e-06, |
| "loss": 0.4217, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.9036, |
| "grad_norm": 0.5630756675541398, |
| "learning_rate": 8.8267312292863e-06, |
| "loss": 0.4205, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.9048, |
| "grad_norm": 0.5589821186819856, |
| "learning_rate": 8.822236193310574e-06, |
| "loss": 0.3911, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.906, |
| "grad_norm": 0.5377971370223851, |
| "learning_rate": 8.817733712309078e-06, |
| "loss": 0.416, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.9072, |
| "grad_norm": 0.6594746319910847, |
| "learning_rate": 8.813223795051828e-06, |
| "loss": 0.449, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.9084, |
| "grad_norm": 0.6337830271660636, |
| "learning_rate": 8.80870645032333e-06, |
| "loss": 0.4733, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.9096, |
| "grad_norm": 0.5096341057306893, |
| "learning_rate": 8.804181686922555e-06, |
| "loss": 0.3898, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.9108, |
| "grad_norm": 0.5551424785674842, |
| "learning_rate": 8.799649513662926e-06, |
| "loss": 0.4354, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.912, |
| "grad_norm": 0.5430810494161301, |
| "learning_rate": 8.795109939372298e-06, |
| "loss": 0.4314, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.9132, |
| "grad_norm": 0.7303510387293701, |
| "learning_rate": 8.79056297289294e-06, |
| "loss": 0.4608, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.9144, |
| "grad_norm": 0.58979590436832, |
| "learning_rate": 8.786008623081526e-06, |
| "loss": 0.408, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.9156, |
| "grad_norm": 0.6431267478376745, |
| "learning_rate": 8.781446898809101e-06, |
| "loss": 0.4256, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.9168, |
| "grad_norm": 0.6800621940281214, |
| "learning_rate": 8.776877808961082e-06, |
| "loss": 0.4125, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.918, |
| "grad_norm": 0.6078542654953756, |
| "learning_rate": 8.772301362437233e-06, |
| "loss": 0.4037, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.9192, |
| "grad_norm": 0.6667156303614546, |
| "learning_rate": 8.767717568151643e-06, |
| "loss": 0.451, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.9204, |
| "grad_norm": 0.6067648015386979, |
| "learning_rate": 8.763126435032717e-06, |
| "loss": 0.4565, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.9216, |
| "grad_norm": 0.6192295222939654, |
| "learning_rate": 8.758527972023151e-06, |
| "loss": 0.4849, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.9228, |
| "grad_norm": 0.6777113466817958, |
| "learning_rate": 8.753922188079923e-06, |
| "loss": 0.4293, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.924, |
| "grad_norm": 0.5627832110347925, |
| "learning_rate": 8.749309092174267e-06, |
| "loss": 0.4395, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.9252, |
| "grad_norm": 0.5973915994189181, |
| "learning_rate": 8.744688693291658e-06, |
| "loss": 0.4326, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.9264, |
| "grad_norm": 0.617631648752449, |
| "learning_rate": 8.740061000431805e-06, |
| "loss": 0.431, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.9276, |
| "grad_norm": 0.6257907130189103, |
| "learning_rate": 8.735426022608611e-06, |
| "loss": 0.4335, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.9288, |
| "grad_norm": 0.7140143377367802, |
| "learning_rate": 8.73078376885018e-06, |
| "loss": 0.4372, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.59771651363443, |
| "learning_rate": 8.726134248198782e-06, |
| "loss": 0.4215, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.9312, |
| "grad_norm": 0.5943798731936335, |
| "learning_rate": 8.721477469710845e-06, |
| "loss": 0.4462, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.9324, |
| "grad_norm": 0.5461113902503175, |
| "learning_rate": 8.71681344245693e-06, |
| "loss": 0.4113, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.9336, |
| "grad_norm": 0.515942973208093, |
| "learning_rate": 8.712142175521723e-06, |
| "loss": 0.4043, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.9348, |
| "grad_norm": 0.5451413502419838, |
| "learning_rate": 8.707463678004004e-06, |
| "loss": 0.3996, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.936, |
| "grad_norm": 0.5670643504057882, |
| "learning_rate": 8.702777959016647e-06, |
| "loss": 0.408, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.9372, |
| "grad_norm": 0.6228540869849157, |
| "learning_rate": 8.698085027686581e-06, |
| "loss": 0.4175, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.9384, |
| "grad_norm": 0.664468192479229, |
| "learning_rate": 8.69338489315479e-06, |
| "loss": 0.4633, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.9396, |
| "grad_norm": 0.6484118822851579, |
| "learning_rate": 8.68867756457629e-06, |
| "loss": 0.4073, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.9408, |
| "grad_norm": 0.6014610628440286, |
| "learning_rate": 8.683963051120103e-06, |
| "loss": 0.4577, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.942, |
| "grad_norm": 0.6898835243028868, |
| "learning_rate": 8.679241361969252e-06, |
| "loss": 0.4383, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.9432, |
| "grad_norm": 0.6872534552572807, |
| "learning_rate": 8.674512506320733e-06, |
| "loss": 0.4585, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.9444, |
| "grad_norm": 0.5229387300614929, |
| "learning_rate": 8.669776493385506e-06, |
| "loss": 0.4234, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.9456, |
| "grad_norm": 0.5528374158454885, |
| "learning_rate": 8.665033332388466e-06, |
| "loss": 0.4342, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.9468, |
| "grad_norm": 0.6163928252539012, |
| "learning_rate": 8.660283032568435e-06, |
| "loss": 0.4114, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.948, |
| "grad_norm": 0.6580057062480736, |
| "learning_rate": 8.655525603178137e-06, |
| "loss": 0.444, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.9492, |
| "grad_norm": 0.5104550641112756, |
| "learning_rate": 8.650761053484188e-06, |
| "loss": 0.3754, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.9504, |
| "grad_norm": 0.6206032119821056, |
| "learning_rate": 8.645989392767068e-06, |
| "loss": 0.4312, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.9516, |
| "grad_norm": 0.619139263946645, |
| "learning_rate": 8.641210630321115e-06, |
| "loss": 0.4523, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.9528, |
| "grad_norm": 0.6111017449033144, |
| "learning_rate": 8.636424775454489e-06, |
| "loss": 0.4259, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.954, |
| "grad_norm": 0.6401410230121702, |
| "learning_rate": 8.631631837489173e-06, |
| "loss": 0.439, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.9552, |
| "grad_norm": 0.5181884328548102, |
| "learning_rate": 8.626831825760946e-06, |
| "loss": 0.436, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.9564, |
| "grad_norm": 0.5334649831223063, |
| "learning_rate": 8.622024749619363e-06, |
| "loss": 0.3736, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.9576, |
| "grad_norm": 0.5454287713548887, |
| "learning_rate": 8.61721061842774e-06, |
| "loss": 0.393, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.9588, |
| "grad_norm": 0.6269409723008791, |
| "learning_rate": 8.612389441563136e-06, |
| "loss": 0.4473, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.594447646809085, |
| "learning_rate": 8.60756122841633e-06, |
| "loss": 0.4438, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.9612, |
| "grad_norm": 0.7102600986895227, |
| "learning_rate": 8.602725988391814e-06, |
| "loss": 0.4139, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.9624, |
| "grad_norm": 0.5254806738684399, |
| "learning_rate": 8.597883730907757e-06, |
| "loss": 0.4293, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.9636, |
| "grad_norm": 0.6746355278803491, |
| "learning_rate": 8.593034465396007e-06, |
| "loss": 0.4331, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.9648, |
| "grad_norm": 0.5759430041057655, |
| "learning_rate": 8.588178201302052e-06, |
| "loss": 0.4639, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.966, |
| "grad_norm": 0.4887389359434623, |
| "learning_rate": 8.583314948085023e-06, |
| "loss": 0.4314, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.9672, |
| "grad_norm": 0.6240592097801354, |
| "learning_rate": 8.578444715217652e-06, |
| "loss": 0.4693, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.9684, |
| "grad_norm": 0.5906677636695606, |
| "learning_rate": 8.57356751218628e-06, |
| "loss": 0.4128, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.9696, |
| "grad_norm": 0.5522058124741028, |
| "learning_rate": 8.568683348490817e-06, |
| "loss": 0.423, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.9708, |
| "grad_norm": 0.519899853308098, |
| "learning_rate": 8.563792233644725e-06, |
| "loss": 0.3736, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.972, |
| "grad_norm": 0.6138196751505098, |
| "learning_rate": 8.558894177175019e-06, |
| "loss": 0.4196, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.9732, |
| "grad_norm": 0.5873280535324105, |
| "learning_rate": 8.553989188622228e-06, |
| "loss": 0.4282, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.9744, |
| "grad_norm": 0.5976713589531968, |
| "learning_rate": 8.549077277540379e-06, |
| "loss": 0.4445, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.9756, |
| "grad_norm": 0.6502292535424105, |
| "learning_rate": 8.544158453496992e-06, |
| "loss": 0.4108, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.9768, |
| "grad_norm": 0.578412995801094, |
| "learning_rate": 8.539232726073046e-06, |
| "loss": 0.4226, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.978, |
| "grad_norm": 0.5707868572285869, |
| "learning_rate": 8.53430010486297e-06, |
| "loss": 0.4008, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.9792, |
| "grad_norm": 0.5852269887281469, |
| "learning_rate": 8.529360599474616e-06, |
| "loss": 0.4257, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.9804, |
| "grad_norm": 0.609048557208369, |
| "learning_rate": 8.524414219529253e-06, |
| "loss": 0.4436, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.9816, |
| "grad_norm": 0.5406348011212092, |
| "learning_rate": 8.519460974661533e-06, |
| "loss": 0.4204, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.9828, |
| "grad_norm": 0.5677734036143112, |
| "learning_rate": 8.514500874519483e-06, |
| "loss": 0.4348, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.984, |
| "grad_norm": 0.5388294590032593, |
| "learning_rate": 8.509533928764482e-06, |
| "loss": 0.4105, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.9852, |
| "grad_norm": 0.519324821092748, |
| "learning_rate": 8.50456014707124e-06, |
| "loss": 0.3875, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.9864, |
| "grad_norm": 0.6125545198699792, |
| "learning_rate": 8.499579539127794e-06, |
| "loss": 0.4901, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.9876, |
| "grad_norm": 0.6206780765007368, |
| "learning_rate": 8.494592114635458e-06, |
| "loss": 0.4046, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.9888, |
| "grad_norm": 0.6222065962769607, |
| "learning_rate": 8.489597883308844e-06, |
| "loss": 0.4488, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 0.5329723921756123, |
| "learning_rate": 8.484596854875806e-06, |
| "loss": 0.4141, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.9912, |
| "grad_norm": 0.5993982737276878, |
| "learning_rate": 8.479589039077446e-06, |
| "loss": 0.4308, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.9924, |
| "grad_norm": 0.5584722292572113, |
| "learning_rate": 8.474574445668085e-06, |
| "loss": 0.4568, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.9936, |
| "grad_norm": 0.6091566316507482, |
| "learning_rate": 8.469553084415247e-06, |
| "loss": 0.4556, |
| "step": 828 |
| }, |
| { |
| "epoch": 0.9948, |
| "grad_norm": 0.6233664933735439, |
| "learning_rate": 8.464524965099632e-06, |
| "loss": 0.4206, |
| "step": 829 |
| }, |
| { |
| "epoch": 0.996, |
| "grad_norm": 0.5293099738021775, |
| "learning_rate": 8.459490097515114e-06, |
| "loss": 0.4071, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.9972, |
| "grad_norm": 0.5350545129837626, |
| "learning_rate": 8.454448491468702e-06, |
| "loss": 0.461, |
| "step": 831 |
| }, |
| { |
| "epoch": 0.9984, |
| "grad_norm": 0.5326198305245626, |
| "learning_rate": 8.449400156780536e-06, |
| "loss": 0.4713, |
| "step": 832 |
| }, |
| { |
| "epoch": 0.9996, |
| "grad_norm": 0.5901616953038845, |
| "learning_rate": 8.444345103283858e-06, |
| "loss": 0.4088, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.5901616953038845, |
| "learning_rate": 8.439283340825002e-06, |
| "loss": 0.4155, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.0012, |
| "grad_norm": 1.1368981411522645, |
| "learning_rate": 8.434214879263365e-06, |
| "loss": 0.3407, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.0024, |
| "grad_norm": 0.5681339768011925, |
| "learning_rate": 8.429139728471395e-06, |
| "loss": 0.3837, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.0036, |
| "grad_norm": 0.60245776707601, |
| "learning_rate": 8.424057898334569e-06, |
| "loss": 0.3865, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.0048, |
| "grad_norm": 0.5452067685083533, |
| "learning_rate": 8.418969398751375e-06, |
| "loss": 0.3761, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.006, |
| "grad_norm": 0.5432481480755952, |
| "learning_rate": 8.413874239633291e-06, |
| "loss": 0.3312, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.0072, |
| "grad_norm": 0.5850088244625832, |
| "learning_rate": 8.408772430904768e-06, |
| "loss": 0.3936, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.0084, |
| "grad_norm": 0.6134958489823171, |
| "learning_rate": 8.403663982503205e-06, |
| "loss": 0.3783, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.0096, |
| "grad_norm": 0.7310393810023733, |
| "learning_rate": 8.398548904378938e-06, |
| "loss": 0.3633, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.0108, |
| "grad_norm": 0.7594188380669021, |
| "learning_rate": 8.393427206495217e-06, |
| "loss": 0.3487, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.012, |
| "grad_norm": 0.5430393582609394, |
| "learning_rate": 8.388298898828182e-06, |
| "loss": 0.3709, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.0132, |
| "grad_norm": 0.5470009445017966, |
| "learning_rate": 8.383163991366852e-06, |
| "loss": 0.3646, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.0144, |
| "grad_norm": 0.6955376746178842, |
| "learning_rate": 8.378022494113099e-06, |
| "loss": 0.3942, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.0156, |
| "grad_norm": 0.5443192423069556, |
| "learning_rate": 8.372874417081632e-06, |
| "loss": 0.3858, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.0168, |
| "grad_norm": 0.5383937902578093, |
| "learning_rate": 8.367719770299972e-06, |
| "loss": 0.3424, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.018, |
| "grad_norm": 0.5753603289032625, |
| "learning_rate": 8.36255856380844e-06, |
| "loss": 0.3775, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.0192, |
| "grad_norm": 0.5817346658130989, |
| "learning_rate": 8.35739080766014e-06, |
| "loss": 0.3656, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.0204, |
| "grad_norm": 0.5106554756161691, |
| "learning_rate": 8.352216511920921e-06, |
| "loss": 0.3676, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.0216, |
| "grad_norm": 0.7331147307012948, |
| "learning_rate": 8.34703568666938e-06, |
| "loss": 0.416, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.0228, |
| "grad_norm": 0.6003405960110348, |
| "learning_rate": 8.341848341996828e-06, |
| "loss": 0.3731, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.024, |
| "grad_norm": 0.6343146490156437, |
| "learning_rate": 8.336654488007277e-06, |
| "loss": 0.3696, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.0252, |
| "grad_norm": 0.6076533649109559, |
| "learning_rate": 8.331454134817414e-06, |
| "loss": 0.3635, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.0264, |
| "grad_norm": 0.5107426303437482, |
| "learning_rate": 8.326247292556588e-06, |
| "loss": 0.365, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.0276, |
| "grad_norm": 0.6148391495624019, |
| "learning_rate": 8.321033971366788e-06, |
| "loss": 0.3838, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.0288, |
| "grad_norm": 0.5880138628410642, |
| "learning_rate": 8.315814181402623e-06, |
| "loss": 0.3796, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.03, |
| "grad_norm": 0.587112143817172, |
| "learning_rate": 8.310587932831302e-06, |
| "loss": 0.3873, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.0312, |
| "grad_norm": 0.6083837513556604, |
| "learning_rate": 8.305355235832611e-06, |
| "loss": 0.3788, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.0324, |
| "grad_norm": 0.611985092855698, |
| "learning_rate": 8.300116100598899e-06, |
| "loss": 0.3979, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.0336, |
| "grad_norm": 0.5127321006506305, |
| "learning_rate": 8.294870537335054e-06, |
| "loss": 0.3646, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.0348, |
| "grad_norm": 0.6133913722569919, |
| "learning_rate": 8.28961855625849e-06, |
| "loss": 0.3477, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.036, |
| "grad_norm": 0.5450221570823442, |
| "learning_rate": 8.284360167599113e-06, |
| "loss": 0.3767, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.0372, |
| "grad_norm": 0.5590817088934886, |
| "learning_rate": 8.279095381599318e-06, |
| "loss": 0.4125, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.0384, |
| "grad_norm": 0.6700002859155857, |
| "learning_rate": 8.273824208513956e-06, |
| "loss": 0.3979, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.0396, |
| "grad_norm": 0.5752277444681116, |
| "learning_rate": 8.268546658610319e-06, |
| "loss": 0.3758, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.0408, |
| "grad_norm": 0.468366324162763, |
| "learning_rate": 8.26326274216812e-06, |
| "loss": 0.3532, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.042, |
| "grad_norm": 0.5970315093052967, |
| "learning_rate": 8.257972469479478e-06, |
| "loss": 0.3935, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.0432, |
| "grad_norm": 0.5782502505738331, |
| "learning_rate": 8.252675850848886e-06, |
| "loss": 0.4055, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.0444, |
| "grad_norm": 0.6631298992386631, |
| "learning_rate": 8.247372896593203e-06, |
| "loss": 0.4402, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.0456, |
| "grad_norm": 0.6584560435617298, |
| "learning_rate": 8.24206361704162e-06, |
| "loss": 0.3878, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.0468, |
| "grad_norm": 0.5480440560061248, |
| "learning_rate": 8.236748022535662e-06, |
| "loss": 0.3733, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.048, |
| "grad_norm": 0.486477480838575, |
| "learning_rate": 8.231426123429143e-06, |
| "loss": 0.3764, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.0492, |
| "grad_norm": 0.6142299872341441, |
| "learning_rate": 8.226097930088162e-06, |
| "loss": 0.3728, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.0504, |
| "grad_norm": 0.5858378638083889, |
| "learning_rate": 8.220763452891078e-06, |
| "loss": 0.3613, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.0516, |
| "grad_norm": 0.6139402721613479, |
| "learning_rate": 8.215422702228487e-06, |
| "loss": 0.3953, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.0528, |
| "grad_norm": 0.4753434037158642, |
| "learning_rate": 8.210075688503209e-06, |
| "loss": 0.3758, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.054, |
| "grad_norm": 0.6564325725197425, |
| "learning_rate": 8.20472242213026e-06, |
| "loss": 0.3856, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.0552, |
| "grad_norm": 0.5887890156582547, |
| "learning_rate": 8.199362913536837e-06, |
| "loss": 0.3843, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.0564, |
| "grad_norm": 1.364183705833157, |
| "learning_rate": 8.193997173162293e-06, |
| "loss": 0.409, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.0576, |
| "grad_norm": 0.6165333073646311, |
| "learning_rate": 8.188625211458123e-06, |
| "loss": 0.382, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.0588, |
| "grad_norm": 0.6041705166135537, |
| "learning_rate": 8.183247038887937e-06, |
| "loss": 0.3749, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.06, |
| "grad_norm": 0.5109914794108413, |
| "learning_rate": 8.177862665927445e-06, |
| "loss": 0.4021, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.0612, |
| "grad_norm": 0.742930894603376, |
| "learning_rate": 8.17247210306443e-06, |
| "loss": 0.3833, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.0624, |
| "grad_norm": 0.5932978751229475, |
| "learning_rate": 8.167075360798739e-06, |
| "loss": 0.421, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.0636, |
| "grad_norm": 0.6758659142255161, |
| "learning_rate": 8.161672449642248e-06, |
| "loss": 0.414, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.0648, |
| "grad_norm": 0.4904299584222583, |
| "learning_rate": 8.156263380118855e-06, |
| "loss": 0.3795, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.066, |
| "grad_norm": 0.6513253017015571, |
| "learning_rate": 8.150848162764448e-06, |
| "loss": 0.3342, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.0672, |
| "grad_norm": 0.5473246180694088, |
| "learning_rate": 8.145426808126894e-06, |
| "loss": 0.3682, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.0684, |
| "grad_norm": 0.6249870992905973, |
| "learning_rate": 8.139999326766011e-06, |
| "loss": 0.392, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.0695999999999999, |
| "grad_norm": 0.6798970371707683, |
| "learning_rate": 8.134565729253554e-06, |
| "loss": 0.3877, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.0708, |
| "grad_norm": 0.6216883250382137, |
| "learning_rate": 8.129126026173189e-06, |
| "loss": 0.3796, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.072, |
| "grad_norm": 0.5807715354692247, |
| "learning_rate": 8.123680228120474e-06, |
| "loss": 0.3777, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.0732, |
| "grad_norm": 0.5740983667800384, |
| "learning_rate": 8.118228345702843e-06, |
| "loss": 0.3837, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.0744, |
| "grad_norm": 0.5956483364277361, |
| "learning_rate": 8.112770389539574e-06, |
| "loss": 0.4024, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.0756000000000001, |
| "grad_norm": 0.6140423109873733, |
| "learning_rate": 8.107306370261785e-06, |
| "loss": 0.3918, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.0768, |
| "grad_norm": 0.568523839966626, |
| "learning_rate": 8.101836298512396e-06, |
| "loss": 0.3879, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.078, |
| "grad_norm": 0.6580373875828848, |
| "learning_rate": 8.096360184946117e-06, |
| "loss": 0.3843, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.0792, |
| "grad_norm": 0.7056683024436424, |
| "learning_rate": 8.09087804022943e-06, |
| "loss": 0.3941, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.0804, |
| "grad_norm": 0.6575712990687944, |
| "learning_rate": 8.085389875040566e-06, |
| "loss": 0.3931, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.0816, |
| "grad_norm": 0.5930671138814431, |
| "learning_rate": 8.079895700069473e-06, |
| "loss": 0.3731, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.0828, |
| "grad_norm": 0.6630649504900762, |
| "learning_rate": 8.074395526017816e-06, |
| "loss": 0.406, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.084, |
| "grad_norm": 0.6261787992626612, |
| "learning_rate": 8.06888936359894e-06, |
| "loss": 0.3594, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.0852, |
| "grad_norm": 0.7502706494959577, |
| "learning_rate": 8.063377223537853e-06, |
| "loss": 0.4491, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.0864, |
| "grad_norm": 0.5534701644541706, |
| "learning_rate": 8.057859116571213e-06, |
| "loss": 0.3703, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.0876, |
| "grad_norm": 0.49799763923502727, |
| "learning_rate": 8.05233505344729e-06, |
| "loss": 0.3543, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.0888, |
| "grad_norm": 1.0019839872604202, |
| "learning_rate": 8.046805044925964e-06, |
| "loss": 0.4528, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.09, |
| "grad_norm": 0.5461445023349752, |
| "learning_rate": 8.041269101778694e-06, |
| "loss": 0.3737, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.0912, |
| "grad_norm": 0.628865585347978, |
| "learning_rate": 8.035727234788496e-06, |
| "loss": 0.4015, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.0924, |
| "grad_norm": 0.6079959467058422, |
| "learning_rate": 8.030179454749925e-06, |
| "loss": 0.3825, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.0936, |
| "grad_norm": 0.5305429021193003, |
| "learning_rate": 8.024625772469055e-06, |
| "loss": 0.3451, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.0948, |
| "grad_norm": 0.7032844908658168, |
| "learning_rate": 8.019066198763458e-06, |
| "loss": 0.3684, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.096, |
| "grad_norm": 0.6134951245718815, |
| "learning_rate": 8.013500744462177e-06, |
| "loss": 0.3723, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.0972, |
| "grad_norm": 0.6064703933463657, |
| "learning_rate": 8.007929420405714e-06, |
| "loss": 0.3598, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.0984, |
| "grad_norm": 0.5129566311438788, |
| "learning_rate": 8.002352237446e-06, |
| "loss": 0.3657, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.0996, |
| "grad_norm": 0.7027224551032671, |
| "learning_rate": 7.996769206446383e-06, |
| "loss": 0.3788, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.1008, |
| "grad_norm": 0.5759047752415848, |
| "learning_rate": 7.991180338281594e-06, |
| "loss": 0.407, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.102, |
| "grad_norm": 0.6444902877949182, |
| "learning_rate": 7.985585643837743e-06, |
| "loss": 0.3525, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.1032, |
| "grad_norm": 0.5481881054619812, |
| "learning_rate": 7.979985134012285e-06, |
| "loss": 0.3501, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.1044, |
| "grad_norm": 0.733075166199247, |
| "learning_rate": 7.974378819713998e-06, |
| "loss": 0.3672, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.1056, |
| "grad_norm": 0.5614221050399015, |
| "learning_rate": 7.968766711862971e-06, |
| "loss": 0.4116, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.1068, |
| "grad_norm": 0.5800899794679014, |
| "learning_rate": 7.963148821390578e-06, |
| "loss": 0.3827, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.108, |
| "grad_norm": 0.6622601415747164, |
| "learning_rate": 7.957525159239454e-06, |
| "loss": 0.3893, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.1092, |
| "grad_norm": 0.8333817588459229, |
| "learning_rate": 7.951895736363477e-06, |
| "loss": 0.3963, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.1104, |
| "grad_norm": 0.5559197241751965, |
| "learning_rate": 7.946260563727746e-06, |
| "loss": 0.4014, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.1116, |
| "grad_norm": 0.5241069356216917, |
| "learning_rate": 7.940619652308562e-06, |
| "loss": 0.3773, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.1128, |
| "grad_norm": 0.620024828062137, |
| "learning_rate": 7.934973013093397e-06, |
| "loss": 0.3507, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.114, |
| "grad_norm": 0.5232386711797898, |
| "learning_rate": 7.929320657080886e-06, |
| "loss": 0.4054, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.1152, |
| "grad_norm": 0.5683648527722198, |
| "learning_rate": 7.923662595280799e-06, |
| "loss": 0.3905, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.1164, |
| "grad_norm": 0.6032191740051801, |
| "learning_rate": 7.917998838714019e-06, |
| "loss": 0.4023, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.1176, |
| "grad_norm": 0.6257951900611615, |
| "learning_rate": 7.91232939841252e-06, |
| "loss": 0.3718, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.1188, |
| "grad_norm": 0.5722970175151099, |
| "learning_rate": 7.906654285419347e-06, |
| "loss": 0.3609, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.12, |
| "grad_norm": 0.8443882209730433, |
| "learning_rate": 7.900973510788595e-06, |
| "loss": 0.4205, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.1212, |
| "grad_norm": 0.6536057882938714, |
| "learning_rate": 7.895287085585386e-06, |
| "loss": 0.4226, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.1224, |
| "grad_norm": 0.6275687443932316, |
| "learning_rate": 7.889595020885853e-06, |
| "loss": 0.3833, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.1236, |
| "grad_norm": 0.6708387846357547, |
| "learning_rate": 7.883897327777108e-06, |
| "loss": 0.3703, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.1248, |
| "grad_norm": 0.557455511462114, |
| "learning_rate": 7.878194017357229e-06, |
| "loss": 0.3761, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.126, |
| "grad_norm": 0.6825974121921209, |
| "learning_rate": 7.87248510073523e-06, |
| "loss": 0.3611, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.1272, |
| "grad_norm": 0.6790308408992597, |
| "learning_rate": 7.866770589031057e-06, |
| "loss": 0.3928, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.1284, |
| "grad_norm": 0.6309280591439027, |
| "learning_rate": 7.86105049337554e-06, |
| "loss": 0.3772, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.1296, |
| "grad_norm": 0.48897990635157607, |
| "learning_rate": 7.855324824910395e-06, |
| "loss": 0.3747, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.1308, |
| "grad_norm": 0.7680633972491443, |
| "learning_rate": 7.849593594788192e-06, |
| "loss": 0.3887, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.1320000000000001, |
| "grad_norm": 0.700393461346577, |
| "learning_rate": 7.843856814172329e-06, |
| "loss": 0.4446, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.1332, |
| "grad_norm": 0.6410912266603231, |
| "learning_rate": 7.83811449423702e-06, |
| "loss": 0.3871, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.1344, |
| "grad_norm": 0.576018077563102, |
| "learning_rate": 7.832366646167268e-06, |
| "loss": 0.3697, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.1356, |
| "grad_norm": 0.5828135175960664, |
| "learning_rate": 7.82661328115884e-06, |
| "loss": 0.3619, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.1368, |
| "grad_norm": 0.6198683619402525, |
| "learning_rate": 7.820854410418255e-06, |
| "loss": 0.3734, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.138, |
| "grad_norm": 0.5457064722309137, |
| "learning_rate": 7.815090045162752e-06, |
| "loss": 0.3895, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.1392, |
| "grad_norm": 0.6253352691670119, |
| "learning_rate": 7.809320196620272e-06, |
| "loss": 0.3516, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.1404, |
| "grad_norm": 0.46672264103831795, |
| "learning_rate": 7.80354487602944e-06, |
| "loss": 0.3616, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.1416, |
| "grad_norm": 0.56828089422725, |
| "learning_rate": 7.797764094639537e-06, |
| "loss": 0.4059, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.1428, |
| "grad_norm": 0.7154646105300466, |
| "learning_rate": 7.79197786371048e-06, |
| "loss": 0.4187, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.144, |
| "grad_norm": 0.46637500730882037, |
| "learning_rate": 7.786186194512802e-06, |
| "loss": 0.3673, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.1452, |
| "grad_norm": 0.6072905735893732, |
| "learning_rate": 7.780389098327629e-06, |
| "loss": 0.3819, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.1464, |
| "grad_norm": 0.6043532507563107, |
| "learning_rate": 7.774586586446658e-06, |
| "loss": 0.3882, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.1476, |
| "grad_norm": 0.5917199850834677, |
| "learning_rate": 7.768778670172135e-06, |
| "loss": 0.4069, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.1488, |
| "grad_norm": 0.5379580116927375, |
| "learning_rate": 7.762965360816828e-06, |
| "loss": 0.3949, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.15, |
| "grad_norm": 0.6510481738337858, |
| "learning_rate": 7.757146669704016e-06, |
| "loss": 0.3803, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.1512, |
| "grad_norm": 0.5541510150757831, |
| "learning_rate": 7.75132260816746e-06, |
| "loss": 0.3924, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.1524, |
| "grad_norm": 0.5678044715290077, |
| "learning_rate": 7.745493187551378e-06, |
| "loss": 0.3413, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.1536, |
| "grad_norm": 0.5214786220289697, |
| "learning_rate": 7.739658419210429e-06, |
| "loss": 0.3447, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.1548, |
| "grad_norm": 0.5553831003825472, |
| "learning_rate": 7.733818314509689e-06, |
| "loss": 0.3828, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.156, |
| "grad_norm": 0.5394612049022063, |
| "learning_rate": 7.727972884824625e-06, |
| "loss": 0.3932, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.1572, |
| "grad_norm": 0.5718005170648759, |
| "learning_rate": 7.72212214154108e-06, |
| "loss": 0.3618, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.1584, |
| "grad_norm": 0.5460985344820174, |
| "learning_rate": 7.716266096055243e-06, |
| "loss": 0.3664, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.1596, |
| "grad_norm": 0.5277059242679342, |
| "learning_rate": 7.710404759773637e-06, |
| "loss": 0.3593, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.1608, |
| "grad_norm": 0.4587152815171526, |
| "learning_rate": 7.704538144113082e-06, |
| "loss": 0.387, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.162, |
| "grad_norm": 0.5779733417720779, |
| "learning_rate": 7.698666260500688e-06, |
| "loss": 0.431, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.1632, |
| "grad_norm": 0.6005822867829625, |
| "learning_rate": 7.692789120373824e-06, |
| "loss": 0.363, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.1644, |
| "grad_norm": 0.5935009091208785, |
| "learning_rate": 7.686906735180099e-06, |
| "loss": 0.3753, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.1656, |
| "grad_norm": 0.5669008223119324, |
| "learning_rate": 7.681019116377331e-06, |
| "loss": 0.3478, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.1668, |
| "grad_norm": 0.5573181478984236, |
| "learning_rate": 7.675126275433545e-06, |
| "loss": 0.3835, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.168, |
| "grad_norm": 0.5473700764940981, |
| "learning_rate": 7.669228223826926e-06, |
| "loss": 0.384, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.1692, |
| "grad_norm": 0.5107739763185454, |
| "learning_rate": 7.663324973045818e-06, |
| "loss": 0.3832, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.1703999999999999, |
| "grad_norm": 0.6102163681788227, |
| "learning_rate": 7.657416534588683e-06, |
| "loss": 0.3549, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.1716, |
| "grad_norm": 0.5532021885497728, |
| "learning_rate": 7.651502919964092e-06, |
| "loss": 0.3714, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.1728, |
| "grad_norm": 0.5185588376669081, |
| "learning_rate": 7.645584140690702e-06, |
| "loss": 0.378, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.174, |
| "grad_norm": 0.5389544442206081, |
| "learning_rate": 7.639660208297221e-06, |
| "loss": 0.3629, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.1752, |
| "grad_norm": 0.5796224788639072, |
| "learning_rate": 7.633731134322404e-06, |
| "loss": 0.3561, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.1764000000000001, |
| "grad_norm": 0.5724657571915197, |
| "learning_rate": 7.6277969303150155e-06, |
| "loss": 0.413, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.1776, |
| "grad_norm": 0.5651233783215182, |
| "learning_rate": 7.6218576078338115e-06, |
| "loss": 0.3978, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.1788, |
| "grad_norm": 0.704877889693732, |
| "learning_rate": 7.615913178447518e-06, |
| "loss": 0.4037, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.18, |
| "grad_norm": 0.5952367164096336, |
| "learning_rate": 7.609963653734814e-06, |
| "loss": 0.3816, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.1812, |
| "grad_norm": 0.5324116366363447, |
| "learning_rate": 7.604009045284295e-06, |
| "loss": 0.3808, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.1824, |
| "grad_norm": 0.6910948988868738, |
| "learning_rate": 7.598049364694466e-06, |
| "loss": 0.4147, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.1836, |
| "grad_norm": 0.5296354520478062, |
| "learning_rate": 7.592084623573708e-06, |
| "loss": 0.3543, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.1848, |
| "grad_norm": 0.5566407528272007, |
| "learning_rate": 7.586114833540257e-06, |
| "loss": 0.353, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.186, |
| "grad_norm": 0.6015296281547915, |
| "learning_rate": 7.58014000622219e-06, |
| "loss": 0.3453, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.1872, |
| "grad_norm": 0.6228830366597151, |
| "learning_rate": 7.574160153257386e-06, |
| "loss": 0.3881, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.1884000000000001, |
| "grad_norm": 0.6334169507119521, |
| "learning_rate": 7.568175286293522e-06, |
| "loss": 0.392, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.1896, |
| "grad_norm": 0.6236183748838798, |
| "learning_rate": 7.562185416988039e-06, |
| "loss": 0.3918, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.1908, |
| "grad_norm": 0.7794773133283003, |
| "learning_rate": 7.556190557008116e-06, |
| "loss": 0.4051, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.192, |
| "grad_norm": 0.7274930809023531, |
| "learning_rate": 7.550190718030663e-06, |
| "loss": 0.3829, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.1932, |
| "grad_norm": 0.5707170114861568, |
| "learning_rate": 7.54418591174228e-06, |
| "loss": 0.3346, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.1944, |
| "grad_norm": 0.6243157977136253, |
| "learning_rate": 7.5381761498392435e-06, |
| "loss": 0.3908, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.1956, |
| "grad_norm": 0.4674660238025524, |
| "learning_rate": 7.532161444027488e-06, |
| "loss": 0.3703, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.1968, |
| "grad_norm": 0.6270586839157299, |
| "learning_rate": 7.526141806022571e-06, |
| "loss": 0.3858, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.198, |
| "grad_norm": 0.6317796196938065, |
| "learning_rate": 7.520117247549661e-06, |
| "loss": 0.3841, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.1992, |
| "grad_norm": 0.6180551084846545, |
| "learning_rate": 7.514087780343511e-06, |
| "loss": 0.3466, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.2004, |
| "grad_norm": 0.543272841474728, |
| "learning_rate": 7.508053416148433e-06, |
| "loss": 0.4188, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.2016, |
| "grad_norm": 0.7036612686305553, |
| "learning_rate": 7.502014166718279e-06, |
| "loss": 0.3691, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.2028, |
| "grad_norm": 0.6190104178533024, |
| "learning_rate": 7.495970043816416e-06, |
| "loss": 0.3798, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.204, |
| "grad_norm": 0.6451355580120752, |
| "learning_rate": 7.489921059215703e-06, |
| "loss": 0.3796, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.2052, |
| "grad_norm": 0.5804373693313714, |
| "learning_rate": 7.483867224698471e-06, |
| "loss": 0.4045, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.2064, |
| "grad_norm": 0.5248356312926055, |
| "learning_rate": 7.477808552056496e-06, |
| "loss": 0.3557, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.2076, |
| "grad_norm": 0.4393039138466907, |
| "learning_rate": 7.471745053090976e-06, |
| "loss": 0.363, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.2088, |
| "grad_norm": 0.5104233947455193, |
| "learning_rate": 7.465676739612514e-06, |
| "loss": 0.3686, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.21, |
| "grad_norm": 0.5918547266203569, |
| "learning_rate": 7.45960362344109e-06, |
| "loss": 0.4017, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.2112, |
| "grad_norm": 0.6442980718898021, |
| "learning_rate": 7.4535257164060324e-06, |
| "loss": 0.3556, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.2124, |
| "grad_norm": 0.539978003071063, |
| "learning_rate": 7.447443030346011e-06, |
| "loss": 0.3552, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.2136, |
| "grad_norm": 0.5167136332720779, |
| "learning_rate": 7.441355577108998e-06, |
| "loss": 0.3099, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.2147999999999999, |
| "grad_norm": 0.547961404686605, |
| "learning_rate": 7.4352633685522535e-06, |
| "loss": 0.3788, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.216, |
| "grad_norm": 0.5739201487486998, |
| "learning_rate": 7.4291664165422985e-06, |
| "loss": 0.3749, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.2172, |
| "grad_norm": 0.6136292174850035, |
| "learning_rate": 7.423064732954895e-06, |
| "loss": 0.3518, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.2184, |
| "grad_norm": 0.6084805729019309, |
| "learning_rate": 7.4169583296750194e-06, |
| "loss": 0.367, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.2196, |
| "grad_norm": 0.6012984604908796, |
| "learning_rate": 7.410847218596846e-06, |
| "loss": 0.3727, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.2208, |
| "grad_norm": 0.5630545476779787, |
| "learning_rate": 7.40473141162371e-06, |
| "loss": 0.3787, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.222, |
| "grad_norm": 0.5608030101506133, |
| "learning_rate": 7.398610920668102e-06, |
| "loss": 0.3708, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.2232, |
| "grad_norm": 0.658915854539811, |
| "learning_rate": 7.392485757651634e-06, |
| "loss": 0.3602, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.2244, |
| "grad_norm": 0.5656184716996064, |
| "learning_rate": 7.386355934505015e-06, |
| "loss": 0.3878, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.2256, |
| "grad_norm": 0.598806868596272, |
| "learning_rate": 7.380221463168036e-06, |
| "loss": 0.3574, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.2268, |
| "grad_norm": 0.6170648083398085, |
| "learning_rate": 7.374082355589536e-06, |
| "loss": 0.4131, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.228, |
| "grad_norm": 0.6838329966789305, |
| "learning_rate": 7.367938623727389e-06, |
| "loss": 0.3419, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.2292, |
| "grad_norm": 0.6205390501504368, |
| "learning_rate": 7.361790279548476e-06, |
| "loss": 0.4052, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.2304, |
| "grad_norm": 0.6888179942809703, |
| "learning_rate": 7.35563733502866e-06, |
| "loss": 0.4047, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.2316, |
| "grad_norm": 0.4939596619401398, |
| "learning_rate": 7.3494798021527665e-06, |
| "loss": 0.3842, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.2328000000000001, |
| "grad_norm": 0.7400721543380634, |
| "learning_rate": 7.3433176929145574e-06, |
| "loss": 0.3556, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.234, |
| "grad_norm": 0.6168688148164273, |
| "learning_rate": 7.337151019316708e-06, |
| "loss": 0.4099, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.2352, |
| "grad_norm": 0.8003583257346282, |
| "learning_rate": 7.330979793370784e-06, |
| "loss": 0.3968, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.2364, |
| "grad_norm": 0.47326526074914016, |
| "learning_rate": 7.324804027097221e-06, |
| "loss": 0.3813, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.2376, |
| "grad_norm": 0.5472467555562863, |
| "learning_rate": 7.318623732525294e-06, |
| "loss": 0.3877, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.2388, |
| "grad_norm": 0.6594200360662721, |
| "learning_rate": 7.312438921693101e-06, |
| "loss": 0.3954, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.24, |
| "grad_norm": 0.6448566701959111, |
| "learning_rate": 7.30624960664754e-06, |
| "loss": 0.3618, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.2412, |
| "grad_norm": 0.5416473187116394, |
| "learning_rate": 7.300055799444273e-06, |
| "loss": 0.3671, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.2424, |
| "grad_norm": 0.5737841757940133, |
| "learning_rate": 7.293857512147723e-06, |
| "loss": 0.3956, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.2436, |
| "grad_norm": 0.7038531065019307, |
| "learning_rate": 7.287654756831031e-06, |
| "loss": 0.3735, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.2448, |
| "grad_norm": 0.5648836478829721, |
| "learning_rate": 7.2814475455760445e-06, |
| "loss": 0.337, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.246, |
| "grad_norm": 0.5734291395414816, |
| "learning_rate": 7.275235890473291e-06, |
| "loss": 0.3759, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.2472, |
| "grad_norm": 0.535389124757447, |
| "learning_rate": 7.269019803621953e-06, |
| "loss": 0.3489, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.2484, |
| "grad_norm": 0.6260068951532562, |
| "learning_rate": 7.262799297129843e-06, |
| "loss": 0.356, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.2496, |
| "grad_norm": 0.5626553670499461, |
| "learning_rate": 7.256574383113386e-06, |
| "loss": 0.3432, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.2508, |
| "grad_norm": 0.5979399572770037, |
| "learning_rate": 7.25034507369759e-06, |
| "loss": 0.3963, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.252, |
| "grad_norm": 0.7627246495578786, |
| "learning_rate": 7.244111381016024e-06, |
| "loss": 0.3589, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.2532, |
| "grad_norm": 0.5924793390470331, |
| "learning_rate": 7.237873317210796e-06, |
| "loss": 0.3863, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.2544, |
| "grad_norm": 0.5618731305329973, |
| "learning_rate": 7.231630894432527e-06, |
| "loss": 0.3686, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.2556, |
| "grad_norm": 0.5484996166547617, |
| "learning_rate": 7.22538412484033e-06, |
| "loss": 0.3638, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.2568, |
| "grad_norm": 0.9948159562952937, |
| "learning_rate": 7.219133020601783e-06, |
| "loss": 0.4089, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.258, |
| "grad_norm": 0.6944598116037035, |
| "learning_rate": 7.2128775938929095e-06, |
| "loss": 0.3984, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.2591999999999999, |
| "grad_norm": 0.6757195579443828, |
| "learning_rate": 7.206617856898149e-06, |
| "loss": 0.3778, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.2604, |
| "grad_norm": 0.5920958068986326, |
| "learning_rate": 7.20035382181034e-06, |
| "loss": 0.3819, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.2616, |
| "grad_norm": 0.6417635133827952, |
| "learning_rate": 7.194085500830691e-06, |
| "loss": 0.403, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.2628, |
| "grad_norm": 0.5568650988903112, |
| "learning_rate": 7.1878129061687595e-06, |
| "loss": 0.3743, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.264, |
| "grad_norm": 0.6533788297396388, |
| "learning_rate": 7.181536050042427e-06, |
| "loss": 0.3706, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.2652, |
| "grad_norm": 0.5362349579796641, |
| "learning_rate": 7.175254944677874e-06, |
| "loss": 0.3644, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.2664, |
| "grad_norm": 0.5692213887579612, |
| "learning_rate": 7.1689696023095625e-06, |
| "loss": 0.3808, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.2676, |
| "grad_norm": 0.5795212902620743, |
| "learning_rate": 7.162680035180201e-06, |
| "loss": 0.3998, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.2688, |
| "grad_norm": 0.5209493310515746, |
| "learning_rate": 7.156386255540732e-06, |
| "loss": 0.3532, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.27, |
| "grad_norm": 0.5424829545715086, |
| "learning_rate": 7.150088275650302e-06, |
| "loss": 0.392, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.2711999999999999, |
| "grad_norm": 0.6464063223655275, |
| "learning_rate": 7.143786107776236e-06, |
| "loss": 0.3422, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.2724, |
| "grad_norm": 0.4813528214550613, |
| "learning_rate": 7.137479764194022e-06, |
| "loss": 0.3491, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.2736, |
| "grad_norm": 0.5800825959591446, |
| "learning_rate": 7.131169257187276e-06, |
| "loss": 0.377, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.2748, |
| "grad_norm": 0.4796442134420188, |
| "learning_rate": 7.1248545990477256e-06, |
| "loss": 0.4, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.276, |
| "grad_norm": 0.5534864415940866, |
| "learning_rate": 7.1185358020751875e-06, |
| "loss": 0.3917, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.2772000000000001, |
| "grad_norm": 0.5579998901657407, |
| "learning_rate": 7.112212878577533e-06, |
| "loss": 0.4256, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.2784, |
| "grad_norm": 0.6136435635336623, |
| "learning_rate": 7.1058858408706765e-06, |
| "loss": 0.358, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.2796, |
| "grad_norm": 0.5559228232636972, |
| "learning_rate": 7.099554701278547e-06, |
| "loss": 0.3811, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.2808, |
| "grad_norm": 0.5429673734481284, |
| "learning_rate": 7.093219472133059e-06, |
| "loss": 0.3952, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.282, |
| "grad_norm": 0.5013586384163251, |
| "learning_rate": 7.086880165774093e-06, |
| "loss": 0.3772, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.2832, |
| "grad_norm": 0.586078174565493, |
| "learning_rate": 7.080536794549477e-06, |
| "loss": 0.3987, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.2844, |
| "grad_norm": 0.5979682949821524, |
| "learning_rate": 7.0741893708149475e-06, |
| "loss": 0.3832, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.2856, |
| "grad_norm": 0.5450858560697013, |
| "learning_rate": 7.067837906934143e-06, |
| "loss": 0.3889, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.2868, |
| "grad_norm": 0.5251321928301862, |
| "learning_rate": 7.061482415278569e-06, |
| "loss": 0.3332, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.288, |
| "grad_norm": 0.5441331999246979, |
| "learning_rate": 7.055122908227571e-06, |
| "loss": 0.3732, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.2892000000000001, |
| "grad_norm": 0.623583931716984, |
| "learning_rate": 7.0487593981683246e-06, |
| "loss": 0.4046, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.2904, |
| "grad_norm": 0.5914739695772331, |
| "learning_rate": 7.042391897495795e-06, |
| "loss": 0.3659, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.2916, |
| "grad_norm": 0.4592194257354245, |
| "learning_rate": 7.036020418612724e-06, |
| "loss": 0.3283, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.2928, |
| "grad_norm": 0.5090800599047531, |
| "learning_rate": 7.029644973929604e-06, |
| "loss": 0.3703, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.294, |
| "grad_norm": 0.48246491923077817, |
| "learning_rate": 7.023265575864648e-06, |
| "loss": 0.4136, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.2952, |
| "grad_norm": 0.5743016554708543, |
| "learning_rate": 7.016882236843769e-06, |
| "loss": 0.3541, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.2964, |
| "grad_norm": 0.6693297930478648, |
| "learning_rate": 7.0104949693005645e-06, |
| "loss": 0.3687, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.2976, |
| "grad_norm": 0.45548226688407856, |
| "learning_rate": 7.00410378567627e-06, |
| "loss": 0.395, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.2988, |
| "grad_norm": 0.58335044226178, |
| "learning_rate": 6.997708698419765e-06, |
| "loss": 0.3464, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.3, |
| "grad_norm": 0.513898806820525, |
| "learning_rate": 6.99130971998752e-06, |
| "loss": 0.3728, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.3012000000000001, |
| "grad_norm": 0.6606013764400083, |
| "learning_rate": 6.98490686284359e-06, |
| "loss": 0.4032, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.3024, |
| "grad_norm": 0.5495677779568732, |
| "learning_rate": 6.978500139459583e-06, |
| "loss": 0.3893, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.3035999999999999, |
| "grad_norm": 0.6833083037662147, |
| "learning_rate": 6.972089562314644e-06, |
| "loss": 0.4113, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.3048, |
| "grad_norm": 0.5759574990861633, |
| "learning_rate": 6.9656751438954115e-06, |
| "loss": 0.3968, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.306, |
| "grad_norm": 0.5115832604155884, |
| "learning_rate": 6.959256896696021e-06, |
| "loss": 0.432, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.3072, |
| "grad_norm": 0.7911985158075923, |
| "learning_rate": 6.952834833218056e-06, |
| "loss": 0.3883, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.3084, |
| "grad_norm": 0.4993447860656908, |
| "learning_rate": 6.946408965970536e-06, |
| "loss": 0.3802, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.3096, |
| "grad_norm": 0.4400466240534706, |
| "learning_rate": 6.939979307469892e-06, |
| "loss": 0.3579, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.3108, |
| "grad_norm": 0.5685894710046548, |
| "learning_rate": 6.933545870239933e-06, |
| "loss": 0.3482, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.312, |
| "grad_norm": 0.583902836607969, |
| "learning_rate": 6.927108666811837e-06, |
| "loss": 0.3556, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.3132, |
| "grad_norm": 0.6469186054837391, |
| "learning_rate": 6.920667709724113e-06, |
| "loss": 0.4031, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.3144, |
| "grad_norm": 0.5135019797765831, |
| "learning_rate": 6.914223011522581e-06, |
| "loss": 0.3853, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.3155999999999999, |
| "grad_norm": 0.6132864607455859, |
| "learning_rate": 6.90777458476035e-06, |
| "loss": 0.3856, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.3168, |
| "grad_norm": 0.5260723911564891, |
| "learning_rate": 6.901322441997791e-06, |
| "loss": 0.3722, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.318, |
| "grad_norm": 0.5769135122339621, |
| "learning_rate": 6.894866595802509e-06, |
| "loss": 0.3929, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.3192, |
| "grad_norm": 0.5217068431943552, |
| "learning_rate": 6.888407058749331e-06, |
| "loss": 0.4039, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.3204, |
| "grad_norm": 0.5882564291044068, |
| "learning_rate": 6.881943843420268e-06, |
| "loss": 0.3685, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.3216, |
| "grad_norm": 0.5397540009109018, |
| "learning_rate": 6.875476962404495e-06, |
| "loss": 0.412, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.3228, |
| "grad_norm": 0.5426493873301287, |
| "learning_rate": 6.869006428298328e-06, |
| "loss": 0.374, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.324, |
| "grad_norm": 0.6918593386156987, |
| "learning_rate": 6.862532253705199e-06, |
| "loss": 0.3746, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.3252, |
| "grad_norm": 0.6622425191632842, |
| "learning_rate": 6.85605445123563e-06, |
| "loss": 0.3446, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.3264, |
| "grad_norm": 0.49566249352496705, |
| "learning_rate": 6.849573033507213e-06, |
| "loss": 0.3509, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.3276, |
| "grad_norm": 0.6281029533094057, |
| "learning_rate": 6.843088013144575e-06, |
| "loss": 0.3732, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.3288, |
| "grad_norm": 0.5339005742427865, |
| "learning_rate": 6.8365994027793695e-06, |
| "loss": 0.384, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.33, |
| "grad_norm": 0.6084133196447026, |
| "learning_rate": 6.830107215050232e-06, |
| "loss": 0.3865, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.3312, |
| "grad_norm": 0.5362040240692425, |
| "learning_rate": 6.823611462602777e-06, |
| "loss": 0.3574, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.3324, |
| "grad_norm": 0.5946541050024513, |
| "learning_rate": 6.817112158089554e-06, |
| "loss": 0.3923, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.3336000000000001, |
| "grad_norm": 0.6799270385641254, |
| "learning_rate": 6.8106093141700336e-06, |
| "loss": 0.3851, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.3348, |
| "grad_norm": 0.5447366160367654, |
| "learning_rate": 6.804102943510583e-06, |
| "loss": 0.3994, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.336, |
| "grad_norm": 0.7605032504216259, |
| "learning_rate": 6.797593058784437e-06, |
| "loss": 0.3779, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.3372, |
| "grad_norm": 0.44318202194361933, |
| "learning_rate": 6.791079672671677e-06, |
| "loss": 0.388, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.3384, |
| "grad_norm": 0.6594966278078546, |
| "learning_rate": 6.784562797859198e-06, |
| "loss": 0.3966, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.3396, |
| "grad_norm": 0.7278372919818764, |
| "learning_rate": 6.7780424470407004e-06, |
| "loss": 0.3981, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.3408, |
| "grad_norm": 0.6394652933854147, |
| "learning_rate": 6.771518632916645e-06, |
| "loss": 0.4393, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.342, |
| "grad_norm": 0.5205892552379503, |
| "learning_rate": 6.7649913681942455e-06, |
| "loss": 0.3487, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.3432, |
| "grad_norm": 0.6518308231968244, |
| "learning_rate": 6.758460665587437e-06, |
| "loss": 0.3918, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.3444, |
| "grad_norm": 0.5518171765062463, |
| "learning_rate": 6.751926537816846e-06, |
| "loss": 0.3874, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.3456000000000001, |
| "grad_norm": 0.5680320375122355, |
| "learning_rate": 6.745388997609774e-06, |
| "loss": 0.3748, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.3468, |
| "grad_norm": 0.5430526903558245, |
| "learning_rate": 6.738848057700169e-06, |
| "loss": 0.3537, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.3479999999999999, |
| "grad_norm": 0.5320208214764721, |
| "learning_rate": 6.732303730828601e-06, |
| "loss": 0.4052, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.3492, |
| "grad_norm": 0.6325336919288119, |
| "learning_rate": 6.725756029742234e-06, |
| "loss": 0.358, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.3504, |
| "grad_norm": 0.685515799326995, |
| "learning_rate": 6.7192049671948115e-06, |
| "loss": 0.3678, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.3516, |
| "grad_norm": 0.41920896936731294, |
| "learning_rate": 6.712650555946616e-06, |
| "loss": 0.3517, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.3528, |
| "grad_norm": 0.5881032868010501, |
| "learning_rate": 6.706092808764459e-06, |
| "loss": 0.3582, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.354, |
| "grad_norm": 0.6483636072262811, |
| "learning_rate": 6.699531738421648e-06, |
| "loss": 0.36, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.3552, |
| "grad_norm": 0.5061302602252388, |
| "learning_rate": 6.692967357697961e-06, |
| "loss": 0.3615, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.3564, |
| "grad_norm": 0.6545204627775875, |
| "learning_rate": 6.6863996793796286e-06, |
| "loss": 0.3633, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.3576, |
| "grad_norm": 0.44178673336282853, |
| "learning_rate": 6.6798287162593e-06, |
| "loss": 0.3496, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.3588, |
| "grad_norm": 0.5475017826386849, |
| "learning_rate": 6.6732544811360255e-06, |
| "loss": 0.3429, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.3599999999999999, |
| "grad_norm": 0.6564333403411471, |
| "learning_rate": 6.666676986815227e-06, |
| "loss": 0.3973, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.3612, |
| "grad_norm": 0.5747932244479428, |
| "learning_rate": 6.660096246108677e-06, |
| "loss": 0.3733, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.3624, |
| "grad_norm": 0.505681834990622, |
| "learning_rate": 6.653512271834468e-06, |
| "loss": 0.3694, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.3636, |
| "grad_norm": 0.5302728720800068, |
| "learning_rate": 6.646925076816994e-06, |
| "loss": 0.3804, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.3648, |
| "grad_norm": 0.6137108947228674, |
| "learning_rate": 6.640334673886921e-06, |
| "loss": 0.3591, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.366, |
| "grad_norm": 0.49332652913525415, |
| "learning_rate": 6.633741075881163e-06, |
| "loss": 0.3868, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.3672, |
| "grad_norm": 0.6555162595538057, |
| "learning_rate": 6.627144295642859e-06, |
| "loss": 0.3583, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.3684, |
| "grad_norm": 0.7103276751363496, |
| "learning_rate": 6.6205443460213445e-06, |
| "loss": 0.3558, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.3696, |
| "grad_norm": 0.5512752934225792, |
| "learning_rate": 6.613941239872129e-06, |
| "loss": 0.3512, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.3708, |
| "grad_norm": 0.502186227760979, |
| "learning_rate": 6.607334990056873e-06, |
| "loss": 0.3604, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.3719999999999999, |
| "grad_norm": 0.6927880455165364, |
| "learning_rate": 6.600725609443356e-06, |
| "loss": 0.4005, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.3732, |
| "grad_norm": 0.44954852300084996, |
| "learning_rate": 6.594113110905458e-06, |
| "loss": 0.3637, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.3744, |
| "grad_norm": 0.554225409355741, |
| "learning_rate": 6.587497507323132e-06, |
| "loss": 0.3559, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.3756, |
| "grad_norm": 0.4711345253979762, |
| "learning_rate": 6.580878811582379e-06, |
| "loss": 0.3701, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.3768, |
| "grad_norm": 0.4722647339207153, |
| "learning_rate": 6.574257036575224e-06, |
| "loss": 0.3597, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.3780000000000001, |
| "grad_norm": 0.5324128787221746, |
| "learning_rate": 6.567632195199686e-06, |
| "loss": 0.3456, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.3792, |
| "grad_norm": 0.588026631619211, |
| "learning_rate": 6.5610043003597615e-06, |
| "loss": 0.3792, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.3804, |
| "grad_norm": 1.0073668433242489, |
| "learning_rate": 6.554373364965392e-06, |
| "loss": 0.3638, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.3816, |
| "grad_norm": 0.6643904374505395, |
| "learning_rate": 6.547739401932443e-06, |
| "loss": 0.3575, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.3828, |
| "grad_norm": 0.5292843395135677, |
| "learning_rate": 6.541102424182676e-06, |
| "loss": 0.3811, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.384, |
| "grad_norm": 0.5932814470820361, |
| "learning_rate": 6.5344624446437234e-06, |
| "loss": 0.3827, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.3852, |
| "grad_norm": 0.4782195713849791, |
| "learning_rate": 6.527819476249066e-06, |
| "loss": 0.3692, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.3864, |
| "grad_norm": 0.6540604048296909, |
| "learning_rate": 6.521173531938011e-06, |
| "loss": 0.3641, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.3876, |
| "grad_norm": 0.4800644925234897, |
| "learning_rate": 6.514524624655654e-06, |
| "loss": 0.3472, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.3888, |
| "grad_norm": 0.5509070760157515, |
| "learning_rate": 6.507872767352863e-06, |
| "loss": 0.3396, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.3900000000000001, |
| "grad_norm": 0.5622639222865433, |
| "learning_rate": 6.50121797298626e-06, |
| "loss": 0.3962, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.3912, |
| "grad_norm": 0.5231156502515467, |
| "learning_rate": 6.494560254518179e-06, |
| "loss": 0.3661, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.3924, |
| "grad_norm": 0.5081529588369621, |
| "learning_rate": 6.487899624916654e-06, |
| "loss": 0.3483, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.3936, |
| "grad_norm": 0.5391745904936082, |
| "learning_rate": 6.481236097155389e-06, |
| "loss": 0.3793, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.3948, |
| "grad_norm": 0.6615024814667997, |
| "learning_rate": 6.4745696842137305e-06, |
| "loss": 0.4145, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.396, |
| "grad_norm": 0.6356413469309103, |
| "learning_rate": 6.467900399076651e-06, |
| "loss": 0.3496, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.3972, |
| "grad_norm": 0.5487989053399741, |
| "learning_rate": 6.461228254734711e-06, |
| "loss": 0.3639, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.3984, |
| "grad_norm": 0.5278545281830197, |
| "learning_rate": 6.454553264184041e-06, |
| "loss": 0.3506, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.3996, |
| "grad_norm": 0.5829669117942872, |
| "learning_rate": 6.447875440426319e-06, |
| "loss": 0.3787, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.4008, |
| "grad_norm": 0.5682179378418761, |
| "learning_rate": 6.441194796468739e-06, |
| "loss": 0.3667, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.4020000000000001, |
| "grad_norm": 0.4888397389782733, |
| "learning_rate": 6.434511345323988e-06, |
| "loss": 0.3634, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.4032, |
| "grad_norm": 0.530620756337448, |
| "learning_rate": 6.427825100010225e-06, |
| "loss": 0.3964, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.4043999999999999, |
| "grad_norm": 0.6363987713172962, |
| "learning_rate": 6.421136073551047e-06, |
| "loss": 0.3841, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.4056, |
| "grad_norm": 0.5568572793629121, |
| "learning_rate": 6.414444278975465e-06, |
| "loss": 0.3633, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.4068, |
| "grad_norm": 0.49553189194984, |
| "learning_rate": 6.407749729317892e-06, |
| "loss": 0.3976, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.408, |
| "grad_norm": 0.5728937899487624, |
| "learning_rate": 6.401052437618098e-06, |
| "loss": 0.387, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.4092, |
| "grad_norm": 0.6518243725832273, |
| "learning_rate": 6.394352416921201e-06, |
| "loss": 0.3849, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.4104, |
| "grad_norm": 0.6043382361141673, |
| "learning_rate": 6.387649680277629e-06, |
| "loss": 0.4052, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.4116, |
| "grad_norm": 0.5404652461010379, |
| "learning_rate": 6.380944240743101e-06, |
| "loss": 0.4085, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.4128, |
| "grad_norm": 0.579388596697836, |
| "learning_rate": 6.374236111378605e-06, |
| "loss": 0.3551, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.414, |
| "grad_norm": 0.5523484176312922, |
| "learning_rate": 6.3675253052503645e-06, |
| "loss": 0.3907, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.4152, |
| "grad_norm": 0.5561093182555912, |
| "learning_rate": 6.360811835429817e-06, |
| "loss": 0.4324, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.4163999999999999, |
| "grad_norm": 0.5535367271687733, |
| "learning_rate": 6.35409571499359e-06, |
| "loss": 0.3714, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.4176, |
| "grad_norm": 0.6083376911119112, |
| "learning_rate": 6.347376957023471e-06, |
| "loss": 0.3501, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.4188, |
| "grad_norm": 0.4654771508132885, |
| "learning_rate": 6.340655574606391e-06, |
| "loss": 0.4048, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.42, |
| "grad_norm": 0.7318252472693728, |
| "learning_rate": 6.333931580834387e-06, |
| "loss": 0.4087, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.4212, |
| "grad_norm": 0.5945086209964745, |
| "learning_rate": 6.327204988804583e-06, |
| "loss": 0.3741, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.4224, |
| "grad_norm": 0.6152246104938062, |
| "learning_rate": 6.320475811619167e-06, |
| "loss": 0.3861, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.4236, |
| "grad_norm": 0.545937458552413, |
| "learning_rate": 6.313744062385363e-06, |
| "loss": 0.3716, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.4248, |
| "grad_norm": 0.4799280381039969, |
| "learning_rate": 6.307009754215397e-06, |
| "loss": 0.3337, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.426, |
| "grad_norm": 0.5334617138994628, |
| "learning_rate": 6.30027290022649e-06, |
| "loss": 0.3791, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.4272, |
| "grad_norm": 0.5466526534649941, |
| "learning_rate": 6.2935335135408135e-06, |
| "loss": 0.3937, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.4284, |
| "grad_norm": 0.5904387210760845, |
| "learning_rate": 6.286791607285478e-06, |
| "loss": 0.3826, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.4296, |
| "grad_norm": 0.4768564178143852, |
| "learning_rate": 6.2800471945925e-06, |
| "loss": 0.3774, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.4308, |
| "grad_norm": 0.5895377413883869, |
| "learning_rate": 6.2733002885987734e-06, |
| "loss": 0.378, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.432, |
| "grad_norm": 0.691532734331932, |
| "learning_rate": 6.2665509024460554e-06, |
| "loss": 0.3676, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.4332, |
| "grad_norm": 0.6050439284597378, |
| "learning_rate": 6.259799049280932e-06, |
| "loss": 0.3932, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.4344000000000001, |
| "grad_norm": 0.5905694488707581, |
| "learning_rate": 6.253044742254791e-06, |
| "loss": 0.3585, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.4356, |
| "grad_norm": 0.5868666557800802, |
| "learning_rate": 6.246287994523805e-06, |
| "loss": 0.3688, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.4368, |
| "grad_norm": 0.4676587208954629, |
| "learning_rate": 6.239528819248898e-06, |
| "loss": 0.3975, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.438, |
| "grad_norm": 0.5699774072418529, |
| "learning_rate": 6.232767229595719e-06, |
| "loss": 0.382, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.4392, |
| "grad_norm": 0.5678736625348831, |
| "learning_rate": 6.226003238734628e-06, |
| "loss": 0.3743, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.4404, |
| "grad_norm": 0.6577077891365315, |
| "learning_rate": 6.219236859840656e-06, |
| "loss": 0.361, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.4416, |
| "grad_norm": 0.5749292581736202, |
| "learning_rate": 6.2124681060934866e-06, |
| "loss": 0.358, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.4428, |
| "grad_norm": 0.590931833530579, |
| "learning_rate": 6.205696990677431e-06, |
| "loss": 0.3669, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.444, |
| "grad_norm": 0.5397996359057199, |
| "learning_rate": 6.1989235267813964e-06, |
| "loss": 0.3555, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.4452, |
| "grad_norm": 0.5743513481789311, |
| "learning_rate": 6.192147727598869e-06, |
| "loss": 0.421, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.4464000000000001, |
| "grad_norm": 0.505200072259294, |
| "learning_rate": 6.185369606327882e-06, |
| "loss": 0.359, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.4476, |
| "grad_norm": 0.5937106634500684, |
| "learning_rate": 6.178589176170991e-06, |
| "loss": 0.3997, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.4487999999999999, |
| "grad_norm": 0.5052831659088494, |
| "learning_rate": 6.171806450335248e-06, |
| "loss": 0.4214, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.45, |
| "grad_norm": 0.5918053967047753, |
| "learning_rate": 6.16502144203218e-06, |
| "loss": 0.4002, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.4512, |
| "grad_norm": 0.5443590614450066, |
| "learning_rate": 6.1582341644777575e-06, |
| "loss": 0.378, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.4524, |
| "grad_norm": 0.42555020008139566, |
| "learning_rate": 6.151444630892372e-06, |
| "loss": 0.3693, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.4536, |
| "grad_norm": 0.720412574975775, |
| "learning_rate": 6.144652854500806e-06, |
| "loss": 0.3938, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.4548, |
| "grad_norm": 0.4724926217608286, |
| "learning_rate": 6.1378588485322145e-06, |
| "loss": 0.3823, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.456, |
| "grad_norm": 0.5802120718021628, |
| "learning_rate": 6.131062626220094e-06, |
| "loss": 0.3461, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.4572, |
| "grad_norm": 0.5480554564104493, |
| "learning_rate": 6.124264200802259e-06, |
| "loss": 0.4336, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.4584, |
| "grad_norm": 0.6443331793045611, |
| "learning_rate": 6.117463585520813e-06, |
| "loss": 0.3841, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.4596, |
| "grad_norm": 0.711519411508646, |
| "learning_rate": 6.110660793622127e-06, |
| "loss": 0.4117, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.4607999999999999, |
| "grad_norm": 0.45866436123572735, |
| "learning_rate": 6.103855838356813e-06, |
| "loss": 0.3785, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.462, |
| "grad_norm": 0.5538669654532735, |
| "learning_rate": 6.097048732979691e-06, |
| "loss": 0.4143, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.4632, |
| "grad_norm": 0.5571894823619101, |
| "learning_rate": 6.090239490749775e-06, |
| "loss": 0.3716, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.4644, |
| "grad_norm": 0.5089942930991793, |
| "learning_rate": 6.083428124930239e-06, |
| "loss": 0.3669, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.4656, |
| "grad_norm": 0.6702683351874047, |
| "learning_rate": 6.076614648788392e-06, |
| "loss": 0.372, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.4668, |
| "grad_norm": 0.5794656181086777, |
| "learning_rate": 6.069799075595658e-06, |
| "loss": 0.3874, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.468, |
| "grad_norm": 0.5468499297531119, |
| "learning_rate": 6.062981418627539e-06, |
| "loss": 0.3573, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.4692, |
| "grad_norm": 0.5438151205713972, |
| "learning_rate": 6.0561616911636025e-06, |
| "loss": 0.3545, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.4704, |
| "grad_norm": 0.4578732396242083, |
| "learning_rate": 6.049339906487443e-06, |
| "loss": 0.3697, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.4716, |
| "grad_norm": 0.5676454535315087, |
| "learning_rate": 6.042516077886669e-06, |
| "loss": 0.3554, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.4727999999999999, |
| "grad_norm": 0.46462267233651716, |
| "learning_rate": 6.035690218652861e-06, |
| "loss": 0.4058, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.474, |
| "grad_norm": 0.63335218585527, |
| "learning_rate": 6.028862342081564e-06, |
| "loss": 0.3613, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.4752, |
| "grad_norm": 0.5392835735551683, |
| "learning_rate": 6.022032461472247e-06, |
| "loss": 0.3428, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.4764, |
| "grad_norm": 0.5238389880445473, |
| "learning_rate": 6.015200590128284e-06, |
| "loss": 0.3897, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.4776, |
| "grad_norm": 0.6032296929764723, |
| "learning_rate": 6.008366741356926e-06, |
| "loss": 0.3808, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.4788000000000001, |
| "grad_norm": 0.5117870671101838, |
| "learning_rate": 6.001530928469277e-06, |
| "loss": 0.361, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.48, |
| "grad_norm": 0.5560766032265853, |
| "learning_rate": 5.9946931647802645e-06, |
| "loss": 0.3973, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.4812, |
| "grad_norm": 0.44978913649649527, |
| "learning_rate": 5.987853463608618e-06, |
| "loss": 0.333, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.4824, |
| "grad_norm": 0.557611973248781, |
| "learning_rate": 5.981011838276841e-06, |
| "loss": 0.4082, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.4836, |
| "grad_norm": 0.4976072856197252, |
| "learning_rate": 5.974168302111183e-06, |
| "loss": 0.3798, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.4848, |
| "grad_norm": 0.5403150413190907, |
| "learning_rate": 5.967322868441616e-06, |
| "loss": 0.3709, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.486, |
| "grad_norm": 0.5377355735149714, |
| "learning_rate": 5.9604755506018105e-06, |
| "loss": 0.36, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.4872, |
| "grad_norm": 0.526622078545486, |
| "learning_rate": 5.953626361929102e-06, |
| "loss": 0.3785, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.4884, |
| "grad_norm": 0.7084647792247337, |
| "learning_rate": 5.946775315764476e-06, |
| "loss": 0.3774, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.4896, |
| "grad_norm": 0.6306590790537886, |
| "learning_rate": 5.939922425452531e-06, |
| "loss": 0.3807, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.4908000000000001, |
| "grad_norm": 0.5364226942594238, |
| "learning_rate": 5.93306770434146e-06, |
| "loss": 0.3713, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.492, |
| "grad_norm": 0.5430047001467209, |
| "learning_rate": 5.926211165783021e-06, |
| "loss": 0.4211, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.4932, |
| "grad_norm": 0.44567637661850384, |
| "learning_rate": 5.919352823132515e-06, |
| "loss": 0.3768, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.4944, |
| "grad_norm": 0.5882164379790376, |
| "learning_rate": 5.912492689748753e-06, |
| "loss": 0.3286, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.4956, |
| "grad_norm": 0.6135999835439079, |
| "learning_rate": 5.905630778994036e-06, |
| "loss": 0.4097, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.4968, |
| "grad_norm": 0.49445597722429835, |
| "learning_rate": 5.898767104234128e-06, |
| "loss": 0.3304, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.498, |
| "grad_norm": 0.7115505037101663, |
| "learning_rate": 5.891901678838227e-06, |
| "loss": 0.3749, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.4992, |
| "grad_norm": 0.5715925192085276, |
| "learning_rate": 5.88503451617894e-06, |
| "loss": 0.3844, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.5004, |
| "grad_norm": 0.5748426571859289, |
| "learning_rate": 5.878165629632262e-06, |
| "loss": 0.3686, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.5016, |
| "grad_norm": 0.4827515224905166, |
| "learning_rate": 5.8712950325775416e-06, |
| "loss": 0.3284, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.5028000000000001, |
| "grad_norm": 0.5379573591619449, |
| "learning_rate": 5.864422738397465e-06, |
| "loss": 0.3576, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.504, |
| "grad_norm": 0.5425509909802786, |
| "learning_rate": 5.857548760478015e-06, |
| "loss": 0.402, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.5051999999999999, |
| "grad_norm": 0.4720264422543957, |
| "learning_rate": 5.85067311220846e-06, |
| "loss": 0.3801, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.5064, |
| "grad_norm": 0.6050179214602931, |
| "learning_rate": 5.843795806981325e-06, |
| "loss": 0.3527, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.5076, |
| "grad_norm": 0.5509117101205769, |
| "learning_rate": 5.836916858192353e-06, |
| "loss": 0.3828, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.5088, |
| "grad_norm": 0.5304817046842542, |
| "learning_rate": 5.830036279240497e-06, |
| "loss": 0.3736, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.51, |
| "grad_norm": 0.5683967543341494, |
| "learning_rate": 5.823154083527884e-06, |
| "loss": 0.3534, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.5112, |
| "grad_norm": 0.5114190307881069, |
| "learning_rate": 5.816270284459783e-06, |
| "loss": 0.3914, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.5124, |
| "grad_norm": 0.6098036240699997, |
| "learning_rate": 5.809384895444594e-06, |
| "loss": 0.3745, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.5135999999999998, |
| "grad_norm": 0.5408809868247507, |
| "learning_rate": 5.802497929893813e-06, |
| "loss": 0.3909, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.5148000000000001, |
| "grad_norm": 0.5848579955188301, |
| "learning_rate": 5.795609401222001e-06, |
| "loss": 0.3784, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.516, |
| "grad_norm": 0.6277636788518177, |
| "learning_rate": 5.7887193228467735e-06, |
| "loss": 0.3578, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.5171999999999999, |
| "grad_norm": 0.4668198224626426, |
| "learning_rate": 5.781827708188753e-06, |
| "loss": 0.3718, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.5184, |
| "grad_norm": 0.49948272590176307, |
| "learning_rate": 5.774934570671562e-06, |
| "loss": 0.3824, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.5196, |
| "grad_norm": 0.5680796315248136, |
| "learning_rate": 5.768039923721791e-06, |
| "loss": 0.3506, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.5208, |
| "grad_norm": 0.5448127204775214, |
| "learning_rate": 5.761143780768962e-06, |
| "loss": 0.413, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.522, |
| "grad_norm": 0.420450639517775, |
| "learning_rate": 5.7542461552455165e-06, |
| "loss": 0.3798, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.5232, |
| "grad_norm": 0.6781585974401322, |
| "learning_rate": 5.747347060586787e-06, |
| "loss": 0.3766, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.5244, |
| "grad_norm": 0.5288110386339663, |
| "learning_rate": 5.740446510230959e-06, |
| "loss": 0.375, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.5255999999999998, |
| "grad_norm": 0.46475178742662154, |
| "learning_rate": 5.733544517619064e-06, |
| "loss": 0.3924, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.5268000000000002, |
| "grad_norm": 0.6725064950199741, |
| "learning_rate": 5.726641096194932e-06, |
| "loss": 0.38, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.528, |
| "grad_norm": 0.551028126958213, |
| "learning_rate": 5.719736259405182e-06, |
| "loss": 0.3854, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.5292, |
| "grad_norm": 0.5247630115155179, |
| "learning_rate": 5.7128300206991925e-06, |
| "loss": 0.3703, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.5304, |
| "grad_norm": 0.6214543397495828, |
| "learning_rate": 5.705922393529065e-06, |
| "loss": 0.3604, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.5316, |
| "grad_norm": 0.5078068129567174, |
| "learning_rate": 5.69901339134961e-06, |
| "loss": 0.3787, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.5328, |
| "grad_norm": 0.6304781199299307, |
| "learning_rate": 5.692103027618321e-06, |
| "loss": 0.3531, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.534, |
| "grad_norm": 0.5860745608692262, |
| "learning_rate": 5.685191315795331e-06, |
| "loss": 0.3994, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.5352000000000001, |
| "grad_norm": 0.4848169895854493, |
| "learning_rate": 5.678278269343411e-06, |
| "loss": 0.3657, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.5364, |
| "grad_norm": 0.529871229005269, |
| "learning_rate": 5.671363901727927e-06, |
| "loss": 0.3635, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.5375999999999999, |
| "grad_norm": 0.4786421442271292, |
| "learning_rate": 5.664448226416815e-06, |
| "loss": 0.3311, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.5388, |
| "grad_norm": 0.6114626793926188, |
| "learning_rate": 5.657531256880565e-06, |
| "loss": 0.3952, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.54, |
| "grad_norm": 0.5310963476303867, |
| "learning_rate": 5.650613006592185e-06, |
| "loss": 0.3822, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.5412, |
| "grad_norm": 0.5258123472073719, |
| "learning_rate": 5.643693489027172e-06, |
| "loss": 0.3509, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.5424, |
| "grad_norm": 0.6791198162986184, |
| "learning_rate": 5.636772717663501e-06, |
| "loss": 0.4113, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.5436, |
| "grad_norm": 0.4858757606096244, |
| "learning_rate": 5.629850705981584e-06, |
| "loss": 0.3503, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.5448, |
| "grad_norm": 0.5987150490357991, |
| "learning_rate": 5.622927467464247e-06, |
| "loss": 0.3589, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.546, |
| "grad_norm": 0.4802188661179889, |
| "learning_rate": 5.6160030155967116e-06, |
| "loss": 0.3454, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.5472000000000001, |
| "grad_norm": 0.4341244942793535, |
| "learning_rate": 5.609077363866555e-06, |
| "loss": 0.3805, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.5484, |
| "grad_norm": 0.5557467999785081, |
| "learning_rate": 5.602150525763701e-06, |
| "loss": 0.3963, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.5495999999999999, |
| "grad_norm": 0.7604236726580861, |
| "learning_rate": 5.595222514780379e-06, |
| "loss": 0.3446, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.5508, |
| "grad_norm": 0.6097142049223084, |
| "learning_rate": 5.588293344411097e-06, |
| "loss": 0.4047, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.552, |
| "grad_norm": 0.4521389519523142, |
| "learning_rate": 5.581363028152633e-06, |
| "loss": 0.366, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.5532, |
| "grad_norm": 0.5914221922011543, |
| "learning_rate": 5.574431579503991e-06, |
| "loss": 0.3948, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.5544, |
| "grad_norm": 0.5527344528002235, |
| "learning_rate": 5.5674990119663794e-06, |
| "loss": 0.3724, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.5556, |
| "grad_norm": 0.5580619134209653, |
| "learning_rate": 5.560565339043188e-06, |
| "loss": 0.3792, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.5568, |
| "grad_norm": 0.48215355759585776, |
| "learning_rate": 5.55363057423996e-06, |
| "loss": 0.4127, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.558, |
| "grad_norm": 0.5646877501201418, |
| "learning_rate": 5.546694731064364e-06, |
| "loss": 0.3939, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.5592000000000001, |
| "grad_norm": 0.6052210861484988, |
| "learning_rate": 5.5397578230261715e-06, |
| "loss": 0.3869, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.5604, |
| "grad_norm": 0.534027609323964, |
| "learning_rate": 5.532819863637223e-06, |
| "loss": 0.3497, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.5615999999999999, |
| "grad_norm": 0.626114651009411, |
| "learning_rate": 5.525880866411414e-06, |
| "loss": 0.3998, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.5628, |
| "grad_norm": 0.519083288023901, |
| "learning_rate": 5.5189408448646565e-06, |
| "loss": 0.3872, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.564, |
| "grad_norm": 0.5358073451310125, |
| "learning_rate": 5.511999812514857e-06, |
| "loss": 0.3685, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.5652, |
| "grad_norm": 0.5220660598380811, |
| "learning_rate": 5.505057782881896e-06, |
| "loss": 0.4071, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.5664, |
| "grad_norm": 0.682306549572413, |
| "learning_rate": 5.4981147694875924e-06, |
| "loss": 0.387, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.5676, |
| "grad_norm": 0.6555366532053696, |
| "learning_rate": 5.491170785855681e-06, |
| "loss": 0.3655, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.5688, |
| "grad_norm": 0.5995526776767848, |
| "learning_rate": 5.484225845511791e-06, |
| "loss": 0.3934, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.5699999999999998, |
| "grad_norm": 0.5699166744963786, |
| "learning_rate": 5.477279961983408e-06, |
| "loss": 0.3453, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.5712000000000002, |
| "grad_norm": 0.5562303554423684, |
| "learning_rate": 5.470333148799862e-06, |
| "loss": 0.3652, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.5724, |
| "grad_norm": 0.6507510125155322, |
| "learning_rate": 5.463385419492288e-06, |
| "loss": 0.3701, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.5735999999999999, |
| "grad_norm": 0.4851458447128553, |
| "learning_rate": 5.456436787593609e-06, |
| "loss": 0.3719, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.5748, |
| "grad_norm": 0.5566503858408686, |
| "learning_rate": 5.449487266638504e-06, |
| "loss": 0.3216, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.576, |
| "grad_norm": 0.5554972580250097, |
| "learning_rate": 5.442536870163386e-06, |
| "loss": 0.3979, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.5772, |
| "grad_norm": 0.5212955621171509, |
| "learning_rate": 5.43558561170637e-06, |
| "loss": 0.387, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.5784, |
| "grad_norm": 0.5507781499962543, |
| "learning_rate": 5.428633504807253e-06, |
| "loss": 0.3552, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.5796000000000001, |
| "grad_norm": 0.5329563540861513, |
| "learning_rate": 5.421680563007486e-06, |
| "loss": 0.386, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.5808, |
| "grad_norm": 0.4940099612080407, |
| "learning_rate": 5.414726799850141e-06, |
| "loss": 0.3462, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.5819999999999999, |
| "grad_norm": 0.5206713014005246, |
| "learning_rate": 5.407772228879894e-06, |
| "loss": 0.3404, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.5832000000000002, |
| "grad_norm": 0.6097320195459147, |
| "learning_rate": 5.400816863642991e-06, |
| "loss": 0.3484, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.5844, |
| "grad_norm": 0.6273168866546157, |
| "learning_rate": 5.393860717687231e-06, |
| "loss": 0.3628, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.5856, |
| "grad_norm": 0.5175627550499874, |
| "learning_rate": 5.3869038045619275e-06, |
| "loss": 0.3299, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.5868, |
| "grad_norm": 0.60322548886763, |
| "learning_rate": 5.379946137817891e-06, |
| "loss": 0.3763, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.588, |
| "grad_norm": 0.5767326804956401, |
| "learning_rate": 5.3729877310073985e-06, |
| "loss": 0.3913, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.5892, |
| "grad_norm": 0.5596287781864292, |
| "learning_rate": 5.366028597684173e-06, |
| "loss": 0.3901, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.5904, |
| "grad_norm": 0.6024156487106819, |
| "learning_rate": 5.359068751403347e-06, |
| "loss": 0.3751, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.5916000000000001, |
| "grad_norm": 0.6279038062933642, |
| "learning_rate": 5.352108205721445e-06, |
| "loss": 0.3699, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.5928, |
| "grad_norm": 0.5857494852870978, |
| "learning_rate": 5.345146974196351e-06, |
| "loss": 0.4159, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.5939999999999999, |
| "grad_norm": 0.5754425791232705, |
| "learning_rate": 5.338185070387289e-06, |
| "loss": 0.4033, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.5952, |
| "grad_norm": 0.6918790398986856, |
| "learning_rate": 5.3312225078547895e-06, |
| "loss": 0.4049, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.5964, |
| "grad_norm": 0.5294408996060997, |
| "learning_rate": 5.324259300160667e-06, |
| "loss": 0.3429, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.5976, |
| "grad_norm": 0.5030371247094101, |
| "learning_rate": 5.31729546086799e-06, |
| "loss": 0.3506, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.5988, |
| "grad_norm": 0.654689724453025, |
| "learning_rate": 5.310331003541065e-06, |
| "loss": 0.3985, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.5706743127335765, |
| "learning_rate": 5.303365941745392e-06, |
| "loss": 0.3893, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.6012, |
| "grad_norm": 0.5909680552053695, |
| "learning_rate": 5.296400289047655e-06, |
| "loss": 0.3921, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.6024, |
| "grad_norm": 0.4716929907938838, |
| "learning_rate": 5.289434059015689e-06, |
| "loss": 0.386, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.6036000000000001, |
| "grad_norm": 0.5622232383432192, |
| "learning_rate": 5.28246726521845e-06, |
| "loss": 0.3646, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.6048, |
| "grad_norm": 0.5464132459639033, |
| "learning_rate": 5.275499921225994e-06, |
| "loss": 0.3588, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.6059999999999999, |
| "grad_norm": 0.5217117536373536, |
| "learning_rate": 5.26853204060945e-06, |
| "loss": 0.35, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.6072, |
| "grad_norm": 0.5463678279611617, |
| "learning_rate": 5.26156363694099e-06, |
| "loss": 0.3876, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.6084, |
| "grad_norm": 0.5414386696177665, |
| "learning_rate": 5.2545947237938055e-06, |
| "loss": 0.3581, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.6096, |
| "grad_norm": 0.6073251400646295, |
| "learning_rate": 5.247625314742083e-06, |
| "loss": 0.3593, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.6108, |
| "grad_norm": 0.5760004674528303, |
| "learning_rate": 5.240655423360969e-06, |
| "loss": 0.3964, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.612, |
| "grad_norm": 0.4860576503060455, |
| "learning_rate": 5.233685063226557e-06, |
| "loss": 0.3882, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.6132, |
| "grad_norm": 0.5384288035807419, |
| "learning_rate": 5.226714247915846e-06, |
| "loss": 0.3959, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.6143999999999998, |
| "grad_norm": 0.5693824402850227, |
| "learning_rate": 5.219742991006728e-06, |
| "loss": 0.3835, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.6156000000000001, |
| "grad_norm": 0.46937121460922504, |
| "learning_rate": 5.212771306077951e-06, |
| "loss": 0.3564, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.6168, |
| "grad_norm": 0.5617653190002595, |
| "learning_rate": 5.205799206709097e-06, |
| "loss": 0.3821, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.6179999999999999, |
| "grad_norm": 0.49040910836464563, |
| "learning_rate": 5.198826706480558e-06, |
| "loss": 0.3627, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.6192, |
| "grad_norm": 0.4856910298870825, |
| "learning_rate": 5.191853818973505e-06, |
| "loss": 0.389, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.6204, |
| "grad_norm": 0.4983582654746984, |
| "learning_rate": 5.184880557769865e-06, |
| "loss": 0.3417, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.6216, |
| "grad_norm": 0.5804818829476863, |
| "learning_rate": 5.177906936452287e-06, |
| "loss": 0.4007, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.6228, |
| "grad_norm": 0.5023000093685094, |
| "learning_rate": 5.170932968604131e-06, |
| "loss": 0.3831, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.624, |
| "grad_norm": 0.5644778366930446, |
| "learning_rate": 5.163958667809422e-06, |
| "loss": 0.3495, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.6252, |
| "grad_norm": 0.5385981194161137, |
| "learning_rate": 5.156984047652841e-06, |
| "loss": 0.3814, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.6263999999999998, |
| "grad_norm": 0.6001189538653291, |
| "learning_rate": 5.15000912171969e-06, |
| "loss": 0.4016, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.6276000000000002, |
| "grad_norm": 0.6470783016801794, |
| "learning_rate": 5.143033903595862e-06, |
| "loss": 0.4066, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.6288, |
| "grad_norm": 0.4072511431074058, |
| "learning_rate": 5.1360584068678225e-06, |
| "loss": 0.4364, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.63, |
| "grad_norm": 0.6029007981337849, |
| "learning_rate": 5.129082645122579e-06, |
| "loss": 0.3365, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.6312, |
| "grad_norm": 0.6437529016436582, |
| "learning_rate": 5.1221066319476576e-06, |
| "loss": 0.3935, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.6324, |
| "grad_norm": 0.5753512318024624, |
| "learning_rate": 5.115130380931071e-06, |
| "loss": 0.3722, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.6336, |
| "grad_norm": 0.4681832965615783, |
| "learning_rate": 5.108153905661296e-06, |
| "loss": 0.3874, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.6348, |
| "grad_norm": 0.5281516586208013, |
| "learning_rate": 5.101177219727245e-06, |
| "loss": 0.3853, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.6360000000000001, |
| "grad_norm": 0.6024168559501076, |
| "learning_rate": 5.094200336718246e-06, |
| "loss": 0.3763, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.6372, |
| "grad_norm": 0.5560166408755494, |
| "learning_rate": 5.087223270224003e-06, |
| "loss": 0.3936, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.6383999999999999, |
| "grad_norm": 0.6592848051247064, |
| "learning_rate": 5.080246033834581e-06, |
| "loss": 0.3893, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.6396, |
| "grad_norm": 0.4941210653374417, |
| "learning_rate": 5.0732686411403816e-06, |
| "loss": 0.3843, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.6408, |
| "grad_norm": 0.6710539393196384, |
| "learning_rate": 5.066291105732102e-06, |
| "loss": 0.3698, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.642, |
| "grad_norm": 0.5142664088562426, |
| "learning_rate": 5.05931344120072e-06, |
| "loss": 0.3857, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.6432, |
| "grad_norm": 0.570957765911436, |
| "learning_rate": 5.052335661137467e-06, |
| "loss": 0.3841, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.6444, |
| "grad_norm": 0.4699235231843021, |
| "learning_rate": 5.0453577791337984e-06, |
| "loss": 0.3912, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.6456, |
| "grad_norm": 0.5922972031744552, |
| "learning_rate": 5.03837980878137e-06, |
| "loss": 0.3878, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.6468, |
| "grad_norm": 0.6480156287733289, |
| "learning_rate": 5.031401763672003e-06, |
| "loss": 0.377, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.6480000000000001, |
| "grad_norm": 0.4834097534494832, |
| "learning_rate": 5.024423657397674e-06, |
| "loss": 0.377, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.6492, |
| "grad_norm": 0.5244756334982461, |
| "learning_rate": 5.017445503550471e-06, |
| "loss": 0.4217, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.6503999999999999, |
| "grad_norm": 0.5615837738343601, |
| "learning_rate": 5.010467315722578e-06, |
| "loss": 0.321, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.6516, |
| "grad_norm": 0.5615280581267553, |
| "learning_rate": 5.003489107506243e-06, |
| "loss": 0.3599, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.6528, |
| "grad_norm": 0.5122115198363286, |
| "learning_rate": 4.996510892493758e-06, |
| "loss": 0.3617, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.654, |
| "grad_norm": 0.5966845770519522, |
| "learning_rate": 4.989532684277424e-06, |
| "loss": 0.3752, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.6552, |
| "grad_norm": 0.5385391339983795, |
| "learning_rate": 4.98255449644953e-06, |
| "loss": 0.3981, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.6564, |
| "grad_norm": 0.5139696119449357, |
| "learning_rate": 4.975576342602329e-06, |
| "loss": 0.3696, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.6576, |
| "grad_norm": 0.5604633683105653, |
| "learning_rate": 4.968598236327998e-06, |
| "loss": 0.3773, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.6588, |
| "grad_norm": 0.5294822576550577, |
| "learning_rate": 4.961620191218632e-06, |
| "loss": 0.3739, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.6600000000000001, |
| "grad_norm": 0.5271341601650227, |
| "learning_rate": 4.954642220866202e-06, |
| "loss": 0.3947, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.6612, |
| "grad_norm": 0.5474409392635282, |
| "learning_rate": 4.947664338862534e-06, |
| "loss": 0.3611, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.6623999999999999, |
| "grad_norm": 0.5244053338973764, |
| "learning_rate": 4.940686558799283e-06, |
| "loss": 0.3728, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.6636, |
| "grad_norm": 0.5832859120381986, |
| "learning_rate": 4.933708894267901e-06, |
| "loss": 0.3439, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.6648, |
| "grad_norm": 0.48313733002500353, |
| "learning_rate": 4.92673135885962e-06, |
| "loss": 0.3421, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.666, |
| "grad_norm": 0.6160310969315205, |
| "learning_rate": 4.919753966165419e-06, |
| "loss": 0.3726, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.6672, |
| "grad_norm": 0.47547459852065205, |
| "learning_rate": 4.912776729775999e-06, |
| "loss": 0.3735, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.6684, |
| "grad_norm": 0.6780958925182213, |
| "learning_rate": 4.905799663281756e-06, |
| "loss": 0.4067, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.6696, |
| "grad_norm": 0.47505858645110066, |
| "learning_rate": 4.898822780272757e-06, |
| "loss": 0.3789, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.6707999999999998, |
| "grad_norm": 0.5961933662672453, |
| "learning_rate": 4.8918460943387065e-06, |
| "loss": 0.398, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.6720000000000002, |
| "grad_norm": 0.44935560844126476, |
| "learning_rate": 4.8848696190689295e-06, |
| "loss": 0.3787, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.6732, |
| "grad_norm": 1.1909559435485608, |
| "learning_rate": 4.877893368052343e-06, |
| "loss": 0.39, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.6743999999999999, |
| "grad_norm": 0.7290903045539241, |
| "learning_rate": 4.870917354877421e-06, |
| "loss": 0.3748, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.6756, |
| "grad_norm": 0.6363422553601993, |
| "learning_rate": 4.863941593132179e-06, |
| "loss": 0.3666, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.6768, |
| "grad_norm": 0.6144068562709365, |
| "learning_rate": 4.856966096404141e-06, |
| "loss": 0.365, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.678, |
| "grad_norm": 0.528262360343204, |
| "learning_rate": 4.849990878280313e-06, |
| "loss": 0.3758, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.6792, |
| "grad_norm": 0.5048194620723434, |
| "learning_rate": 4.843015952347159e-06, |
| "loss": 0.4263, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.6804000000000001, |
| "grad_norm": 0.5970202904903041, |
| "learning_rate": 4.8360413321905786e-06, |
| "loss": 0.3829, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.6816, |
| "grad_norm": 0.5466644316823482, |
| "learning_rate": 4.829067031395871e-06, |
| "loss": 0.3727, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.6827999999999999, |
| "grad_norm": 0.5151765615919326, |
| "learning_rate": 4.822093063547715e-06, |
| "loss": 0.3847, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.6840000000000002, |
| "grad_norm": 0.556770360004758, |
| "learning_rate": 4.815119442230138e-06, |
| "loss": 0.3767, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.6852, |
| "grad_norm": 0.5566673757348145, |
| "learning_rate": 4.8081461810264955e-06, |
| "loss": 0.3777, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.6864, |
| "grad_norm": 0.49635429488969113, |
| "learning_rate": 4.801173293519442e-06, |
| "loss": 0.376, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.6876, |
| "grad_norm": 0.5192887943818861, |
| "learning_rate": 4.7942007932909046e-06, |
| "loss": 0.3903, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.6888, |
| "grad_norm": 0.5653881468056294, |
| "learning_rate": 4.7872286939220516e-06, |
| "loss": 0.3811, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.69, |
| "grad_norm": 0.48983125503166525, |
| "learning_rate": 4.7802570089932746e-06, |
| "loss": 0.3838, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.6912, |
| "grad_norm": 0.6022472711625542, |
| "learning_rate": 4.773285752084154e-06, |
| "loss": 0.3712, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.6924000000000001, |
| "grad_norm": 0.4592209495815929, |
| "learning_rate": 4.766314936773445e-06, |
| "loss": 0.372, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.6936, |
| "grad_norm": 0.5988832880945443, |
| "learning_rate": 4.7593445766390315e-06, |
| "loss": 0.3655, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.6947999999999999, |
| "grad_norm": 0.45588967216207166, |
| "learning_rate": 4.752374685257919e-06, |
| "loss": 0.3643, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.696, |
| "grad_norm": 0.5556541460465428, |
| "learning_rate": 4.745405276206196e-06, |
| "loss": 0.3915, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.6972, |
| "grad_norm": 0.5012126046135608, |
| "learning_rate": 4.738436363059013e-06, |
| "loss": 0.379, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.6984, |
| "grad_norm": 0.5041937531710121, |
| "learning_rate": 4.731467959390552e-06, |
| "loss": 0.3421, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.6996, |
| "grad_norm": 0.5283604855128771, |
| "learning_rate": 4.724500078774008e-06, |
| "loss": 0.337, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.7008, |
| "grad_norm": 0.6133511645731402, |
| "learning_rate": 4.717532734781552e-06, |
| "loss": 0.3779, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.702, |
| "grad_norm": 0.507800303653185, |
| "learning_rate": 4.710565940984313e-06, |
| "loss": 0.3598, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.7032, |
| "grad_norm": 0.5426917965409874, |
| "learning_rate": 4.703599710952347e-06, |
| "loss": 0.3441, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.7044000000000001, |
| "grad_norm": 0.543149879989818, |
| "learning_rate": 4.6966340582546085e-06, |
| "loss": 0.3569, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.7056, |
| "grad_norm": 0.5079384765827253, |
| "learning_rate": 4.689668996458937e-06, |
| "loss": 0.345, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.7067999999999999, |
| "grad_norm": 0.5717820988124686, |
| "learning_rate": 4.682704539132011e-06, |
| "loss": 0.3777, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.708, |
| "grad_norm": 0.6581756912070847, |
| "learning_rate": 4.6757406998393354e-06, |
| "loss": 0.3802, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.7092, |
| "grad_norm": 0.4871198567708227, |
| "learning_rate": 4.668777492145212e-06, |
| "loss": 0.3737, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.7104, |
| "grad_norm": 0.5836916748404222, |
| "learning_rate": 4.661814929612713e-06, |
| "loss": 0.3505, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.7116, |
| "grad_norm": 0.5377148867740239, |
| "learning_rate": 4.654853025803649e-06, |
| "loss": 0.3593, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.7128, |
| "grad_norm": 0.554213026309025, |
| "learning_rate": 4.6478917942785575e-06, |
| "loss": 0.3546, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.714, |
| "grad_norm": 0.46820629569907074, |
| "learning_rate": 4.640931248596655e-06, |
| "loss": 0.3324, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.7151999999999998, |
| "grad_norm": 0.5097593929717572, |
| "learning_rate": 4.633971402315828e-06, |
| "loss": 0.3443, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.7164000000000001, |
| "grad_norm": 0.39050241505881494, |
| "learning_rate": 4.627012268992603e-06, |
| "loss": 0.3414, |
| "step": 1431 |
| }, |
| { |
| "epoch": 1.7176, |
| "grad_norm": 0.5964374941992685, |
| "learning_rate": 4.62005386218211e-06, |
| "loss": 0.3849, |
| "step": 1432 |
| }, |
| { |
| "epoch": 1.7187999999999999, |
| "grad_norm": 0.5484383135611258, |
| "learning_rate": 4.613096195438074e-06, |
| "loss": 0.4054, |
| "step": 1433 |
| }, |
| { |
| "epoch": 1.72, |
| "grad_norm": 0.6148189012721375, |
| "learning_rate": 4.60613928231277e-06, |
| "loss": 0.3755, |
| "step": 1434 |
| }, |
| { |
| "epoch": 1.7212, |
| "grad_norm": 0.5326369467247358, |
| "learning_rate": 4.59918313635701e-06, |
| "loss": 0.3271, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.7224, |
| "grad_norm": 0.5273601456958985, |
| "learning_rate": 4.592227771120108e-06, |
| "loss": 0.3817, |
| "step": 1436 |
| }, |
| { |
| "epoch": 1.7236, |
| "grad_norm": 0.4203737234869034, |
| "learning_rate": 4.585273200149859e-06, |
| "loss": 0.3974, |
| "step": 1437 |
| }, |
| { |
| "epoch": 1.7248, |
| "grad_norm": 0.4936440580649403, |
| "learning_rate": 4.578319436992515e-06, |
| "loss": 0.3354, |
| "step": 1438 |
| }, |
| { |
| "epoch": 1.726, |
| "grad_norm": 0.5130638369711625, |
| "learning_rate": 4.5713664951927475e-06, |
| "loss": 0.3403, |
| "step": 1439 |
| }, |
| { |
| "epoch": 1.7271999999999998, |
| "grad_norm": 0.6258616009702679, |
| "learning_rate": 4.5644143882936316e-06, |
| "loss": 0.3865, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.7284000000000002, |
| "grad_norm": 0.5885108055193322, |
| "learning_rate": 4.5574631298366165e-06, |
| "loss": 0.3858, |
| "step": 1441 |
| }, |
| { |
| "epoch": 1.7296, |
| "grad_norm": 0.45377890152896866, |
| "learning_rate": 4.550512733361499e-06, |
| "loss": 0.3625, |
| "step": 1442 |
| }, |
| { |
| "epoch": 1.7308, |
| "grad_norm": 0.52449993022665, |
| "learning_rate": 4.543563212406392e-06, |
| "loss": 0.3985, |
| "step": 1443 |
| }, |
| { |
| "epoch": 1.732, |
| "grad_norm": 0.5485173263452691, |
| "learning_rate": 4.536614580507714e-06, |
| "loss": 0.3235, |
| "step": 1444 |
| }, |
| { |
| "epoch": 1.7332, |
| "grad_norm": 0.5660979450438521, |
| "learning_rate": 4.52966685120014e-06, |
| "loss": 0.3796, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.7344, |
| "grad_norm": 0.538975607453001, |
| "learning_rate": 4.5227200380165925e-06, |
| "loss": 0.3837, |
| "step": 1446 |
| }, |
| { |
| "epoch": 1.7356, |
| "grad_norm": 0.5331842010157929, |
| "learning_rate": 4.51577415448821e-06, |
| "loss": 0.359, |
| "step": 1447 |
| }, |
| { |
| "epoch": 1.7368000000000001, |
| "grad_norm": 0.54645760044252, |
| "learning_rate": 4.508829214144318e-06, |
| "loss": 0.3623, |
| "step": 1448 |
| }, |
| { |
| "epoch": 1.738, |
| "grad_norm": 0.46325458435024974, |
| "learning_rate": 4.5018852305124075e-06, |
| "loss": 0.3622, |
| "step": 1449 |
| }, |
| { |
| "epoch": 1.7391999999999999, |
| "grad_norm": 0.5145168772489319, |
| "learning_rate": 4.494942217118105e-06, |
| "loss": 0.3722, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.7404, |
| "grad_norm": 0.39840795787353445, |
| "learning_rate": 4.488000187485144e-06, |
| "loss": 0.388, |
| "step": 1451 |
| }, |
| { |
| "epoch": 1.7416, |
| "grad_norm": 0.49935920190729366, |
| "learning_rate": 4.481059155135346e-06, |
| "loss": 0.3801, |
| "step": 1452 |
| }, |
| { |
| "epoch": 1.7428, |
| "grad_norm": 0.6174089378652856, |
| "learning_rate": 4.474119133588588e-06, |
| "loss": 0.3663, |
| "step": 1453 |
| }, |
| { |
| "epoch": 1.744, |
| "grad_norm": 0.5238711669514375, |
| "learning_rate": 4.4671801363627776e-06, |
| "loss": 0.3329, |
| "step": 1454 |
| }, |
| { |
| "epoch": 1.7452, |
| "grad_norm": 0.522612750946382, |
| "learning_rate": 4.460242176973829e-06, |
| "loss": 0.3578, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.7464, |
| "grad_norm": 0.4683774342022167, |
| "learning_rate": 4.453305268935637e-06, |
| "loss": 0.3571, |
| "step": 1456 |
| }, |
| { |
| "epoch": 1.7476, |
| "grad_norm": 0.5671416556577483, |
| "learning_rate": 4.446369425760042e-06, |
| "loss": 0.3724, |
| "step": 1457 |
| }, |
| { |
| "epoch": 1.7488000000000001, |
| "grad_norm": 0.5141170253368055, |
| "learning_rate": 4.439434660956814e-06, |
| "loss": 0.3694, |
| "step": 1458 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.4756332978389349, |
| "learning_rate": 4.432500988033621e-06, |
| "loss": 0.3665, |
| "step": 1459 |
| }, |
| { |
| "epoch": 1.7511999999999999, |
| "grad_norm": 0.5755242591889931, |
| "learning_rate": 4.42556842049601e-06, |
| "loss": 0.3672, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.7524, |
| "grad_norm": 0.507611616514393, |
| "learning_rate": 4.418636971847367e-06, |
| "loss": 0.3578, |
| "step": 1461 |
| }, |
| { |
| "epoch": 1.7536, |
| "grad_norm": 0.570791682381513, |
| "learning_rate": 4.4117066555889045e-06, |
| "loss": 0.3762, |
| "step": 1462 |
| }, |
| { |
| "epoch": 1.7548, |
| "grad_norm": 0.5423061322282628, |
| "learning_rate": 4.404777485219624e-06, |
| "loss": 0.3295, |
| "step": 1463 |
| }, |
| { |
| "epoch": 1.756, |
| "grad_norm": 0.6360241213438484, |
| "learning_rate": 4.397849474236299e-06, |
| "loss": 0.3811, |
| "step": 1464 |
| }, |
| { |
| "epoch": 1.7572, |
| "grad_norm": 0.5389604778367516, |
| "learning_rate": 4.390922636133444e-06, |
| "loss": 0.39, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.7584, |
| "grad_norm": 0.6798118345646637, |
| "learning_rate": 4.38399698440329e-06, |
| "loss": 0.3853, |
| "step": 1466 |
| }, |
| { |
| "epoch": 1.7596, |
| "grad_norm": 0.53508198471987, |
| "learning_rate": 4.377072532535756e-06, |
| "loss": 0.4439, |
| "step": 1467 |
| }, |
| { |
| "epoch": 1.7608000000000001, |
| "grad_norm": 0.6128605125037492, |
| "learning_rate": 4.370149294018419e-06, |
| "loss": 0.3696, |
| "step": 1468 |
| }, |
| { |
| "epoch": 1.762, |
| "grad_norm": 0.9725373567491395, |
| "learning_rate": 4.3632272823365004e-06, |
| "loss": 0.4337, |
| "step": 1469 |
| }, |
| { |
| "epoch": 1.7631999999999999, |
| "grad_norm": 0.7151413691087543, |
| "learning_rate": 4.356306510972829e-06, |
| "loss": 0.3852, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.7644, |
| "grad_norm": 0.6052197334436037, |
| "learning_rate": 4.349386993407817e-06, |
| "loss": 0.3937, |
| "step": 1471 |
| }, |
| { |
| "epoch": 1.7656, |
| "grad_norm": 0.5187059957562955, |
| "learning_rate": 4.342468743119436e-06, |
| "loss": 0.3747, |
| "step": 1472 |
| }, |
| { |
| "epoch": 1.7668, |
| "grad_norm": 0.567039630276201, |
| "learning_rate": 4.335551773583186e-06, |
| "loss": 0.4003, |
| "step": 1473 |
| }, |
| { |
| "epoch": 1.768, |
| "grad_norm": 0.6097264557850262, |
| "learning_rate": 4.328636098272075e-06, |
| "loss": 0.353, |
| "step": 1474 |
| }, |
| { |
| "epoch": 1.7692, |
| "grad_norm": 0.5345926269351398, |
| "learning_rate": 4.32172173065659e-06, |
| "loss": 0.3525, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.7704, |
| "grad_norm": 0.4797111307565443, |
| "learning_rate": 4.31480868420467e-06, |
| "loss": 0.4078, |
| "step": 1476 |
| }, |
| { |
| "epoch": 1.7715999999999998, |
| "grad_norm": 0.7058092031192786, |
| "learning_rate": 4.307896972381681e-06, |
| "loss": 0.3951, |
| "step": 1477 |
| }, |
| { |
| "epoch": 1.7728000000000002, |
| "grad_norm": 0.5919098471895498, |
| "learning_rate": 4.3009866086503905e-06, |
| "loss": 0.3946, |
| "step": 1478 |
| }, |
| { |
| "epoch": 1.774, |
| "grad_norm": 0.544218809026354, |
| "learning_rate": 4.294077606470937e-06, |
| "loss": 0.3591, |
| "step": 1479 |
| }, |
| { |
| "epoch": 1.7752, |
| "grad_norm": 0.5234209998058871, |
| "learning_rate": 4.28716997930081e-06, |
| "loss": 0.3682, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.7764, |
| "grad_norm": 0.6673345442623971, |
| "learning_rate": 4.2802637405948175e-06, |
| "loss": 0.3723, |
| "step": 1481 |
| }, |
| { |
| "epoch": 1.7776, |
| "grad_norm": 0.5063519336134552, |
| "learning_rate": 4.273358903805069e-06, |
| "loss": 0.376, |
| "step": 1482 |
| }, |
| { |
| "epoch": 1.7788, |
| "grad_norm": 0.6427479164034344, |
| "learning_rate": 4.266455482380938e-06, |
| "loss": 0.3697, |
| "step": 1483 |
| }, |
| { |
| "epoch": 1.78, |
| "grad_norm": 0.6492453092818349, |
| "learning_rate": 4.2595534897690415e-06, |
| "loss": 0.3645, |
| "step": 1484 |
| }, |
| { |
| "epoch": 1.7812000000000001, |
| "grad_norm": 0.5198773636033195, |
| "learning_rate": 4.252652939413215e-06, |
| "loss": 0.3657, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.7824, |
| "grad_norm": 0.5506908323597158, |
| "learning_rate": 4.245753844754484e-06, |
| "loss": 0.3639, |
| "step": 1486 |
| }, |
| { |
| "epoch": 1.7835999999999999, |
| "grad_norm": 0.5937767470670442, |
| "learning_rate": 4.23885621923104e-06, |
| "loss": 0.354, |
| "step": 1487 |
| }, |
| { |
| "epoch": 1.7848000000000002, |
| "grad_norm": 0.5679193278077739, |
| "learning_rate": 4.231960076278211e-06, |
| "loss": 0.3855, |
| "step": 1488 |
| }, |
| { |
| "epoch": 1.786, |
| "grad_norm": 0.4961986169959603, |
| "learning_rate": 4.225065429328439e-06, |
| "loss": 0.3803, |
| "step": 1489 |
| }, |
| { |
| "epoch": 1.7872, |
| "grad_norm": 0.5751271293242203, |
| "learning_rate": 4.218172291811249e-06, |
| "loss": 0.3588, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.7884, |
| "grad_norm": 0.4650715545650898, |
| "learning_rate": 4.211280677153228e-06, |
| "loss": 0.3868, |
| "step": 1491 |
| }, |
| { |
| "epoch": 1.7896, |
| "grad_norm": 0.4609837338355205, |
| "learning_rate": 4.204390598777999e-06, |
| "loss": 0.4046, |
| "step": 1492 |
| }, |
| { |
| "epoch": 1.7908, |
| "grad_norm": 0.674095634826924, |
| "learning_rate": 4.1975020701061884e-06, |
| "loss": 0.3631, |
| "step": 1493 |
| }, |
| { |
| "epoch": 1.792, |
| "grad_norm": 0.5176761419170193, |
| "learning_rate": 4.190615104555407e-06, |
| "loss": 0.3636, |
| "step": 1494 |
| }, |
| { |
| "epoch": 1.7932000000000001, |
| "grad_norm": 0.6213097687855804, |
| "learning_rate": 4.1837297155402204e-06, |
| "loss": 0.3924, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.7944, |
| "grad_norm": 0.550061903460295, |
| "learning_rate": 4.17684591647212e-06, |
| "loss": 0.3697, |
| "step": 1496 |
| }, |
| { |
| "epoch": 1.7955999999999999, |
| "grad_norm": 0.5631316141718825, |
| "learning_rate": 4.1699637207595035e-06, |
| "loss": 0.3983, |
| "step": 1497 |
| }, |
| { |
| "epoch": 1.7968, |
| "grad_norm": 0.4676009983905216, |
| "learning_rate": 4.163083141807648e-06, |
| "loss": 0.3777, |
| "step": 1498 |
| }, |
| { |
| "epoch": 1.798, |
| "grad_norm": 0.5819571057130736, |
| "learning_rate": 4.156204193018677e-06, |
| "loss": 0.3556, |
| "step": 1499 |
| }, |
| { |
| "epoch": 1.7992, |
| "grad_norm": 0.525961103299972, |
| "learning_rate": 4.149326887791541e-06, |
| "loss": 0.3669, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.8004, |
| "grad_norm": 0.5537939950528783, |
| "learning_rate": 4.142451239521988e-06, |
| "loss": 0.3548, |
| "step": 1501 |
| }, |
| { |
| "epoch": 1.8016, |
| "grad_norm": 0.4036885983445749, |
| "learning_rate": 4.135577261602537e-06, |
| "loss": 0.3397, |
| "step": 1502 |
| }, |
| { |
| "epoch": 1.8028, |
| "grad_norm": 0.47808838294530726, |
| "learning_rate": 4.128704967422458e-06, |
| "loss": 0.3701, |
| "step": 1503 |
| }, |
| { |
| "epoch": 1.804, |
| "grad_norm": 0.5715714749581858, |
| "learning_rate": 4.1218343703677385e-06, |
| "loss": 0.3816, |
| "step": 1504 |
| }, |
| { |
| "epoch": 1.8052000000000001, |
| "grad_norm": 0.5827261024498809, |
| "learning_rate": 4.114965483821061e-06, |
| "loss": 0.3827, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.8064, |
| "grad_norm": 0.8838156305247361, |
| "learning_rate": 4.108098321161776e-06, |
| "loss": 0.385, |
| "step": 1506 |
| }, |
| { |
| "epoch": 1.8075999999999999, |
| "grad_norm": 0.4359612260958472, |
| "learning_rate": 4.101232895765875e-06, |
| "loss": 0.3544, |
| "step": 1507 |
| }, |
| { |
| "epoch": 1.8088, |
| "grad_norm": 0.5361847284685747, |
| "learning_rate": 4.094369221005965e-06, |
| "loss": 0.3654, |
| "step": 1508 |
| }, |
| { |
| "epoch": 1.81, |
| "grad_norm": 0.5756106529204161, |
| "learning_rate": 4.0875073102512485e-06, |
| "loss": 0.4157, |
| "step": 1509 |
| }, |
| { |
| "epoch": 1.8112, |
| "grad_norm": 0.573089376292654, |
| "learning_rate": 4.080647176867486e-06, |
| "loss": 0.3692, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.8124, |
| "grad_norm": 0.5705072972686631, |
| "learning_rate": 4.07378883421698e-06, |
| "loss": 0.3711, |
| "step": 1511 |
| }, |
| { |
| "epoch": 1.8136, |
| "grad_norm": 0.482441385829138, |
| "learning_rate": 4.066932295658543e-06, |
| "loss": 0.3568, |
| "step": 1512 |
| }, |
| { |
| "epoch": 1.8148, |
| "grad_norm": 0.4209533458740963, |
| "learning_rate": 4.06007757454747e-06, |
| "loss": 0.393, |
| "step": 1513 |
| }, |
| { |
| "epoch": 1.8159999999999998, |
| "grad_norm": 0.5236483171737902, |
| "learning_rate": 4.053224684235526e-06, |
| "loss": 0.3611, |
| "step": 1514 |
| }, |
| { |
| "epoch": 1.8172000000000001, |
| "grad_norm": 0.5351363903713013, |
| "learning_rate": 4.0463736380708986e-06, |
| "loss": 0.3706, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.8184, |
| "grad_norm": 0.5425221056948407, |
| "learning_rate": 4.039524449398191e-06, |
| "loss": 0.3601, |
| "step": 1516 |
| }, |
| { |
| "epoch": 1.8195999999999999, |
| "grad_norm": 0.5048292023028578, |
| "learning_rate": 4.032677131558386e-06, |
| "loss": 0.3854, |
| "step": 1517 |
| }, |
| { |
| "epoch": 1.8208, |
| "grad_norm": 0.4737232612699287, |
| "learning_rate": 4.025831697888817e-06, |
| "loss": 0.398, |
| "step": 1518 |
| }, |
| { |
| "epoch": 1.822, |
| "grad_norm": 0.6024946408130546, |
| "learning_rate": 4.01898816172316e-06, |
| "loss": 0.3237, |
| "step": 1519 |
| }, |
| { |
| "epoch": 1.8232, |
| "grad_norm": 0.4552328457278779, |
| "learning_rate": 4.012146536391383e-06, |
| "loss": 0.3623, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.8244, |
| "grad_norm": 0.5315194274609365, |
| "learning_rate": 4.005306835219737e-06, |
| "loss": 0.3841, |
| "step": 1521 |
| }, |
| { |
| "epoch": 1.8256000000000001, |
| "grad_norm": 0.7239466583222977, |
| "learning_rate": 3.998469071530725e-06, |
| "loss": 0.3804, |
| "step": 1522 |
| }, |
| { |
| "epoch": 1.8268, |
| "grad_norm": 0.492142871435618, |
| "learning_rate": 3.991633258643077e-06, |
| "loss": 0.3465, |
| "step": 1523 |
| }, |
| { |
| "epoch": 1.8279999999999998, |
| "grad_norm": 0.5303185304197421, |
| "learning_rate": 3.9847994098717166e-06, |
| "loss": 0.3673, |
| "step": 1524 |
| }, |
| { |
| "epoch": 1.8292000000000002, |
| "grad_norm": 0.478005077740389, |
| "learning_rate": 3.9779675385277545e-06, |
| "loss": 0.4136, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.8304, |
| "grad_norm": 0.4635938541288215, |
| "learning_rate": 3.971137657918437e-06, |
| "loss": 0.3532, |
| "step": 1526 |
| }, |
| { |
| "epoch": 1.8316, |
| "grad_norm": 0.5448930511722193, |
| "learning_rate": 3.96430978134714e-06, |
| "loss": 0.3784, |
| "step": 1527 |
| }, |
| { |
| "epoch": 1.8328, |
| "grad_norm": 0.5951035174657143, |
| "learning_rate": 3.957483922113334e-06, |
| "loss": 0.3702, |
| "step": 1528 |
| }, |
| { |
| "epoch": 1.834, |
| "grad_norm": 0.4767784439251168, |
| "learning_rate": 3.950660093512556e-06, |
| "loss": 0.4134, |
| "step": 1529 |
| }, |
| { |
| "epoch": 1.8352, |
| "grad_norm": 0.5436932780193476, |
| "learning_rate": 3.943838308836398e-06, |
| "loss": 0.346, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.8364, |
| "grad_norm": 0.5243606584297386, |
| "learning_rate": 3.937018581372462e-06, |
| "loss": 0.3653, |
| "step": 1531 |
| }, |
| { |
| "epoch": 1.8376000000000001, |
| "grad_norm": 0.4947097270033976, |
| "learning_rate": 3.9302009244043435e-06, |
| "loss": 0.3237, |
| "step": 1532 |
| }, |
| { |
| "epoch": 1.8388, |
| "grad_norm": 0.5388902364359281, |
| "learning_rate": 3.923385351211609e-06, |
| "loss": 0.3879, |
| "step": 1533 |
| }, |
| { |
| "epoch": 1.8399999999999999, |
| "grad_norm": 0.6355454986812967, |
| "learning_rate": 3.916571875069764e-06, |
| "loss": 0.3569, |
| "step": 1534 |
| }, |
| { |
| "epoch": 1.8412, |
| "grad_norm": 0.5443889556984081, |
| "learning_rate": 3.909760509250225e-06, |
| "loss": 0.3843, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.8424, |
| "grad_norm": 0.505390080729345, |
| "learning_rate": 3.902951267020311e-06, |
| "loss": 0.4259, |
| "step": 1536 |
| }, |
| { |
| "epoch": 1.8436, |
| "grad_norm": 0.5215460521041646, |
| "learning_rate": 3.896144161643189e-06, |
| "loss": 0.3681, |
| "step": 1537 |
| }, |
| { |
| "epoch": 1.8448, |
| "grad_norm": 0.5296599874059905, |
| "learning_rate": 3.8893392063778736e-06, |
| "loss": 0.3621, |
| "step": 1538 |
| }, |
| { |
| "epoch": 1.846, |
| "grad_norm": 0.5461805110697693, |
| "learning_rate": 3.882536414479189e-06, |
| "loss": 0.3366, |
| "step": 1539 |
| }, |
| { |
| "epoch": 1.8472, |
| "grad_norm": 0.5108889914041715, |
| "learning_rate": 3.8757357991977415e-06, |
| "loss": 0.3724, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.8484, |
| "grad_norm": 0.5420603294921492, |
| "learning_rate": 3.868937373779907e-06, |
| "loss": 0.3711, |
| "step": 1541 |
| }, |
| { |
| "epoch": 1.8496000000000001, |
| "grad_norm": 0.48916721532093305, |
| "learning_rate": 3.862141151467787e-06, |
| "loss": 0.3728, |
| "step": 1542 |
| }, |
| { |
| "epoch": 1.8508, |
| "grad_norm": 0.48465691505041003, |
| "learning_rate": 3.855347145499197e-06, |
| "loss": 0.3519, |
| "step": 1543 |
| }, |
| { |
| "epoch": 1.8519999999999999, |
| "grad_norm": 0.5754386422023029, |
| "learning_rate": 3.848555369107631e-06, |
| "loss": 0.421, |
| "step": 1544 |
| }, |
| { |
| "epoch": 1.8532, |
| "grad_norm": 0.5317947339959318, |
| "learning_rate": 3.841765835522242e-06, |
| "loss": 0.3663, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.8544, |
| "grad_norm": 0.4955907576641392, |
| "learning_rate": 3.83497855796782e-06, |
| "loss": 0.355, |
| "step": 1546 |
| }, |
| { |
| "epoch": 1.8556, |
| "grad_norm": 0.6458398468766117, |
| "learning_rate": 3.8281935496647526e-06, |
| "loss": 0.3743, |
| "step": 1547 |
| }, |
| { |
| "epoch": 1.8568, |
| "grad_norm": 0.46667706268297304, |
| "learning_rate": 3.821410823829011e-06, |
| "loss": 0.4003, |
| "step": 1548 |
| }, |
| { |
| "epoch": 1.858, |
| "grad_norm": 0.5520866583909263, |
| "learning_rate": 3.8146303936721197e-06, |
| "loss": 0.4399, |
| "step": 1549 |
| }, |
| { |
| "epoch": 1.8592, |
| "grad_norm": 0.5773823022211833, |
| "learning_rate": 3.8078522724011324e-06, |
| "loss": 0.3507, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.8604, |
| "grad_norm": 0.5223004050075711, |
| "learning_rate": 3.8010764732186044e-06, |
| "loss": 0.3768, |
| "step": 1551 |
| }, |
| { |
| "epoch": 1.8616000000000001, |
| "grad_norm": 0.6233856499906449, |
| "learning_rate": 3.79430300932257e-06, |
| "loss": 0.3945, |
| "step": 1552 |
| }, |
| { |
| "epoch": 1.8628, |
| "grad_norm": 0.5769446082685447, |
| "learning_rate": 3.7875318939065147e-06, |
| "loss": 0.3796, |
| "step": 1553 |
| }, |
| { |
| "epoch": 1.8639999999999999, |
| "grad_norm": 0.4976458511268623, |
| "learning_rate": 3.7807631401593455e-06, |
| "loss": 0.3901, |
| "step": 1554 |
| }, |
| { |
| "epoch": 1.8652, |
| "grad_norm": 0.5581927189349054, |
| "learning_rate": 3.773996761265373e-06, |
| "loss": 0.3695, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.8664, |
| "grad_norm": 0.5680626951405663, |
| "learning_rate": 3.767232770404281e-06, |
| "loss": 0.344, |
| "step": 1556 |
| }, |
| { |
| "epoch": 1.8676, |
| "grad_norm": 0.6465253624638309, |
| "learning_rate": 3.7604711807511034e-06, |
| "loss": 0.4288, |
| "step": 1557 |
| }, |
| { |
| "epoch": 1.8688, |
| "grad_norm": 0.5578386611760368, |
| "learning_rate": 3.753712005476197e-06, |
| "loss": 0.3603, |
| "step": 1558 |
| }, |
| { |
| "epoch": 1.87, |
| "grad_norm": 0.499801410275945, |
| "learning_rate": 3.74695525774521e-06, |
| "loss": 0.3668, |
| "step": 1559 |
| }, |
| { |
| "epoch": 1.8712, |
| "grad_norm": 0.4790306329461315, |
| "learning_rate": 3.7402009507190696e-06, |
| "loss": 0.3946, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.8723999999999998, |
| "grad_norm": 0.5682465666500219, |
| "learning_rate": 3.733449097553945e-06, |
| "loss": 0.3891, |
| "step": 1561 |
| }, |
| { |
| "epoch": 1.8736000000000002, |
| "grad_norm": 0.4774980812732164, |
| "learning_rate": 3.7266997114012265e-06, |
| "loss": 0.3472, |
| "step": 1562 |
| }, |
| { |
| "epoch": 1.8748, |
| "grad_norm": 0.4619452981736414, |
| "learning_rate": 3.7199528054075005e-06, |
| "loss": 0.3827, |
| "step": 1563 |
| }, |
| { |
| "epoch": 1.876, |
| "grad_norm": 0.5389963332916776, |
| "learning_rate": 3.713208392714523e-06, |
| "loss": 0.3717, |
| "step": 1564 |
| }, |
| { |
| "epoch": 1.8772, |
| "grad_norm": 0.5186510437822639, |
| "learning_rate": 3.7064664864591878e-06, |
| "loss": 0.3886, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.8784, |
| "grad_norm": 0.4926466183813456, |
| "learning_rate": 3.6997270997735122e-06, |
| "loss": 0.3468, |
| "step": 1566 |
| }, |
| { |
| "epoch": 1.8796, |
| "grad_norm": 0.49498331518620853, |
| "learning_rate": 3.6929902457846034e-06, |
| "loss": 0.3705, |
| "step": 1567 |
| }, |
| { |
| "epoch": 1.8808, |
| "grad_norm": 0.5425259501099292, |
| "learning_rate": 3.6862559376146388e-06, |
| "loss": 0.3747, |
| "step": 1568 |
| }, |
| { |
| "epoch": 1.8820000000000001, |
| "grad_norm": 0.5609809814213781, |
| "learning_rate": 3.6795241883808342e-06, |
| "loss": 0.3532, |
| "step": 1569 |
| }, |
| { |
| "epoch": 1.8832, |
| "grad_norm": 0.5328793062157196, |
| "learning_rate": 3.6727950111954186e-06, |
| "loss": 0.3862, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.8843999999999999, |
| "grad_norm": 0.544774371543934, |
| "learning_rate": 3.6660684191656155e-06, |
| "loss": 0.3842, |
| "step": 1571 |
| }, |
| { |
| "epoch": 1.8856000000000002, |
| "grad_norm": 0.5647936712841308, |
| "learning_rate": 3.6593444253936094e-06, |
| "loss": 0.3604, |
| "step": 1572 |
| }, |
| { |
| "epoch": 1.8868, |
| "grad_norm": 0.5021335213225804, |
| "learning_rate": 3.652623042976529e-06, |
| "loss": 0.3821, |
| "step": 1573 |
| }, |
| { |
| "epoch": 1.888, |
| "grad_norm": 0.4692959275922926, |
| "learning_rate": 3.645904285006412e-06, |
| "loss": 0.3727, |
| "step": 1574 |
| }, |
| { |
| "epoch": 1.8892, |
| "grad_norm": 0.5394960915353251, |
| "learning_rate": 3.6391881645701854e-06, |
| "loss": 0.3679, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.8904, |
| "grad_norm": 0.5215317490147278, |
| "learning_rate": 3.632474694749638e-06, |
| "loss": 0.367, |
| "step": 1576 |
| }, |
| { |
| "epoch": 1.8916, |
| "grad_norm": 0.47546096708845664, |
| "learning_rate": 3.625763888621397e-06, |
| "loss": 0.3649, |
| "step": 1577 |
| }, |
| { |
| "epoch": 1.8928, |
| "grad_norm": 0.5621601110474697, |
| "learning_rate": 3.6190557592569e-06, |
| "loss": 0.3321, |
| "step": 1578 |
| }, |
| { |
| "epoch": 1.8940000000000001, |
| "grad_norm": 0.5028797468518021, |
| "learning_rate": 3.612350319722372e-06, |
| "loss": 0.3547, |
| "step": 1579 |
| }, |
| { |
| "epoch": 1.8952, |
| "grad_norm": 0.5196444646365543, |
| "learning_rate": 3.6056475830787997e-06, |
| "loss": 0.3401, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.8963999999999999, |
| "grad_norm": 0.5638831380664279, |
| "learning_rate": 3.5989475623819025e-06, |
| "loss": 0.4, |
| "step": 1581 |
| }, |
| { |
| "epoch": 1.8976, |
| "grad_norm": 0.5338161038634314, |
| "learning_rate": 3.5922502706821094e-06, |
| "loss": 0.3743, |
| "step": 1582 |
| }, |
| { |
| "epoch": 1.8988, |
| "grad_norm": 0.5183504121139286, |
| "learning_rate": 3.585555721024535e-06, |
| "loss": 0.3736, |
| "step": 1583 |
| }, |
| { |
| "epoch": 1.9, |
| "grad_norm": 0.5441996684359627, |
| "learning_rate": 3.578863926448955e-06, |
| "loss": 0.3638, |
| "step": 1584 |
| }, |
| { |
| "epoch": 1.9012, |
| "grad_norm": 0.532009924270318, |
| "learning_rate": 3.5721748999897753e-06, |
| "loss": 0.3809, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.9024, |
| "grad_norm": 0.4888811306933541, |
| "learning_rate": 3.5654886546760125e-06, |
| "loss": 0.3885, |
| "step": 1586 |
| }, |
| { |
| "epoch": 1.9036, |
| "grad_norm": 0.5412619197000217, |
| "learning_rate": 3.558805203531263e-06, |
| "loss": 0.3502, |
| "step": 1587 |
| }, |
| { |
| "epoch": 1.9048, |
| "grad_norm": 0.5738922874471647, |
| "learning_rate": 3.5521245595736837e-06, |
| "loss": 0.3849, |
| "step": 1588 |
| }, |
| { |
| "epoch": 1.9060000000000001, |
| "grad_norm": 0.459414943658132, |
| "learning_rate": 3.5454467358159606e-06, |
| "loss": 0.3954, |
| "step": 1589 |
| }, |
| { |
| "epoch": 1.9072, |
| "grad_norm": 0.5942278277436939, |
| "learning_rate": 3.5387717452652914e-06, |
| "loss": 0.3591, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.9083999999999999, |
| "grad_norm": 0.6053235629917005, |
| "learning_rate": 3.53209960092335e-06, |
| "loss": 0.3679, |
| "step": 1591 |
| }, |
| { |
| "epoch": 1.9096, |
| "grad_norm": 0.5589122111952849, |
| "learning_rate": 3.5254303157862707e-06, |
| "loss": 0.3635, |
| "step": 1592 |
| }, |
| { |
| "epoch": 1.9108, |
| "grad_norm": 0.4688125407210059, |
| "learning_rate": 3.5187639028446136e-06, |
| "loss": 0.3652, |
| "step": 1593 |
| }, |
| { |
| "epoch": 1.912, |
| "grad_norm": 0.5608055017754532, |
| "learning_rate": 3.512100375083347e-06, |
| "loss": 0.3985, |
| "step": 1594 |
| }, |
| { |
| "epoch": 1.9132, |
| "grad_norm": 0.5716758221470768, |
| "learning_rate": 3.5054397454818224e-06, |
| "loss": 0.3888, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.9144, |
| "grad_norm": 0.5971907346915089, |
| "learning_rate": 3.498782027013742e-06, |
| "loss": 0.3564, |
| "step": 1596 |
| }, |
| { |
| "epoch": 1.9156, |
| "grad_norm": 0.43530261136331444, |
| "learning_rate": 3.4921272326471388e-06, |
| "loss": 0.3481, |
| "step": 1597 |
| }, |
| { |
| "epoch": 1.9167999999999998, |
| "grad_norm": 0.5258196952161901, |
| "learning_rate": 3.4854753753443494e-06, |
| "loss": 0.3846, |
| "step": 1598 |
| }, |
| { |
| "epoch": 1.9180000000000001, |
| "grad_norm": 0.5371980957790986, |
| "learning_rate": 3.47882646806199e-06, |
| "loss": 0.3688, |
| "step": 1599 |
| }, |
| { |
| "epoch": 1.9192, |
| "grad_norm": 0.5651026783522477, |
| "learning_rate": 3.472180523750933e-06, |
| "loss": 0.3746, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.9203999999999999, |
| "grad_norm": 0.48795009081539864, |
| "learning_rate": 3.4655375553562774e-06, |
| "loss": 0.3798, |
| "step": 1601 |
| }, |
| { |
| "epoch": 1.9216, |
| "grad_norm": 0.5500703230461617, |
| "learning_rate": 3.458897575817326e-06, |
| "loss": 0.3748, |
| "step": 1602 |
| }, |
| { |
| "epoch": 1.9228, |
| "grad_norm": 0.48505267826496845, |
| "learning_rate": 3.4522605980675593e-06, |
| "loss": 0.3722, |
| "step": 1603 |
| }, |
| { |
| "epoch": 1.924, |
| "grad_norm": 0.5497801258468328, |
| "learning_rate": 3.44562663503461e-06, |
| "loss": 0.3491, |
| "step": 1604 |
| }, |
| { |
| "epoch": 1.9252, |
| "grad_norm": 0.5234204994472285, |
| "learning_rate": 3.43899569964024e-06, |
| "loss": 0.3658, |
| "step": 1605 |
| }, |
| { |
| "epoch": 1.9264000000000001, |
| "grad_norm": 0.5696552141865382, |
| "learning_rate": 3.432367804800316e-06, |
| "loss": 0.3639, |
| "step": 1606 |
| }, |
| { |
| "epoch": 1.9276, |
| "grad_norm": 0.5257832715215263, |
| "learning_rate": 3.4257429634247783e-06, |
| "loss": 0.372, |
| "step": 1607 |
| }, |
| { |
| "epoch": 1.9287999999999998, |
| "grad_norm": 0.5737981603168226, |
| "learning_rate": 3.419121188417622e-06, |
| "loss": 0.4145, |
| "step": 1608 |
| }, |
| { |
| "epoch": 1.9300000000000002, |
| "grad_norm": 0.5764943601824868, |
| "learning_rate": 3.41250249267687e-06, |
| "loss": 0.3691, |
| "step": 1609 |
| }, |
| { |
| "epoch": 1.9312, |
| "grad_norm": 0.5491686888513345, |
| "learning_rate": 3.4058868890945425e-06, |
| "loss": 0.3662, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.9324, |
| "grad_norm": 0.6044451235894281, |
| "learning_rate": 3.3992743905566453e-06, |
| "loss": 0.3584, |
| "step": 1611 |
| }, |
| { |
| "epoch": 1.9336, |
| "grad_norm": 0.4198217864035794, |
| "learning_rate": 3.3926650099431286e-06, |
| "loss": 0.3525, |
| "step": 1612 |
| }, |
| { |
| "epoch": 1.9348, |
| "grad_norm": 0.5120087990944092, |
| "learning_rate": 3.3860587601278715e-06, |
| "loss": 0.3701, |
| "step": 1613 |
| }, |
| { |
| "epoch": 1.936, |
| "grad_norm": 0.6721308523835157, |
| "learning_rate": 3.3794556539786584e-06, |
| "loss": 0.3343, |
| "step": 1614 |
| }, |
| { |
| "epoch": 1.9372, |
| "grad_norm": 0.5970580286538677, |
| "learning_rate": 3.372855704357144e-06, |
| "loss": 0.3936, |
| "step": 1615 |
| }, |
| { |
| "epoch": 1.9384000000000001, |
| "grad_norm": 0.5137829253898193, |
| "learning_rate": 3.3662589241188382e-06, |
| "loss": 0.3556, |
| "step": 1616 |
| }, |
| { |
| "epoch": 1.9396, |
| "grad_norm": 0.531855958226858, |
| "learning_rate": 3.3596653261130806e-06, |
| "loss": 0.3589, |
| "step": 1617 |
| }, |
| { |
| "epoch": 1.9407999999999999, |
| "grad_norm": 0.5946587085895932, |
| "learning_rate": 3.3530749231830073e-06, |
| "loss": 0.329, |
| "step": 1618 |
| }, |
| { |
| "epoch": 1.942, |
| "grad_norm": 0.4686111871670008, |
| "learning_rate": 3.3464877281655335e-06, |
| "loss": 0.3747, |
| "step": 1619 |
| }, |
| { |
| "epoch": 1.9432, |
| "grad_norm": 0.5992858573364381, |
| "learning_rate": 3.339903753891326e-06, |
| "loss": 0.3973, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.9444, |
| "grad_norm": 0.5396983308324597, |
| "learning_rate": 3.333323013184773e-06, |
| "loss": 0.329, |
| "step": 1621 |
| }, |
| { |
| "epoch": 1.9456, |
| "grad_norm": 0.4964847650360278, |
| "learning_rate": 3.326745518863976e-06, |
| "loss": 0.3878, |
| "step": 1622 |
| }, |
| { |
| "epoch": 1.9468, |
| "grad_norm": 0.6389901728616871, |
| "learning_rate": 3.320171283740702e-06, |
| "loss": 0.3713, |
| "step": 1623 |
| }, |
| { |
| "epoch": 1.948, |
| "grad_norm": 0.49670306633769595, |
| "learning_rate": 3.3136003206203727e-06, |
| "loss": 0.3551, |
| "step": 1624 |
| }, |
| { |
| "epoch": 1.9492, |
| "grad_norm": 0.5582835213645627, |
| "learning_rate": 3.307032642302041e-06, |
| "loss": 0.3608, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.9504000000000001, |
| "grad_norm": 0.5196610105352837, |
| "learning_rate": 3.3004682615783524e-06, |
| "loss": 0.4153, |
| "step": 1626 |
| }, |
| { |
| "epoch": 1.9516, |
| "grad_norm": 0.5771469058077887, |
| "learning_rate": 3.2939071912355424e-06, |
| "loss": 0.4018, |
| "step": 1627 |
| }, |
| { |
| "epoch": 1.9527999999999999, |
| "grad_norm": 0.49003266342707796, |
| "learning_rate": 3.2873494440533856e-06, |
| "loss": 0.3513, |
| "step": 1628 |
| }, |
| { |
| "epoch": 1.954, |
| "grad_norm": 0.46639523883379724, |
| "learning_rate": 3.2807950328051906e-06, |
| "loss": 0.3605, |
| "step": 1629 |
| }, |
| { |
| "epoch": 1.9552, |
| "grad_norm": 0.6535942240293555, |
| "learning_rate": 3.2742439702577665e-06, |
| "loss": 0.3778, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.9564, |
| "grad_norm": 0.48640850782934464, |
| "learning_rate": 3.267696269171402e-06, |
| "loss": 0.3624, |
| "step": 1631 |
| }, |
| { |
| "epoch": 1.9576, |
| "grad_norm": 0.48986749881878144, |
| "learning_rate": 3.2611519422998308e-06, |
| "loss": 0.3612, |
| "step": 1632 |
| }, |
| { |
| "epoch": 1.9588, |
| "grad_norm": 0.48744895083408596, |
| "learning_rate": 3.254611002390227e-06, |
| "loss": 0.3887, |
| "step": 1633 |
| }, |
| { |
| "epoch": 1.96, |
| "grad_norm": 0.6344603647518389, |
| "learning_rate": 3.248073462183155e-06, |
| "loss": 0.343, |
| "step": 1634 |
| }, |
| { |
| "epoch": 1.9612, |
| "grad_norm": 0.5455560601200089, |
| "learning_rate": 3.2415393344125647e-06, |
| "loss": 0.3784, |
| "step": 1635 |
| }, |
| { |
| "epoch": 1.9624000000000001, |
| "grad_norm": 0.551024447947303, |
| "learning_rate": 3.235008631805755e-06, |
| "loss": 0.3725, |
| "step": 1636 |
| }, |
| { |
| "epoch": 1.9636, |
| "grad_norm": 0.5820297969549334, |
| "learning_rate": 3.228481367083356e-06, |
| "loss": 0.3958, |
| "step": 1637 |
| }, |
| { |
| "epoch": 1.9647999999999999, |
| "grad_norm": 0.5127265706843908, |
| "learning_rate": 3.2219575529593017e-06, |
| "loss": 0.4105, |
| "step": 1638 |
| }, |
| { |
| "epoch": 1.966, |
| "grad_norm": 0.6449585442393768, |
| "learning_rate": 3.215437202140803e-06, |
| "loss": 0.3753, |
| "step": 1639 |
| }, |
| { |
| "epoch": 1.9672, |
| "grad_norm": 0.5258365554255892, |
| "learning_rate": 3.2089203273283253e-06, |
| "loss": 0.3682, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.9684, |
| "grad_norm": 0.6624073726742211, |
| "learning_rate": 3.2024069412155632e-06, |
| "loss": 0.3743, |
| "step": 1641 |
| }, |
| { |
| "epoch": 1.9696, |
| "grad_norm": 0.4454142223170098, |
| "learning_rate": 3.1958970564894187e-06, |
| "loss": 0.3807, |
| "step": 1642 |
| }, |
| { |
| "epoch": 1.9708, |
| "grad_norm": 0.5076196288677864, |
| "learning_rate": 3.189390685829967e-06, |
| "loss": 0.3751, |
| "step": 1643 |
| }, |
| { |
| "epoch": 1.972, |
| "grad_norm": 0.6237584283328298, |
| "learning_rate": 3.182887841910448e-06, |
| "loss": 0.3547, |
| "step": 1644 |
| }, |
| { |
| "epoch": 1.9731999999999998, |
| "grad_norm": 0.49749450545562557, |
| "learning_rate": 3.1763885373972246e-06, |
| "loss": 0.3614, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.9744000000000002, |
| "grad_norm": 0.5197430897510873, |
| "learning_rate": 3.1698927849497683e-06, |
| "loss": 0.3128, |
| "step": 1646 |
| }, |
| { |
| "epoch": 1.9756, |
| "grad_norm": 0.4808955813148, |
| "learning_rate": 3.1634005972206326e-06, |
| "loss": 0.3484, |
| "step": 1647 |
| }, |
| { |
| "epoch": 1.9768, |
| "grad_norm": 0.5607402476639658, |
| "learning_rate": 3.156911986855425e-06, |
| "loss": 0.3803, |
| "step": 1648 |
| }, |
| { |
| "epoch": 1.978, |
| "grad_norm": 0.5236498380407515, |
| "learning_rate": 3.150426966492788e-06, |
| "loss": 0.3778, |
| "step": 1649 |
| }, |
| { |
| "epoch": 1.9792, |
| "grad_norm": 0.6068813245345871, |
| "learning_rate": 3.143945548764371e-06, |
| "loss": 0.367, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.9804, |
| "grad_norm": 0.6489137877474729, |
| "learning_rate": 3.137467746294803e-06, |
| "loss": 0.3533, |
| "step": 1651 |
| }, |
| { |
| "epoch": 1.9816, |
| "grad_norm": 0.5096114548543674, |
| "learning_rate": 3.130993571701674e-06, |
| "loss": 0.3736, |
| "step": 1652 |
| }, |
| { |
| "epoch": 1.9828000000000001, |
| "grad_norm": 0.5145598601879202, |
| "learning_rate": 3.124523037595506e-06, |
| "loss": 0.3733, |
| "step": 1653 |
| }, |
| { |
| "epoch": 1.984, |
| "grad_norm": 0.47189446289024184, |
| "learning_rate": 3.1180561565797323e-06, |
| "loss": 0.3892, |
| "step": 1654 |
| }, |
| { |
| "epoch": 1.9851999999999999, |
| "grad_norm": 0.6762021822264397, |
| "learning_rate": 3.1115929412506698e-06, |
| "loss": 0.3443, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.9864000000000002, |
| "grad_norm": 0.48938422415105837, |
| "learning_rate": 3.1051334041974923e-06, |
| "loss": 0.3891, |
| "step": 1656 |
| }, |
| { |
| "epoch": 1.9876, |
| "grad_norm": 0.5313458718096313, |
| "learning_rate": 3.0986775580022122e-06, |
| "loss": 0.3361, |
| "step": 1657 |
| }, |
| { |
| "epoch": 1.9888, |
| "grad_norm": 0.5134417300426585, |
| "learning_rate": 3.092225415239652e-06, |
| "loss": 0.3499, |
| "step": 1658 |
| }, |
| { |
| "epoch": 1.99, |
| "grad_norm": 0.5303924683671959, |
| "learning_rate": 3.0857769884774192e-06, |
| "loss": 0.3514, |
| "step": 1659 |
| }, |
| { |
| "epoch": 1.9912, |
| "grad_norm": 0.4818336976271982, |
| "learning_rate": 3.079332290275887e-06, |
| "loss": 0.4273, |
| "step": 1660 |
| }, |
| { |
| "epoch": 1.9924, |
| "grad_norm": 0.5734803663648053, |
| "learning_rate": 3.0728913331881638e-06, |
| "loss": 0.3691, |
| "step": 1661 |
| }, |
| { |
| "epoch": 1.9936, |
| "grad_norm": 0.5192783415445636, |
| "learning_rate": 3.0664541297600682e-06, |
| "loss": 0.3576, |
| "step": 1662 |
| }, |
| { |
| "epoch": 1.9948000000000001, |
| "grad_norm": 0.5898979211568474, |
| "learning_rate": 3.0600206925301114e-06, |
| "loss": 0.3545, |
| "step": 1663 |
| }, |
| { |
| "epoch": 1.996, |
| "grad_norm": 0.6277347416397628, |
| "learning_rate": 3.053591034029465e-06, |
| "loss": 0.4102, |
| "step": 1664 |
| }, |
| { |
| "epoch": 1.9971999999999999, |
| "grad_norm": 0.4769980692535198, |
| "learning_rate": 3.0471651667819447e-06, |
| "loss": 0.3512, |
| "step": 1665 |
| }, |
| { |
| "epoch": 1.9984, |
| "grad_norm": 0.5481061372674426, |
| "learning_rate": 3.0407431033039795e-06, |
| "loss": 0.343, |
| "step": 1666 |
| }, |
| { |
| "epoch": 1.9996, |
| "grad_norm": 0.49844107285522393, |
| "learning_rate": 3.03432485610459e-06, |
| "loss": 0.378, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.49844107285522393, |
| "learning_rate": 3.0279104376853592e-06, |
| "loss": 0.3941, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.0012, |
| "grad_norm": 1.0591897389977591, |
| "learning_rate": 3.0214998605404165e-06, |
| "loss": 0.297, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.0024, |
| "grad_norm": 0.5624930162629377, |
| "learning_rate": 3.0150931371564107e-06, |
| "loss": 0.332, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.0036, |
| "grad_norm": 0.5333759652290109, |
| "learning_rate": 3.0086902800124806e-06, |
| "loss": 0.3298, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.0048, |
| "grad_norm": 0.6392975587937063, |
| "learning_rate": 3.0022913015802363e-06, |
| "loss": 0.2996, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.006, |
| "grad_norm": 0.6357461041838159, |
| "learning_rate": 2.99589621432373e-06, |
| "loss": 0.3202, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.0072, |
| "grad_norm": 0.4796703489749423, |
| "learning_rate": 2.9895050306994385e-06, |
| "loss": 0.333, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.0084, |
| "grad_norm": 0.5361479466387894, |
| "learning_rate": 2.9831177631562306e-06, |
| "loss": 0.3002, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.0096, |
| "grad_norm": 0.7273485065920107, |
| "learning_rate": 2.9767344241353535e-06, |
| "loss": 0.3374, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.0108, |
| "grad_norm": 0.610498600023043, |
| "learning_rate": 2.9703550260703974e-06, |
| "loss": 0.2796, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.012, |
| "grad_norm": 0.6240915148913934, |
| "learning_rate": 2.9639795813872773e-06, |
| "loss": 0.3453, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.0132, |
| "grad_norm": 0.5664530610060867, |
| "learning_rate": 2.9576081025042068e-06, |
| "loss": 0.3245, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.0144, |
| "grad_norm": 0.5028610712097004, |
| "learning_rate": 2.9512406018316763e-06, |
| "loss": 0.3709, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.0156, |
| "grad_norm": 0.6410080913126401, |
| "learning_rate": 2.9448770917724296e-06, |
| "loss": 0.3355, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.0168, |
| "grad_norm": 0.5406313865257344, |
| "learning_rate": 2.9385175847214325e-06, |
| "loss": 0.3464, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.018, |
| "grad_norm": 0.5824184255775667, |
| "learning_rate": 2.9321620930658578e-06, |
| "loss": 0.3288, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.0192, |
| "grad_norm": 0.5210606649765747, |
| "learning_rate": 2.925810629185054e-06, |
| "loss": 0.3252, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.0204, |
| "grad_norm": 0.6983919226474768, |
| "learning_rate": 2.919463205450526e-06, |
| "loss": 0.2967, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.0216, |
| "grad_norm": 0.4058001224876323, |
| "learning_rate": 2.9131198342259065e-06, |
| "loss": 0.286, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.0228, |
| "grad_norm": 0.5599493682522717, |
| "learning_rate": 2.9067805278669425e-06, |
| "loss": 0.3139, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.024, |
| "grad_norm": 0.48325874794700446, |
| "learning_rate": 2.900445298721455e-06, |
| "loss": 0.3179, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.0252, |
| "grad_norm": 0.5193881973767888, |
| "learning_rate": 2.894114159129324e-06, |
| "loss": 0.3273, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.0264, |
| "grad_norm": 0.5058550742725104, |
| "learning_rate": 2.8877871214224694e-06, |
| "loss": 0.3554, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.0276, |
| "grad_norm": 0.6095816753403872, |
| "learning_rate": 2.881464197924814e-06, |
| "loss": 0.3258, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.0288, |
| "grad_norm": 0.6214600375192152, |
| "learning_rate": 2.875145400952274e-06, |
| "loss": 0.2933, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.03, |
| "grad_norm": 0.4814523589044724, |
| "learning_rate": 2.868830742812726e-06, |
| "loss": 0.3466, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.0312, |
| "grad_norm": 0.5850847089967712, |
| "learning_rate": 2.8625202358059806e-06, |
| "loss": 0.3137, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.0324, |
| "grad_norm": 0.5066166407228644, |
| "learning_rate": 2.8562138922237648e-06, |
| "loss": 0.3253, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.0336, |
| "grad_norm": 0.4972535078378977, |
| "learning_rate": 2.8499117243496986e-06, |
| "loss": 0.317, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.0348, |
| "grad_norm": 0.46441502204129653, |
| "learning_rate": 2.8436137444592694e-06, |
| "loss": 0.3207, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.036, |
| "grad_norm": 0.5061917060410045, |
| "learning_rate": 2.837319964819801e-06, |
| "loss": 0.3188, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.0372, |
| "grad_norm": 0.5331265053707761, |
| "learning_rate": 2.8310303976904396e-06, |
| "loss": 0.3357, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.0384, |
| "grad_norm": 0.48253564003344795, |
| "learning_rate": 2.824745055322128e-06, |
| "loss": 0.3327, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.0396, |
| "grad_norm": 0.561262170115495, |
| "learning_rate": 2.818463949957575e-06, |
| "loss": 0.3304, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.0408, |
| "grad_norm": 0.517301545564998, |
| "learning_rate": 2.8121870938312413e-06, |
| "loss": 0.325, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.042, |
| "grad_norm": 0.5013079295176773, |
| "learning_rate": 2.80591449916931e-06, |
| "loss": 0.2929, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.0432, |
| "grad_norm": 0.5906583350314853, |
| "learning_rate": 2.7996461781896624e-06, |
| "loss": 0.3455, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.0444, |
| "grad_norm": 0.5648511168991517, |
| "learning_rate": 2.7933821431018523e-06, |
| "loss": 0.3446, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.0456, |
| "grad_norm": 0.5106440892824704, |
| "learning_rate": 2.7871224061070935e-06, |
| "loss": 0.3181, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.0468, |
| "grad_norm": 0.59545898809765, |
| "learning_rate": 2.780866979398218e-06, |
| "loss": 0.3606, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.048, |
| "grad_norm": 0.5652481430453722, |
| "learning_rate": 2.77461587515967e-06, |
| "loss": 0.3228, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.0492, |
| "grad_norm": 0.4548590670825555, |
| "learning_rate": 2.7683691055674745e-06, |
| "loss": 0.3187, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.0504, |
| "grad_norm": 0.537889637661287, |
| "learning_rate": 2.7621266827892062e-06, |
| "loss": 0.3285, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.0516, |
| "grad_norm": 0.48534951318330044, |
| "learning_rate": 2.755888618983977e-06, |
| "loss": 0.3347, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.0528, |
| "grad_norm": 0.4562443608158437, |
| "learning_rate": 2.749654926302412e-06, |
| "loss": 0.3029, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.054, |
| "grad_norm": 0.5451371452941084, |
| "learning_rate": 2.743425616886615e-06, |
| "loss": 0.3251, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.0552, |
| "grad_norm": 0.5001312557188845, |
| "learning_rate": 2.737200702870157e-06, |
| "loss": 0.2942, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.0564, |
| "grad_norm": 0.5597264970045684, |
| "learning_rate": 2.7309801963780485e-06, |
| "loss": 0.3253, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.0576, |
| "grad_norm": 0.4875406465840606, |
| "learning_rate": 2.724764109526711e-06, |
| "loss": 0.3359, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.0588, |
| "grad_norm": 0.4521934668461847, |
| "learning_rate": 2.7185524544239567e-06, |
| "loss": 0.2898, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.06, |
| "grad_norm": 0.5129446705571528, |
| "learning_rate": 2.71234524316897e-06, |
| "loss": 0.3297, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.0612, |
| "grad_norm": 0.6356511930660489, |
| "learning_rate": 2.706142487852279e-06, |
| "loss": 0.3566, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.0624, |
| "grad_norm": 0.6062536311557942, |
| "learning_rate": 2.699944200555727e-06, |
| "loss": 0.3328, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.0636, |
| "grad_norm": 0.5755634538397862, |
| "learning_rate": 2.693750393352462e-06, |
| "loss": 0.3231, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.0648, |
| "grad_norm": 0.41540127708606395, |
| "learning_rate": 2.6875610783069007e-06, |
| "loss": 0.313, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.066, |
| "grad_norm": 0.5649731253952338, |
| "learning_rate": 2.681376267474707e-06, |
| "loss": 0.331, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.0672, |
| "grad_norm": 0.42456286519904973, |
| "learning_rate": 2.67519597290278e-06, |
| "loss": 0.294, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.0684, |
| "grad_norm": 0.644090239706362, |
| "learning_rate": 2.669020206629217e-06, |
| "loss": 0.345, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.0696, |
| "grad_norm": 0.6154771529796125, |
| "learning_rate": 2.6628489806832947e-06, |
| "loss": 0.3091, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.0708, |
| "grad_norm": 0.5171610309392253, |
| "learning_rate": 2.6566823070854442e-06, |
| "loss": 0.3147, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.072, |
| "grad_norm": 0.4790305699062261, |
| "learning_rate": 2.650520197847235e-06, |
| "loss": 0.3149, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.0732, |
| "grad_norm": 0.5807330206595483, |
| "learning_rate": 2.6443626649713407e-06, |
| "loss": 0.3275, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.0744, |
| "grad_norm": 0.5596362638288411, |
| "learning_rate": 2.6382097204515246e-06, |
| "loss": 0.2882, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.0756, |
| "grad_norm": 0.4542833127022029, |
| "learning_rate": 2.6320613762726123e-06, |
| "loss": 0.3388, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.0768, |
| "grad_norm": 0.6155612626173607, |
| "learning_rate": 2.625917644410467e-06, |
| "loss": 0.3177, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.078, |
| "grad_norm": 0.5610949947795919, |
| "learning_rate": 2.6197785368319663e-06, |
| "loss": 0.3191, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.0792, |
| "grad_norm": 0.5195379523395367, |
| "learning_rate": 2.613644065494985e-06, |
| "loss": 0.2999, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.0804, |
| "grad_norm": 0.5130953063709173, |
| "learning_rate": 2.6075142423483675e-06, |
| "loss": 0.3326, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.0816, |
| "grad_norm": 0.5438357579709007, |
| "learning_rate": 2.6013890793318972e-06, |
| "loss": 0.321, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.0828, |
| "grad_norm": 0.42650077102042744, |
| "learning_rate": 2.5952685883762918e-06, |
| "loss": 0.3227, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.084, |
| "grad_norm": 0.6695148645027172, |
| "learning_rate": 2.589152781403158e-06, |
| "loss": 0.3475, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.0852, |
| "grad_norm": 0.5432827453027909, |
| "learning_rate": 2.583041670324982e-06, |
| "loss": 0.3244, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.0864, |
| "grad_norm": 0.4764183872286641, |
| "learning_rate": 2.5769352670451058e-06, |
| "loss": 0.3045, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.0876, |
| "grad_norm": 0.5671290448812345, |
| "learning_rate": 2.5708335834577035e-06, |
| "loss": 0.3315, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.0888, |
| "grad_norm": 0.44226466754051197, |
| "learning_rate": 2.5647366314477473e-06, |
| "loss": 0.2882, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.09, |
| "grad_norm": 0.5551492977171031, |
| "learning_rate": 2.5586444228910036e-06, |
| "loss": 0.3308, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.0912, |
| "grad_norm": 0.5878933682398272, |
| "learning_rate": 2.5525569696539916e-06, |
| "loss": 0.3083, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.0924, |
| "grad_norm": 0.4629754288702419, |
| "learning_rate": 2.546474283593969e-06, |
| "loss": 0.3, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.0936, |
| "grad_norm": 0.49341087104009557, |
| "learning_rate": 2.540396376558912e-06, |
| "loss": 0.3259, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.0948, |
| "grad_norm": 0.5455999623520852, |
| "learning_rate": 2.5343232603874868e-06, |
| "loss": 0.3105, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.096, |
| "grad_norm": 0.5184574680106683, |
| "learning_rate": 2.5282549469090246e-06, |
| "loss": 0.3274, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.0972, |
| "grad_norm": 0.48886013939692935, |
| "learning_rate": 2.522191447943506e-06, |
| "loss": 0.3222, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.0984, |
| "grad_norm": 0.5339952790495294, |
| "learning_rate": 2.5161327753015297e-06, |
| "loss": 0.326, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.0996, |
| "grad_norm": 0.5769798704745386, |
| "learning_rate": 2.5100789407842985e-06, |
| "loss": 0.3131, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.1008, |
| "grad_norm": 0.471327477630944, |
| "learning_rate": 2.5040299561835846e-06, |
| "loss": 0.3158, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.102, |
| "grad_norm": 0.5026221605877529, |
| "learning_rate": 2.4979858332817225e-06, |
| "loss": 0.3197, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.1032, |
| "grad_norm": 0.5372165990104663, |
| "learning_rate": 2.4919465838515687e-06, |
| "loss": 0.3145, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.1044, |
| "grad_norm": 0.6641614157675277, |
| "learning_rate": 2.48591221965649e-06, |
| "loss": 0.3084, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.1056, |
| "grad_norm": 0.45435517949191667, |
| "learning_rate": 2.479882752450339e-06, |
| "loss": 0.3048, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.1068, |
| "grad_norm": 0.45788975083727507, |
| "learning_rate": 2.4738581939774303e-06, |
| "loss": 0.3236, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.108, |
| "grad_norm": 0.6053906560507565, |
| "learning_rate": 2.4678385559725125e-06, |
| "loss": 0.3097, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.1092, |
| "grad_norm": 0.46864880799396397, |
| "learning_rate": 2.4618238501607577e-06, |
| "loss": 0.2929, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.1104, |
| "grad_norm": 0.5358136543962301, |
| "learning_rate": 2.455814088257723e-06, |
| "loss": 0.3018, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.1116, |
| "grad_norm": 0.5084290417208595, |
| "learning_rate": 2.4498092819693364e-06, |
| "loss": 0.3451, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.1128, |
| "grad_norm": 0.5376300173544492, |
| "learning_rate": 2.443809442991884e-06, |
| "loss": 0.3202, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.114, |
| "grad_norm": 0.49013479713273206, |
| "learning_rate": 2.4378145830119637e-06, |
| "loss": 0.2757, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.1152, |
| "grad_norm": 0.47235239540072455, |
| "learning_rate": 2.4318247137064788e-06, |
| "loss": 0.3227, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.1164, |
| "grad_norm": 0.5754905338883051, |
| "learning_rate": 2.425839846742616e-06, |
| "loss": 0.3213, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.1176, |
| "grad_norm": 0.5328513695540388, |
| "learning_rate": 2.4198599937778138e-06, |
| "loss": 0.3453, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.1188, |
| "grad_norm": 0.49451716774508825, |
| "learning_rate": 2.4138851664597424e-06, |
| "loss": 0.3448, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.12, |
| "grad_norm": 0.5848833881296942, |
| "learning_rate": 2.407915376426293e-06, |
| "loss": 0.3209, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.1212, |
| "grad_norm": 0.4748565112996034, |
| "learning_rate": 2.401950635305535e-06, |
| "loss": 0.3322, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.1224, |
| "grad_norm": 0.5764982528591845, |
| "learning_rate": 2.395990954715705e-06, |
| "loss": 0.3169, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.1236, |
| "grad_norm": 0.5233034231797605, |
| "learning_rate": 2.390036346265188e-06, |
| "loss": 0.2888, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.1248, |
| "grad_norm": 0.4229446066382141, |
| "learning_rate": 2.3840868215524824e-06, |
| "loss": 0.3386, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.126, |
| "grad_norm": 0.49839777559135007, |
| "learning_rate": 2.378142392166191e-06, |
| "loss": 0.3024, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.1272, |
| "grad_norm": 0.5854598988817233, |
| "learning_rate": 2.3722030696849857e-06, |
| "loss": 0.3043, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.1284, |
| "grad_norm": 0.5251285279619012, |
| "learning_rate": 2.3662688656775973e-06, |
| "loss": 0.3055, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.1296, |
| "grad_norm": 0.5299135615265957, |
| "learning_rate": 2.3603397917027787e-06, |
| "loss": 0.3272, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.1308, |
| "grad_norm": 0.44624661103843993, |
| "learning_rate": 2.3544158593092986e-06, |
| "loss": 0.3231, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.132, |
| "grad_norm": 0.4238463305096519, |
| "learning_rate": 2.3484970800359087e-06, |
| "loss": 0.2789, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.1332, |
| "grad_norm": 0.49592857839548543, |
| "learning_rate": 2.34258346541132e-06, |
| "loss": 0.3207, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.1344, |
| "grad_norm": 0.5602302444148246, |
| "learning_rate": 2.3366750269541833e-06, |
| "loss": 0.2812, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.1356, |
| "grad_norm": 0.496040693843303, |
| "learning_rate": 2.3307717761730745e-06, |
| "loss": 0.3372, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.1368, |
| "grad_norm": 0.5368866392412842, |
| "learning_rate": 2.3248737245664575e-06, |
| "loss": 0.3225, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.138, |
| "grad_norm": 0.4372922000394104, |
| "learning_rate": 2.318980883622668e-06, |
| "loss": 0.3308, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.1391999999999998, |
| "grad_norm": 0.5800037494382269, |
| "learning_rate": 2.313093264819903e-06, |
| "loss": 0.3219, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.1404, |
| "grad_norm": 0.5100182459831669, |
| "learning_rate": 2.3072108796261766e-06, |
| "loss": 0.3631, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.1416, |
| "grad_norm": 0.637238814394478, |
| "learning_rate": 2.301333739499312e-06, |
| "loss": 0.2987, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.1428, |
| "grad_norm": 0.5994611543419464, |
| "learning_rate": 2.2954618558869194e-06, |
| "loss": 0.3471, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.144, |
| "grad_norm": 0.43867780095707637, |
| "learning_rate": 2.2895952402263642e-06, |
| "loss": 0.3036, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.1452, |
| "grad_norm": 0.5083667260496417, |
| "learning_rate": 2.283733903944756e-06, |
| "loss": 0.3079, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.1464, |
| "grad_norm": 0.5130542668182789, |
| "learning_rate": 2.2778778584589214e-06, |
| "loss": 0.2861, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.1476, |
| "grad_norm": 0.514032456923874, |
| "learning_rate": 2.272027115175377e-06, |
| "loss": 0.3508, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.1488, |
| "grad_norm": 0.640022764728223, |
| "learning_rate": 2.2661816854903117e-06, |
| "loss": 0.3207, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.15, |
| "grad_norm": 0.45744824436218107, |
| "learning_rate": 2.2603415807895718e-06, |
| "loss": 0.3376, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.1512000000000002, |
| "grad_norm": 0.455358581238065, |
| "learning_rate": 2.254506812448622e-06, |
| "loss": 0.2976, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.1524, |
| "grad_norm": 0.5327275147447389, |
| "learning_rate": 2.2486773918325394e-06, |
| "loss": 0.3117, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.1536, |
| "grad_norm": 0.5451791706243986, |
| "learning_rate": 2.242853330295984e-06, |
| "loss": 0.3648, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.1548, |
| "grad_norm": 0.5600203400053401, |
| "learning_rate": 2.2370346391831737e-06, |
| "loss": 0.2985, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.156, |
| "grad_norm": 0.4796107945013445, |
| "learning_rate": 2.231221329827867e-06, |
| "loss": 0.3204, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.1572, |
| "grad_norm": 0.5343319911045897, |
| "learning_rate": 2.225413413553341e-06, |
| "loss": 0.3552, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.1584, |
| "grad_norm": 0.5936500640441726, |
| "learning_rate": 2.219610901672371e-06, |
| "loss": 0.2796, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.1596, |
| "grad_norm": 0.5103626788162533, |
| "learning_rate": 2.2138138054871993e-06, |
| "loss": 0.3185, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.1608, |
| "grad_norm": 0.46686532203197323, |
| "learning_rate": 2.208022136289521e-06, |
| "loss": 0.3319, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.162, |
| "grad_norm": 0.5469832397157314, |
| "learning_rate": 2.2022359053604654e-06, |
| "loss": 0.3374, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.1632, |
| "grad_norm": 0.6159816821332904, |
| "learning_rate": 2.1964551239705604e-06, |
| "loss": 0.35, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.1644, |
| "grad_norm": 0.5124786815177615, |
| "learning_rate": 2.1906798033797276e-06, |
| "loss": 0.3526, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.1656, |
| "grad_norm": 0.5140149212193701, |
| "learning_rate": 2.1849099548372492e-06, |
| "loss": 0.273, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.1668, |
| "grad_norm": 0.5062178694235752, |
| "learning_rate": 2.179145589581747e-06, |
| "loss": 0.3373, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.168, |
| "grad_norm": 0.49234615502579876, |
| "learning_rate": 2.1733867188411606e-06, |
| "loss": 0.3294, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.1692, |
| "grad_norm": 0.48285204019246003, |
| "learning_rate": 2.167633353832734e-06, |
| "loss": 0.3167, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.1704, |
| "grad_norm": 0.653391977737391, |
| "learning_rate": 2.1618855057629804e-06, |
| "loss": 0.3188, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.1716, |
| "grad_norm": 0.568596597442444, |
| "learning_rate": 2.156143185827671e-06, |
| "loss": 0.304, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.1728, |
| "grad_norm": 0.43840364040639196, |
| "learning_rate": 2.1504064052118095e-06, |
| "loss": 0.344, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.174, |
| "grad_norm": 0.5678920344689531, |
| "learning_rate": 2.144675175089606e-06, |
| "loss": 0.3455, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.1752, |
| "grad_norm": 0.4808681831796438, |
| "learning_rate": 2.1389495066244613e-06, |
| "loss": 0.3101, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.1764, |
| "grad_norm": 0.45444323442625073, |
| "learning_rate": 2.1332294109689446e-06, |
| "loss": 0.328, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.1776, |
| "grad_norm": 0.5508433787565589, |
| "learning_rate": 2.127514899264771e-06, |
| "loss": 0.3674, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.1788, |
| "grad_norm": 0.5532777904919081, |
| "learning_rate": 2.1218059826427727e-06, |
| "loss": 0.3107, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.18, |
| "grad_norm": 0.5284704056608788, |
| "learning_rate": 2.1161026722228932e-06, |
| "loss": 0.3064, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.1812, |
| "grad_norm": 0.5138803356821838, |
| "learning_rate": 2.110404979114149e-06, |
| "loss": 0.2991, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.1824, |
| "grad_norm": 0.6064872980601652, |
| "learning_rate": 2.104712914414615e-06, |
| "loss": 0.3046, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.1836, |
| "grad_norm": 0.5686013271119306, |
| "learning_rate": 2.0990264892114067e-06, |
| "loss": 0.3063, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.1848, |
| "grad_norm": 0.504330189065762, |
| "learning_rate": 2.093345714580656e-06, |
| "loss": 0.3626, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.186, |
| "grad_norm": 0.532977215227692, |
| "learning_rate": 2.0876706015874816e-06, |
| "loss": 0.3321, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.1872, |
| "grad_norm": 0.5065964923161952, |
| "learning_rate": 2.0820011612859825e-06, |
| "loss": 0.2779, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.1884, |
| "grad_norm": 0.5214169367813709, |
| "learning_rate": 2.076337404719203e-06, |
| "loss": 0.3354, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.1896, |
| "grad_norm": 0.540875044686939, |
| "learning_rate": 2.0706793429191156e-06, |
| "loss": 0.3246, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.1908, |
| "grad_norm": 0.5290340427211486, |
| "learning_rate": 2.0650269869066048e-06, |
| "loss": 0.3128, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.192, |
| "grad_norm": 0.4212512063016084, |
| "learning_rate": 2.0593803476914407e-06, |
| "loss": 0.317, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.1932, |
| "grad_norm": 0.564012613928127, |
| "learning_rate": 2.053739436272256e-06, |
| "loss": 0.3003, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.1944, |
| "grad_norm": 0.5006215016427719, |
| "learning_rate": 2.0481042636365243e-06, |
| "loss": 0.2887, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.1955999999999998, |
| "grad_norm": 0.5975039999001123, |
| "learning_rate": 2.0424748407605468e-06, |
| "loss": 0.3312, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.1968, |
| "grad_norm": 0.419444036679051, |
| "learning_rate": 2.036851178609423e-06, |
| "loss": 0.3395, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.198, |
| "grad_norm": 0.5296721376266754, |
| "learning_rate": 2.0312332881370294e-06, |
| "loss": 0.3052, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.1992, |
| "grad_norm": 0.49181852359041606, |
| "learning_rate": 2.0256211802860044e-06, |
| "loss": 0.3139, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.2004, |
| "grad_norm": 0.43499705360276436, |
| "learning_rate": 2.0200148659877185e-06, |
| "loss": 0.3025, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.2016, |
| "grad_norm": 0.521684040854764, |
| "learning_rate": 2.014414356162258e-06, |
| "loss": 0.3216, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.2028, |
| "grad_norm": 0.5246897918039743, |
| "learning_rate": 2.0088196617184065e-06, |
| "loss": 0.3255, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.204, |
| "grad_norm": 0.6951126296040991, |
| "learning_rate": 2.00323079355362e-06, |
| "loss": 0.3179, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.2052, |
| "grad_norm": 0.4991834103517274, |
| "learning_rate": 1.997647762554e-06, |
| "loss": 0.3121, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.2064, |
| "grad_norm": 0.6435612169425046, |
| "learning_rate": 1.992070579594288e-06, |
| "loss": 0.3215, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.2076000000000002, |
| "grad_norm": 0.5550473315339096, |
| "learning_rate": 1.9864992555378256e-06, |
| "loss": 0.3068, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.2088, |
| "grad_norm": 0.519085929027261, |
| "learning_rate": 1.9809338012365438e-06, |
| "loss": 0.3147, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.21, |
| "grad_norm": 0.5398378021114274, |
| "learning_rate": 1.9753742275309456e-06, |
| "loss": 0.3109, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.2112, |
| "grad_norm": 0.496031217404602, |
| "learning_rate": 1.9698205452500772e-06, |
| "loss": 0.3309, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.2124, |
| "grad_norm": 0.5091006413823043, |
| "learning_rate": 1.9642727652115056e-06, |
| "loss": 0.3353, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.2136, |
| "grad_norm": 0.5127551039153834, |
| "learning_rate": 1.9587308982213077e-06, |
| "loss": 0.3128, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.2148, |
| "grad_norm": 0.4801485456191148, |
| "learning_rate": 1.953194955074038e-06, |
| "loss": 0.3157, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.216, |
| "grad_norm": 0.5294673058010643, |
| "learning_rate": 1.9476649465527116e-06, |
| "loss": 0.3341, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.2172, |
| "grad_norm": 0.5316094839936794, |
| "learning_rate": 1.942140883428788e-06, |
| "loss": 0.2717, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.2184, |
| "grad_norm": 0.5556388765318808, |
| "learning_rate": 1.936622776462147e-06, |
| "loss": 0.3404, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.2196, |
| "grad_norm": 0.503236246918105, |
| "learning_rate": 1.931110636401062e-06, |
| "loss": 0.3213, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.2208, |
| "grad_norm": 0.5690711638502806, |
| "learning_rate": 1.925604473982185e-06, |
| "loss": 0.3517, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.222, |
| "grad_norm": 0.5101343946978072, |
| "learning_rate": 1.9201042999305276e-06, |
| "loss": 0.3432, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.2232, |
| "grad_norm": 0.604211155077798, |
| "learning_rate": 1.914610124959437e-06, |
| "loss": 0.3373, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.2244, |
| "grad_norm": 0.4884119407257996, |
| "learning_rate": 1.9091219597705694e-06, |
| "loss": 0.32, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.2256, |
| "grad_norm": 0.47970146777377404, |
| "learning_rate": 1.9036398150538842e-06, |
| "loss": 0.2938, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.2268, |
| "grad_norm": 0.4769871704809297, |
| "learning_rate": 1.898163701487607e-06, |
| "loss": 0.3154, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.228, |
| "grad_norm": 0.5381238546223847, |
| "learning_rate": 1.8926936297382148e-06, |
| "loss": 0.3169, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.2292, |
| "grad_norm": 0.3847813549375159, |
| "learning_rate": 1.8872296104604255e-06, |
| "loss": 0.3497, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.2304, |
| "grad_norm": 0.5054743631613623, |
| "learning_rate": 1.8817716542971593e-06, |
| "loss": 0.3585, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.2316, |
| "grad_norm": 0.5095273965254722, |
| "learning_rate": 1.8763197718795262e-06, |
| "loss": 0.3253, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.2328, |
| "grad_norm": 0.5372747138212405, |
| "learning_rate": 1.8708739738268133e-06, |
| "loss": 0.3198, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.234, |
| "grad_norm": 0.4273289084664602, |
| "learning_rate": 1.865434270746449e-06, |
| "loss": 0.299, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.2352, |
| "grad_norm": 0.5057411248655699, |
| "learning_rate": 1.8600006732339892e-06, |
| "loss": 0.3223, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.2364, |
| "grad_norm": 0.5329286312340235, |
| "learning_rate": 1.8545731918731074e-06, |
| "loss": 0.3095, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.2376, |
| "grad_norm": 0.42512261208802066, |
| "learning_rate": 1.8491518372355538e-06, |
| "loss": 0.3344, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.2388, |
| "grad_norm": 0.5144880443849901, |
| "learning_rate": 1.8437366198811463e-06, |
| "loss": 0.3155, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.546562593617825, |
| "learning_rate": 1.838327550357753e-06, |
| "loss": 0.3274, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.2412, |
| "grad_norm": 0.46168655023050265, |
| "learning_rate": 1.8329246392012622e-06, |
| "loss": 0.3095, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.2424, |
| "grad_norm": 0.5873141909299401, |
| "learning_rate": 1.8275278969355714e-06, |
| "loss": 0.3193, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.2436, |
| "grad_norm": 0.5126551069055825, |
| "learning_rate": 1.8221373340725568e-06, |
| "loss": 0.332, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.2448, |
| "grad_norm": 0.473975809996108, |
| "learning_rate": 1.8167529611120648e-06, |
| "loss": 0.3331, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.246, |
| "grad_norm": 0.5973549893034205, |
| "learning_rate": 1.811374788541878e-06, |
| "loss": 0.3499, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.2472, |
| "grad_norm": 0.4937805794383019, |
| "learning_rate": 1.8060028268377088e-06, |
| "loss": 0.3591, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.2484, |
| "grad_norm": 0.5520047912990497, |
| "learning_rate": 1.8006370864631644e-06, |
| "loss": 0.3091, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.2496, |
| "grad_norm": 0.5280603840791102, |
| "learning_rate": 1.7952775778697418e-06, |
| "loss": 0.3822, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.2508, |
| "grad_norm": 0.4747984575549771, |
| "learning_rate": 1.7899243114967918e-06, |
| "loss": 0.3414, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.252, |
| "grad_norm": 0.5008025728171192, |
| "learning_rate": 1.7845772977715148e-06, |
| "loss": 0.3217, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.2532, |
| "grad_norm": 0.5460878686864432, |
| "learning_rate": 1.7792365471089252e-06, |
| "loss": 0.3116, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.2544, |
| "grad_norm": 0.4706181673801985, |
| "learning_rate": 1.773902069911838e-06, |
| "loss": 0.3054, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.2556, |
| "grad_norm": 0.5659208409724967, |
| "learning_rate": 1.7685738765708576e-06, |
| "loss": 0.3284, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.2568, |
| "grad_norm": 0.5914803959669342, |
| "learning_rate": 1.7632519774643391e-06, |
| "loss": 0.3247, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.258, |
| "grad_norm": 0.4904554286879897, |
| "learning_rate": 1.7579363829583794e-06, |
| "loss": 0.3047, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.2592, |
| "grad_norm": 0.5227748841136812, |
| "learning_rate": 1.7526271034067993e-06, |
| "loss": 0.3024, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.2604, |
| "grad_norm": 0.5318646011263115, |
| "learning_rate": 1.7473241491511139e-06, |
| "loss": 0.3072, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.2616, |
| "grad_norm": 0.5068337164319224, |
| "learning_rate": 1.7420275305205214e-06, |
| "loss": 0.3037, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.2628, |
| "grad_norm": 0.6220942329692476, |
| "learning_rate": 1.7367372578318797e-06, |
| "loss": 0.3223, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.2640000000000002, |
| "grad_norm": 0.5531572497923152, |
| "learning_rate": 1.7314533413896833e-06, |
| "loss": 0.3059, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.2652, |
| "grad_norm": 0.5635377817414549, |
| "learning_rate": 1.7261757914860456e-06, |
| "loss": 0.3399, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.2664, |
| "grad_norm": 0.5515908215511565, |
| "learning_rate": 1.720904618400684e-06, |
| "loss": 0.3133, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.2676, |
| "grad_norm": 0.4711116326295099, |
| "learning_rate": 1.7156398324008871e-06, |
| "loss": 0.3488, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.2688, |
| "grad_norm": 0.5169505814378155, |
| "learning_rate": 1.7103814437415105e-06, |
| "loss": 0.3119, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.27, |
| "grad_norm": 0.4808802832107604, |
| "learning_rate": 1.7051294626649462e-06, |
| "loss": 0.2876, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.2712, |
| "grad_norm": 0.4965800820258115, |
| "learning_rate": 1.6998838994011041e-06, |
| "loss": 0.3081, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.2724, |
| "grad_norm": 0.5765108347910463, |
| "learning_rate": 1.6946447641673907e-06, |
| "loss": 0.2977, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.2736, |
| "grad_norm": 0.5747764419760193, |
| "learning_rate": 1.6894120671686986e-06, |
| "loss": 0.3082, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.2748, |
| "grad_norm": 0.4979801826768883, |
| "learning_rate": 1.6841858185973775e-06, |
| "loss": 0.307, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.276, |
| "grad_norm": 0.5668296391608482, |
| "learning_rate": 1.6789660286332132e-06, |
| "loss": 0.349, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.2772, |
| "grad_norm": 0.5273497007825191, |
| "learning_rate": 1.6737527074434135e-06, |
| "loss": 0.3298, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.2784, |
| "grad_norm": 0.5205699456340483, |
| "learning_rate": 1.6685458651825892e-06, |
| "loss": 0.3269, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.2796, |
| "grad_norm": 0.5603073017723463, |
| "learning_rate": 1.6633455119927256e-06, |
| "loss": 0.3002, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.2808, |
| "grad_norm": 0.49085075677697326, |
| "learning_rate": 1.658151658003172e-06, |
| "loss": 0.3231, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.282, |
| "grad_norm": 0.47995130663114505, |
| "learning_rate": 1.6529643133306212e-06, |
| "loss": 0.3055, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.2832, |
| "grad_norm": 0.4808506099960846, |
| "learning_rate": 1.647783488079081e-06, |
| "loss": 0.3307, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.2843999999999998, |
| "grad_norm": 0.6235221866733798, |
| "learning_rate": 1.6426091923398619e-06, |
| "loss": 0.3389, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.2856, |
| "grad_norm": 0.47445141356388904, |
| "learning_rate": 1.6374414361915613e-06, |
| "loss": 0.2944, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.2868, |
| "grad_norm": 0.4774585184186103, |
| "learning_rate": 1.6322802297000306e-06, |
| "loss": 0.3438, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.288, |
| "grad_norm": 0.5375942289912291, |
| "learning_rate": 1.6271255829183702e-06, |
| "loss": 0.3348, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.2892, |
| "grad_norm": 0.40086256227082395, |
| "learning_rate": 1.6219775058869019e-06, |
| "loss": 0.3448, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.2904, |
| "grad_norm": 0.5172335820787956, |
| "learning_rate": 1.6168360086331498e-06, |
| "loss": 0.3233, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.2916, |
| "grad_norm": 0.5207471934020876, |
| "learning_rate": 1.6117011011718188e-06, |
| "loss": 0.3255, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.2928, |
| "grad_norm": 0.4878094924131688, |
| "learning_rate": 1.6065727935047837e-06, |
| "loss": 0.3286, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.294, |
| "grad_norm": 0.5950357137095884, |
| "learning_rate": 1.6014510956210632e-06, |
| "loss": 0.3283, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.2952, |
| "grad_norm": 0.48086928299992526, |
| "learning_rate": 1.5963360174967956e-06, |
| "loss": 0.3503, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.2964, |
| "grad_norm": 0.5892110607046107, |
| "learning_rate": 1.5912275690952339e-06, |
| "loss": 0.3245, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.2976, |
| "grad_norm": 0.5028560710697326, |
| "learning_rate": 1.5861257603667106e-06, |
| "loss": 0.3065, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.2988, |
| "grad_norm": 0.5189757571086189, |
| "learning_rate": 1.581030601248626e-06, |
| "loss": 0.3135, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.3, |
| "grad_norm": 0.4491773427040121, |
| "learning_rate": 1.5759421016654314e-06, |
| "loss": 0.318, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.3012, |
| "grad_norm": 0.4874058563272879, |
| "learning_rate": 1.570860271528607e-06, |
| "loss": 0.3001, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.3024, |
| "grad_norm": 0.4852758364086213, |
| "learning_rate": 1.5657851207366359e-06, |
| "loss": 0.3135, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.3036, |
| "grad_norm": 0.5759656835583162, |
| "learning_rate": 1.5607166591749995e-06, |
| "loss": 0.3452, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.3048, |
| "grad_norm": 0.5207395067658368, |
| "learning_rate": 1.555654896716144e-06, |
| "loss": 0.32, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.306, |
| "grad_norm": 0.452747411117712, |
| "learning_rate": 1.5505998432194658e-06, |
| "loss": 0.3405, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.3072, |
| "grad_norm": 0.5412198304899934, |
| "learning_rate": 1.5455515085312984e-06, |
| "loss": 0.3322, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.3084, |
| "grad_norm": 0.49544095265814697, |
| "learning_rate": 1.5405099024848874e-06, |
| "loss": 0.3124, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.3096, |
| "grad_norm": 0.5183316214030841, |
| "learning_rate": 1.5354750349003694e-06, |
| "loss": 0.3495, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.3108, |
| "grad_norm": 0.5463151196073748, |
| "learning_rate": 1.5304469155847556e-06, |
| "loss": 0.3434, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.312, |
| "grad_norm": 0.5516143512305192, |
| "learning_rate": 1.5254255543319168e-06, |
| "loss": 0.3168, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.3132, |
| "grad_norm": 0.45573917604459735, |
| "learning_rate": 1.5204109609225553e-06, |
| "loss": 0.3195, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.3144, |
| "grad_norm": 0.5469056634720862, |
| "learning_rate": 1.5154031451241952e-06, |
| "loss": 0.3411, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.3156, |
| "grad_norm": 0.5158849784855729, |
| "learning_rate": 1.5104021166911582e-06, |
| "loss": 0.3165, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.3168, |
| "grad_norm": 0.5544014080581716, |
| "learning_rate": 1.5054078853645432e-06, |
| "loss": 0.3089, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.318, |
| "grad_norm": 0.4524564910062869, |
| "learning_rate": 1.5004204608722088e-06, |
| "loss": 0.3152, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.3192, |
| "grad_norm": 0.502269133931021, |
| "learning_rate": 1.495439852928759e-06, |
| "loss": 0.295, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.3204000000000002, |
| "grad_norm": 0.4800443904019531, |
| "learning_rate": 1.4904660712355207e-06, |
| "loss": 0.3223, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.3216, |
| "grad_norm": 0.4711817206662842, |
| "learning_rate": 1.4854991254805179e-06, |
| "loss": 0.3182, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.3228, |
| "grad_norm": 0.5204593299602398, |
| "learning_rate": 1.4805390253384683e-06, |
| "loss": 0.3432, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.324, |
| "grad_norm": 0.5634364961018137, |
| "learning_rate": 1.4755857804707485e-06, |
| "loss": 0.3003, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.3252, |
| "grad_norm": 0.4498885389670753, |
| "learning_rate": 1.4706394005253838e-06, |
| "loss": 0.3033, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.3264, |
| "grad_norm": 0.5581196261806362, |
| "learning_rate": 1.465699895137031e-06, |
| "loss": 0.3033, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.3276, |
| "grad_norm": 0.4408958316492097, |
| "learning_rate": 1.4607672739269552e-06, |
| "loss": 0.3073, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.3288, |
| "grad_norm": 0.6240652317829313, |
| "learning_rate": 1.455841546503009e-06, |
| "loss": 0.3215, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.33, |
| "grad_norm": 0.5492997699864376, |
| "learning_rate": 1.450922722459623e-06, |
| "loss": 0.3279, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.3312, |
| "grad_norm": 0.49666520199305403, |
| "learning_rate": 1.446010811377776e-06, |
| "loss": 0.332, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.3324, |
| "grad_norm": 0.5916923146968442, |
| "learning_rate": 1.4411058228249824e-06, |
| "loss": 0.3188, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.3336, |
| "grad_norm": 0.5692144923101109, |
| "learning_rate": 1.4362077663552754e-06, |
| "loss": 0.3205, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.3348, |
| "grad_norm": 0.5244770882576342, |
| "learning_rate": 1.4313166515091863e-06, |
| "loss": 0.3567, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.336, |
| "grad_norm": 0.4985866219770014, |
| "learning_rate": 1.4264324878137204e-06, |
| "loss": 0.2955, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.3372, |
| "grad_norm": 0.5124823393836703, |
| "learning_rate": 1.421555284782349e-06, |
| "loss": 0.3044, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.3384, |
| "grad_norm": 0.5408319376191771, |
| "learning_rate": 1.4166850519149794e-06, |
| "loss": 0.3171, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.3396, |
| "grad_norm": 0.5398033571111165, |
| "learning_rate": 1.41182179869795e-06, |
| "loss": 0.3416, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.3407999999999998, |
| "grad_norm": 0.7408763803046092, |
| "learning_rate": 1.406965534603995e-06, |
| "loss": 0.2988, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.342, |
| "grad_norm": 0.48957085058491534, |
| "learning_rate": 1.4021162690922441e-06, |
| "loss": 0.2904, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.3432, |
| "grad_norm": 0.4800797062711741, |
| "learning_rate": 1.397274011608189e-06, |
| "loss": 0.3394, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.3444, |
| "grad_norm": 0.504636425789252, |
| "learning_rate": 1.3924387715836706e-06, |
| "loss": 0.3306, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.3456, |
| "grad_norm": 0.5273255459333012, |
| "learning_rate": 1.3876105584368653e-06, |
| "loss": 0.3111, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.3468, |
| "grad_norm": 0.46242283582128896, |
| "learning_rate": 1.3827893815722614e-06, |
| "loss": 0.3308, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.348, |
| "grad_norm": 0.5268676502741816, |
| "learning_rate": 1.3779752503806375e-06, |
| "loss": 0.3434, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.3492, |
| "grad_norm": 0.5821097945236201, |
| "learning_rate": 1.3731681742390558e-06, |
| "loss": 0.3084, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.3504, |
| "grad_norm": 0.5469587875056272, |
| "learning_rate": 1.368368162510829e-06, |
| "loss": 0.302, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.3516, |
| "grad_norm": 0.4634982197304083, |
| "learning_rate": 1.363575224545512e-06, |
| "loss": 0.3182, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.3528000000000002, |
| "grad_norm": 0.5274355185700257, |
| "learning_rate": 1.3587893696788868e-06, |
| "loss": 0.2801, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.354, |
| "grad_norm": 0.41061222873885617, |
| "learning_rate": 1.3540106072329323e-06, |
| "loss": 0.3271, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.3552, |
| "grad_norm": 0.5577455688560952, |
| "learning_rate": 1.349238946515813e-06, |
| "loss": 0.3324, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.3564, |
| "grad_norm": 0.5086040856439964, |
| "learning_rate": 1.344474396821865e-06, |
| "loss": 0.3383, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.3576, |
| "grad_norm": 0.45513590551982486, |
| "learning_rate": 1.3397169674315668e-06, |
| "loss": 0.3029, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.3588, |
| "grad_norm": 0.5032377873503844, |
| "learning_rate": 1.3349666676115358e-06, |
| "loss": 0.3161, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.36, |
| "grad_norm": 0.5429418662908604, |
| "learning_rate": 1.3302235066144948e-06, |
| "loss": 0.3173, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.3612, |
| "grad_norm": 0.582433350815691, |
| "learning_rate": 1.3254874936792672e-06, |
| "loss": 0.3203, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.3624, |
| "grad_norm": 0.5061141475840918, |
| "learning_rate": 1.3207586380307486e-06, |
| "loss": 0.3133, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.3636, |
| "grad_norm": 0.38935634371222433, |
| "learning_rate": 1.3160369488798984e-06, |
| "loss": 0.289, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.3648, |
| "grad_norm": 0.49040182272898925, |
| "learning_rate": 1.3113224354237113e-06, |
| "loss": 0.2872, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.366, |
| "grad_norm": 0.49662477301690094, |
| "learning_rate": 1.306615106845211e-06, |
| "loss": 0.3028, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.3672, |
| "grad_norm": 0.5614718485198186, |
| "learning_rate": 1.30191497231342e-06, |
| "loss": 0.2845, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.3684, |
| "grad_norm": 0.5474441188846769, |
| "learning_rate": 1.2972220409833552e-06, |
| "loss": 0.3182, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.3696, |
| "grad_norm": 0.44863953590190264, |
| "learning_rate": 1.2925363219959958e-06, |
| "loss": 0.3233, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.3708, |
| "grad_norm": 0.5859727221445215, |
| "learning_rate": 1.2878578244782775e-06, |
| "loss": 0.338, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.372, |
| "grad_norm": 0.518472285200413, |
| "learning_rate": 1.2831865575430702e-06, |
| "loss": 0.3103, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.3731999999999998, |
| "grad_norm": 0.4486954376925882, |
| "learning_rate": 1.2785225302891568e-06, |
| "loss": 0.3036, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.3744, |
| "grad_norm": 0.5332520800656768, |
| "learning_rate": 1.2738657518012188e-06, |
| "loss": 0.3455, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.3756, |
| "grad_norm": 0.5180503448026015, |
| "learning_rate": 1.2692162311498219e-06, |
| "loss": 0.3315, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.3768000000000002, |
| "grad_norm": 0.5503669228226378, |
| "learning_rate": 1.2645739773913911e-06, |
| "loss": 0.3106, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.378, |
| "grad_norm": 0.5572704229738278, |
| "learning_rate": 1.259938999568196e-06, |
| "loss": 0.3235, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.3792, |
| "grad_norm": 0.49223457619635097, |
| "learning_rate": 1.2553113067083417e-06, |
| "loss": 0.323, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.3804, |
| "grad_norm": 0.5298160573023049, |
| "learning_rate": 1.2506909078257357e-06, |
| "loss": 0.3113, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.3816, |
| "grad_norm": 0.4739126163553427, |
| "learning_rate": 1.2460778119200778e-06, |
| "loss": 0.3063, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.3828, |
| "grad_norm": 0.5756760439695214, |
| "learning_rate": 1.24147202797685e-06, |
| "loss": 0.29, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.384, |
| "grad_norm": 0.47841470066730546, |
| "learning_rate": 1.236873564967284e-06, |
| "loss": 0.3334, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.3852, |
| "grad_norm": 0.5372964333839603, |
| "learning_rate": 1.2322824318483568e-06, |
| "loss": 0.3182, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.3864, |
| "grad_norm": 0.5360611358567483, |
| "learning_rate": 1.227698637562768e-06, |
| "loss": 0.3028, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.3876, |
| "grad_norm": 0.546051270415823, |
| "learning_rate": 1.2231221910389196e-06, |
| "loss": 0.3365, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.3888, |
| "grad_norm": 0.5355902634690551, |
| "learning_rate": 1.2185531011909008e-06, |
| "loss": 0.3134, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.39, |
| "grad_norm": 0.5667030431688215, |
| "learning_rate": 1.2139913769184757e-06, |
| "loss": 0.3551, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.3912, |
| "grad_norm": 0.5586223985170625, |
| "learning_rate": 1.2094370271070599e-06, |
| "loss": 0.2898, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.3924, |
| "grad_norm": 0.49938487817710525, |
| "learning_rate": 1.2048900606277036e-06, |
| "loss": 0.3374, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.3936, |
| "grad_norm": 0.5172811558524574, |
| "learning_rate": 1.2003504863370746e-06, |
| "loss": 0.3239, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.3948, |
| "grad_norm": 0.4448576698489582, |
| "learning_rate": 1.195818313077447e-06, |
| "loss": 0.2964, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.396, |
| "grad_norm": 0.5067804987087742, |
| "learning_rate": 1.1912935496766719e-06, |
| "loss": 0.3149, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.3971999999999998, |
| "grad_norm": 0.5289972726643701, |
| "learning_rate": 1.186776204948173e-06, |
| "loss": 0.3197, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.3984, |
| "grad_norm": 0.48058445208828016, |
| "learning_rate": 1.182266287690924e-06, |
| "loss": 0.3183, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.3996, |
| "grad_norm": 0.5330873826333208, |
| "learning_rate": 1.177763806689427e-06, |
| "loss": 0.2727, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.4008, |
| "grad_norm": 0.4467421647008868, |
| "learning_rate": 1.173268770713701e-06, |
| "loss": 0.3235, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.402, |
| "grad_norm": 0.5189215617155751, |
| "learning_rate": 1.1687811885192662e-06, |
| "loss": 0.3345, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.4032, |
| "grad_norm": 0.5236509042131071, |
| "learning_rate": 1.16430106884712e-06, |
| "loss": 0.2923, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.4044, |
| "grad_norm": 0.4174001768905615, |
| "learning_rate": 1.159828420423728e-06, |
| "loss": 0.3329, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.4056, |
| "grad_norm": 0.48556976851826156, |
| "learning_rate": 1.1553632519610025e-06, |
| "loss": 0.3181, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.4068, |
| "grad_norm": 0.5333581260392112, |
| "learning_rate": 1.1509055721562839e-06, |
| "loss": 0.3558, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.408, |
| "grad_norm": 0.5504819445821775, |
| "learning_rate": 1.1464553896923264e-06, |
| "loss": 0.3366, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.4092000000000002, |
| "grad_norm": 0.6575472559330863, |
| "learning_rate": 1.1420127132372839e-06, |
| "loss": 0.3023, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.4104, |
| "grad_norm": 0.5224772296793633, |
| "learning_rate": 1.1375775514446846e-06, |
| "loss": 0.3487, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.4116, |
| "grad_norm": 0.5362750622696038, |
| "learning_rate": 1.1331499129534252e-06, |
| "loss": 0.3985, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.4128, |
| "grad_norm": 0.6480867835273206, |
| "learning_rate": 1.128729806387746e-06, |
| "loss": 0.3078, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.414, |
| "grad_norm": 0.510972758166725, |
| "learning_rate": 1.124317240357216e-06, |
| "loss": 0.374, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.4152, |
| "grad_norm": 0.4353416689772134, |
| "learning_rate": 1.119912223456715e-06, |
| "loss": 0.3434, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.4164, |
| "grad_norm": 0.4992041891223879, |
| "learning_rate": 1.1155147642664217e-06, |
| "loss": 0.2941, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.4176, |
| "grad_norm": 0.4981271993131891, |
| "learning_rate": 1.1111248713517935e-06, |
| "loss": 0.3399, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.4188, |
| "grad_norm": 0.5315501345593295, |
| "learning_rate": 1.1067425532635463e-06, |
| "loss": 0.3059, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.42, |
| "grad_norm": 0.5272127156791281, |
| "learning_rate": 1.1023678185376474e-06, |
| "loss": 0.3252, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.4212, |
| "grad_norm": 0.5509389015682687, |
| "learning_rate": 1.0980006756952882e-06, |
| "loss": 0.3637, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.4224, |
| "grad_norm": 0.5577993072539054, |
| "learning_rate": 1.0936411332428732e-06, |
| "loss": 0.2761, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.4236, |
| "grad_norm": 0.47620274555170333, |
| "learning_rate": 1.089289199672004e-06, |
| "loss": 0.3058, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.4248, |
| "grad_norm": 0.4872285887781436, |
| "learning_rate": 1.084944883459464e-06, |
| "loss": 0.3357, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.426, |
| "grad_norm": 0.6092064550019151, |
| "learning_rate": 1.0806081930671947e-06, |
| "loss": 0.3375, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.4272, |
| "grad_norm": 0.45105808930779195, |
| "learning_rate": 1.0762791369422838e-06, |
| "loss": 0.3343, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.4284, |
| "grad_norm": 0.5673808224464912, |
| "learning_rate": 1.0719577235169537e-06, |
| "loss": 0.3471, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.4295999999999998, |
| "grad_norm": 0.39845689327313377, |
| "learning_rate": 1.0676439612085353e-06, |
| "loss": 0.2865, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.4308, |
| "grad_norm": 0.4922044208177292, |
| "learning_rate": 1.0633378584194593e-06, |
| "loss": 0.3436, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.432, |
| "grad_norm": 0.48469669953570305, |
| "learning_rate": 1.059039423537237e-06, |
| "loss": 0.3211, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.4332, |
| "grad_norm": 0.47164823939079553, |
| "learning_rate": 1.054748664934443e-06, |
| "loss": 0.3072, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.4344, |
| "grad_norm": 0.5096766564767695, |
| "learning_rate": 1.0504655909686978e-06, |
| "loss": 0.2921, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.4356, |
| "grad_norm": 0.5908518033772256, |
| "learning_rate": 1.0461902099826577e-06, |
| "loss": 0.2869, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.4368, |
| "grad_norm": 0.434773091611653, |
| "learning_rate": 1.0419225303039943e-06, |
| "loss": 0.3247, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.438, |
| "grad_norm": 0.46572147262123414, |
| "learning_rate": 1.0376625602453733e-06, |
| "loss": 0.2892, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.4392, |
| "grad_norm": 0.46387952563383317, |
| "learning_rate": 1.0334103081044504e-06, |
| "loss": 0.2989, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.4404, |
| "grad_norm": 0.431710967434786, |
| "learning_rate": 1.0291657821638435e-06, |
| "loss": 0.3158, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.4416, |
| "grad_norm": 0.5109671398031306, |
| "learning_rate": 1.024928990691121e-06, |
| "loss": 0.3335, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.4428, |
| "grad_norm": 0.5032323659686736, |
| "learning_rate": 1.0206999419387881e-06, |
| "loss": 0.3047, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.444, |
| "grad_norm": 0.6420893900320955, |
| "learning_rate": 1.0164786441442698e-06, |
| "loss": 0.3465, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.4452, |
| "grad_norm": 0.45454109064614695, |
| "learning_rate": 1.0122651055298898e-06, |
| "loss": 0.3011, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.4464, |
| "grad_norm": 0.5729786871162398, |
| "learning_rate": 1.0080593343028621e-06, |
| "loss": 0.32, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.4476, |
| "grad_norm": 0.44189500277710014, |
| "learning_rate": 1.0038613386552687e-06, |
| "loss": 0.345, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.4488, |
| "grad_norm": 0.5957486603332705, |
| "learning_rate": 9.996711267640451e-07, |
| "loss": 0.3576, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.45, |
| "grad_norm": 0.43553588307583163, |
| "learning_rate": 9.95488706790969e-07, |
| "loss": 0.2917, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.4512, |
| "grad_norm": 0.4703442233300642, |
| "learning_rate": 9.913140868826405e-07, |
| "loss": 0.2682, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.4524, |
| "grad_norm": 0.5954017228399653, |
| "learning_rate": 9.871472751704625e-07, |
| "loss": 0.3111, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.4536, |
| "grad_norm": 0.4958235772248686, |
| "learning_rate": 9.829882797706336e-07, |
| "loss": 0.3283, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.4548, |
| "grad_norm": 0.5337507765326563, |
| "learning_rate": 9.788371087841236e-07, |
| "loss": 0.3177, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.456, |
| "grad_norm": 0.45094450039509193, |
| "learning_rate": 9.74693770296667e-07, |
| "loss": 0.3107, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.4572, |
| "grad_norm": 0.47640902644388333, |
| "learning_rate": 9.705582723787348e-07, |
| "loss": 0.3105, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.4584, |
| "grad_norm": 0.5551540280130849, |
| "learning_rate": 9.664306230855342e-07, |
| "loss": 0.3121, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.4596, |
| "grad_norm": 0.5296141106730923, |
| "learning_rate": 9.623108304569783e-07, |
| "loss": 0.3084, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.4608, |
| "grad_norm": 0.4503517127076986, |
| "learning_rate": 9.58198902517678e-07, |
| "loss": 0.2762, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.462, |
| "grad_norm": 0.49176682948014766, |
| "learning_rate": 9.540948472769278e-07, |
| "loss": 0.3189, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.4632, |
| "grad_norm": 0.5479259942541452, |
| "learning_rate": 9.499986727286869e-07, |
| "loss": 0.334, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.4644, |
| "grad_norm": 0.48701373761857364, |
| "learning_rate": 9.459103868515618e-07, |
| "loss": 0.2987, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.4656000000000002, |
| "grad_norm": 0.5904824064092714, |
| "learning_rate": 9.418299976087964e-07, |
| "loss": 0.314, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.4668, |
| "grad_norm": 0.5119778019908812, |
| "learning_rate": 9.377575129482513e-07, |
| "loss": 0.299, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.468, |
| "grad_norm": 0.5037314727759037, |
| "learning_rate": 9.336929408023887e-07, |
| "loss": 0.3288, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.4692, |
| "grad_norm": 0.45996754162030723, |
| "learning_rate": 9.29636289088266e-07, |
| "loss": 0.3327, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.4704, |
| "grad_norm": 0.5661953382786764, |
| "learning_rate": 9.255875657075053e-07, |
| "loss": 0.3223, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.4716, |
| "grad_norm": 0.39589800407491577, |
| "learning_rate": 9.215467785462873e-07, |
| "loss": 0.3084, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.4728, |
| "grad_norm": 0.4707799591645335, |
| "learning_rate": 9.175139354753382e-07, |
| "loss": 0.3077, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.474, |
| "grad_norm": 0.5658672639000812, |
| "learning_rate": 9.134890443499068e-07, |
| "loss": 0.3339, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.4752, |
| "grad_norm": 0.5072807402522693, |
| "learning_rate": 9.094721130097517e-07, |
| "loss": 0.3212, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.4764, |
| "grad_norm": 0.5364063819378891, |
| "learning_rate": 9.054631492791344e-07, |
| "loss": 0.3138, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.4776, |
| "grad_norm": 0.7490696104573091, |
| "learning_rate": 9.014621609667896e-07, |
| "loss": 0.343, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.4788, |
| "grad_norm": 0.5640758366462366, |
| "learning_rate": 8.974691558659187e-07, |
| "loss": 0.3108, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.48, |
| "grad_norm": 0.5379307007062987, |
| "learning_rate": 8.934841417541767e-07, |
| "loss": 0.3131, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.4812, |
| "grad_norm": 0.6205649567389456, |
| "learning_rate": 8.8950712639365e-07, |
| "loss": 0.3468, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.4824, |
| "grad_norm": 0.4531821748984028, |
| "learning_rate": 8.855381175308475e-07, |
| "loss": 0.326, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.4836, |
| "grad_norm": 0.5276488030607475, |
| "learning_rate": 8.815771228966796e-07, |
| "loss": 0.3581, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.4848, |
| "grad_norm": 0.5484540605204118, |
| "learning_rate": 8.776241502064508e-07, |
| "loss": 0.3439, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.4859999999999998, |
| "grad_norm": 0.5539951976069447, |
| "learning_rate": 8.736792071598355e-07, |
| "loss": 0.3129, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.4872, |
| "grad_norm": 0.43894121359141824, |
| "learning_rate": 8.697423014408718e-07, |
| "loss": 0.3137, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.4884, |
| "grad_norm": 0.500242458968423, |
| "learning_rate": 8.658134407179419e-07, |
| "loss": 0.2873, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.4896, |
| "grad_norm": 0.430036684418572, |
| "learning_rate": 8.61892632643756e-07, |
| "loss": 0.3149, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.4908, |
| "grad_norm": 0.5133263436266973, |
| "learning_rate": 8.579798848553389e-07, |
| "loss": 0.3572, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.492, |
| "grad_norm": 0.48514597886695165, |
| "learning_rate": 8.540752049740181e-07, |
| "loss": 0.3061, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.4932, |
| "grad_norm": 0.56251703182634, |
| "learning_rate": 8.501786006054047e-07, |
| "loss": 0.3312, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.4944, |
| "grad_norm": 0.5474216931160197, |
| "learning_rate": 8.462900793393775e-07, |
| "loss": 0.3163, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.4956, |
| "grad_norm": 0.43448486787857776, |
| "learning_rate": 8.424096487500777e-07, |
| "loss": 0.3229, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.4968, |
| "grad_norm": 0.47897062850634076, |
| "learning_rate": 8.385373163958821e-07, |
| "loss": 0.3588, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.498, |
| "grad_norm": 0.47095621511292696, |
| "learning_rate": 8.346730898193928e-07, |
| "loss": 0.3344, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.4992, |
| "grad_norm": 0.43858239978498487, |
| "learning_rate": 8.308169765474278e-07, |
| "loss": 0.343, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.5004, |
| "grad_norm": 0.5939243267036994, |
| "learning_rate": 8.269689840909967e-07, |
| "loss": 0.3472, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.5016, |
| "grad_norm": 0.5357699409881242, |
| "learning_rate": 8.231291199452956e-07, |
| "loss": 0.3001, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.5028, |
| "grad_norm": 0.5250204364175717, |
| "learning_rate": 8.192973915896868e-07, |
| "loss": 0.3625, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.504, |
| "grad_norm": 0.4470480707133538, |
| "learning_rate": 8.154738064876843e-07, |
| "loss": 0.3383, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.5052, |
| "grad_norm": 0.5450729961924072, |
| "learning_rate": 8.116583720869398e-07, |
| "loss": 0.3155, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.5064, |
| "grad_norm": 0.47490875408006045, |
| "learning_rate": 8.078510958192337e-07, |
| "loss": 0.2995, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.5076, |
| "grad_norm": 0.5372639556416108, |
| "learning_rate": 8.040519851004492e-07, |
| "loss": 0.3129, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.5088, |
| "grad_norm": 0.46359050540817753, |
| "learning_rate": 8.002610473305688e-07, |
| "loss": 0.3292, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.51, |
| "grad_norm": 0.4796323163330126, |
| "learning_rate": 7.964782898936569e-07, |
| "loss": 0.3354, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.5112, |
| "grad_norm": 0.5208612693160106, |
| "learning_rate": 7.927037201578397e-07, |
| "loss": 0.3309, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.5124, |
| "grad_norm": 0.47838743726112654, |
| "learning_rate": 7.889373454752964e-07, |
| "loss": 0.3281, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.5136, |
| "grad_norm": 0.5320773076821897, |
| "learning_rate": 7.851791731822461e-07, |
| "loss": 0.3542, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.5148, |
| "grad_norm": 0.49702028419375943, |
| "learning_rate": 7.814292105989308e-07, |
| "loss": 0.3168, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.516, |
| "grad_norm": 0.5788902384801881, |
| "learning_rate": 7.776874650295984e-07, |
| "loss": 0.3037, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.5172, |
| "grad_norm": 0.43347682146682476, |
| "learning_rate": 7.739539437624933e-07, |
| "loss": 0.3132, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.5183999999999997, |
| "grad_norm": 0.5138448794184438, |
| "learning_rate": 7.702286540698417e-07, |
| "loss": 0.3049, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.5196, |
| "grad_norm": 0.47716781325402874, |
| "learning_rate": 7.665116032078346e-07, |
| "loss": 0.2931, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.5208, |
| "grad_norm": 0.3987044585261873, |
| "learning_rate": 7.628027984166153e-07, |
| "loss": 0.344, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.5220000000000002, |
| "grad_norm": 0.6009158876364648, |
| "learning_rate": 7.591022469202675e-07, |
| "loss": 0.3317, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.5232, |
| "grad_norm": 0.5084696612857849, |
| "learning_rate": 7.554099559267964e-07, |
| "loss": 0.298, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.5244, |
| "grad_norm": 0.5239081316589891, |
| "learning_rate": 7.517259326281157e-07, |
| "loss": 0.3244, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.5256, |
| "grad_norm": 0.4583710076541313, |
| "learning_rate": 7.480501842000404e-07, |
| "loss": 0.2939, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.5268, |
| "grad_norm": 0.4979847876868833, |
| "learning_rate": 7.443827178022628e-07, |
| "loss": 0.3132, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.528, |
| "grad_norm": 0.5304581631546377, |
| "learning_rate": 7.407235405783453e-07, |
| "loss": 0.287, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.5292, |
| "grad_norm": 0.5560997292562039, |
| "learning_rate": 7.370726596557059e-07, |
| "loss": 0.3329, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.5304, |
| "grad_norm": 0.5419001893194091, |
| "learning_rate": 7.334300821455998e-07, |
| "loss": 0.3251, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.5316, |
| "grad_norm": 0.4762163668078043, |
| "learning_rate": 7.297958151431094e-07, |
| "loss": 0.313, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.5328, |
| "grad_norm": 0.5471030007881764, |
| "learning_rate": 7.26169865727131e-07, |
| "loss": 0.3096, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.534, |
| "grad_norm": 0.4859920497046962, |
| "learning_rate": 7.225522409603608e-07, |
| "loss": 0.307, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.5352, |
| "grad_norm": 0.46802572251033253, |
| "learning_rate": 7.189429478892762e-07, |
| "loss": 0.3221, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.5364, |
| "grad_norm": 0.5019363613852679, |
| "learning_rate": 7.153419935441303e-07, |
| "loss": 0.3353, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.5376, |
| "grad_norm": 0.5306887115275352, |
| "learning_rate": 7.117493849389306e-07, |
| "loss": 0.3121, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.5388, |
| "grad_norm": 0.4171241804844174, |
| "learning_rate": 7.081651290714287e-07, |
| "loss": 0.3156, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.54, |
| "grad_norm": 0.5238443225363039, |
| "learning_rate": 7.045892329231086e-07, |
| "loss": 0.3228, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.5412, |
| "grad_norm": 0.5814868599249048, |
| "learning_rate": 7.010217034591721e-07, |
| "loss": 0.3493, |
| "step": 2119 |
| }, |
| { |
| "epoch": 2.5423999999999998, |
| "grad_norm": 0.5437026734195148, |
| "learning_rate": 6.974625476285191e-07, |
| "loss": 0.3385, |
| "step": 2120 |
| }, |
| { |
| "epoch": 2.5436, |
| "grad_norm": 0.6691095297956318, |
| "learning_rate": 6.93911772363745e-07, |
| "loss": 0.3557, |
| "step": 2121 |
| }, |
| { |
| "epoch": 2.5448, |
| "grad_norm": 0.5019767886763458, |
| "learning_rate": 6.903693845811176e-07, |
| "loss": 0.3436, |
| "step": 2122 |
| }, |
| { |
| "epoch": 2.5460000000000003, |
| "grad_norm": 0.48356310492003735, |
| "learning_rate": 6.86835391180567e-07, |
| "loss": 0.3358, |
| "step": 2123 |
| }, |
| { |
| "epoch": 2.5472, |
| "grad_norm": 0.5094244777532211, |
| "learning_rate": 6.833097990456761e-07, |
| "loss": 0.3039, |
| "step": 2124 |
| }, |
| { |
| "epoch": 2.5484, |
| "grad_norm": 0.459217234761272, |
| "learning_rate": 6.797926150436618e-07, |
| "loss": 0.2956, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.5496, |
| "grad_norm": 0.4472215759109841, |
| "learning_rate": 6.762838460253629e-07, |
| "loss": 0.2976, |
| "step": 2126 |
| }, |
| { |
| "epoch": 2.5507999999999997, |
| "grad_norm": 0.5277980736149517, |
| "learning_rate": 6.727834988252258e-07, |
| "loss": 0.326, |
| "step": 2127 |
| }, |
| { |
| "epoch": 2.552, |
| "grad_norm": 0.5853107231529425, |
| "learning_rate": 6.692915802612965e-07, |
| "loss": 0.278, |
| "step": 2128 |
| }, |
| { |
| "epoch": 2.5532, |
| "grad_norm": 0.43077774106221967, |
| "learning_rate": 6.658080971352026e-07, |
| "loss": 0.3112, |
| "step": 2129 |
| }, |
| { |
| "epoch": 2.5544000000000002, |
| "grad_norm": 0.5099625444460486, |
| "learning_rate": 6.623330562321378e-07, |
| "loss": 0.3486, |
| "step": 2130 |
| }, |
| { |
| "epoch": 2.5556, |
| "grad_norm": 0.5749327138836842, |
| "learning_rate": 6.588664643208559e-07, |
| "loss": 0.3064, |
| "step": 2131 |
| }, |
| { |
| "epoch": 2.5568, |
| "grad_norm": 0.47739858914285005, |
| "learning_rate": 6.554083281536516e-07, |
| "loss": 0.3224, |
| "step": 2132 |
| }, |
| { |
| "epoch": 2.558, |
| "grad_norm": 0.5140359142056293, |
| "learning_rate": 6.519586544663481e-07, |
| "loss": 0.2938, |
| "step": 2133 |
| }, |
| { |
| "epoch": 2.5592, |
| "grad_norm": 0.456904859506897, |
| "learning_rate": 6.485174499782876e-07, |
| "loss": 0.3425, |
| "step": 2134 |
| }, |
| { |
| "epoch": 2.5604, |
| "grad_norm": 0.5122662847914252, |
| "learning_rate": 6.450847213923162e-07, |
| "loss": 0.3162, |
| "step": 2135 |
| }, |
| { |
| "epoch": 2.5616, |
| "grad_norm": 0.5141807606724946, |
| "learning_rate": 6.416604753947675e-07, |
| "loss": 0.3396, |
| "step": 2136 |
| }, |
| { |
| "epoch": 2.5628, |
| "grad_norm": 0.5342014748603048, |
| "learning_rate": 6.382447186554553e-07, |
| "loss": 0.3093, |
| "step": 2137 |
| }, |
| { |
| "epoch": 2.564, |
| "grad_norm": 0.4997597150358832, |
| "learning_rate": 6.348374578276567e-07, |
| "loss": 0.3159, |
| "step": 2138 |
| }, |
| { |
| "epoch": 2.5652, |
| "grad_norm": 0.5884891445466817, |
| "learning_rate": 6.314386995480987e-07, |
| "loss": 0.3148, |
| "step": 2139 |
| }, |
| { |
| "epoch": 2.5664, |
| "grad_norm": 0.5441867009250919, |
| "learning_rate": 6.280484504369505e-07, |
| "loss": 0.3186, |
| "step": 2140 |
| }, |
| { |
| "epoch": 2.5676, |
| "grad_norm": 0.4516493539423153, |
| "learning_rate": 6.246667170978049e-07, |
| "loss": 0.3612, |
| "step": 2141 |
| }, |
| { |
| "epoch": 2.5688, |
| "grad_norm": 0.5084051121542065, |
| "learning_rate": 6.212935061176667e-07, |
| "loss": 0.2953, |
| "step": 2142 |
| }, |
| { |
| "epoch": 2.57, |
| "grad_norm": 0.5877296146705292, |
| "learning_rate": 6.179288240669429e-07, |
| "loss": 0.3559, |
| "step": 2143 |
| }, |
| { |
| "epoch": 2.5712, |
| "grad_norm": 0.5558192685394937, |
| "learning_rate": 6.14572677499426e-07, |
| "loss": 0.3191, |
| "step": 2144 |
| }, |
| { |
| "epoch": 2.5724, |
| "grad_norm": 0.5012538516660345, |
| "learning_rate": 6.112250729522823e-07, |
| "loss": 0.2866, |
| "step": 2145 |
| }, |
| { |
| "epoch": 2.5736, |
| "grad_norm": 0.5217614598335951, |
| "learning_rate": 6.078860169460416e-07, |
| "loss": 0.2985, |
| "step": 2146 |
| }, |
| { |
| "epoch": 2.5747999999999998, |
| "grad_norm": 0.4496361197136127, |
| "learning_rate": 6.045555159845828e-07, |
| "loss": 0.3082, |
| "step": 2147 |
| }, |
| { |
| "epoch": 2.576, |
| "grad_norm": 0.5288177340496564, |
| "learning_rate": 6.012335765551186e-07, |
| "loss": 0.3197, |
| "step": 2148 |
| }, |
| { |
| "epoch": 2.5772, |
| "grad_norm": 0.5440414858731661, |
| "learning_rate": 5.979202051281891e-07, |
| "loss": 0.351, |
| "step": 2149 |
| }, |
| { |
| "epoch": 2.5784000000000002, |
| "grad_norm": 0.5361965970453445, |
| "learning_rate": 5.946154081576411e-07, |
| "loss": 0.3013, |
| "step": 2150 |
| }, |
| { |
| "epoch": 2.5796, |
| "grad_norm": 0.4975790271997051, |
| "learning_rate": 5.913191920806244e-07, |
| "loss": 0.3108, |
| "step": 2151 |
| }, |
| { |
| "epoch": 2.5808, |
| "grad_norm": 0.6761947196756368, |
| "learning_rate": 5.880315633175704e-07, |
| "loss": 0.3161, |
| "step": 2152 |
| }, |
| { |
| "epoch": 2.582, |
| "grad_norm": 0.5129758578390546, |
| "learning_rate": 5.847525282721883e-07, |
| "loss": 0.3235, |
| "step": 2153 |
| }, |
| { |
| "epoch": 2.5832, |
| "grad_norm": 0.5057608403560111, |
| "learning_rate": 5.814820933314446e-07, |
| "loss": 0.3091, |
| "step": 2154 |
| }, |
| { |
| "epoch": 2.5844, |
| "grad_norm": 0.5027515285537514, |
| "learning_rate": 5.78220264865555e-07, |
| "loss": 0.3229, |
| "step": 2155 |
| }, |
| { |
| "epoch": 2.5856, |
| "grad_norm": 0.4960339355194685, |
| "learning_rate": 5.749670492279757e-07, |
| "loss": 0.3336, |
| "step": 2156 |
| }, |
| { |
| "epoch": 2.5868, |
| "grad_norm": 0.5695628614608653, |
| "learning_rate": 5.717224527553811e-07, |
| "loss": 0.3282, |
| "step": 2157 |
| }, |
| { |
| "epoch": 2.588, |
| "grad_norm": 0.45658740631386857, |
| "learning_rate": 5.684864817676583e-07, |
| "loss": 0.3212, |
| "step": 2158 |
| }, |
| { |
| "epoch": 2.5892, |
| "grad_norm": 0.4714820843044785, |
| "learning_rate": 5.65259142567896e-07, |
| "loss": 0.3287, |
| "step": 2159 |
| }, |
| { |
| "epoch": 2.5904, |
| "grad_norm": 0.4845192136259272, |
| "learning_rate": 5.620404414423674e-07, |
| "loss": 0.3383, |
| "step": 2160 |
| }, |
| { |
| "epoch": 2.5916, |
| "grad_norm": 0.5099050915537449, |
| "learning_rate": 5.588303846605187e-07, |
| "loss": 0.3468, |
| "step": 2161 |
| }, |
| { |
| "epoch": 2.5928, |
| "grad_norm": 0.5348537812273993, |
| "learning_rate": 5.556289784749653e-07, |
| "loss": 0.2881, |
| "step": 2162 |
| }, |
| { |
| "epoch": 2.594, |
| "grad_norm": 0.48728381677988253, |
| "learning_rate": 5.524362291214652e-07, |
| "loss": 0.2991, |
| "step": 2163 |
| }, |
| { |
| "epoch": 2.5952, |
| "grad_norm": 0.46169914830110426, |
| "learning_rate": 5.492521428189179e-07, |
| "loss": 0.3198, |
| "step": 2164 |
| }, |
| { |
| "epoch": 2.5964, |
| "grad_norm": 0.4922362789271303, |
| "learning_rate": 5.460767257693489e-07, |
| "loss": 0.2959, |
| "step": 2165 |
| }, |
| { |
| "epoch": 2.5976, |
| "grad_norm": 0.6088704547745182, |
| "learning_rate": 5.429099841578966e-07, |
| "loss": 0.3211, |
| "step": 2166 |
| }, |
| { |
| "epoch": 2.5987999999999998, |
| "grad_norm": 0.5110850313521915, |
| "learning_rate": 5.397519241528026e-07, |
| "loss": 0.3377, |
| "step": 2167 |
| }, |
| { |
| "epoch": 2.6, |
| "grad_norm": 0.5724649889513147, |
| "learning_rate": 5.366025519053958e-07, |
| "loss": 0.3144, |
| "step": 2168 |
| }, |
| { |
| "epoch": 2.6012, |
| "grad_norm": 0.4709112775705695, |
| "learning_rate": 5.334618735500868e-07, |
| "loss": 0.3076, |
| "step": 2169 |
| }, |
| { |
| "epoch": 2.6024000000000003, |
| "grad_norm": 0.5228928468545195, |
| "learning_rate": 5.303298952043473e-07, |
| "loss": 0.3291, |
| "step": 2170 |
| }, |
| { |
| "epoch": 2.6036, |
| "grad_norm": 0.5740694244625357, |
| "learning_rate": 5.272066229687078e-07, |
| "loss": 0.3356, |
| "step": 2171 |
| }, |
| { |
| "epoch": 2.6048, |
| "grad_norm": 0.4470914388889558, |
| "learning_rate": 5.24092062926736e-07, |
| "loss": 0.3396, |
| "step": 2172 |
| }, |
| { |
| "epoch": 2.606, |
| "grad_norm": 0.5051172194547626, |
| "learning_rate": 5.209862211450351e-07, |
| "loss": 0.3434, |
| "step": 2173 |
| }, |
| { |
| "epoch": 2.6071999999999997, |
| "grad_norm": 0.49743096722575747, |
| "learning_rate": 5.17889103673222e-07, |
| "loss": 0.314, |
| "step": 2174 |
| }, |
| { |
| "epoch": 2.6084, |
| "grad_norm": 0.42926620449426334, |
| "learning_rate": 5.148007165439234e-07, |
| "loss": 0.3041, |
| "step": 2175 |
| }, |
| { |
| "epoch": 2.6096, |
| "grad_norm": 0.5297863445354456, |
| "learning_rate": 5.117210657727589e-07, |
| "loss": 0.3153, |
| "step": 2176 |
| }, |
| { |
| "epoch": 2.6108000000000002, |
| "grad_norm": 0.5751473879165507, |
| "learning_rate": 5.086501573583302e-07, |
| "loss": 0.3164, |
| "step": 2177 |
| }, |
| { |
| "epoch": 2.612, |
| "grad_norm": 0.49512626673277416, |
| "learning_rate": 5.055879972822164e-07, |
| "loss": 0.309, |
| "step": 2178 |
| }, |
| { |
| "epoch": 2.6132, |
| "grad_norm": 0.45728478789306254, |
| "learning_rate": 5.025345915089497e-07, |
| "loss": 0.3019, |
| "step": 2179 |
| }, |
| { |
| "epoch": 2.6144, |
| "grad_norm": 0.6727936896653166, |
| "learning_rate": 4.994899459860125e-07, |
| "loss": 0.3238, |
| "step": 2180 |
| }, |
| { |
| "epoch": 2.6156, |
| "grad_norm": 0.5716215767054986, |
| "learning_rate": 4.964540666438261e-07, |
| "loss": 0.2843, |
| "step": 2181 |
| }, |
| { |
| "epoch": 2.6168, |
| "grad_norm": 0.5098447409370542, |
| "learning_rate": 4.934269593957336e-07, |
| "loss": 0.3484, |
| "step": 2182 |
| }, |
| { |
| "epoch": 2.618, |
| "grad_norm": 0.5093018487176464, |
| "learning_rate": 4.90408630137994e-07, |
| "loss": 0.3431, |
| "step": 2183 |
| }, |
| { |
| "epoch": 2.6192, |
| "grad_norm": 0.4840635587660347, |
| "learning_rate": 4.873990847497684e-07, |
| "loss": 0.3126, |
| "step": 2184 |
| }, |
| { |
| "epoch": 2.6204, |
| "grad_norm": 0.5082242061141022, |
| "learning_rate": 4.843983290931064e-07, |
| "loss": 0.2969, |
| "step": 2185 |
| }, |
| { |
| "epoch": 2.6216, |
| "grad_norm": 0.530976128386319, |
| "learning_rate": 4.814063690129378e-07, |
| "loss": 0.343, |
| "step": 2186 |
| }, |
| { |
| "epoch": 2.6228, |
| "grad_norm": 0.6016390612143504, |
| "learning_rate": 4.784232103370617e-07, |
| "loss": 0.3294, |
| "step": 2187 |
| }, |
| { |
| "epoch": 2.624, |
| "grad_norm": 0.556223638359762, |
| "learning_rate": 4.7544885887613136e-07, |
| "loss": 0.3379, |
| "step": 2188 |
| }, |
| { |
| "epoch": 2.6252, |
| "grad_norm": 0.54268125037195, |
| "learning_rate": 4.724833204236462e-07, |
| "loss": 0.3331, |
| "step": 2189 |
| }, |
| { |
| "epoch": 2.6264, |
| "grad_norm": 0.48122211227489187, |
| "learning_rate": 4.695266007559407e-07, |
| "loss": 0.3071, |
| "step": 2190 |
| }, |
| { |
| "epoch": 2.6276, |
| "grad_norm": 0.45238477752729517, |
| "learning_rate": 4.6657870563217076e-07, |
| "loss": 0.3218, |
| "step": 2191 |
| }, |
| { |
| "epoch": 2.6288, |
| "grad_norm": 0.5379409443968398, |
| "learning_rate": 4.6363964079430166e-07, |
| "loss": 0.3314, |
| "step": 2192 |
| }, |
| { |
| "epoch": 2.63, |
| "grad_norm": 0.49881724473585415, |
| "learning_rate": 4.6070941196710186e-07, |
| "loss": 0.318, |
| "step": 2193 |
| }, |
| { |
| "epoch": 2.6311999999999998, |
| "grad_norm": 0.5073400286081319, |
| "learning_rate": 4.5778802485812956e-07, |
| "loss": 0.3141, |
| "step": 2194 |
| }, |
| { |
| "epoch": 2.6324, |
| "grad_norm": 0.4494142417800747, |
| "learning_rate": 4.548754851577175e-07, |
| "loss": 0.2828, |
| "step": 2195 |
| }, |
| { |
| "epoch": 2.6336, |
| "grad_norm": 0.4241794577035244, |
| "learning_rate": 4.5197179853896654e-07, |
| "loss": 0.2998, |
| "step": 2196 |
| }, |
| { |
| "epoch": 2.6348000000000003, |
| "grad_norm": 0.5113141807236002, |
| "learning_rate": 4.4907697065773523e-07, |
| "loss": 0.3286, |
| "step": 2197 |
| }, |
| { |
| "epoch": 2.636, |
| "grad_norm": 0.5055727152160692, |
| "learning_rate": 4.4619100715262374e-07, |
| "loss": 0.3036, |
| "step": 2198 |
| }, |
| { |
| "epoch": 2.6372, |
| "grad_norm": 0.44911584465721455, |
| "learning_rate": 4.4331391364496934e-07, |
| "loss": 0.3037, |
| "step": 2199 |
| }, |
| { |
| "epoch": 2.6384, |
| "grad_norm": 0.45094950311107046, |
| "learning_rate": 4.404456957388309e-07, |
| "loss": 0.3316, |
| "step": 2200 |
| }, |
| { |
| "epoch": 2.6395999999999997, |
| "grad_norm": 0.41455331969132403, |
| "learning_rate": 4.375863590209778e-07, |
| "loss": 0.323, |
| "step": 2201 |
| }, |
| { |
| "epoch": 2.6408, |
| "grad_norm": 0.43075888630028597, |
| "learning_rate": 4.3473590906088046e-07, |
| "loss": 0.3196, |
| "step": 2202 |
| }, |
| { |
| "epoch": 2.642, |
| "grad_norm": 0.5686147750139869, |
| "learning_rate": 4.3189435141070324e-07, |
| "loss": 0.3349, |
| "step": 2203 |
| }, |
| { |
| "epoch": 2.6432, |
| "grad_norm": 0.5689753507315711, |
| "learning_rate": 4.2906169160528424e-07, |
| "loss": 0.3011, |
| "step": 2204 |
| }, |
| { |
| "epoch": 2.6444, |
| "grad_norm": 0.5472134263806879, |
| "learning_rate": 4.262379351621354e-07, |
| "loss": 0.2809, |
| "step": 2205 |
| }, |
| { |
| "epoch": 2.6456, |
| "grad_norm": 0.5637048283007405, |
| "learning_rate": 4.2342308758142437e-07, |
| "loss": 0.2791, |
| "step": 2206 |
| }, |
| { |
| "epoch": 2.6468, |
| "grad_norm": 0.5038051098339148, |
| "learning_rate": 4.2061715434596475e-07, |
| "loss": 0.3214, |
| "step": 2207 |
| }, |
| { |
| "epoch": 2.648, |
| "grad_norm": 0.5582257157785934, |
| "learning_rate": 4.1782014092120735e-07, |
| "loss": 0.3465, |
| "step": 2208 |
| }, |
| { |
| "epoch": 2.6492, |
| "grad_norm": 0.4816975269500765, |
| "learning_rate": 4.150320527552304e-07, |
| "loss": 0.3351, |
| "step": 2209 |
| }, |
| { |
| "epoch": 2.6504, |
| "grad_norm": 0.6144407863119755, |
| "learning_rate": 4.122528952787258e-07, |
| "loss": 0.303, |
| "step": 2210 |
| }, |
| { |
| "epoch": 2.6516, |
| "grad_norm": 0.4690793878545897, |
| "learning_rate": 4.0948267390498953e-07, |
| "loss": 0.295, |
| "step": 2211 |
| }, |
| { |
| "epoch": 2.6528, |
| "grad_norm": 0.45771893866529245, |
| "learning_rate": 4.067213940299136e-07, |
| "loss": 0.2892, |
| "step": 2212 |
| }, |
| { |
| "epoch": 2.654, |
| "grad_norm": 0.5107864430619176, |
| "learning_rate": 4.0396906103197244e-07, |
| "loss": 0.3355, |
| "step": 2213 |
| }, |
| { |
| "epoch": 2.6552, |
| "grad_norm": 0.48459788169311224, |
| "learning_rate": 4.01225680272212e-07, |
| "loss": 0.3564, |
| "step": 2214 |
| }, |
| { |
| "epoch": 2.6564, |
| "grad_norm": 0.5223513258810136, |
| "learning_rate": 3.984912570942434e-07, |
| "loss": 0.3575, |
| "step": 2215 |
| }, |
| { |
| "epoch": 2.6576, |
| "grad_norm": 0.4748513216901385, |
| "learning_rate": 3.9576579682423066e-07, |
| "loss": 0.31, |
| "step": 2216 |
| }, |
| { |
| "epoch": 2.6588000000000003, |
| "grad_norm": 0.49092324085793265, |
| "learning_rate": 3.930493047708761e-07, |
| "loss": 0.3273, |
| "step": 2217 |
| }, |
| { |
| "epoch": 2.66, |
| "grad_norm": 0.5086589663869817, |
| "learning_rate": 3.903417862254172e-07, |
| "loss": 0.3037, |
| "step": 2218 |
| }, |
| { |
| "epoch": 2.6612, |
| "grad_norm": 0.5944392528245183, |
| "learning_rate": 3.876432464616103e-07, |
| "loss": 0.3102, |
| "step": 2219 |
| }, |
| { |
| "epoch": 2.6624, |
| "grad_norm": 0.43997132818776513, |
| "learning_rate": 3.8495369073572266e-07, |
| "loss": 0.2987, |
| "step": 2220 |
| }, |
| { |
| "epoch": 2.6635999999999997, |
| "grad_norm": 0.4479899231191285, |
| "learning_rate": 3.822731242865235e-07, |
| "loss": 0.307, |
| "step": 2221 |
| }, |
| { |
| "epoch": 2.6648, |
| "grad_norm": 0.49879617773556645, |
| "learning_rate": 3.7960155233527364e-07, |
| "loss": 0.2972, |
| "step": 2222 |
| }, |
| { |
| "epoch": 2.666, |
| "grad_norm": 0.4096782707041188, |
| "learning_rate": 3.7693898008571205e-07, |
| "loss": 0.3173, |
| "step": 2223 |
| }, |
| { |
| "epoch": 2.6672000000000002, |
| "grad_norm": 0.5370525570342256, |
| "learning_rate": 3.742854127240464e-07, |
| "loss": 0.3189, |
| "step": 2224 |
| }, |
| { |
| "epoch": 2.6684, |
| "grad_norm": 0.6088932928091623, |
| "learning_rate": 3.7164085541894937e-07, |
| "loss": 0.3435, |
| "step": 2225 |
| }, |
| { |
| "epoch": 2.6696, |
| "grad_norm": 0.5113014285512854, |
| "learning_rate": 3.690053133215399e-07, |
| "loss": 0.2883, |
| "step": 2226 |
| }, |
| { |
| "epoch": 2.6708, |
| "grad_norm": 0.5238135484660226, |
| "learning_rate": 3.663787915653777e-07, |
| "loss": 0.3635, |
| "step": 2227 |
| }, |
| { |
| "epoch": 2.672, |
| "grad_norm": 0.4478739081822928, |
| "learning_rate": 3.6376129526645376e-07, |
| "loss": 0.347, |
| "step": 2228 |
| }, |
| { |
| "epoch": 2.6732, |
| "grad_norm": 0.443318474124737, |
| "learning_rate": 3.6115282952317807e-07, |
| "loss": 0.3447, |
| "step": 2229 |
| }, |
| { |
| "epoch": 2.6744, |
| "grad_norm": 0.5298718770182731, |
| "learning_rate": 3.5855339941636867e-07, |
| "loss": 0.2945, |
| "step": 2230 |
| }, |
| { |
| "epoch": 2.6756, |
| "grad_norm": 0.5045580008727675, |
| "learning_rate": 3.5596301000924815e-07, |
| "loss": 0.3105, |
| "step": 2231 |
| }, |
| { |
| "epoch": 2.6768, |
| "grad_norm": 0.5605568435967557, |
| "learning_rate": 3.533816663474271e-07, |
| "loss": 0.297, |
| "step": 2232 |
| }, |
| { |
| "epoch": 2.678, |
| "grad_norm": 0.5631677023915352, |
| "learning_rate": 3.508093734588952e-07, |
| "loss": 0.3222, |
| "step": 2233 |
| }, |
| { |
| "epoch": 2.6792, |
| "grad_norm": 0.49016251569886216, |
| "learning_rate": 3.482461363540163e-07, |
| "loss": 0.2997, |
| "step": 2234 |
| }, |
| { |
| "epoch": 2.6804, |
| "grad_norm": 0.570641230845762, |
| "learning_rate": 3.456919600255126e-07, |
| "loss": 0.3159, |
| "step": 2235 |
| }, |
| { |
| "epoch": 2.6816, |
| "grad_norm": 0.5818993208808508, |
| "learning_rate": 3.4314684944845747e-07, |
| "loss": 0.2889, |
| "step": 2236 |
| }, |
| { |
| "epoch": 2.6828, |
| "grad_norm": 0.4895819240155071, |
| "learning_rate": 3.406108095802668e-07, |
| "loss": 0.3207, |
| "step": 2237 |
| }, |
| { |
| "epoch": 2.684, |
| "grad_norm": 0.4467165658918849, |
| "learning_rate": 3.3808384536068997e-07, |
| "loss": 0.3007, |
| "step": 2238 |
| }, |
| { |
| "epoch": 2.6852, |
| "grad_norm": 0.44346210912182843, |
| "learning_rate": 3.3556596171179455e-07, |
| "loss": 0.3345, |
| "step": 2239 |
| }, |
| { |
| "epoch": 2.6864, |
| "grad_norm": 0.4527063516873698, |
| "learning_rate": 3.3305716353796537e-07, |
| "loss": 0.3217, |
| "step": 2240 |
| }, |
| { |
| "epoch": 2.6875999999999998, |
| "grad_norm": 0.591329001450134, |
| "learning_rate": 3.305574557258867e-07, |
| "loss": 0.308, |
| "step": 2241 |
| }, |
| { |
| "epoch": 2.6888, |
| "grad_norm": 0.5253134786452384, |
| "learning_rate": 3.2806684314453774e-07, |
| "loss": 0.2952, |
| "step": 2242 |
| }, |
| { |
| "epoch": 2.69, |
| "grad_norm": 0.5211911228534106, |
| "learning_rate": 3.255853306451823e-07, |
| "loss": 0.3346, |
| "step": 2243 |
| }, |
| { |
| "epoch": 2.6912000000000003, |
| "grad_norm": 0.4965699355946691, |
| "learning_rate": 3.2311292306135944e-07, |
| "loss": 0.2861, |
| "step": 2244 |
| }, |
| { |
| "epoch": 2.6924, |
| "grad_norm": 0.4527295772355629, |
| "learning_rate": 3.2064962520887146e-07, |
| "loss": 0.3208, |
| "step": 2245 |
| }, |
| { |
| "epoch": 2.6936, |
| "grad_norm": 0.7191617099605793, |
| "learning_rate": 3.18195441885778e-07, |
| "loss": 0.3842, |
| "step": 2246 |
| }, |
| { |
| "epoch": 2.6948, |
| "grad_norm": 0.4660456167342279, |
| "learning_rate": 3.157503778723847e-07, |
| "loss": 0.3154, |
| "step": 2247 |
| }, |
| { |
| "epoch": 2.6959999999999997, |
| "grad_norm": 0.4581700106444103, |
| "learning_rate": 3.1331443793123585e-07, |
| "loss": 0.316, |
| "step": 2248 |
| }, |
| { |
| "epoch": 2.6972, |
| "grad_norm": 0.4910588872563203, |
| "learning_rate": 3.108876268071009e-07, |
| "loss": 0.3142, |
| "step": 2249 |
| }, |
| { |
| "epoch": 2.6984, |
| "grad_norm": 0.4636219243535772, |
| "learning_rate": 3.0846994922697104e-07, |
| "loss": 0.347, |
| "step": 2250 |
| }, |
| { |
| "epoch": 2.6996, |
| "grad_norm": 0.5034421859299472, |
| "learning_rate": 3.060614099000442e-07, |
| "loss": 0.3028, |
| "step": 2251 |
| }, |
| { |
| "epoch": 2.7008, |
| "grad_norm": 0.4912027303121192, |
| "learning_rate": 3.0366201351771983e-07, |
| "loss": 0.3035, |
| "step": 2252 |
| }, |
| { |
| "epoch": 2.702, |
| "grad_norm": 0.4533175883327802, |
| "learning_rate": 3.0127176475359065e-07, |
| "loss": 0.3072, |
| "step": 2253 |
| }, |
| { |
| "epoch": 2.7032, |
| "grad_norm": 0.5440528983007439, |
| "learning_rate": 2.988906682634285e-07, |
| "loss": 0.3108, |
| "step": 2254 |
| }, |
| { |
| "epoch": 2.7044, |
| "grad_norm": 0.4642521095472613, |
| "learning_rate": 2.965187286851784e-07, |
| "loss": 0.3107, |
| "step": 2255 |
| }, |
| { |
| "epoch": 2.7056, |
| "grad_norm": 0.5045589770404426, |
| "learning_rate": 2.941559506389513e-07, |
| "loss": 0.322, |
| "step": 2256 |
| }, |
| { |
| "epoch": 2.7068, |
| "grad_norm": 0.45835153688747926, |
| "learning_rate": 2.9180233872701247e-07, |
| "loss": 0.2856, |
| "step": 2257 |
| }, |
| { |
| "epoch": 2.708, |
| "grad_norm": 0.7022876521237986, |
| "learning_rate": 2.894578975337703e-07, |
| "loss": 0.3423, |
| "step": 2258 |
| }, |
| { |
| "epoch": 2.7092, |
| "grad_norm": 0.48858267358117635, |
| "learning_rate": 2.8712263162577636e-07, |
| "loss": 0.3113, |
| "step": 2259 |
| }, |
| { |
| "epoch": 2.7104, |
| "grad_norm": 0.5026542424606422, |
| "learning_rate": 2.8479654555170546e-07, |
| "loss": 0.3298, |
| "step": 2260 |
| }, |
| { |
| "epoch": 2.7116, |
| "grad_norm": 0.4570485297158199, |
| "learning_rate": 2.8247964384235214e-07, |
| "loss": 0.2936, |
| "step": 2261 |
| }, |
| { |
| "epoch": 2.7128, |
| "grad_norm": 0.5256313228851077, |
| "learning_rate": 2.8017193101062377e-07, |
| "loss": 0.2875, |
| "step": 2262 |
| }, |
| { |
| "epoch": 2.714, |
| "grad_norm": 0.5508455978649375, |
| "learning_rate": 2.778734115515269e-07, |
| "loss": 0.3706, |
| "step": 2263 |
| }, |
| { |
| "epoch": 2.7152, |
| "grad_norm": 0.6383900095161533, |
| "learning_rate": 2.755840899421636e-07, |
| "loss": 0.32, |
| "step": 2264 |
| }, |
| { |
| "epoch": 2.7164, |
| "grad_norm": 0.5310429090489197, |
| "learning_rate": 2.7330397064171787e-07, |
| "loss": 0.3448, |
| "step": 2265 |
| }, |
| { |
| "epoch": 2.7176, |
| "grad_norm": 0.5272757528343431, |
| "learning_rate": 2.7103305809145106e-07, |
| "loss": 0.321, |
| "step": 2266 |
| }, |
| { |
| "epoch": 2.7188, |
| "grad_norm": 0.48216498913473155, |
| "learning_rate": 2.687713567146899e-07, |
| "loss": 0.3286, |
| "step": 2267 |
| }, |
| { |
| "epoch": 2.7199999999999998, |
| "grad_norm": 0.5025142667561859, |
| "learning_rate": 2.665188709168215e-07, |
| "loss": 0.303, |
| "step": 2268 |
| }, |
| { |
| "epoch": 2.7212, |
| "grad_norm": 0.47857628655183854, |
| "learning_rate": 2.642756050852796e-07, |
| "loss": 0.2973, |
| "step": 2269 |
| }, |
| { |
| "epoch": 2.7224, |
| "grad_norm": 0.6016413542553345, |
| "learning_rate": 2.620415635895429e-07, |
| "loss": 0.3043, |
| "step": 2270 |
| }, |
| { |
| "epoch": 2.7236000000000002, |
| "grad_norm": 0.5393265361428493, |
| "learning_rate": 2.5981675078111835e-07, |
| "loss": 0.2915, |
| "step": 2271 |
| }, |
| { |
| "epoch": 2.7248, |
| "grad_norm": 0.41790215327308255, |
| "learning_rate": 2.5760117099354163e-07, |
| "loss": 0.3091, |
| "step": 2272 |
| }, |
| { |
| "epoch": 2.726, |
| "grad_norm": 0.4533408612597167, |
| "learning_rate": 2.5539482854236076e-07, |
| "loss": 0.3261, |
| "step": 2273 |
| }, |
| { |
| "epoch": 2.7272, |
| "grad_norm": 0.5594124000073417, |
| "learning_rate": 2.531977277251324e-07, |
| "loss": 0.3247, |
| "step": 2274 |
| }, |
| { |
| "epoch": 2.7284, |
| "grad_norm": 0.4721945408146146, |
| "learning_rate": 2.510098728214133e-07, |
| "loss": 0.3185, |
| "step": 2275 |
| }, |
| { |
| "epoch": 2.7296, |
| "grad_norm": 0.4774126492232413, |
| "learning_rate": 2.488312680927485e-07, |
| "loss": 0.3366, |
| "step": 2276 |
| }, |
| { |
| "epoch": 2.7308, |
| "grad_norm": 0.43569948109138307, |
| "learning_rate": 2.466619177826668e-07, |
| "loss": 0.3292, |
| "step": 2277 |
| }, |
| { |
| "epoch": 2.732, |
| "grad_norm": 0.4611430979498518, |
| "learning_rate": 2.4450182611667096e-07, |
| "loss": 0.2826, |
| "step": 2278 |
| }, |
| { |
| "epoch": 2.7332, |
| "grad_norm": 0.5292796289010596, |
| "learning_rate": 2.423509973022292e-07, |
| "loss": 0.3274, |
| "step": 2279 |
| }, |
| { |
| "epoch": 2.7344, |
| "grad_norm": 0.4888164532692469, |
| "learning_rate": 2.4020943552876706e-07, |
| "loss": 0.3298, |
| "step": 2280 |
| }, |
| { |
| "epoch": 2.7356, |
| "grad_norm": 0.5713745001080968, |
| "learning_rate": 2.3807714496766165e-07, |
| "loss": 0.3049, |
| "step": 2281 |
| }, |
| { |
| "epoch": 2.7368, |
| "grad_norm": 0.478309599150092, |
| "learning_rate": 2.3595412977222897e-07, |
| "loss": 0.3028, |
| "step": 2282 |
| }, |
| { |
| "epoch": 2.738, |
| "grad_norm": 0.5217959378930856, |
| "learning_rate": 2.3384039407771896e-07, |
| "loss": 0.2874, |
| "step": 2283 |
| }, |
| { |
| "epoch": 2.7392, |
| "grad_norm": 0.44818891342230366, |
| "learning_rate": 2.317359420013071e-07, |
| "loss": 0.2947, |
| "step": 2284 |
| }, |
| { |
| "epoch": 2.7404, |
| "grad_norm": 0.617465212236017, |
| "learning_rate": 2.2964077764208615e-07, |
| "loss": 0.3485, |
| "step": 2285 |
| }, |
| { |
| "epoch": 2.7416, |
| "grad_norm": 0.5941416952735622, |
| "learning_rate": 2.2755490508105716e-07, |
| "loss": 0.322, |
| "step": 2286 |
| }, |
| { |
| "epoch": 2.7428, |
| "grad_norm": 0.42877695545329736, |
| "learning_rate": 2.254783283811246e-07, |
| "loss": 0.3339, |
| "step": 2287 |
| }, |
| { |
| "epoch": 2.7439999999999998, |
| "grad_norm": 0.5975641375977941, |
| "learning_rate": 2.2341105158708408e-07, |
| "loss": 0.3219, |
| "step": 2288 |
| }, |
| { |
| "epoch": 2.7452, |
| "grad_norm": 0.5071505659295147, |
| "learning_rate": 2.2135307872561628e-07, |
| "loss": 0.3069, |
| "step": 2289 |
| }, |
| { |
| "epoch": 2.7464, |
| "grad_norm": 0.47164314091927906, |
| "learning_rate": 2.1930441380528243e-07, |
| "loss": 0.33, |
| "step": 2290 |
| }, |
| { |
| "epoch": 2.7476000000000003, |
| "grad_norm": 0.5271951285578713, |
| "learning_rate": 2.172650608165111e-07, |
| "loss": 0.3162, |
| "step": 2291 |
| }, |
| { |
| "epoch": 2.7488, |
| "grad_norm": 0.4448132879912682, |
| "learning_rate": 2.1523502373159367e-07, |
| "loss": 0.3224, |
| "step": 2292 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.46226885551679747, |
| "learning_rate": 2.1321430650467546e-07, |
| "loss": 0.2991, |
| "step": 2293 |
| }, |
| { |
| "epoch": 2.7512, |
| "grad_norm": 0.5606191502800733, |
| "learning_rate": 2.112029130717491e-07, |
| "loss": 0.3334, |
| "step": 2294 |
| }, |
| { |
| "epoch": 2.7523999999999997, |
| "grad_norm": 0.5385580493671908, |
| "learning_rate": 2.092008473506446e-07, |
| "loss": 0.3071, |
| "step": 2295 |
| }, |
| { |
| "epoch": 2.7536, |
| "grad_norm": 0.45254035994089326, |
| "learning_rate": 2.072081132410253e-07, |
| "loss": 0.3224, |
| "step": 2296 |
| }, |
| { |
| "epoch": 2.7548, |
| "grad_norm": 0.49436523245287795, |
| "learning_rate": 2.0522471462437798e-07, |
| "loss": 0.3204, |
| "step": 2297 |
| }, |
| { |
| "epoch": 2.7560000000000002, |
| "grad_norm": 0.5125106319731159, |
| "learning_rate": 2.0325065536400456e-07, |
| "loss": 0.3419, |
| "step": 2298 |
| }, |
| { |
| "epoch": 2.7572, |
| "grad_norm": 0.48631037797606286, |
| "learning_rate": 2.0128593930501427e-07, |
| "loss": 0.2907, |
| "step": 2299 |
| }, |
| { |
| "epoch": 2.7584, |
| "grad_norm": 0.5155305501751236, |
| "learning_rate": 1.9933057027432147e-07, |
| "loss": 0.3314, |
| "step": 2300 |
| }, |
| { |
| "epoch": 2.7596, |
| "grad_norm": 0.5173988364156298, |
| "learning_rate": 1.9738455208063055e-07, |
| "loss": 0.3238, |
| "step": 2301 |
| }, |
| { |
| "epoch": 2.7608, |
| "grad_norm": 0.4803287920371691, |
| "learning_rate": 1.9544788851443342e-07, |
| "loss": 0.3081, |
| "step": 2302 |
| }, |
| { |
| "epoch": 2.762, |
| "grad_norm": 0.41561006841126175, |
| "learning_rate": 1.9352058334800195e-07, |
| "loss": 0.3041, |
| "step": 2303 |
| }, |
| { |
| "epoch": 2.7632, |
| "grad_norm": 0.43105371379486856, |
| "learning_rate": 1.9160264033537824e-07, |
| "loss": 0.304, |
| "step": 2304 |
| }, |
| { |
| "epoch": 2.7644, |
| "grad_norm": 0.6000309915607545, |
| "learning_rate": 1.8969406321236727e-07, |
| "loss": 0.3206, |
| "step": 2305 |
| }, |
| { |
| "epoch": 2.7656, |
| "grad_norm": 0.5067718716132091, |
| "learning_rate": 1.8779485569653422e-07, |
| "loss": 0.3411, |
| "step": 2306 |
| }, |
| { |
| "epoch": 2.7668, |
| "grad_norm": 0.5875424125802807, |
| "learning_rate": 1.859050214871927e-07, |
| "loss": 0.3384, |
| "step": 2307 |
| }, |
| { |
| "epoch": 2.768, |
| "grad_norm": 0.4612732891476999, |
| "learning_rate": 1.8402456426539706e-07, |
| "loss": 0.2919, |
| "step": 2308 |
| }, |
| { |
| "epoch": 2.7692, |
| "grad_norm": 0.5583069625711948, |
| "learning_rate": 1.8215348769393904e-07, |
| "loss": 0.3176, |
| "step": 2309 |
| }, |
| { |
| "epoch": 2.7704, |
| "grad_norm": 0.5530704471556647, |
| "learning_rate": 1.8029179541733833e-07, |
| "loss": 0.3048, |
| "step": 2310 |
| }, |
| { |
| "epoch": 2.7716, |
| "grad_norm": 0.5222524590044774, |
| "learning_rate": 1.7843949106183368e-07, |
| "loss": 0.34, |
| "step": 2311 |
| }, |
| { |
| "epoch": 2.7728, |
| "grad_norm": 0.44633840212479664, |
| "learning_rate": 1.7659657823538067e-07, |
| "loss": 0.3181, |
| "step": 2312 |
| }, |
| { |
| "epoch": 2.774, |
| "grad_norm": 0.4671319697242977, |
| "learning_rate": 1.747630605276407e-07, |
| "loss": 0.3485, |
| "step": 2313 |
| }, |
| { |
| "epoch": 2.7752, |
| "grad_norm": 0.4397303650706271, |
| "learning_rate": 1.7293894150997414e-07, |
| "loss": 0.3032, |
| "step": 2314 |
| }, |
| { |
| "epoch": 2.7763999999999998, |
| "grad_norm": 0.5338150922235967, |
| "learning_rate": 1.711242247354372e-07, |
| "loss": 0.2934, |
| "step": 2315 |
| }, |
| { |
| "epoch": 2.7776, |
| "grad_norm": 0.4409628237565337, |
| "learning_rate": 1.6931891373876852e-07, |
| "loss": 0.3235, |
| "step": 2316 |
| }, |
| { |
| "epoch": 2.7788, |
| "grad_norm": 0.5834993749132399, |
| "learning_rate": 1.6752301203638854e-07, |
| "loss": 0.3328, |
| "step": 2317 |
| }, |
| { |
| "epoch": 2.7800000000000002, |
| "grad_norm": 0.5345933957862313, |
| "learning_rate": 1.657365231263891e-07, |
| "loss": 0.3268, |
| "step": 2318 |
| }, |
| { |
| "epoch": 2.7812, |
| "grad_norm": 0.4421569088458628, |
| "learning_rate": 1.6395945048852947e-07, |
| "loss": 0.3335, |
| "step": 2319 |
| }, |
| { |
| "epoch": 2.7824, |
| "grad_norm": 0.5036343755740437, |
| "learning_rate": 1.6219179758422465e-07, |
| "loss": 0.2935, |
| "step": 2320 |
| }, |
| { |
| "epoch": 2.7836, |
| "grad_norm": 0.4498029958875161, |
| "learning_rate": 1.6043356785654273e-07, |
| "loss": 0.3304, |
| "step": 2321 |
| }, |
| { |
| "epoch": 2.7848, |
| "grad_norm": 0.44670818158992454, |
| "learning_rate": 1.5868476473019922e-07, |
| "loss": 0.3237, |
| "step": 2322 |
| }, |
| { |
| "epoch": 2.786, |
| "grad_norm": 0.5601966592047272, |
| "learning_rate": 1.5694539161154598e-07, |
| "loss": 0.334, |
| "step": 2323 |
| }, |
| { |
| "epoch": 2.7872, |
| "grad_norm": 0.6091250972218841, |
| "learning_rate": 1.5521545188856734e-07, |
| "loss": 0.357, |
| "step": 2324 |
| }, |
| { |
| "epoch": 2.7884, |
| "grad_norm": 0.5192268009841875, |
| "learning_rate": 1.5349494893087514e-07, |
| "loss": 0.3098, |
| "step": 2325 |
| }, |
| { |
| "epoch": 2.7896, |
| "grad_norm": 0.446567573109484, |
| "learning_rate": 1.517838860896964e-07, |
| "loss": 0.3099, |
| "step": 2326 |
| }, |
| { |
| "epoch": 2.7908, |
| "grad_norm": 0.5095120428916953, |
| "learning_rate": 1.500822666978735e-07, |
| "loss": 0.3343, |
| "step": 2327 |
| }, |
| { |
| "epoch": 2.792, |
| "grad_norm": 0.48823817262313274, |
| "learning_rate": 1.4839009406985295e-07, |
| "loss": 0.3166, |
| "step": 2328 |
| }, |
| { |
| "epoch": 2.7932, |
| "grad_norm": 0.49617621664603856, |
| "learning_rate": 1.4670737150168257e-07, |
| "loss": 0.3257, |
| "step": 2329 |
| }, |
| { |
| "epoch": 2.7944, |
| "grad_norm": 0.6698791671979474, |
| "learning_rate": 1.4503410227100057e-07, |
| "loss": 0.3605, |
| "step": 2330 |
| }, |
| { |
| "epoch": 2.7956, |
| "grad_norm": 0.5626220748746263, |
| "learning_rate": 1.433702896370348e-07, |
| "loss": 0.3195, |
| "step": 2331 |
| }, |
| { |
| "epoch": 2.7968, |
| "grad_norm": 0.4228810671781407, |
| "learning_rate": 1.417159368405907e-07, |
| "loss": 0.3549, |
| "step": 2332 |
| }, |
| { |
| "epoch": 2.798, |
| "grad_norm": 0.5091371276500047, |
| "learning_rate": 1.4007104710404838e-07, |
| "loss": 0.3254, |
| "step": 2333 |
| }, |
| { |
| "epoch": 2.7992, |
| "grad_norm": 0.451129778887307, |
| "learning_rate": 1.384356236313572e-07, |
| "loss": 0.3034, |
| "step": 2334 |
| }, |
| { |
| "epoch": 2.8004, |
| "grad_norm": 0.5403192642000542, |
| "learning_rate": 1.3680966960802623e-07, |
| "loss": 0.3344, |
| "step": 2335 |
| }, |
| { |
| "epoch": 2.8016, |
| "grad_norm": 0.4428088174158261, |
| "learning_rate": 1.3519318820111983e-07, |
| "loss": 0.3385, |
| "step": 2336 |
| }, |
| { |
| "epoch": 2.8028, |
| "grad_norm": 0.464586004978293, |
| "learning_rate": 1.3358618255925214e-07, |
| "loss": 0.3036, |
| "step": 2337 |
| }, |
| { |
| "epoch": 2.8040000000000003, |
| "grad_norm": 0.43094893731579953, |
| "learning_rate": 1.3198865581258046e-07, |
| "loss": 0.3176, |
| "step": 2338 |
| }, |
| { |
| "epoch": 2.8052, |
| "grad_norm": 0.4410368975259025, |
| "learning_rate": 1.3040061107279679e-07, |
| "loss": 0.3685, |
| "step": 2339 |
| }, |
| { |
| "epoch": 2.8064, |
| "grad_norm": 0.5689204693655754, |
| "learning_rate": 1.2882205143312676e-07, |
| "loss": 0.309, |
| "step": 2340 |
| }, |
| { |
| "epoch": 2.8076, |
| "grad_norm": 0.5638340896622204, |
| "learning_rate": 1.272529799683192e-07, |
| "loss": 0.3798, |
| "step": 2341 |
| }, |
| { |
| "epoch": 2.8087999999999997, |
| "grad_norm": 0.5782087446727083, |
| "learning_rate": 1.2569339973464155e-07, |
| "loss": 0.3366, |
| "step": 2342 |
| }, |
| { |
| "epoch": 2.81, |
| "grad_norm": 0.499291269992539, |
| "learning_rate": 1.2414331376987555e-07, |
| "loss": 0.3406, |
| "step": 2343 |
| }, |
| { |
| "epoch": 2.8112, |
| "grad_norm": 0.4415314530087141, |
| "learning_rate": 1.2260272509330707e-07, |
| "loss": 0.3048, |
| "step": 2344 |
| }, |
| { |
| "epoch": 2.8124000000000002, |
| "grad_norm": 0.4904567272284453, |
| "learning_rate": 1.2107163670572574e-07, |
| "loss": 0.3074, |
| "step": 2345 |
| }, |
| { |
| "epoch": 2.8136, |
| "grad_norm": 0.4415238455362824, |
| "learning_rate": 1.195500515894149e-07, |
| "loss": 0.3227, |
| "step": 2346 |
| }, |
| { |
| "epoch": 2.8148, |
| "grad_norm": 0.5376510127249622, |
| "learning_rate": 1.1803797270814765e-07, |
| "loss": 0.3177, |
| "step": 2347 |
| }, |
| { |
| "epoch": 2.816, |
| "grad_norm": 0.4527948890890255, |
| "learning_rate": 1.165354030071808e-07, |
| "loss": 0.3297, |
| "step": 2348 |
| }, |
| { |
| "epoch": 2.8172, |
| "grad_norm": 0.4646289088914466, |
| "learning_rate": 1.1504234541324765e-07, |
| "loss": 0.3037, |
| "step": 2349 |
| }, |
| { |
| "epoch": 2.8184, |
| "grad_norm": 0.5920759085126542, |
| "learning_rate": 1.1355880283455523e-07, |
| "loss": 0.3043, |
| "step": 2350 |
| }, |
| { |
| "epoch": 2.8196, |
| "grad_norm": 0.4049496950901706, |
| "learning_rate": 1.1208477816077756e-07, |
| "loss": 0.3159, |
| "step": 2351 |
| }, |
| { |
| "epoch": 2.8208, |
| "grad_norm": 0.48697259926343966, |
| "learning_rate": 1.1062027426304744e-07, |
| "loss": 0.3032, |
| "step": 2352 |
| }, |
| { |
| "epoch": 2.822, |
| "grad_norm": 0.4821516548964115, |
| "learning_rate": 1.091652939939547e-07, |
| "loss": 0.3165, |
| "step": 2353 |
| }, |
| { |
| "epoch": 2.8232, |
| "grad_norm": 0.5198051132803483, |
| "learning_rate": 1.0771984018753733e-07, |
| "loss": 0.3152, |
| "step": 2354 |
| }, |
| { |
| "epoch": 2.8244, |
| "grad_norm": 0.4585361524417428, |
| "learning_rate": 1.0628391565927765e-07, |
| "loss": 0.3115, |
| "step": 2355 |
| }, |
| { |
| "epoch": 2.8256, |
| "grad_norm": 0.4859294764163727, |
| "learning_rate": 1.0485752320609944e-07, |
| "loss": 0.3138, |
| "step": 2356 |
| }, |
| { |
| "epoch": 2.8268, |
| "grad_norm": 0.4775270643408919, |
| "learning_rate": 1.0344066560635635e-07, |
| "loss": 0.3279, |
| "step": 2357 |
| }, |
| { |
| "epoch": 2.828, |
| "grad_norm": 0.4647411286051379, |
| "learning_rate": 1.0203334561983025e-07, |
| "loss": 0.3003, |
| "step": 2358 |
| }, |
| { |
| "epoch": 2.8292, |
| "grad_norm": 0.4844392351864473, |
| "learning_rate": 1.0063556598772839e-07, |
| "loss": 0.3155, |
| "step": 2359 |
| }, |
| { |
| "epoch": 2.8304, |
| "grad_norm": 0.605604539251082, |
| "learning_rate": 9.924732943267068e-08, |
| "loss": 0.3248, |
| "step": 2360 |
| }, |
| { |
| "epoch": 2.8316, |
| "grad_norm": 0.5032824623072444, |
| "learning_rate": 9.7868638658693e-08, |
| "loss": 0.3094, |
| "step": 2361 |
| }, |
| { |
| "epoch": 2.8327999999999998, |
| "grad_norm": 0.499371598929128, |
| "learning_rate": 9.6499496351235e-08, |
| "loss": 0.286, |
| "step": 2362 |
| }, |
| { |
| "epoch": 2.834, |
| "grad_norm": 0.5373737210650458, |
| "learning_rate": 9.513990517713955e-08, |
| "loss": 0.3104, |
| "step": 2363 |
| }, |
| { |
| "epoch": 2.8352, |
| "grad_norm": 0.4510168069438221, |
| "learning_rate": 9.378986778464327e-08, |
| "loss": 0.3522, |
| "step": 2364 |
| }, |
| { |
| "epoch": 2.8364000000000003, |
| "grad_norm": 0.4934286179082433, |
| "learning_rate": 9.244938680337656e-08, |
| "loss": 0.2772, |
| "step": 2365 |
| }, |
| { |
| "epoch": 2.8376, |
| "grad_norm": 0.45055787521651935, |
| "learning_rate": 9.111846484435361e-08, |
| "loss": 0.3121, |
| "step": 2366 |
| }, |
| { |
| "epoch": 2.8388, |
| "grad_norm": 0.48992248863843435, |
| "learning_rate": 8.979710449997014e-08, |
| "loss": 0.3474, |
| "step": 2367 |
| }, |
| { |
| "epoch": 2.84, |
| "grad_norm": 0.5360710799868117, |
| "learning_rate": 8.848530834399683e-08, |
| "loss": 0.2979, |
| "step": 2368 |
| }, |
| { |
| "epoch": 2.8411999999999997, |
| "grad_norm": 0.5385843863790583, |
| "learning_rate": 8.718307893157696e-08, |
| "loss": 0.3061, |
| "step": 2369 |
| }, |
| { |
| "epoch": 2.8424, |
| "grad_norm": 0.4817960470762318, |
| "learning_rate": 8.589041879921711e-08, |
| "loss": 0.329, |
| "step": 2370 |
| }, |
| { |
| "epoch": 2.8436, |
| "grad_norm": 0.5981766823358456, |
| "learning_rate": 8.460733046478653e-08, |
| "loss": 0.3174, |
| "step": 2371 |
| }, |
| { |
| "epoch": 2.8448, |
| "grad_norm": 0.4538696419025796, |
| "learning_rate": 8.333381642750882e-08, |
| "loss": 0.3161, |
| "step": 2372 |
| }, |
| { |
| "epoch": 2.846, |
| "grad_norm": 0.505003014137601, |
| "learning_rate": 8.206987916796027e-08, |
| "loss": 0.3087, |
| "step": 2373 |
| }, |
| { |
| "epoch": 2.8472, |
| "grad_norm": 0.42719544721616665, |
| "learning_rate": 8.081552114806101e-08, |
| "loss": 0.323, |
| "step": 2374 |
| }, |
| { |
| "epoch": 2.8484, |
| "grad_norm": 0.4898473682925847, |
| "learning_rate": 7.957074481107551e-08, |
| "loss": 0.3049, |
| "step": 2375 |
| }, |
| { |
| "epoch": 2.8496, |
| "grad_norm": 0.43235343770639867, |
| "learning_rate": 7.833555258160208e-08, |
| "loss": 0.2999, |
| "step": 2376 |
| }, |
| { |
| "epoch": 2.8508, |
| "grad_norm": 0.510727061012366, |
| "learning_rate": 7.710994686557172e-08, |
| "loss": 0.2989, |
| "step": 2377 |
| }, |
| { |
| "epoch": 2.852, |
| "grad_norm": 0.5846787914082995, |
| "learning_rate": 7.589393005024482e-08, |
| "loss": 0.3349, |
| "step": 2378 |
| }, |
| { |
| "epoch": 2.8532, |
| "grad_norm": 0.49638533272937807, |
| "learning_rate": 7.468750450420114e-08, |
| "loss": 0.3358, |
| "step": 2379 |
| }, |
| { |
| "epoch": 2.8544, |
| "grad_norm": 0.501970908690895, |
| "learning_rate": 7.349067257733989e-08, |
| "loss": 0.2965, |
| "step": 2380 |
| }, |
| { |
| "epoch": 2.8556, |
| "grad_norm": 0.4181659544610093, |
| "learning_rate": 7.230343660087402e-08, |
| "loss": 0.2911, |
| "step": 2381 |
| }, |
| { |
| "epoch": 2.8568, |
| "grad_norm": 0.4714363547285763, |
| "learning_rate": 7.11257988873243e-08, |
| "loss": 0.3049, |
| "step": 2382 |
| }, |
| { |
| "epoch": 2.858, |
| "grad_norm": 0.4457501963987575, |
| "learning_rate": 6.995776173051583e-08, |
| "loss": 0.3196, |
| "step": 2383 |
| }, |
| { |
| "epoch": 2.8592, |
| "grad_norm": 0.6019596004834906, |
| "learning_rate": 6.879932740557538e-08, |
| "loss": 0.2975, |
| "step": 2384 |
| }, |
| { |
| "epoch": 2.8604000000000003, |
| "grad_norm": 0.54148366874017, |
| "learning_rate": 6.7650498168923e-08, |
| "loss": 0.3173, |
| "step": 2385 |
| }, |
| { |
| "epoch": 2.8616, |
| "grad_norm": 0.4357959192650805, |
| "learning_rate": 6.651127625827037e-08, |
| "loss": 0.3565, |
| "step": 2386 |
| }, |
| { |
| "epoch": 2.8628, |
| "grad_norm": 0.5406915990260817, |
| "learning_rate": 6.538166389261635e-08, |
| "loss": 0.3188, |
| "step": 2387 |
| }, |
| { |
| "epoch": 2.864, |
| "grad_norm": 0.43937973467304026, |
| "learning_rate": 6.426166327224148e-08, |
| "loss": 0.3133, |
| "step": 2388 |
| }, |
| { |
| "epoch": 2.8651999999999997, |
| "grad_norm": 0.43446611545236996, |
| "learning_rate": 6.315127657870513e-08, |
| "loss": 0.3116, |
| "step": 2389 |
| }, |
| { |
| "epoch": 2.8664, |
| "grad_norm": 0.5007274102404864, |
| "learning_rate": 6.205050597483997e-08, |
| "loss": 0.3516, |
| "step": 2390 |
| }, |
| { |
| "epoch": 2.8676, |
| "grad_norm": 0.5235249201937633, |
| "learning_rate": 6.095935360474814e-08, |
| "loss": 0.3195, |
| "step": 2391 |
| }, |
| { |
| "epoch": 2.8688000000000002, |
| "grad_norm": 0.4295429354477048, |
| "learning_rate": 5.98778215937973e-08, |
| "loss": 0.3198, |
| "step": 2392 |
| }, |
| { |
| "epoch": 2.87, |
| "grad_norm": 0.43532397576400567, |
| "learning_rate": 5.880591204861674e-08, |
| "loss": 0.3065, |
| "step": 2393 |
| }, |
| { |
| "epoch": 2.8712, |
| "grad_norm": 0.45294078857264364, |
| "learning_rate": 5.7743627057092463e-08, |
| "loss": 0.3134, |
| "step": 2394 |
| }, |
| { |
| "epoch": 2.8724, |
| "grad_norm": 0.5769728088585871, |
| "learning_rate": 5.669096868836377e-08, |
| "loss": 0.293, |
| "step": 2395 |
| }, |
| { |
| "epoch": 2.8736, |
| "grad_norm": 0.5111669242404633, |
| "learning_rate": 5.564793899281884e-08, |
| "loss": 0.322, |
| "step": 2396 |
| }, |
| { |
| "epoch": 2.8748, |
| "grad_norm": 0.48760200574794743, |
| "learning_rate": 5.461454000209199e-08, |
| "loss": 0.2901, |
| "step": 2397 |
| }, |
| { |
| "epoch": 2.876, |
| "grad_norm": 0.6298759028578274, |
| "learning_rate": 5.3590773729056965e-08, |
| "loss": 0.2865, |
| "step": 2398 |
| }, |
| { |
| "epoch": 2.8772, |
| "grad_norm": 0.4076819774768756, |
| "learning_rate": 5.257664216782532e-08, |
| "loss": 0.3175, |
| "step": 2399 |
| }, |
| { |
| "epoch": 2.8784, |
| "grad_norm": 0.5230996839581467, |
| "learning_rate": 5.157214729374305e-08, |
| "loss": 0.3008, |
| "step": 2400 |
| }, |
| { |
| "epoch": 2.8796, |
| "grad_norm": 0.4980150312966219, |
| "learning_rate": 5.057729106338505e-08, |
| "loss": 0.3097, |
| "step": 2401 |
| }, |
| { |
| "epoch": 2.8808, |
| "grad_norm": 0.6269231740041662, |
| "learning_rate": 4.959207541455013e-08, |
| "loss": 0.301, |
| "step": 2402 |
| }, |
| { |
| "epoch": 2.882, |
| "grad_norm": 0.5509563063387615, |
| "learning_rate": 4.8616502266261026e-08, |
| "loss": 0.3222, |
| "step": 2403 |
| }, |
| { |
| "epoch": 2.8832, |
| "grad_norm": 0.4777034613305951, |
| "learning_rate": 4.765057351875879e-08, |
| "loss": 0.3455, |
| "step": 2404 |
| }, |
| { |
| "epoch": 2.8844, |
| "grad_norm": 0.6229122328651276, |
| "learning_rate": 4.6694291053496766e-08, |
| "loss": 0.3427, |
| "step": 2405 |
| }, |
| { |
| "epoch": 2.8856, |
| "grad_norm": 0.4363010937071919, |
| "learning_rate": 4.5747656733142184e-08, |
| "loss": 0.3079, |
| "step": 2406 |
| }, |
| { |
| "epoch": 2.8868, |
| "grad_norm": 0.5164317618387799, |
| "learning_rate": 4.481067240156678e-08, |
| "loss": 0.3577, |
| "step": 2407 |
| }, |
| { |
| "epoch": 2.888, |
| "grad_norm": 0.5310453003938295, |
| "learning_rate": 4.3883339883846186e-08, |
| "loss": 0.3092, |
| "step": 2408 |
| }, |
| { |
| "epoch": 2.8891999999999998, |
| "grad_norm": 0.47591299716717905, |
| "learning_rate": 4.296566098625776e-08, |
| "loss": 0.289, |
| "step": 2409 |
| }, |
| { |
| "epoch": 2.8904, |
| "grad_norm": 0.4057762248455579, |
| "learning_rate": 4.2057637496273896e-08, |
| "loss": 0.3078, |
| "step": 2410 |
| }, |
| { |
| "epoch": 2.8916, |
| "grad_norm": 0.5170309933348114, |
| "learning_rate": 4.115927118256036e-08, |
| "loss": 0.3055, |
| "step": 2411 |
| }, |
| { |
| "epoch": 2.8928000000000003, |
| "grad_norm": 0.5785863146042344, |
| "learning_rate": 4.02705637949724e-08, |
| "loss": 0.2945, |
| "step": 2412 |
| }, |
| { |
| "epoch": 2.894, |
| "grad_norm": 0.5280703827425812, |
| "learning_rate": 3.939151706455146e-08, |
| "loss": 0.3231, |
| "step": 2413 |
| }, |
| { |
| "epoch": 2.8952, |
| "grad_norm": 0.48114304647455475, |
| "learning_rate": 3.8522132703521784e-08, |
| "loss": 0.3291, |
| "step": 2414 |
| }, |
| { |
| "epoch": 2.8964, |
| "grad_norm": 0.6179466081930308, |
| "learning_rate": 3.7662412405286567e-08, |
| "loss": 0.2838, |
| "step": 2415 |
| }, |
| { |
| "epoch": 2.8975999999999997, |
| "grad_norm": 0.553574524175213, |
| "learning_rate": 3.6812357844427385e-08, |
| "loss": 0.3424, |
| "step": 2416 |
| }, |
| { |
| "epoch": 2.8988, |
| "grad_norm": 0.476372373712313, |
| "learning_rate": 3.597197067669533e-08, |
| "loss": 0.2967, |
| "step": 2417 |
| }, |
| { |
| "epoch": 2.9, |
| "grad_norm": 0.4589191775819069, |
| "learning_rate": 3.514125253901324e-08, |
| "loss": 0.289, |
| "step": 2418 |
| }, |
| { |
| "epoch": 2.9012000000000002, |
| "grad_norm": 0.5025480183641806, |
| "learning_rate": 3.432020504947064e-08, |
| "loss": 0.3634, |
| "step": 2419 |
| }, |
| { |
| "epoch": 2.9024, |
| "grad_norm": 0.5378007119550171, |
| "learning_rate": 3.350882980731884e-08, |
| "loss": 0.3292, |
| "step": 2420 |
| }, |
| { |
| "epoch": 2.9036, |
| "grad_norm": 0.5132483847780795, |
| "learning_rate": 3.27071283929703e-08, |
| "loss": 0.2929, |
| "step": 2421 |
| }, |
| { |
| "epoch": 2.9048, |
| "grad_norm": 0.5017330891104314, |
| "learning_rate": 3.191510236799589e-08, |
| "loss": 0.3086, |
| "step": 2422 |
| }, |
| { |
| "epoch": 2.906, |
| "grad_norm": 0.6054142245943596, |
| "learning_rate": 3.113275327511767e-08, |
| "loss": 0.301, |
| "step": 2423 |
| }, |
| { |
| "epoch": 2.9072, |
| "grad_norm": 0.5009794571004275, |
| "learning_rate": 3.0360082638211666e-08, |
| "loss": 0.3447, |
| "step": 2424 |
| }, |
| { |
| "epoch": 2.9084, |
| "grad_norm": 0.4911659041607946, |
| "learning_rate": 2.959709196229954e-08, |
| "loss": 0.2892, |
| "step": 2425 |
| }, |
| { |
| "epoch": 2.9096, |
| "grad_norm": 0.4597986261621221, |
| "learning_rate": 2.8843782733549706e-08, |
| "loss": 0.3198, |
| "step": 2426 |
| }, |
| { |
| "epoch": 2.9108, |
| "grad_norm": 0.5628532674537106, |
| "learning_rate": 2.8100156419272885e-08, |
| "loss": 0.2947, |
| "step": 2427 |
| }, |
| { |
| "epoch": 2.912, |
| "grad_norm": 0.4282700586751546, |
| "learning_rate": 2.7366214467919318e-08, |
| "loss": 0.3177, |
| "step": 2428 |
| }, |
| { |
| "epoch": 2.9132, |
| "grad_norm": 0.4618771875772092, |
| "learning_rate": 2.664195830907379e-08, |
| "loss": 0.319, |
| "step": 2429 |
| }, |
| { |
| "epoch": 2.9144, |
| "grad_norm": 0.5395700391467462, |
| "learning_rate": 2.5927389353457842e-08, |
| "loss": 0.3321, |
| "step": 2430 |
| }, |
| { |
| "epoch": 2.9156, |
| "grad_norm": 0.463777655214732, |
| "learning_rate": 2.5222508992922e-08, |
| "loss": 0.3089, |
| "step": 2431 |
| }, |
| { |
| "epoch": 2.9168, |
| "grad_norm": 0.5016833316461257, |
| "learning_rate": 2.4527318600446324e-08, |
| "loss": 0.3221, |
| "step": 2432 |
| }, |
| { |
| "epoch": 2.918, |
| "grad_norm": 0.658339893908631, |
| "learning_rate": 2.3841819530135424e-08, |
| "loss": 0.2819, |
| "step": 2433 |
| }, |
| { |
| "epoch": 2.9192, |
| "grad_norm": 0.5588249527983352, |
| "learning_rate": 2.3166013117218998e-08, |
| "loss": 0.3116, |
| "step": 2434 |
| }, |
| { |
| "epoch": 2.9204, |
| "grad_norm": 0.5610785433460539, |
| "learning_rate": 2.249990067804464e-08, |
| "loss": 0.3453, |
| "step": 2435 |
| }, |
| { |
| "epoch": 2.9215999999999998, |
| "grad_norm": 0.5814747100855794, |
| "learning_rate": 2.1843483510080032e-08, |
| "loss": 0.3171, |
| "step": 2436 |
| }, |
| { |
| "epoch": 2.9228, |
| "grad_norm": 0.44736991991141273, |
| "learning_rate": 2.119676289190631e-08, |
| "loss": 0.3127, |
| "step": 2437 |
| }, |
| { |
| "epoch": 2.924, |
| "grad_norm": 0.43175452101413864, |
| "learning_rate": 2.0559740083219147e-08, |
| "loss": 0.2804, |
| "step": 2438 |
| }, |
| { |
| "epoch": 2.9252000000000002, |
| "grad_norm": 0.4487740244420664, |
| "learning_rate": 1.9932416324823235e-08, |
| "loss": 0.3243, |
| "step": 2439 |
| }, |
| { |
| "epoch": 2.9264, |
| "grad_norm": 0.5800148752493892, |
| "learning_rate": 1.93147928386328e-08, |
| "loss": 0.3212, |
| "step": 2440 |
| }, |
| { |
| "epoch": 2.9276, |
| "grad_norm": 0.47301517237011714, |
| "learning_rate": 1.8706870827666646e-08, |
| "loss": 0.3203, |
| "step": 2441 |
| }, |
| { |
| "epoch": 2.9288, |
| "grad_norm": 0.47724066371088975, |
| "learning_rate": 1.8108651476046457e-08, |
| "loss": 0.3107, |
| "step": 2442 |
| }, |
| { |
| "epoch": 2.93, |
| "grad_norm": 0.4757237945362611, |
| "learning_rate": 1.7520135948996263e-08, |
| "loss": 0.3048, |
| "step": 2443 |
| }, |
| { |
| "epoch": 2.9312, |
| "grad_norm": 0.49605027064755813, |
| "learning_rate": 1.6941325392837437e-08, |
| "loss": 0.2888, |
| "step": 2444 |
| }, |
| { |
| "epoch": 2.9324, |
| "grad_norm": 0.5628634219339166, |
| "learning_rate": 1.6372220934988693e-08, |
| "loss": 0.3292, |
| "step": 2445 |
| }, |
| { |
| "epoch": 2.9336, |
| "grad_norm": 0.5974348122702033, |
| "learning_rate": 1.5812823683962198e-08, |
| "loss": 0.3527, |
| "step": 2446 |
| }, |
| { |
| "epoch": 2.9348, |
| "grad_norm": 0.42311587911880083, |
| "learning_rate": 1.5263134729363582e-08, |
| "loss": 0.3057, |
| "step": 2447 |
| }, |
| { |
| "epoch": 2.936, |
| "grad_norm": 0.4424085149778813, |
| "learning_rate": 1.472315514188749e-08, |
| "loss": 0.3196, |
| "step": 2448 |
| }, |
| { |
| "epoch": 2.9372, |
| "grad_norm": 0.5206514274509301, |
| "learning_rate": 1.4192885973315918e-08, |
| "loss": 0.3227, |
| "step": 2449 |
| }, |
| { |
| "epoch": 2.9384, |
| "grad_norm": 0.38928868836839525, |
| "learning_rate": 1.3672328256518208e-08, |
| "loss": 0.2786, |
| "step": 2450 |
| }, |
| { |
| "epoch": 2.9396, |
| "grad_norm": 0.49822036281929044, |
| "learning_rate": 1.3161483005446618e-08, |
| "loss": 0.3139, |
| "step": 2451 |
| }, |
| { |
| "epoch": 2.9408, |
| "grad_norm": 0.5862817429516328, |
| "learning_rate": 1.2660351215135203e-08, |
| "loss": 0.3477, |
| "step": 2452 |
| }, |
| { |
| "epoch": 2.942, |
| "grad_norm": 0.41117931769988564, |
| "learning_rate": 1.2168933861698151e-08, |
| "loss": 0.2959, |
| "step": 2453 |
| }, |
| { |
| "epoch": 2.9432, |
| "grad_norm": 0.5331845701928941, |
| "learning_rate": 1.1687231902328122e-08, |
| "loss": 0.329, |
| "step": 2454 |
| }, |
| { |
| "epoch": 2.9444, |
| "grad_norm": 0.4131289314624644, |
| "learning_rate": 1.1215246275292913e-08, |
| "loss": 0.3148, |
| "step": 2455 |
| }, |
| { |
| "epoch": 2.9455999999999998, |
| "grad_norm": 0.5709882695339584, |
| "learning_rate": 1.0752977899936013e-08, |
| "loss": 0.3669, |
| "step": 2456 |
| }, |
| { |
| "epoch": 2.9468, |
| "grad_norm": 0.5383496373093002, |
| "learning_rate": 1.0300427676672164e-08, |
| "loss": 0.3207, |
| "step": 2457 |
| }, |
| { |
| "epoch": 2.948, |
| "grad_norm": 0.5036985474975768, |
| "learning_rate": 9.85759648698792e-09, |
| "loss": 0.3518, |
| "step": 2458 |
| }, |
| { |
| "epoch": 2.9492000000000003, |
| "grad_norm": 0.48604138385546897, |
| "learning_rate": 9.42448519343775e-09, |
| "loss": 0.3003, |
| "step": 2459 |
| }, |
| { |
| "epoch": 2.9504, |
| "grad_norm": 0.46063161763653415, |
| "learning_rate": 9.001094639645158e-09, |
| "loss": 0.3145, |
| "step": 2460 |
| }, |
| { |
| "epoch": 2.9516, |
| "grad_norm": 0.44888976548593773, |
| "learning_rate": 8.587425650297688e-09, |
| "loss": 0.3134, |
| "step": 2461 |
| }, |
| { |
| "epoch": 2.9528, |
| "grad_norm": 0.5443734200582069, |
| "learning_rate": 8.183479031148022e-09, |
| "loss": 0.3055, |
| "step": 2462 |
| }, |
| { |
| "epoch": 2.9539999999999997, |
| "grad_norm": 0.46207920486485327, |
| "learning_rate": 7.789255569011223e-09, |
| "loss": 0.3055, |
| "step": 2463 |
| }, |
| { |
| "epoch": 2.9552, |
| "grad_norm": 0.5590105689639684, |
| "learning_rate": 7.40475603176416e-09, |
| "loss": 0.3282, |
| "step": 2464 |
| }, |
| { |
| "epoch": 2.9564, |
| "grad_norm": 0.4728662799676889, |
| "learning_rate": 7.029981168341082e-09, |
| "loss": 0.3237, |
| "step": 2465 |
| }, |
| { |
| "epoch": 2.9576000000000002, |
| "grad_norm": 0.6122492473730149, |
| "learning_rate": 6.664931708736943e-09, |
| "loss": 0.3634, |
| "step": 2466 |
| }, |
| { |
| "epoch": 2.9588, |
| "grad_norm": 0.48871890909775306, |
| "learning_rate": 6.309608364001296e-09, |
| "loss": 0.3274, |
| "step": 2467 |
| }, |
| { |
| "epoch": 2.96, |
| "grad_norm": 0.4736736566376079, |
| "learning_rate": 5.9640118262399575e-09, |
| "loss": 0.3647, |
| "step": 2468 |
| }, |
| { |
| "epoch": 2.9612, |
| "grad_norm": 0.7702392956065972, |
| "learning_rate": 5.628142768613343e-09, |
| "loss": 0.3343, |
| "step": 2469 |
| }, |
| { |
| "epoch": 2.9624, |
| "grad_norm": 0.5110995784521956, |
| "learning_rate": 5.302001845333138e-09, |
| "loss": 0.3469, |
| "step": 2470 |
| }, |
| { |
| "epoch": 2.9636, |
| "grad_norm": 0.6271313615816718, |
| "learning_rate": 4.9855896916634065e-09, |
| "loss": 0.3011, |
| "step": 2471 |
| }, |
| { |
| "epoch": 2.9648, |
| "grad_norm": 0.5049024790218767, |
| "learning_rate": 4.678906923916704e-09, |
| "loss": 0.3349, |
| "step": 2472 |
| }, |
| { |
| "epoch": 2.966, |
| "grad_norm": 0.5083737036919284, |
| "learning_rate": 4.381954139457411e-09, |
| "loss": 0.3188, |
| "step": 2473 |
| }, |
| { |
| "epoch": 2.9672, |
| "grad_norm": 0.4439220103812538, |
| "learning_rate": 4.094731916693962e-09, |
| "loss": 0.303, |
| "step": 2474 |
| }, |
| { |
| "epoch": 2.9684, |
| "grad_norm": 0.5349117810636819, |
| "learning_rate": 3.817240815084944e-09, |
| "loss": 0.3106, |
| "step": 2475 |
| }, |
| { |
| "epoch": 2.9696, |
| "grad_norm": 0.4796397451321217, |
| "learning_rate": 3.5494813751324466e-09, |
| "loss": 0.3208, |
| "step": 2476 |
| }, |
| { |
| "epoch": 2.9708, |
| "grad_norm": 0.49657074475370644, |
| "learning_rate": 3.291454118383164e-09, |
| "loss": 0.3001, |
| "step": 2477 |
| }, |
| { |
| "epoch": 2.972, |
| "grad_norm": 0.45887033574992925, |
| "learning_rate": 3.043159547427843e-09, |
| "loss": 0.297, |
| "step": 2478 |
| }, |
| { |
| "epoch": 2.9732, |
| "grad_norm": 0.4654686233837408, |
| "learning_rate": 2.804598145899062e-09, |
| "loss": 0.3224, |
| "step": 2479 |
| }, |
| { |
| "epoch": 2.9744, |
| "grad_norm": 0.5123518073183415, |
| "learning_rate": 2.575770378472342e-09, |
| "loss": 0.3298, |
| "step": 2480 |
| }, |
| { |
| "epoch": 2.9756, |
| "grad_norm": 0.5616833089679418, |
| "learning_rate": 2.3566766908622586e-09, |
| "loss": 0.3039, |
| "step": 2481 |
| }, |
| { |
| "epoch": 2.9768, |
| "grad_norm": 0.5781294294873015, |
| "learning_rate": 2.1473175098229993e-09, |
| "loss": 0.3141, |
| "step": 2482 |
| }, |
| { |
| "epoch": 2.9779999999999998, |
| "grad_norm": 0.45192639710917176, |
| "learning_rate": 1.9476932431500286e-09, |
| "loss": 0.3087, |
| "step": 2483 |
| }, |
| { |
| "epoch": 2.9792, |
| "grad_norm": 0.42422671390989036, |
| "learning_rate": 1.7578042796739803e-09, |
| "loss": 0.3399, |
| "step": 2484 |
| }, |
| { |
| "epoch": 2.9804, |
| "grad_norm": 0.4340043445979178, |
| "learning_rate": 1.5776509892645453e-09, |
| "loss": 0.3088, |
| "step": 2485 |
| }, |
| { |
| "epoch": 2.9816000000000003, |
| "grad_norm": 0.4425405316221765, |
| "learning_rate": 1.4072337228282496e-09, |
| "loss": 0.3216, |
| "step": 2486 |
| }, |
| { |
| "epoch": 2.9828, |
| "grad_norm": 0.4987650680285554, |
| "learning_rate": 1.2465528123073445e-09, |
| "loss": 0.2955, |
| "step": 2487 |
| }, |
| { |
| "epoch": 2.984, |
| "grad_norm": 0.43270212291705107, |
| "learning_rate": 1.0956085706781416e-09, |
| "loss": 0.3315, |
| "step": 2488 |
| }, |
| { |
| "epoch": 2.9852, |
| "grad_norm": 0.5028265173730337, |
| "learning_rate": 9.54401291953788e-10, |
| "loss": 0.3101, |
| "step": 2489 |
| }, |
| { |
| "epoch": 2.9864, |
| "grad_norm": 0.5165685400479998, |
| "learning_rate": 8.229312511803811e-10, |
| "loss": 0.2875, |
| "step": 2490 |
| }, |
| { |
| "epoch": 2.9876, |
| "grad_norm": 0.5184185637940487, |
| "learning_rate": 7.011987044369673e-10, |
| "loss": 0.3225, |
| "step": 2491 |
| }, |
| { |
| "epoch": 2.9888, |
| "grad_norm": 0.6063108943109075, |
| "learning_rate": 5.892038888377638e-10, |
| "loss": 0.3134, |
| "step": 2492 |
| }, |
| { |
| "epoch": 2.99, |
| "grad_norm": 0.4981802545627052, |
| "learning_rate": 4.869470225277174e-10, |
| "loss": 0.3149, |
| "step": 2493 |
| }, |
| { |
| "epoch": 2.9912, |
| "grad_norm": 0.47237623616462293, |
| "learning_rate": 3.9442830468472414e-10, |
| "loss": 0.3044, |
| "step": 2494 |
| }, |
| { |
| "epoch": 2.9924, |
| "grad_norm": 0.5143523787824885, |
| "learning_rate": 3.1164791551907545e-10, |
| "loss": 0.3246, |
| "step": 2495 |
| }, |
| { |
| "epoch": 2.9936, |
| "grad_norm": 0.44405248487108334, |
| "learning_rate": 2.386060162717918e-10, |
| "loss": 0.2675, |
| "step": 2496 |
| }, |
| { |
| "epoch": 2.9948, |
| "grad_norm": 0.4952936205896277, |
| "learning_rate": 1.7530274921462308e-10, |
| "loss": 0.3095, |
| "step": 2497 |
| }, |
| { |
| "epoch": 2.996, |
| "grad_norm": 0.5220038590190723, |
| "learning_rate": 1.21738237651714e-10, |
| "loss": 0.2753, |
| "step": 2498 |
| }, |
| { |
| "epoch": 2.9972, |
| "grad_norm": 0.5220070903249548, |
| "learning_rate": 7.791258591682837e-11, |
| "loss": 0.3656, |
| "step": 2499 |
| }, |
| { |
| "epoch": 2.9984, |
| "grad_norm": 0.51190349865152, |
| "learning_rate": 4.382587937445948e-11, |
| "loss": 0.3388, |
| "step": 2500 |
| }, |
| { |
| "epoch": 2.9996, |
| "grad_norm": 0.4599694574699737, |
| "learning_rate": 1.947818441927485e-11, |
| "loss": 0.317, |
| "step": 2501 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.9361995803938117, |
| "learning_rate": 4.8695484761163145e-12, |
| "loss": 0.374, |
| "step": 2502 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 2502, |
| "total_flos": 898946847588352.0, |
| "train_loss": 0.38813807606268275, |
| "train_runtime": 64430.8646, |
| "train_samples_per_second": 1.862, |
| "train_steps_per_second": 0.039 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2502, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 4000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 898946847588352.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|