| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9995872884853487, |
| "eval_steps": 500, |
| "global_step": 1211, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0008254230293025176, |
| "grad_norm": 24.685088561067097, |
| "learning_rate": 8.19672131147541e-08, |
| "loss": 1.4397, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.004127115146512587, |
| "grad_norm": 23.945153463040807, |
| "learning_rate": 4.0983606557377047e-07, |
| "loss": 1.4292, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.008254230293025175, |
| "grad_norm": 8.201914286275175, |
| "learning_rate": 8.196721311475409e-07, |
| "loss": 1.3321, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.012381345439537762, |
| "grad_norm": 11.180188226746075, |
| "learning_rate": 1.2295081967213116e-06, |
| "loss": 1.1764, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.01650846058605035, |
| "grad_norm": 3.0006985745643777, |
| "learning_rate": 1.6393442622950819e-06, |
| "loss": 1.0311, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.020635575732562937, |
| "grad_norm": 3.0231996454961116, |
| "learning_rate": 2.0491803278688526e-06, |
| "loss": 0.9824, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.024762690879075525, |
| "grad_norm": 2.394417208637789, |
| "learning_rate": 2.459016393442623e-06, |
| "loss": 0.9428, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.028889806025588115, |
| "grad_norm": 2.243762354492006, |
| "learning_rate": 2.8688524590163937e-06, |
| "loss": 0.929, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.0330169211721007, |
| "grad_norm": 2.31711751154106, |
| "learning_rate": 3.2786885245901638e-06, |
| "loss": 0.9036, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.03714403631861329, |
| "grad_norm": 2.717403904295213, |
| "learning_rate": 3.6885245901639347e-06, |
| "loss": 0.8884, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.041271151465125874, |
| "grad_norm": 2.274524441450386, |
| "learning_rate": 4.098360655737705e-06, |
| "loss": 0.885, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.04539826661163846, |
| "grad_norm": 2.457481290190043, |
| "learning_rate": 4.508196721311476e-06, |
| "loss": 0.8721, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.04952538175815105, |
| "grad_norm": 2.3198474831023175, |
| "learning_rate": 4.918032786885246e-06, |
| "loss": 0.8735, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.053652496904663644, |
| "grad_norm": 2.2677876278066753, |
| "learning_rate": 5.327868852459017e-06, |
| "loss": 0.8592, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.05777961205117623, |
| "grad_norm": 2.334132338411567, |
| "learning_rate": 5.737704918032787e-06, |
| "loss": 0.8573, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.06190672719768882, |
| "grad_norm": 2.3114481733901213, |
| "learning_rate": 6.147540983606558e-06, |
| "loss": 0.8511, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.0660338423442014, |
| "grad_norm": 2.445435314197215, |
| "learning_rate": 6.5573770491803276e-06, |
| "loss": 0.8427, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.070160957490714, |
| "grad_norm": 2.3320755043408483, |
| "learning_rate": 6.967213114754099e-06, |
| "loss": 0.8494, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.07428807263722657, |
| "grad_norm": 2.4947952053454916, |
| "learning_rate": 7.3770491803278695e-06, |
| "loss": 0.8334, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.07841518778373917, |
| "grad_norm": 2.4255315812672666, |
| "learning_rate": 7.786885245901639e-06, |
| "loss": 0.8292, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.08254230293025175, |
| "grad_norm": 2.5980903528360586, |
| "learning_rate": 8.19672131147541e-06, |
| "loss": 0.8182, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.08666941807676434, |
| "grad_norm": 2.557969631480717, |
| "learning_rate": 8.60655737704918e-06, |
| "loss": 0.83, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.09079653322327692, |
| "grad_norm": 2.3958016117230363, |
| "learning_rate": 9.016393442622952e-06, |
| "loss": 0.8132, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.09492364836978952, |
| "grad_norm": 2.47094805804508, |
| "learning_rate": 9.426229508196723e-06, |
| "loss": 0.8058, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.0990507635163021, |
| "grad_norm": 2.2173686833192234, |
| "learning_rate": 9.836065573770493e-06, |
| "loss": 0.8129, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.10317787866281469, |
| "grad_norm": 2.442299703694065, |
| "learning_rate": 9.999812749151968e-06, |
| "loss": 0.8043, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.10730499380932729, |
| "grad_norm": 2.70020809980298, |
| "learning_rate": 9.998668489204265e-06, |
| "loss": 0.8005, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.11143210895583987, |
| "grad_norm": 2.4881429411766014, |
| "learning_rate": 9.99648423533726e-06, |
| "loss": 0.7993, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.11555922410235246, |
| "grad_norm": 2.3569754221609953, |
| "learning_rate": 9.993260441994116e-06, |
| "loss": 0.7966, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.11968633924886504, |
| "grad_norm": 2.455448628360433, |
| "learning_rate": 9.988997779898544e-06, |
| "loss": 0.8019, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.12381345439537764, |
| "grad_norm": 2.5006306104937623, |
| "learning_rate": 9.983697135915253e-06, |
| "loss": 0.7921, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.12794056954189023, |
| "grad_norm": 2.5688606957093594, |
| "learning_rate": 9.977359612865424e-06, |
| "loss": 0.7825, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.1320676846884028, |
| "grad_norm": 2.416695522058028, |
| "learning_rate": 9.96998652929728e-06, |
| "loss": 0.7809, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.1361947998349154, |
| "grad_norm": 2.2779935869735732, |
| "learning_rate": 9.961579419211741e-06, |
| "loss": 0.7767, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.140321914981428, |
| "grad_norm": 2.405259985341037, |
| "learning_rate": 9.952140031743282e-06, |
| "loss": 0.7696, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.14444903012794058, |
| "grad_norm": 2.2359965979949186, |
| "learning_rate": 9.941670330796007e-06, |
| "loss": 0.7571, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.14857614527445315, |
| "grad_norm": 2.123914969069781, |
| "learning_rate": 9.930172494635058e-06, |
| "loss": 0.7545, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.15270326042096574, |
| "grad_norm": 2.3685141262823057, |
| "learning_rate": 9.917648915433413e-06, |
| "loss": 0.7577, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.15683037556747834, |
| "grad_norm": 2.288582295953061, |
| "learning_rate": 9.904102198774188e-06, |
| "loss": 0.752, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.16095749071399093, |
| "grad_norm": 2.237056392660144, |
| "learning_rate": 9.889535163108537e-06, |
| "loss": 0.7439, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.1650846058605035, |
| "grad_norm": 2.4463435064141312, |
| "learning_rate": 9.873950839169248e-06, |
| "loss": 0.7302, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.1692117210070161, |
| "grad_norm": 2.255727307108107, |
| "learning_rate": 9.857352469340204e-06, |
| "loss": 0.7332, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.17333883615352869, |
| "grad_norm": 2.315889036709674, |
| "learning_rate": 9.839743506981783e-06, |
| "loss": 0.7334, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.17746595130004128, |
| "grad_norm": 2.4721326878211363, |
| "learning_rate": 9.821127615712365e-06, |
| "loss": 0.7184, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.18159306644655385, |
| "grad_norm": 2.4415976276779277, |
| "learning_rate": 9.801508668646117e-06, |
| "loss": 0.7149, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.18572018159306644, |
| "grad_norm": 2.412349551631901, |
| "learning_rate": 9.780890747587163e-06, |
| "loss": 0.713, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.18984729673957904, |
| "grad_norm": 2.515848530060018, |
| "learning_rate": 9.759278142180348e-06, |
| "loss": 0.7172, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.19397441188609163, |
| "grad_norm": 2.55352651790107, |
| "learning_rate": 9.736675349018768e-06, |
| "loss": 0.7042, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.1981015270326042, |
| "grad_norm": 2.2895880245761515, |
| "learning_rate": 9.713087070708225e-06, |
| "loss": 0.7109, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.2022286421791168, |
| "grad_norm": 2.3671724385797566, |
| "learning_rate": 9.688518214888836e-06, |
| "loss": 0.7001, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.20635575732562939, |
| "grad_norm": 2.3617892749786074, |
| "learning_rate": 9.662973893213976e-06, |
| "loss": 0.6979, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.21048287247214198, |
| "grad_norm": 2.254544557833255, |
| "learning_rate": 9.636459420286779e-06, |
| "loss": 0.6914, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.21460998761865457, |
| "grad_norm": 2.219547321736457, |
| "learning_rate": 9.60898031255441e-06, |
| "loss": 0.6964, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.21873710276516714, |
| "grad_norm": 2.211511469572542, |
| "learning_rate": 9.580542287160348e-06, |
| "loss": 0.6695, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.22286421791167974, |
| "grad_norm": 2.5041061805226787, |
| "learning_rate": 9.551151260754907e-06, |
| "loss": 0.6864, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.22699133305819233, |
| "grad_norm": 2.3511386929231097, |
| "learning_rate": 9.520813348264252e-06, |
| "loss": 0.6809, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.23111844820470492, |
| "grad_norm": 2.960097513048209, |
| "learning_rate": 9.489534861618168e-06, |
| "loss": 0.6555, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.2352455633512175, |
| "grad_norm": 2.2082387429502712, |
| "learning_rate": 9.45732230843683e-06, |
| "loss": 0.6568, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.23937267849773008, |
| "grad_norm": 2.151410227639591, |
| "learning_rate": 9.424182390676872e-06, |
| "loss": 0.6645, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.24349979364424268, |
| "grad_norm": 2.2209736960884334, |
| "learning_rate": 9.39012200323701e-06, |
| "loss": 0.6705, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.24762690879075527, |
| "grad_norm": 2.414842272352699, |
| "learning_rate": 9.355148232523538e-06, |
| "loss": 0.6496, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.25175402393726787, |
| "grad_norm": 2.5739956654811236, |
| "learning_rate": 9.319268354975958e-06, |
| "loss": 0.657, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.25588113908378046, |
| "grad_norm": 2.618163978298313, |
| "learning_rate": 9.282489835553106e-06, |
| "loss": 0.6496, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.260008254230293, |
| "grad_norm": 2.281248176869453, |
| "learning_rate": 9.244820326180013e-06, |
| "loss": 0.6563, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.2641353693768056, |
| "grad_norm": 2.297254363064407, |
| "learning_rate": 9.206267664155906e-06, |
| "loss": 0.6432, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.2682624845233182, |
| "grad_norm": 2.3343424077594306, |
| "learning_rate": 9.166839870523627e-06, |
| "loss": 0.6352, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.2723895996698308, |
| "grad_norm": 2.3001895352129904, |
| "learning_rate": 9.126545148400807e-06, |
| "loss": 0.6346, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.2765167148163434, |
| "grad_norm": 2.335491244315102, |
| "learning_rate": 9.085391881273182e-06, |
| "loss": 0.6417, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.280643829962856, |
| "grad_norm": 2.8711945935898986, |
| "learning_rate": 9.04338863125038e-06, |
| "loss": 0.6203, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.28477094510936857, |
| "grad_norm": 2.4481124095885964, |
| "learning_rate": 9.00054413728452e-06, |
| "loss": 0.6253, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.28889806025588116, |
| "grad_norm": 2.208108640856046, |
| "learning_rate": 8.956867313352055e-06, |
| "loss": 0.6246, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.2930251754023937, |
| "grad_norm": 2.2861750757235995, |
| "learning_rate": 8.912367246599175e-06, |
| "loss": 0.6152, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.2971522905489063, |
| "grad_norm": 2.548396902849064, |
| "learning_rate": 8.867053195451182e-06, |
| "loss": 0.6262, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.3012794056954189, |
| "grad_norm": 2.305963742549307, |
| "learning_rate": 8.820934587686247e-06, |
| "loss": 0.6151, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.3054065208419315, |
| "grad_norm": 2.4126232699277157, |
| "learning_rate": 8.77402101847391e-06, |
| "loss": 0.6071, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.3095336359884441, |
| "grad_norm": 2.2092911857967867, |
| "learning_rate": 8.726322248378775e-06, |
| "loss": 0.6144, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.3136607511349567, |
| "grad_norm": 2.2414757536483125, |
| "learning_rate": 8.677848201329775e-06, |
| "loss": 0.5971, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.31778786628146927, |
| "grad_norm": 2.1301302453717423, |
| "learning_rate": 8.628608962555467e-06, |
| "loss": 0.6171, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.32191498142798186, |
| "grad_norm": 2.1138762246320373, |
| "learning_rate": 8.578614776485744e-06, |
| "loss": 0.5913, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.32604209657449446, |
| "grad_norm": 2.2822356894428277, |
| "learning_rate": 8.527876044620453e-06, |
| "loss": 0.5967, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.330169211721007, |
| "grad_norm": 2.241849162079401, |
| "learning_rate": 8.47640332336531e-06, |
| "loss": 0.6042, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.3342963268675196, |
| "grad_norm": 2.2294939629300097, |
| "learning_rate": 8.424207321835598e-06, |
| "loss": 0.5824, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.3384234420140322, |
| "grad_norm": 2.2031216423678543, |
| "learning_rate": 8.371298899628091e-06, |
| "loss": 0.5778, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.3425505571605448, |
| "grad_norm": 2.33426131831025, |
| "learning_rate": 8.317689064561671e-06, |
| "loss": 0.5846, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.34667767230705737, |
| "grad_norm": 2.3437558474733895, |
| "learning_rate": 8.263388970387102e-06, |
| "loss": 0.5752, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.35080478745356997, |
| "grad_norm": 2.194788845362847, |
| "learning_rate": 8.20840991446645e-06, |
| "loss": 0.5796, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.35493190260008256, |
| "grad_norm": 2.3039527642284776, |
| "learning_rate": 8.152763335422612e-06, |
| "loss": 0.5768, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.35905901774659515, |
| "grad_norm": 2.271949627752587, |
| "learning_rate": 8.096460810759473e-06, |
| "loss": 0.5745, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.3631861328931077, |
| "grad_norm": 2.299369126451729, |
| "learning_rate": 8.03951405445314e-06, |
| "loss": 0.5629, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.3673132480396203, |
| "grad_norm": 2.701487019844598, |
| "learning_rate": 7.98193491451483e-06, |
| "loss": 0.5682, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.3714403631861329, |
| "grad_norm": 2.329511707999701, |
| "learning_rate": 7.923735370525809e-06, |
| "loss": 0.5741, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.3755674783326455, |
| "grad_norm": 2.2666378605955155, |
| "learning_rate": 7.864927531145012e-06, |
| "loss": 0.5613, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.37969459347915807, |
| "grad_norm": 2.44812863797748, |
| "learning_rate": 7.805523631589775e-06, |
| "loss": 0.5654, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.38382170862567067, |
| "grad_norm": 2.346335768826544, |
| "learning_rate": 7.745536031090252e-06, |
| "loss": 0.5577, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.38794882377218326, |
| "grad_norm": 2.340018514786721, |
| "learning_rate": 7.684977210318024e-06, |
| "loss": 0.5575, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.39207593891869585, |
| "grad_norm": 2.250185737483932, |
| "learning_rate": 7.623859768789441e-06, |
| "loss": 0.5456, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.3962030540652084, |
| "grad_norm": 2.1535843417391938, |
| "learning_rate": 7.5621964222442455e-06, |
| "loss": 0.5504, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.400330169211721, |
| "grad_norm": 2.279014812989976, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.5418, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.4044572843582336, |
| "grad_norm": 2.248120170776638, |
| "learning_rate": 7.437283442282903e-06, |
| "loss": 0.5299, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.4085843995047462, |
| "grad_norm": 2.1290912998771443, |
| "learning_rate": 7.374059797535516e-06, |
| "loss": 0.5382, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.41271151465125877, |
| "grad_norm": 2.2318811155096325, |
| "learning_rate": 7.310342219701981e-06, |
| "loss": 0.5386, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.41683862979777137, |
| "grad_norm": 2.0172840744038445, |
| "learning_rate": 7.246143965491288e-06, |
| "loss": 0.532, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.42096574494428396, |
| "grad_norm": 2.0888969433680953, |
| "learning_rate": 7.181478391619162e-06, |
| "loss": 0.5205, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.42509286009079655, |
| "grad_norm": 2.15061141359648, |
| "learning_rate": 7.11635895202914e-06, |
| "loss": 0.5311, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.42921997523730915, |
| "grad_norm": 2.2686115013098704, |
| "learning_rate": 7.050799195093421e-06, |
| "loss": 0.5301, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.4333470903838217, |
| "grad_norm": 2.341272332235732, |
| "learning_rate": 6.984812760794078e-06, |
| "loss": 0.5362, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.4374742055303343, |
| "grad_norm": 2.166891534024358, |
| "learning_rate": 6.918413377885193e-06, |
| "loss": 0.5293, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.4416013206768469, |
| "grad_norm": 2.2412320223529134, |
| "learning_rate": 6.851614861036533e-06, |
| "loss": 0.5283, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.44572843582335947, |
| "grad_norm": 2.2162169091736166, |
| "learning_rate": 6.78443110795936e-06, |
| "loss": 0.5187, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.44985555096987206, |
| "grad_norm": 2.1687285736037447, |
| "learning_rate": 6.716876096514944e-06, |
| "loss": 0.5208, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.45398266611638466, |
| "grad_norm": 2.1587915614775297, |
| "learning_rate": 6.648963881806411e-06, |
| "loss": 0.5058, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.45810978126289725, |
| "grad_norm": 2.05653346463343, |
| "learning_rate": 6.580708593254526e-06, |
| "loss": 0.5103, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.46223689640940985, |
| "grad_norm": 2.1531395501548642, |
| "learning_rate": 6.512124431658006e-06, |
| "loss": 0.5103, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.4663640115559224, |
| "grad_norm": 2.176090356825283, |
| "learning_rate": 6.443225666238976e-06, |
| "loss": 0.5062, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.470491126702435, |
| "grad_norm": 2.209786359023677, |
| "learning_rate": 6.37402663167421e-06, |
| "loss": 0.5027, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.4746182418489476, |
| "grad_norm": 2.056272842336906, |
| "learning_rate": 6.304541725112734e-06, |
| "loss": 0.5056, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.47874535699546017, |
| "grad_norm": 2.162812572951533, |
| "learning_rate": 6.234785403180438e-06, |
| "loss": 0.4928, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.48287247214197276, |
| "grad_norm": 2.211681052073838, |
| "learning_rate": 6.164772178972321e-06, |
| "loss": 0.4938, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.48699958728848536, |
| "grad_norm": 2.0594553618227613, |
| "learning_rate": 6.094516619032975e-06, |
| "loss": 0.5024, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.49112670243499795, |
| "grad_norm": 2.1196964005402803, |
| "learning_rate": 6.024033340325954e-06, |
| "loss": 0.4877, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.49525381758151055, |
| "grad_norm": 2.0731282875409787, |
| "learning_rate": 5.953337007192659e-06, |
| "loss": 0.4903, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.4993809327280231, |
| "grad_norm": 2.072327686896433, |
| "learning_rate": 5.882442328301356e-06, |
| "loss": 0.4814, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.5035080478745357, |
| "grad_norm": 2.118695720658721, |
| "learning_rate": 5.811364053586973e-06, |
| "loss": 0.4766, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.5076351630210483, |
| "grad_norm": 2.119885860879392, |
| "learning_rate": 5.740116971182322e-06, |
| "loss": 0.4777, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.5117622781675609, |
| "grad_norm": 2.073375813482556, |
| "learning_rate": 5.668715904341365e-06, |
| "loss": 0.4734, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.5158893933140735, |
| "grad_norm": 2.0709791028178572, |
| "learning_rate": 5.5971757083551625e-06, |
| "loss": 0.4707, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.520016508460586, |
| "grad_norm": 2.0492205452436396, |
| "learning_rate": 5.525511267461186e-06, |
| "loss": 0.4736, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.5241436236070987, |
| "grad_norm": 2.0319046135786953, |
| "learning_rate": 5.453737491746572e-06, |
| "loss": 0.4713, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.5282707387536112, |
| "grad_norm": 2.0892666680641327, |
| "learning_rate": 5.381869314046031e-06, |
| "loss": 0.4764, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.5323978539001238, |
| "grad_norm": 2.078568518806719, |
| "learning_rate": 5.3099216868349965e-06, |
| "loss": 0.4671, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.5365249690466364, |
| "grad_norm": 2.0410694490990724, |
| "learning_rate": 5.237909579118713e-06, |
| "loss": 0.4778, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.540652084193149, |
| "grad_norm": 2.0761769744117444, |
| "learning_rate": 5.165847973317854e-06, |
| "loss": 0.4678, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.5447791993396616, |
| "grad_norm": 2.130099579144156, |
| "learning_rate": 5.093751862151388e-06, |
| "loss": 0.4649, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.5489063144861742, |
| "grad_norm": 2.078379103493414, |
| "learning_rate": 5.021636245517261e-06, |
| "loss": 0.4732, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.5530334296326868, |
| "grad_norm": 2.0058829746015507, |
| "learning_rate": 4.9495161273716174e-06, |
| "loss": 0.4566, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.5571605447791993, |
| "grad_norm": 2.22400636300283, |
| "learning_rate": 4.877406512607159e-06, |
| "loss": 0.4572, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.561287659925712, |
| "grad_norm": 2.115628462513928, |
| "learning_rate": 4.805322403931312e-06, |
| "loss": 0.4599, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.5654147750722245, |
| "grad_norm": 2.0907681450444353, |
| "learning_rate": 4.73327879874486e-06, |
| "loss": 0.4647, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.5695418902187371, |
| "grad_norm": 2.203495989612946, |
| "learning_rate": 4.661290686021661e-06, |
| "loss": 0.4631, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.5736690053652497, |
| "grad_norm": 2.0602084077093235, |
| "learning_rate": 4.589373043190137e-06, |
| "loss": 0.4596, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.5777961205117623, |
| "grad_norm": 2.08958806455361, |
| "learning_rate": 4.517540833017152e-06, |
| "loss": 0.4481, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.5819232356582749, |
| "grad_norm": 2.0835614126353112, |
| "learning_rate": 4.445809000494945e-06, |
| "loss": 0.4457, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.5860503508047874, |
| "grad_norm": 2.1121573908487057, |
| "learning_rate": 4.374192469731771e-06, |
| "loss": 0.4485, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.5901774659513, |
| "grad_norm": 2.0348765238388307, |
| "learning_rate": 4.302706140846864e-06, |
| "loss": 0.4531, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.5943045810978126, |
| "grad_norm": 2.097167725695452, |
| "learning_rate": 4.231364886870417e-06, |
| "loss": 0.432, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.5984316962443252, |
| "grad_norm": 2.035634145828901, |
| "learning_rate": 4.160183550649176e-06, |
| "loss": 0.4542, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.6025588113908378, |
| "grad_norm": 2.083465204457464, |
| "learning_rate": 4.089176941758332e-06, |
| "loss": 0.4373, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.6066859265373504, |
| "grad_norm": 2.302682902657171, |
| "learning_rate": 4.018359833420323e-06, |
| "loss": 0.4355, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.610813041683863, |
| "grad_norm": 2.1209322204861167, |
| "learning_rate": 3.9477469594311975e-06, |
| "loss": 0.4382, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.6149401568303756, |
| "grad_norm": 2.1130799454748694, |
| "learning_rate": 3.8773530110952e-06, |
| "loss": 0.4303, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.6190672719768882, |
| "grad_norm": 2.067330224712638, |
| "learning_rate": 3.807192634168168e-06, |
| "loss": 0.4282, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.6231943871234007, |
| "grad_norm": 2.121507686505271, |
| "learning_rate": 3.7372804258104367e-06, |
| "loss": 0.4398, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.6273215022699133, |
| "grad_norm": 2.081955763632376, |
| "learning_rate": 3.667630931549826e-06, |
| "loss": 0.4321, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.6314486174164259, |
| "grad_norm": 2.0671194356029936, |
| "learning_rate": 3.598258642255387e-06, |
| "loss": 0.4297, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.6355757325629385, |
| "grad_norm": 2.0745096928290274, |
| "learning_rate": 3.529177991122519e-06, |
| "loss": 0.4291, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.6397028477094511, |
| "grad_norm": 2.04047679569905, |
| "learning_rate": 3.460403350670077e-06, |
| "loss": 0.4234, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.6438299628559637, |
| "grad_norm": 2.0184632129343254, |
| "learning_rate": 3.3919490297501167e-06, |
| "loss": 0.4322, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.6479570780024763, |
| "grad_norm": 2.0467555064446983, |
| "learning_rate": 3.3238292705708675e-06, |
| "loss": 0.423, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.6520841931489889, |
| "grad_norm": 2.15202224763677, |
| "learning_rate": 3.256058245733592e-06, |
| "loss": 0.4189, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.6562113082955014, |
| "grad_norm": 1.9966574895688705, |
| "learning_rate": 3.1886500552839105e-06, |
| "loss": 0.4199, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.660338423442014, |
| "grad_norm": 2.0485705146360753, |
| "learning_rate": 3.121618723778225e-06, |
| "loss": 0.4106, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.6644655385885266, |
| "grad_norm": 1.939157035039348, |
| "learning_rate": 3.054978197365861e-06, |
| "loss": 0.4164, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.6685926537350392, |
| "grad_norm": 1.9248864355584618, |
| "learning_rate": 2.9887423408875056e-06, |
| "loss": 0.4082, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.6727197688815518, |
| "grad_norm": 2.0698662946290423, |
| "learning_rate": 2.9229249349905686e-06, |
| "loss": 0.415, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.6768468840280644, |
| "grad_norm": 2.1520358829070094, |
| "learning_rate": 2.8575396732620673e-06, |
| "loss": 0.4003, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.680973999174577, |
| "grad_norm": 1.9676814885778169, |
| "learning_rate": 2.7926001593796114e-06, |
| "loss": 0.4173, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.6851011143210896, |
| "grad_norm": 2.0886545446660363, |
| "learning_rate": 2.728119904281105e-06, |
| "loss": 0.4174, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.6892282294676021, |
| "grad_norm": 2.1021768729202637, |
| "learning_rate": 2.6641123233537395e-06, |
| "loss": 0.4009, |
| "step": 835 |
| }, |
| { |
| "epoch": 0.6933553446141147, |
| "grad_norm": 2.164281085927058, |
| "learning_rate": 2.6005907336428615e-06, |
| "loss": 0.4017, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.6974824597606273, |
| "grad_norm": 2.092014051056356, |
| "learning_rate": 2.537568351081311e-06, |
| "loss": 0.4046, |
| "step": 845 |
| }, |
| { |
| "epoch": 0.7016095749071399, |
| "grad_norm": 1.9368454491782257, |
| "learning_rate": 2.4750582877397933e-06, |
| "loss": 0.4073, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.7057366900536525, |
| "grad_norm": 2.019204414322489, |
| "learning_rate": 2.4130735490988457e-06, |
| "loss": 0.4019, |
| "step": 855 |
| }, |
| { |
| "epoch": 0.7098638052001651, |
| "grad_norm": 1.9404031254190708, |
| "learning_rate": 2.3516270313430085e-06, |
| "loss": 0.3965, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.7139909203466777, |
| "grad_norm": 1.9803275733019916, |
| "learning_rate": 2.2907315186777057e-06, |
| "loss": 0.4009, |
| "step": 865 |
| }, |
| { |
| "epoch": 0.7181180354931903, |
| "grad_norm": 1.9105419735279907, |
| "learning_rate": 2.230399680669449e-06, |
| "loss": 0.3925, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.7222451506397028, |
| "grad_norm": 2.11701521254791, |
| "learning_rate": 2.170644069609876e-06, |
| "loss": 0.3878, |
| "step": 875 |
| }, |
| { |
| "epoch": 0.7263722657862154, |
| "grad_norm": 2.009828108641354, |
| "learning_rate": 2.1114771179041894e-06, |
| "loss": 0.3939, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.730499380932728, |
| "grad_norm": 2.0525311494906417, |
| "learning_rate": 2.052911135484551e-06, |
| "loss": 0.3983, |
| "step": 885 |
| }, |
| { |
| "epoch": 0.7346264960792406, |
| "grad_norm": 2.0024645682646214, |
| "learning_rate": 1.9949583072489455e-06, |
| "loss": 0.3991, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.7387536112257532, |
| "grad_norm": 1.983708310096234, |
| "learning_rate": 1.9376306905260604e-06, |
| "loss": 0.3908, |
| "step": 895 |
| }, |
| { |
| "epoch": 0.7428807263722658, |
| "grad_norm": 1.9201326193885593, |
| "learning_rate": 1.8809402125667064e-06, |
| "loss": 0.3894, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.7470078415187784, |
| "grad_norm": 2.0461993333856068, |
| "learning_rate": 1.8248986680623077e-06, |
| "loss": 0.3845, |
| "step": 905 |
| }, |
| { |
| "epoch": 0.751134956665291, |
| "grad_norm": 1.9895671478806258, |
| "learning_rate": 1.7695177166909672e-06, |
| "loss": 0.3924, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.7552620718118036, |
| "grad_norm": 2.057619939386755, |
| "learning_rate": 1.7148088806916114e-06, |
| "loss": 0.3895, |
| "step": 915 |
| }, |
| { |
| "epoch": 0.7593891869583161, |
| "grad_norm": 2.0151906155411354, |
| "learning_rate": 1.6607835424667578e-06, |
| "loss": 0.3865, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.7635163021048287, |
| "grad_norm": 1.925068927133916, |
| "learning_rate": 1.6074529422143398e-06, |
| "loss": 0.3852, |
| "step": 925 |
| }, |
| { |
| "epoch": 0.7676434172513413, |
| "grad_norm": 2.0136796449917256, |
| "learning_rate": 1.554828175589151e-06, |
| "loss": 0.3787, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.7717705323978539, |
| "grad_norm": 1.9559990017194175, |
| "learning_rate": 1.5029201913943425e-06, |
| "loss": 0.3796, |
| "step": 935 |
| }, |
| { |
| "epoch": 0.7758976475443665, |
| "grad_norm": 2.1521890680893585, |
| "learning_rate": 1.4517397893034718e-06, |
| "loss": 0.3842, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.7800247626908791, |
| "grad_norm": 2.0029729095922213, |
| "learning_rate": 1.4012976176135978e-06, |
| "loss": 0.393, |
| "step": 945 |
| }, |
| { |
| "epoch": 0.7841518778373917, |
| "grad_norm": 2.021018024866405, |
| "learning_rate": 1.35160417102985e-06, |
| "loss": 0.3873, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.7882789929839042, |
| "grad_norm": 1.9835697101743557, |
| "learning_rate": 1.3026697884819644e-06, |
| "loss": 0.3861, |
| "step": 955 |
| }, |
| { |
| "epoch": 0.7924061081304168, |
| "grad_norm": 1.9340723815291019, |
| "learning_rate": 1.25450465097322e-06, |
| "loss": 0.3903, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.7965332232769294, |
| "grad_norm": 2.1191758392362035, |
| "learning_rate": 1.207118779462248e-06, |
| "loss": 0.3749, |
| "step": 965 |
| }, |
| { |
| "epoch": 0.800660338423442, |
| "grad_norm": 2.132380601853603, |
| "learning_rate": 1.160522032778123e-06, |
| "loss": 0.3782, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.8047874535699546, |
| "grad_norm": 1.9708201131489755, |
| "learning_rate": 1.1147241055691909e-06, |
| "loss": 0.3705, |
| "step": 975 |
| }, |
| { |
| "epoch": 0.8089145687164672, |
| "grad_norm": 2.01028167948197, |
| "learning_rate": 1.0697345262860638e-06, |
| "loss": 0.3742, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.8130416838629798, |
| "grad_norm": 1.9991004397644925, |
| "learning_rate": 1.0255626551991765e-06, |
| "loss": 0.3806, |
| "step": 985 |
| }, |
| { |
| "epoch": 0.8171687990094924, |
| "grad_norm": 1.9398586585837028, |
| "learning_rate": 9.822176824513524e-07, |
| "loss": 0.3731, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.821295914156005, |
| "grad_norm": 1.96994225142757, |
| "learning_rate": 9.397086261457511e-07, |
| "loss": 0.3776, |
| "step": 995 |
| }, |
| { |
| "epoch": 0.8254230293025175, |
| "grad_norm": 2.003600819188701, |
| "learning_rate": 8.980443304696107e-07, |
| "loss": 0.3819, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.8295501444490301, |
| "grad_norm": 2.0351798859032484, |
| "learning_rate": 8.572334638541857e-07, |
| "loss": 0.3774, |
| "step": 1005 |
| }, |
| { |
| "epoch": 0.8336772595955427, |
| "grad_norm": 2.1430516379993403, |
| "learning_rate": 8.172845171712379e-07, |
| "loss": 0.3737, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.8378043747420553, |
| "grad_norm": 2.03671544389429, |
| "learning_rate": 7.782058019664773e-07, |
| "loss": 0.3709, |
| "step": 1015 |
| }, |
| { |
| "epoch": 0.8419314898885679, |
| "grad_norm": 2.014237343029339, |
| "learning_rate": 7.40005448730306e-07, |
| "loss": 0.3717, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.8460586050350805, |
| "grad_norm": 2.001733960692795, |
| "learning_rate": 7.026914052062433e-07, |
| "loss": 0.378, |
| "step": 1025 |
| }, |
| { |
| "epoch": 0.8501857201815931, |
| "grad_norm": 2.0314797478672864, |
| "learning_rate": 6.662714347373589e-07, |
| "loss": 0.3718, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.8543128353281056, |
| "grad_norm": 2.0398788512991985, |
| "learning_rate": 6.307531146510754e-07, |
| "loss": 0.3802, |
| "step": 1035 |
| }, |
| { |
| "epoch": 0.8584399504746183, |
| "grad_norm": 2.0401593697279616, |
| "learning_rate": 5.961438346826792e-07, |
| "loss": 0.3723, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.8625670656211308, |
| "grad_norm": 2.105641702439893, |
| "learning_rate": 5.6245079543785e-07, |
| "loss": 0.3674, |
| "step": 1045 |
| }, |
| { |
| "epoch": 0.8666941807676434, |
| "grad_norm": 2.0475120583137962, |
| "learning_rate": 5.296810068945474e-07, |
| "loss": 0.3767, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.870821295914156, |
| "grad_norm": 2.037325684321563, |
| "learning_rate": 4.97841286944557e-07, |
| "loss": 0.3717, |
| "step": 1055 |
| }, |
| { |
| "epoch": 0.8749484110606686, |
| "grad_norm": 1.8826839605258934, |
| "learning_rate": 4.6693825997499274e-07, |
| "loss": 0.366, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.8790755262071812, |
| "grad_norm": 2.1270152173670294, |
| "learning_rate": 4.3697835549007474e-07, |
| "loss": 0.3657, |
| "step": 1065 |
| }, |
| { |
| "epoch": 0.8832026413536938, |
| "grad_norm": 1.986884523560406, |
| "learning_rate": 4.0796780677343606e-07, |
| "loss": 0.3687, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.8873297565002064, |
| "grad_norm": 2.0992498730608453, |
| "learning_rate": 3.799126495912686e-07, |
| "loss": 0.3638, |
| "step": 1075 |
| }, |
| { |
| "epoch": 0.8914568716467189, |
| "grad_norm": 2.028752109036286, |
| "learning_rate": 3.528187209365486e-07, |
| "loss": 0.3699, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.8955839867932315, |
| "grad_norm": 2.0346601882201707, |
| "learning_rate": 3.26691657814634e-07, |
| "loss": 0.3703, |
| "step": 1085 |
| }, |
| { |
| "epoch": 0.8997111019397441, |
| "grad_norm": 2.0217798718878717, |
| "learning_rate": 3.015368960704584e-07, |
| "loss": 0.3604, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.9038382170862567, |
| "grad_norm": 1.9484068285089462, |
| "learning_rate": 2.7735966925757807e-07, |
| "loss": 0.3582, |
| "step": 1095 |
| }, |
| { |
| "epoch": 0.9079653322327693, |
| "grad_norm": 1.9903170219755175, |
| "learning_rate": 2.5416500754931294e-07, |
| "loss": 0.3641, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.9120924473792819, |
| "grad_norm": 2.025285970039861, |
| "learning_rate": 2.3195773669219767e-07, |
| "loss": 0.3651, |
| "step": 1105 |
| }, |
| { |
| "epoch": 0.9162195625257945, |
| "grad_norm": 1.9954116267972222, |
| "learning_rate": 2.1074247700196337e-07, |
| "loss": 0.3665, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.920346677672307, |
| "grad_norm": 2.0132907410321588, |
| "learning_rate": 1.905236424022633e-07, |
| "loss": 0.3586, |
| "step": 1115 |
| }, |
| { |
| "epoch": 0.9244737928188197, |
| "grad_norm": 1.9966146901368165, |
| "learning_rate": 1.7130543950633405e-07, |
| "loss": 0.3588, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.9286009079653322, |
| "grad_norm": 2.0300570451418314, |
| "learning_rate": 1.5309186674179633e-07, |
| "loss": 0.3634, |
| "step": 1125 |
| }, |
| { |
| "epoch": 0.9327280231118448, |
| "grad_norm": 2.1175207366778146, |
| "learning_rate": 1.358867135187636e-07, |
| "loss": 0.3734, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.9368551382583574, |
| "grad_norm": 2.0296632257291956, |
| "learning_rate": 1.1969355944143834e-07, |
| "loss": 0.3632, |
| "step": 1135 |
| }, |
| { |
| "epoch": 0.94098225340487, |
| "grad_norm": 1.9716530471763118, |
| "learning_rate": 1.0451577356336118e-07, |
| "loss": 0.3556, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.9451093685513826, |
| "grad_norm": 2.079524156836045, |
| "learning_rate": 9.035651368646647e-08, |
| "loss": 0.3627, |
| "step": 1145 |
| }, |
| { |
| "epoch": 0.9492364836978952, |
| "grad_norm": 2.063707259064606, |
| "learning_rate": 7.721872570408684e-08, |
| "loss": 0.3633, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.9533635988444078, |
| "grad_norm": 2.007674392778877, |
| "learning_rate": 6.510514298804838e-08, |
| "loss": 0.3622, |
| "step": 1155 |
| }, |
| { |
| "epoch": 0.9574907139909203, |
| "grad_norm": 2.0337463475612534, |
| "learning_rate": 5.401828581997948e-08, |
| "loss": 0.3576, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.961617829137433, |
| "grad_norm": 1.994551313475225, |
| "learning_rate": 4.396046086695915e-08, |
| "loss": 0.3569, |
| "step": 1165 |
| }, |
| { |
| "epoch": 0.9657449442839455, |
| "grad_norm": 2.016111248082697, |
| "learning_rate": 3.4933760701602595e-08, |
| "loss": 0.3578, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.9698720594304581, |
| "grad_norm": 2.061506062561967, |
| "learning_rate": 2.6940063366693303e-08, |
| "loss": 0.3642, |
| "step": 1175 |
| }, |
| { |
| "epoch": 0.9739991745769707, |
| "grad_norm": 1.9380243614271475, |
| "learning_rate": 1.998103198444845e-08, |
| "loss": 0.3682, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.9781262897234833, |
| "grad_norm": 2.0523022237333506, |
| "learning_rate": 1.405811441049898e-08, |
| "loss": 0.3558, |
| "step": 1185 |
| }, |
| { |
| "epoch": 0.9822534048699959, |
| "grad_norm": 1.9847092252520235, |
| "learning_rate": 9.1725429326589e-09, |
| "loss": 0.3513, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.9863805200165084, |
| "grad_norm": 1.9944372302390396, |
| "learning_rate": 5.325334014539829e-09, |
| "loss": 0.3682, |
| "step": 1195 |
| }, |
| { |
| "epoch": 0.9905076351630211, |
| "grad_norm": 2.0557893820556847, |
| "learning_rate": 2.5172880840745873e-09, |
| "loss": 0.3642, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.9946347503095336, |
| "grad_norm": 1.9769612777056, |
| "learning_rate": 7.48989366980979e-10, |
| "loss": 0.368, |
| "step": 1205 |
| }, |
| { |
| "epoch": 0.9987618654560462, |
| "grad_norm": 2.1452580734926343, |
| "learning_rate": 2.080576521568123e-11, |
| "loss": 0.3657, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.9995872884853487, |
| "eval_loss": 0.3757357895374298, |
| "eval_runtime": 0.963, |
| "eval_samples_per_second": 3.115, |
| "eval_steps_per_second": 1.038, |
| "step": 1211 |
| }, |
| { |
| "epoch": 0.9995872884853487, |
| "step": 1211, |
| "total_flos": 253506418114560.0, |
| "train_loss": 0.5435279840826496, |
| "train_runtime": 29545.3245, |
| "train_samples_per_second": 1.312, |
| "train_steps_per_second": 0.041 |
| } |
| ], |
| "logging_steps": 5, |
| "max_steps": 1211, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 1, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 253506418114560.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|