| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.968152866242038, |
| "eval_steps": 500, |
| "global_step": 390, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.012738853503184714, |
| "grad_norm": 3.7423072661703385, |
| "learning_rate": 1.0256410256410257e-06, |
| "loss": 0.972, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.025477707006369428, |
| "grad_norm": 4.014331193631274, |
| "learning_rate": 2.0512820512820513e-06, |
| "loss": 0.9843, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.03821656050955414, |
| "grad_norm": 3.8392264390505235, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 0.9844, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.050955414012738856, |
| "grad_norm": 3.4206021200245513, |
| "learning_rate": 4.102564102564103e-06, |
| "loss": 0.9535, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.06369426751592357, |
| "grad_norm": 2.844532001391515, |
| "learning_rate": 5.128205128205128e-06, |
| "loss": 0.9244, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.07643312101910828, |
| "grad_norm": 2.019106639258673, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 0.8528, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.08917197452229299, |
| "grad_norm": 1.57361042148724, |
| "learning_rate": 7.17948717948718e-06, |
| "loss": 0.8202, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.10191082802547771, |
| "grad_norm": 1.3225717537131316, |
| "learning_rate": 8.205128205128205e-06, |
| "loss": 0.806, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.11464968152866242, |
| "grad_norm": 1.297213830671725, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 0.8505, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.12738853503184713, |
| "grad_norm": 1.924658811896663, |
| "learning_rate": 1.0256410256410256e-05, |
| "loss": 0.7536, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.14012738853503184, |
| "grad_norm": 1.7407165339891915, |
| "learning_rate": 1.1282051282051283e-05, |
| "loss": 0.7524, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.15286624203821655, |
| "grad_norm": 1.3558527366110018, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 0.7382, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.16560509554140126, |
| "grad_norm": 1.0929459173306966, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.695, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.17834394904458598, |
| "grad_norm": 1.2749157546624197, |
| "learning_rate": 1.435897435897436e-05, |
| "loss": 0.7111, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.1910828025477707, |
| "grad_norm": 1.0998533610546595, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.7091, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.20382165605095542, |
| "grad_norm": 0.913903420689088, |
| "learning_rate": 1.641025641025641e-05, |
| "loss": 0.6703, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.21656050955414013, |
| "grad_norm": 0.7661216217896927, |
| "learning_rate": 1.7435897435897438e-05, |
| "loss": 0.6948, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.22929936305732485, |
| "grad_norm": 0.7437430749983936, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 0.6176, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.24203821656050956, |
| "grad_norm": 0.7446006831365574, |
| "learning_rate": 1.9487179487179488e-05, |
| "loss": 0.6591, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.25477707006369427, |
| "grad_norm": 0.6792039966401283, |
| "learning_rate": 2.0512820512820512e-05, |
| "loss": 0.6359, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.267515923566879, |
| "grad_norm": 0.5849626006077434, |
| "learning_rate": 2.153846153846154e-05, |
| "loss": 0.6271, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2802547770700637, |
| "grad_norm": 0.6919741782724808, |
| "learning_rate": 2.2564102564102566e-05, |
| "loss": 0.665, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2929936305732484, |
| "grad_norm": 0.7570356242077189, |
| "learning_rate": 2.3589743589743593e-05, |
| "loss": 0.6547, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3057324840764331, |
| "grad_norm": 0.6189824489932229, |
| "learning_rate": 2.461538461538462e-05, |
| "loss": 0.6146, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.3184713375796178, |
| "grad_norm": 0.5416486550934623, |
| "learning_rate": 2.5641025641025646e-05, |
| "loss": 0.6467, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.33121019108280253, |
| "grad_norm": 0.6011316533516919, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.6231, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.34394904458598724, |
| "grad_norm": 0.5744339851945378, |
| "learning_rate": 2.7692307692307694e-05, |
| "loss": 0.5921, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.35668789808917195, |
| "grad_norm": 0.5427273841273952, |
| "learning_rate": 2.871794871794872e-05, |
| "loss": 0.5984, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.36942675159235666, |
| "grad_norm": 0.6587706620684388, |
| "learning_rate": 2.9743589743589747e-05, |
| "loss": 0.6258, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.3821656050955414, |
| "grad_norm": 0.5241407477072185, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 0.6057, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.39490445859872614, |
| "grad_norm": 0.6221622861877016, |
| "learning_rate": 3.1794871794871795e-05, |
| "loss": 0.6028, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.40764331210191085, |
| "grad_norm": 0.5873239727341781, |
| "learning_rate": 3.282051282051282e-05, |
| "loss": 0.5984, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.42038216560509556, |
| "grad_norm": 0.5306206208481558, |
| "learning_rate": 3.384615384615385e-05, |
| "loss": 0.559, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.43312101910828027, |
| "grad_norm": 0.5582669312616446, |
| "learning_rate": 3.4871794871794875e-05, |
| "loss": 0.6149, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.445859872611465, |
| "grad_norm": 0.5000854344725365, |
| "learning_rate": 3.58974358974359e-05, |
| "loss": 0.6189, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4585987261146497, |
| "grad_norm": 0.527484918197263, |
| "learning_rate": 3.692307692307693e-05, |
| "loss": 0.6085, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4713375796178344, |
| "grad_norm": 0.48032262139418297, |
| "learning_rate": 3.794871794871795e-05, |
| "loss": 0.5931, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4840764331210191, |
| "grad_norm": 0.4279530947525605, |
| "learning_rate": 3.8974358974358976e-05, |
| "loss": 0.5781, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.4968152866242038, |
| "grad_norm": 0.4900464691732963, |
| "learning_rate": 4e-05, |
| "loss": 0.6013, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.5095541401273885, |
| "grad_norm": 0.4882711712476194, |
| "learning_rate": 3.9999198907597046e-05, |
| "loss": 0.5778, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5222929936305732, |
| "grad_norm": 0.4368372090967015, |
| "learning_rate": 3.9996795694563096e-05, |
| "loss": 0.567, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.535031847133758, |
| "grad_norm": 0.46003529609029364, |
| "learning_rate": 3.999279055341771e-05, |
| "loss": 0.5919, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5477707006369427, |
| "grad_norm": 0.433443291072034, |
| "learning_rate": 3.998718380500971e-05, |
| "loss": 0.5608, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5605095541401274, |
| "grad_norm": 0.5579094629535033, |
| "learning_rate": 3.997997589849145e-05, |
| "loss": 0.5919, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.5732484076433121, |
| "grad_norm": 0.5002127081158247, |
| "learning_rate": 3.9971167411282835e-05, |
| "loss": 0.6202, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5859872611464968, |
| "grad_norm": 0.45513028054084476, |
| "learning_rate": 3.99607590490251e-05, |
| "loss": 0.583, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.5987261146496815, |
| "grad_norm": 0.49480268233907687, |
| "learning_rate": 3.9948751645524235e-05, |
| "loss": 0.6031, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.6114649681528662, |
| "grad_norm": 0.4676370814108011, |
| "learning_rate": 3.9935146162684206e-05, |
| "loss": 0.5774, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6242038216560509, |
| "grad_norm": 0.42161416955688974, |
| "learning_rate": 3.9919943690429906e-05, |
| "loss": 0.5836, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.6369426751592356, |
| "grad_norm": 0.4758290404818582, |
| "learning_rate": 3.9903145446619837e-05, |
| "loss": 0.5961, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6496815286624203, |
| "grad_norm": 0.4465799608039247, |
| "learning_rate": 3.9884752776948564e-05, |
| "loss": 0.5712, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6624203821656051, |
| "grad_norm": 0.4783306849945249, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.5781, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.6751592356687898, |
| "grad_norm": 0.47408070441336125, |
| "learning_rate": 3.9843190181323744e-05, |
| "loss": 0.5631, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6878980891719745, |
| "grad_norm": 0.44160447990297685, |
| "learning_rate": 3.982002358491817e-05, |
| "loss": 0.5793, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.7006369426751592, |
| "grad_norm": 0.5440809599078149, |
| "learning_rate": 3.979526922148058e-05, |
| "loss": 0.5739, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7133757961783439, |
| "grad_norm": 0.4030353591762494, |
| "learning_rate": 3.9768929074064206e-05, |
| "loss": 0.5731, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7261146496815286, |
| "grad_norm": 0.47204667115023985, |
| "learning_rate": 3.9741005252758255e-05, |
| "loss": 0.5752, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7388535031847133, |
| "grad_norm": 0.3924967377535464, |
| "learning_rate": 3.971149999451886e-05, |
| "loss": 0.5723, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7515923566878981, |
| "grad_norm": 0.5036651859656847, |
| "learning_rate": 3.9680415662989806e-05, |
| "loss": 0.5816, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.7643312101910829, |
| "grad_norm": 0.44083451558702347, |
| "learning_rate": 3.9647754748313294e-05, |
| "loss": 0.5878, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.7770700636942676, |
| "grad_norm": 0.4848776951212216, |
| "learning_rate": 3.96135198669304e-05, |
| "loss": 0.5733, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7898089171974523, |
| "grad_norm": 0.4525502560691572, |
| "learning_rate": 3.957771376137144e-05, |
| "loss": 0.5558, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.802547770700637, |
| "grad_norm": 0.4368149673012582, |
| "learning_rate": 3.954033930003634e-05, |
| "loss": 0.5766, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8152866242038217, |
| "grad_norm": 0.4530850844628041, |
| "learning_rate": 3.9501399476964806e-05, |
| "loss": 0.567, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.8280254777070064, |
| "grad_norm": 0.4520029723511962, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.5747, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.8407643312101911, |
| "grad_norm": 0.4458792601241768, |
| "learning_rate": 3.9418836348521045e-05, |
| "loss": 0.5567, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.8535031847133758, |
| "grad_norm": 0.49824351969919184, |
| "learning_rate": 3.937521965721831e-05, |
| "loss": 0.5707, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8662420382165605, |
| "grad_norm": 0.5483292524184726, |
| "learning_rate": 3.933005083178828e-05, |
| "loss": 0.5947, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.8789808917197452, |
| "grad_norm": 0.48089214203643743, |
| "learning_rate": 3.928333349067125e-05, |
| "loss": 0.5389, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.89171974522293, |
| "grad_norm": 0.49454234464079616, |
| "learning_rate": 3.923507137635792e-05, |
| "loss": 0.5396, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.9044585987261147, |
| "grad_norm": 0.43160503319375604, |
| "learning_rate": 3.9185268355089606e-05, |
| "loss": 0.5714, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9171974522292994, |
| "grad_norm": 0.4419560432757935, |
| "learning_rate": 3.913392841654851e-05, |
| "loss": 0.5623, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9299363057324841, |
| "grad_norm": 0.44484601437802485, |
| "learning_rate": 3.9081055673538093e-05, |
| "loss": 0.5606, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9426751592356688, |
| "grad_norm": 0.4085811628730557, |
| "learning_rate": 3.902665436165364e-05, |
| "loss": 0.5457, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.9554140127388535, |
| "grad_norm": 0.4101366746857519, |
| "learning_rate": 3.897072883894291e-05, |
| "loss": 0.6062, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.9681528662420382, |
| "grad_norm": 0.4532752791503924, |
| "learning_rate": 3.8913283585557054e-05, |
| "loss": 0.5861, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.9808917197452229, |
| "grad_norm": 0.3783522323934715, |
| "learning_rate": 3.885432320339167e-05, |
| "loss": 0.5515, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9936305732484076, |
| "grad_norm": 0.4477233989258556, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.5707, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.0063694267515924, |
| "grad_norm": 0.4629761435051509, |
| "learning_rate": 3.873187606680543e-05, |
| "loss": 0.4782, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.019108280254777, |
| "grad_norm": 0.4840535828792894, |
| "learning_rate": 3.866839912153168e-05, |
| "loss": 0.418, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0318471337579618, |
| "grad_norm": 0.5036326924866047, |
| "learning_rate": 3.860342666498677e-05, |
| "loss": 0.3971, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0445859872611465, |
| "grad_norm": 0.6228426093815846, |
| "learning_rate": 3.853696390206484e-05, |
| "loss": 0.3766, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0573248407643312, |
| "grad_norm": 0.5561176865138412, |
| "learning_rate": 3.846901615704734e-05, |
| "loss": 0.3907, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.070063694267516, |
| "grad_norm": 0.5545736824358007, |
| "learning_rate": 3.839958887317649e-05, |
| "loss": 0.4015, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.0828025477707006, |
| "grad_norm": 0.5837935204920232, |
| "learning_rate": 3.832868761221926e-05, |
| "loss": 0.4234, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.0955414012738853, |
| "grad_norm": 0.492326405924025, |
| "learning_rate": 3.825631805402182e-05, |
| "loss": 0.3696, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.10828025477707, |
| "grad_norm": 0.5209834439614793, |
| "learning_rate": 3.818248599605448e-05, |
| "loss": 0.4019, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1210191082802548, |
| "grad_norm": 0.4885566025553211, |
| "learning_rate": 3.810719735294731e-05, |
| "loss": 0.3943, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.1337579617834395, |
| "grad_norm": 0.47450364241586784, |
| "learning_rate": 3.8030458156016326e-05, |
| "loss": 0.3907, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.1464968152866242, |
| "grad_norm": 0.4445266649179879, |
| "learning_rate": 3.795227455278029e-05, |
| "loss": 0.411, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.1592356687898089, |
| "grad_norm": 0.46006224511432137, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.3926, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.1719745222929936, |
| "grad_norm": 0.4270153987174869, |
| "learning_rate": 3.7791599295517825e-05, |
| "loss": 0.3655, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.1847133757961783, |
| "grad_norm": 0.4878817140154042, |
| "learning_rate": 3.7709120513064196e-05, |
| "loss": 0.3798, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.197452229299363, |
| "grad_norm": 0.46867955444134074, |
| "learning_rate": 3.762522306641998e-05, |
| "loss": 0.3631, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.2101910828025477, |
| "grad_norm": 0.4419027945938377, |
| "learning_rate": 3.7539913676545874e-05, |
| "loss": 0.3884, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.2229299363057324, |
| "grad_norm": 0.40849750356307674, |
| "learning_rate": 3.745319917751229e-05, |
| "loss": 0.3933, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.2356687898089171, |
| "grad_norm": 0.5655828636490249, |
| "learning_rate": 3.736508651595188e-05, |
| "loss": 0.3992, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2484076433121019, |
| "grad_norm": 0.4781160371870933, |
| "learning_rate": 3.727558275050301e-05, |
| "loss": 0.4138, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.2611464968152866, |
| "grad_norm": 0.4213212240808343, |
| "learning_rate": 3.718469505124434e-05, |
| "loss": 0.3848, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.2738853503184713, |
| "grad_norm": 0.4274617938587094, |
| "learning_rate": 3.709243069912041e-05, |
| "loss": 0.3763, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.286624203821656, |
| "grad_norm": 0.4220383272407581, |
| "learning_rate": 3.699879708535838e-05, |
| "loss": 0.378, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.2993630573248407, |
| "grad_norm": 0.4171375820693096, |
| "learning_rate": 3.69038017108759e-05, |
| "loss": 0.41, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.3121019108280254, |
| "grad_norm": 0.4382305912659545, |
| "learning_rate": 3.680745218568026e-05, |
| "loss": 0.4059, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.3248407643312101, |
| "grad_norm": 0.4165670382998542, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.3844, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.3375796178343948, |
| "grad_norm": 0.4406466854897885, |
| "learning_rate": 3.6610721664960236e-05, |
| "loss": 0.4006, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.3503184713375795, |
| "grad_norm": 0.4481397864298152, |
| "learning_rate": 3.65103564293684e-05, |
| "loss": 0.384, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.3630573248407643, |
| "grad_norm": 0.43576569305511864, |
| "learning_rate": 3.640866856166601e-05, |
| "loss": 0.3816, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.3757961783439492, |
| "grad_norm": 0.41729038843610433, |
| "learning_rate": 3.6305666207990886e-05, |
| "loss": 0.417, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.388535031847134, |
| "grad_norm": 0.4880218086268087, |
| "learning_rate": 3.6201357619783336e-05, |
| "loss": 0.4033, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.4012738853503186, |
| "grad_norm": 0.41047644449018167, |
| "learning_rate": 3.609575115312511e-05, |
| "loss": 0.3797, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.4140127388535033, |
| "grad_norm": 0.4822480986437211, |
| "learning_rate": 3.598885526807003e-05, |
| "loss": 0.4076, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.426751592356688, |
| "grad_norm": 0.4459969963783435, |
| "learning_rate": 3.5880678527966224e-05, |
| "loss": 0.384, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.4394904458598727, |
| "grad_norm": 0.4108885261851228, |
| "learning_rate": 3.577122959877017e-05, |
| "loss": 0.3775, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.4522292993630574, |
| "grad_norm": 0.4264177407871251, |
| "learning_rate": 3.566051724835245e-05, |
| "loss": 0.3723, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.4649681528662422, |
| "grad_norm": 0.46254534003867953, |
| "learning_rate": 3.554855034579532e-05, |
| "loss": 0.3905, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.4777070063694269, |
| "grad_norm": 0.43159939406328157, |
| "learning_rate": 3.5435337860682304e-05, |
| "loss": 0.3968, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.4904458598726116, |
| "grad_norm": 0.40378318420157067, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.3986, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.5031847133757963, |
| "grad_norm": 0.4010856819942095, |
| "learning_rate": 3.520521251930941e-05, |
| "loss": 0.4018, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.515923566878981, |
| "grad_norm": 0.3953489181151893, |
| "learning_rate": 3.5088318098215805e-05, |
| "loss": 0.3877, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.5286624203821657, |
| "grad_norm": 0.4360231716627973, |
| "learning_rate": 3.497021496342203e-05, |
| "loss": 0.3619, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.5414012738853504, |
| "grad_norm": 0.42883040333822336, |
| "learning_rate": 3.485091257608047e-05, |
| "loss": 0.3917, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.5541401273885351, |
| "grad_norm": 0.3949540394409347, |
| "learning_rate": 3.473042049341474e-05, |
| "loss": 0.3795, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.5668789808917198, |
| "grad_norm": 0.4613117397583663, |
| "learning_rate": 3.4608748367954064e-05, |
| "loss": 0.3996, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.5796178343949046, |
| "grad_norm": 0.3602038452810683, |
| "learning_rate": 3.4485905946759965e-05, |
| "loss": 0.3716, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.5923566878980893, |
| "grad_norm": 0.3931259570104483, |
| "learning_rate": 3.4361903070645484e-05, |
| "loss": 0.3847, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.605095541401274, |
| "grad_norm": 0.4433362238715447, |
| "learning_rate": 3.423674967338681e-05, |
| "loss": 0.3824, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.6178343949044587, |
| "grad_norm": 0.3897182981017782, |
| "learning_rate": 3.411045578092754e-05, |
| "loss": 0.382, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6305732484076434, |
| "grad_norm": 0.39580662710002257, |
| "learning_rate": 3.398303151057543e-05, |
| "loss": 0.3872, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.643312101910828, |
| "grad_norm": 0.36389859578115363, |
| "learning_rate": 3.385448707019199e-05, |
| "loss": 0.393, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.6560509554140128, |
| "grad_norm": 0.3760822336320154, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.4038, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.6687898089171975, |
| "grad_norm": 0.40124915655150517, |
| "learning_rate": 3.359407895863199e-05, |
| "loss": 0.3828, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.6815286624203822, |
| "grad_norm": 0.3492681667679969, |
| "learning_rate": 3.34622361485514e-05, |
| "loss": 0.3988, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.694267515923567, |
| "grad_norm": 0.4359639458909479, |
| "learning_rate": 3.332931488896029e-05, |
| "loss": 0.4123, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.7070063694267517, |
| "grad_norm": 0.35765716673817444, |
| "learning_rate": 3.319532582807977e-05, |
| "loss": 0.4025, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.7197452229299364, |
| "grad_norm": 0.4181343414020294, |
| "learning_rate": 3.30602796996717e-05, |
| "loss": 0.3988, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.732484076433121, |
| "grad_norm": 0.3671754645675639, |
| "learning_rate": 3.2924187322178865e-05, |
| "loss": 0.3866, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.7452229299363058, |
| "grad_norm": 0.40706707942452913, |
| "learning_rate": 3.278705959785821e-05, |
| "loss": 0.4071, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.7579617834394905, |
| "grad_norm": 0.40132966074561627, |
| "learning_rate": 3.2648907511907544e-05, |
| "loss": 0.3915, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.7707006369426752, |
| "grad_norm": 0.356092810738192, |
| "learning_rate": 3.250974213158555e-05, |
| "loss": 0.3881, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.78343949044586, |
| "grad_norm": 0.4088495881297229, |
| "learning_rate": 3.23695746053251e-05, |
| "loss": 0.3956, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.7961783439490446, |
| "grad_norm": 0.3568174986894459, |
| "learning_rate": 3.222841616184025e-05, |
| "loss": 0.4264, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.8089171974522293, |
| "grad_norm": 0.3828743980988108, |
| "learning_rate": 3.208627810922665e-05, |
| "loss": 0.3934, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.821656050955414, |
| "grad_norm": 0.40695432826523725, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.3538, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.8343949044585988, |
| "grad_norm": 0.34980415470555404, |
| "learning_rate": 3.1799108800462466e-05, |
| "loss": 0.3751, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.8471337579617835, |
| "grad_norm": 0.4171552280293834, |
| "learning_rate": 3.1654100549227024e-05, |
| "loss": 0.3823, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.8598726114649682, |
| "grad_norm": 0.3856839605835017, |
| "learning_rate": 3.1508158696850275e-05, |
| "loss": 0.3732, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.872611464968153, |
| "grad_norm": 0.3534737799953714, |
| "learning_rate": 3.136129493462312e-05, |
| "loss": 0.4172, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.8853503184713376, |
| "grad_norm": 0.40251325811058225, |
| "learning_rate": 3.121352102768998e-05, |
| "loss": 0.3775, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.8980891719745223, |
| "grad_norm": 0.39323479639785724, |
| "learning_rate": 3.106484881410628e-05, |
| "loss": 0.3929, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.910828025477707, |
| "grad_norm": 0.3948424894576429, |
| "learning_rate": 3.091529020389009e-05, |
| "loss": 0.4098, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.9235668789808917, |
| "grad_norm": 0.3798104391658232, |
| "learning_rate": 3.076485717806808e-05, |
| "loss": 0.3865, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.9363057324840764, |
| "grad_norm": 0.42664388139031734, |
| "learning_rate": 3.061356178771564e-05, |
| "loss": 0.4049, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.9490445859872612, |
| "grad_norm": 0.38754913155372867, |
| "learning_rate": 3.0461416152991555e-05, |
| "loss": 0.3863, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.9617834394904459, |
| "grad_norm": 0.41477873550604283, |
| "learning_rate": 3.0308432462167045e-05, |
| "loss": 0.3887, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.9745222929936306, |
| "grad_norm": 0.4140640968780079, |
| "learning_rate": 3.015462297064936e-05, |
| "loss": 0.3936, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.9872611464968153, |
| "grad_norm": 0.38731104462679833, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.383, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.3728294872032267, |
| "learning_rate": 2.98445759369477e-05, |
| "loss": 0.3533, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.0127388535031847, |
| "grad_norm": 0.5326083329193402, |
| "learning_rate": 2.9688363232396056e-05, |
| "loss": 0.2746, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.0254777070063694, |
| "grad_norm": 0.4241037663468422, |
| "learning_rate": 2.9531374400426158e-05, |
| "loss": 0.2451, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.038216560509554, |
| "grad_norm": 0.5238076547115884, |
| "learning_rate": 2.9373622017294075e-05, |
| "loss": 0.2426, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.050955414012739, |
| "grad_norm": 0.43750634458426324, |
| "learning_rate": 2.9215118720423375e-05, |
| "loss": 0.2461, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.0636942675159236, |
| "grad_norm": 0.5426569564960375, |
| "learning_rate": 2.9055877207392752e-05, |
| "loss": 0.2396, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.0764331210191083, |
| "grad_norm": 0.4603442806084417, |
| "learning_rate": 2.8895910234918828e-05, |
| "loss": 0.2423, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.089171974522293, |
| "grad_norm": 0.4783694185949688, |
| "learning_rate": 2.873523061783426e-05, |
| "loss": 0.2522, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.1019108280254777, |
| "grad_norm": 0.4155346213843112, |
| "learning_rate": 2.8573851228061084e-05, |
| "loss": 0.2384, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.1146496815286624, |
| "grad_norm": 0.4032696957971972, |
| "learning_rate": 2.8411784993579633e-05, |
| "loss": 0.2505, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.127388535031847, |
| "grad_norm": 0.3898038039651499, |
| "learning_rate": 2.8249044897392814e-05, |
| "loss": 0.2701, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.140127388535032, |
| "grad_norm": 0.40575399978730575, |
| "learning_rate": 2.80856439764861e-05, |
| "loss": 0.2305, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.1528662420382165, |
| "grad_norm": 0.37004856559051524, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.2451, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.1656050955414012, |
| "grad_norm": 0.39808666444917457, |
| "learning_rate": 2.77569120720971e-05, |
| "loss": 0.258, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.178343949044586, |
| "grad_norm": 0.36955071620464847, |
| "learning_rate": 2.7591607423077932e-05, |
| "loss": 0.2292, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.1910828025477707, |
| "grad_norm": 0.336518551638344, |
| "learning_rate": 2.7425694616155474e-05, |
| "loss": 0.2477, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.2038216560509554, |
| "grad_norm": 0.4140957079330811, |
| "learning_rate": 2.7259186942478656e-05, |
| "loss": 0.2255, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.21656050955414, |
| "grad_norm": 0.32308874630904505, |
| "learning_rate": 2.7092097740850712e-05, |
| "loss": 0.237, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.229299363057325, |
| "grad_norm": 0.38917894801947195, |
| "learning_rate": 2.692444039666066e-05, |
| "loss": 0.2514, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.2420382165605095, |
| "grad_norm": 0.32626408176829913, |
| "learning_rate": 2.6756228340810946e-05, |
| "loss": 0.2317, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.254777070063694, |
| "grad_norm": 0.34013561380822094, |
| "learning_rate": 2.6587475048641596e-05, |
| "loss": 0.2553, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.267515923566879, |
| "grad_norm": 0.37726290374312765, |
| "learning_rate": 2.6418194038850634e-05, |
| "loss": 0.2438, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.2802547770700636, |
| "grad_norm": 0.3524165115081538, |
| "learning_rate": 2.624839887241115e-05, |
| "loss": 0.231, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.2929936305732483, |
| "grad_norm": 0.32629627151791846, |
| "learning_rate": 2.607810315148494e-05, |
| "loss": 0.2278, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.305732484076433, |
| "grad_norm": 0.41583661942751143, |
| "learning_rate": 2.5907320518332827e-05, |
| "loss": 0.242, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.3184713375796178, |
| "grad_norm": 0.30326537675872595, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.2488, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.3312101910828025, |
| "grad_norm": 0.3469983421489293, |
| "learning_rate": 2.5564349278329056e-05, |
| "loss": 0.2512, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.343949044585987, |
| "grad_norm": 0.34347379957192603, |
| "learning_rate": 2.539218814664288e-05, |
| "loss": 0.2477, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.356687898089172, |
| "grad_norm": 0.28641537582695287, |
| "learning_rate": 2.521959505086075e-05, |
| "loss": 0.2362, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.3694267515923566, |
| "grad_norm": 0.3690167657528722, |
| "learning_rate": 2.5046583817284437e-05, |
| "loss": 0.2587, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.3821656050955413, |
| "grad_norm": 0.2961653899116515, |
| "learning_rate": 2.487316830571244e-05, |
| "loss": 0.2311, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.394904458598726, |
| "grad_norm": 0.29521846638552274, |
| "learning_rate": 2.4699362408329646e-05, |
| "loss": 0.2327, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.4076433121019107, |
| "grad_norm": 0.29770180340093805, |
| "learning_rate": 2.4525180048594452e-05, |
| "loss": 0.2544, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.4203821656050954, |
| "grad_norm": 0.2913325444490102, |
| "learning_rate": 2.435063518012335e-05, |
| "loss": 0.2247, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.43312101910828, |
| "grad_norm": 0.3384396420749506, |
| "learning_rate": 2.4175741785573177e-05, |
| "loss": 0.2379, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.445859872611465, |
| "grad_norm": 0.30159462621181515, |
| "learning_rate": 2.4000513875520892e-05, |
| "loss": 0.2152, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.4585987261146496, |
| "grad_norm": 0.29567483557148394, |
| "learning_rate": 2.3824965487341247e-05, |
| "loss": 0.2402, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.4713375796178343, |
| "grad_norm": 0.2769989858820714, |
| "learning_rate": 2.3649110684082258e-05, |
| "loss": 0.2597, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.484076433121019, |
| "grad_norm": 0.29336394029366775, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.2273, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.4968152866242037, |
| "grad_norm": 0.2838756230989672, |
| "learning_rate": 2.3296538206123134e-05, |
| "loss": 0.2389, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.5095541401273884, |
| "grad_norm": 0.2626418361521103, |
| "learning_rate": 2.311984877573636e-05, |
| "loss": 0.2352, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.522292993630573, |
| "grad_norm": 0.26718958075760085, |
| "learning_rate": 2.2942909416634326e-05, |
| "loss": 0.2357, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.535031847133758, |
| "grad_norm": 0.2845956555682719, |
| "learning_rate": 2.2765734303294666e-05, |
| "loss": 0.2371, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.5477707006369426, |
| "grad_norm": 0.2640776304490595, |
| "learning_rate": 2.2588337629081107e-05, |
| "loss": 0.2384, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.5605095541401273, |
| "grad_norm": 0.2848898116027439, |
| "learning_rate": 2.2410733605106462e-05, |
| "loss": 0.2217, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.573248407643312, |
| "grad_norm": 0.2812237619598167, |
| "learning_rate": 2.2232936459094158e-05, |
| "loss": 0.2403, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.5859872611464967, |
| "grad_norm": 0.29409588083070315, |
| "learning_rate": 2.205496043423849e-05, |
| "loss": 0.2266, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.5987261146496814, |
| "grad_norm": 0.32256548006968067, |
| "learning_rate": 2.1876819788063586e-05, |
| "loss": 0.2547, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.611464968152866, |
| "grad_norm": 0.32874439082160606, |
| "learning_rate": 2.16985287912813e-05, |
| "loss": 0.2261, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.624203821656051, |
| "grad_norm": 0.3266191143621023, |
| "learning_rate": 2.1520101726647922e-05, |
| "loss": 0.2321, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.6369426751592355, |
| "grad_norm": 0.30563025473541944, |
| "learning_rate": 2.1341552887820048e-05, |
| "loss": 0.2394, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.6496815286624202, |
| "grad_norm": 0.2869377241233248, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.2409, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.662420382165605, |
| "grad_norm": 0.30715126599292863, |
| "learning_rate": 2.0984147109837564e-05, |
| "loss": 0.2423, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.6751592356687897, |
| "grad_norm": 0.2894859890668604, |
| "learning_rate": 2.0805318802188307e-05, |
| "loss": 0.264, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.6878980891719744, |
| "grad_norm": 0.28673853671855953, |
| "learning_rate": 2.0626425981061608e-05, |
| "loss": 0.2632, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.700636942675159, |
| "grad_norm": 0.26547959114709113, |
| "learning_rate": 2.0447482977425465e-05, |
| "loss": 0.2165, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.713375796178344, |
| "grad_norm": 0.29689259712489163, |
| "learning_rate": 2.0268504126267952e-05, |
| "loss": 0.254, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.7261146496815285, |
| "grad_norm": 0.30794580549232115, |
| "learning_rate": 2.008950376544887e-05, |
| "loss": 0.234, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.738853503184713, |
| "grad_norm": 0.29029164891637815, |
| "learning_rate": 1.9910496234551132e-05, |
| "loss": 0.2423, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.7515923566878984, |
| "grad_norm": 0.28445043266611614, |
| "learning_rate": 1.9731495873732055e-05, |
| "loss": 0.2399, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.7643312101910826, |
| "grad_norm": 0.28463685628918173, |
| "learning_rate": 1.9552517022574542e-05, |
| "loss": 0.2246, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.777070063694268, |
| "grad_norm": 0.29242221813409025, |
| "learning_rate": 1.93735740189384e-05, |
| "loss": 0.2276, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.789808917197452, |
| "grad_norm": 0.309031276466736, |
| "learning_rate": 1.9194681197811703e-05, |
| "loss": 0.2522, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.802547770700637, |
| "grad_norm": 0.2769431867934569, |
| "learning_rate": 1.901585289016244e-05, |
| "loss": 0.2247, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.8152866242038215, |
| "grad_norm": 0.27968185341237434, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.2227, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.8280254777070066, |
| "grad_norm": 0.2888083036658392, |
| "learning_rate": 1.8658447112179952e-05, |
| "loss": 0.2374, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.840764331210191, |
| "grad_norm": 0.28582364336230653, |
| "learning_rate": 1.8479898273352084e-05, |
| "loss": 0.2536, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.853503184713376, |
| "grad_norm": 0.2935452815809231, |
| "learning_rate": 1.83014712087187e-05, |
| "loss": 0.2181, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.8662420382165603, |
| "grad_norm": 0.30167338921175085, |
| "learning_rate": 1.8123180211936417e-05, |
| "loss": 0.2279, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.8789808917197455, |
| "grad_norm": 0.28313988031191023, |
| "learning_rate": 1.794503956576152e-05, |
| "loss": 0.2136, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.8917197452229297, |
| "grad_norm": 0.3036257204617132, |
| "learning_rate": 1.776706354090585e-05, |
| "loss": 0.2431, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.904458598726115, |
| "grad_norm": 0.28848979346617437, |
| "learning_rate": 1.758926639489354e-05, |
| "loss": 0.2423, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.917197452229299, |
| "grad_norm": 0.272764590445302, |
| "learning_rate": 1.7411662370918893e-05, |
| "loss": 0.2301, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.9299363057324843, |
| "grad_norm": 0.3047366111218703, |
| "learning_rate": 1.7234265696705344e-05, |
| "loss": 0.2548, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.9426751592356686, |
| "grad_norm": 0.29141657758684564, |
| "learning_rate": 1.7057090583365678e-05, |
| "loss": 0.2363, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.9554140127388537, |
| "grad_norm": 0.2905701392836334, |
| "learning_rate": 1.6880151224263646e-05, |
| "loss": 0.2429, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.968152866242038, |
| "grad_norm": 0.3055726799006245, |
| "learning_rate": 1.6703461793876876e-05, |
| "loss": 0.241, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.980891719745223, |
| "grad_norm": 0.27702453651096914, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.2785, |
| "step": 234 |
| }, |
| { |
| "epoch": 2.9936305732484074, |
| "grad_norm": 0.2958171896040813, |
| "learning_rate": 1.635088931591775e-05, |
| "loss": 0.2494, |
| "step": 235 |
| }, |
| { |
| "epoch": 3.0063694267515926, |
| "grad_norm": 0.3339252283259285, |
| "learning_rate": 1.6175034512658753e-05, |
| "loss": 0.1814, |
| "step": 236 |
| }, |
| { |
| "epoch": 3.0191082802547773, |
| "grad_norm": 0.33323829415209344, |
| "learning_rate": 1.5999486124479115e-05, |
| "loss": 0.1519, |
| "step": 237 |
| }, |
| { |
| "epoch": 3.031847133757962, |
| "grad_norm": 0.263725808295815, |
| "learning_rate": 1.5824258214426833e-05, |
| "loss": 0.1756, |
| "step": 238 |
| }, |
| { |
| "epoch": 3.0445859872611467, |
| "grad_norm": 0.3263063861580799, |
| "learning_rate": 1.5649364819876655e-05, |
| "loss": 0.1681, |
| "step": 239 |
| }, |
| { |
| "epoch": 3.0573248407643314, |
| "grad_norm": 0.3332704699662475, |
| "learning_rate": 1.547481995140556e-05, |
| "loss": 0.1533, |
| "step": 240 |
| }, |
| { |
| "epoch": 3.070063694267516, |
| "grad_norm": 0.3290607170386027, |
| "learning_rate": 1.5300637591670357e-05, |
| "loss": 0.1491, |
| "step": 241 |
| }, |
| { |
| "epoch": 3.082802547770701, |
| "grad_norm": 0.27626642274366475, |
| "learning_rate": 1.5126831694287564e-05, |
| "loss": 0.1386, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.0955414012738856, |
| "grad_norm": 0.24383948766661087, |
| "learning_rate": 1.4953416182715566e-05, |
| "loss": 0.1454, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.1082802547770703, |
| "grad_norm": 0.2565144051732309, |
| "learning_rate": 1.478040494913926e-05, |
| "loss": 0.1297, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.121019108280255, |
| "grad_norm": 0.2854715023231579, |
| "learning_rate": 1.460781185335713e-05, |
| "loss": 0.1611, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.1337579617834397, |
| "grad_norm": 0.23688489896659726, |
| "learning_rate": 1.443565072167095e-05, |
| "loss": 0.1537, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.1464968152866244, |
| "grad_norm": 0.23498017729946113, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.128, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.159235668789809, |
| "grad_norm": 0.25210838232322463, |
| "learning_rate": 1.409267948166718e-05, |
| "loss": 0.1529, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.171974522292994, |
| "grad_norm": 0.23817238492456902, |
| "learning_rate": 1.3921896848515064e-05, |
| "loss": 0.1431, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.1847133757961785, |
| "grad_norm": 0.23878818674795252, |
| "learning_rate": 1.3751601127588849e-05, |
| "loss": 0.1511, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.1974522292993632, |
| "grad_norm": 0.2253169671801177, |
| "learning_rate": 1.3581805961149371e-05, |
| "loss": 0.171, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.210191082802548, |
| "grad_norm": 0.2399433284877313, |
| "learning_rate": 1.341252495135841e-05, |
| "loss": 0.125, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.2229299363057327, |
| "grad_norm": 0.2482037509566062, |
| "learning_rate": 1.324377165918906e-05, |
| "loss": 0.1332, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.2356687898089174, |
| "grad_norm": 0.21581244708702438, |
| "learning_rate": 1.3075559603339354e-05, |
| "loss": 0.1479, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.248407643312102, |
| "grad_norm": 0.2075414978318476, |
| "learning_rate": 1.2907902259149287e-05, |
| "loss": 0.1289, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.261146496815287, |
| "grad_norm": 0.24464034758512393, |
| "learning_rate": 1.274081305752135e-05, |
| "loss": 0.1505, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.2738853503184715, |
| "grad_norm": 0.23054729756063358, |
| "learning_rate": 1.2574305383844528e-05, |
| "loss": 0.1534, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.286624203821656, |
| "grad_norm": 0.2020043798081996, |
| "learning_rate": 1.2408392576922075e-05, |
| "loss": 0.1381, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.299363057324841, |
| "grad_norm": 0.22799906894946498, |
| "learning_rate": 1.2243087927902905e-05, |
| "loss": 0.1719, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.3121019108280256, |
| "grad_norm": 0.24533082757636485, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.1609, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.3248407643312103, |
| "grad_norm": 0.20714305709401984, |
| "learning_rate": 1.1914356023513904e-05, |
| "loss": 0.1179, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.337579617834395, |
| "grad_norm": 0.21668069968026532, |
| "learning_rate": 1.1750955102607193e-05, |
| "loss": 0.1647, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.3503184713375798, |
| "grad_norm": 0.2184496195596042, |
| "learning_rate": 1.1588215006420374e-05, |
| "loss": 0.1553, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.3630573248407645, |
| "grad_norm": 0.20337547245152646, |
| "learning_rate": 1.1426148771938915e-05, |
| "loss": 0.1315, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.375796178343949, |
| "grad_norm": 0.2037566957772752, |
| "learning_rate": 1.1264769382165748e-05, |
| "loss": 0.1539, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.388535031847134, |
| "grad_norm": 0.20032474440683493, |
| "learning_rate": 1.110408976508118e-05, |
| "loss": 0.1215, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.4012738853503186, |
| "grad_norm": 0.2136164552846967, |
| "learning_rate": 1.094412279260726e-05, |
| "loss": 0.1428, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.4140127388535033, |
| "grad_norm": 0.21177053858403921, |
| "learning_rate": 1.0784881279576635e-05, |
| "loss": 0.1595, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.426751592356688, |
| "grad_norm": 0.2029892437448442, |
| "learning_rate": 1.0626377982705929e-05, |
| "loss": 0.1383, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.4394904458598727, |
| "grad_norm": 0.20919896892050535, |
| "learning_rate": 1.0468625599573842e-05, |
| "loss": 0.1424, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.4522292993630574, |
| "grad_norm": 0.2070233782186885, |
| "learning_rate": 1.0311636767603952e-05, |
| "loss": 0.1467, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.464968152866242, |
| "grad_norm": 0.19702950998811397, |
| "learning_rate": 1.0155424063052306e-05, |
| "loss": 0.1341, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.477707006369427, |
| "grad_norm": 0.2063307034689398, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.1333, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.4904458598726116, |
| "grad_norm": 0.20719963685656465, |
| "learning_rate": 9.84537702935065e-06, |
| "loss": 0.1388, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.5031847133757963, |
| "grad_norm": 0.215076053264252, |
| "learning_rate": 9.691567537832964e-06, |
| "loss": 0.1382, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.515923566878981, |
| "grad_norm": 0.2088643606927074, |
| "learning_rate": 9.538583847008452e-06, |
| "loss": 0.138, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.5286624203821657, |
| "grad_norm": 0.19464590100491783, |
| "learning_rate": 9.386438212284372e-06, |
| "loss": 0.1602, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.5414012738853504, |
| "grad_norm": 0.19748474073166788, |
| "learning_rate": 9.235142821931928e-06, |
| "loss": 0.1205, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.554140127388535, |
| "grad_norm": 0.20462175920146233, |
| "learning_rate": 9.084709796109907e-06, |
| "loss": 0.1243, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.56687898089172, |
| "grad_norm": 0.19781750645119578, |
| "learning_rate": 8.93515118589373e-06, |
| "loss": 0.1184, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.5796178343949046, |
| "grad_norm": 0.1932570190163277, |
| "learning_rate": 8.786478972310023e-06, |
| "loss": 0.1402, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.5923566878980893, |
| "grad_norm": 0.2130457950220486, |
| "learning_rate": 8.638705065376887e-06, |
| "loss": 0.1572, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.605095541401274, |
| "grad_norm": 0.21977855143398284, |
| "learning_rate": 8.491841303149728e-06, |
| "loss": 0.1666, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.6178343949044587, |
| "grad_norm": 0.19809617527946968, |
| "learning_rate": 8.345899450772975e-06, |
| "loss": 0.1423, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.6305732484076434, |
| "grad_norm": 0.18690090576844234, |
| "learning_rate": 8.200891199537549e-06, |
| "loss": 0.1364, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.643312101910828, |
| "grad_norm": 0.18446815748564052, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.1276, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.656050955414013, |
| "grad_norm": 0.19914110063591683, |
| "learning_rate": 7.913721890773354e-06, |
| "loss": 0.1452, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.6687898089171975, |
| "grad_norm": 0.19784197594445632, |
| "learning_rate": 7.771583838159756e-06, |
| "loss": 0.1393, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.6815286624203822, |
| "grad_norm": 0.18779371172725068, |
| "learning_rate": 7.630425394674903e-06, |
| "loss": 0.1486, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.694267515923567, |
| "grad_norm": 0.19091202103101299, |
| "learning_rate": 7.49025786841445e-06, |
| "loss": 0.1532, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.7070063694267517, |
| "grad_norm": 0.19572576915772363, |
| "learning_rate": 7.3510924880924575e-06, |
| "loss": 0.1498, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.7197452229299364, |
| "grad_norm": 0.19383886127171826, |
| "learning_rate": 7.212940402141808e-06, |
| "loss": 0.1243, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.732484076433121, |
| "grad_norm": 0.18951075996073852, |
| "learning_rate": 7.075812677821145e-06, |
| "loss": 0.1408, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.745222929936306, |
| "grad_norm": 0.18725185654705204, |
| "learning_rate": 6.939720300328303e-06, |
| "loss": 0.1445, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.7579617834394905, |
| "grad_norm": 0.1923854581483771, |
| "learning_rate": 6.8046741719202385e-06, |
| "loss": 0.1322, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.770700636942675, |
| "grad_norm": 0.19857273274849133, |
| "learning_rate": 6.67068511103971e-06, |
| "loss": 0.1472, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.78343949044586, |
| "grad_norm": 0.18958112392058588, |
| "learning_rate": 6.537763851448593e-06, |
| "loss": 0.1188, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.7961783439490446, |
| "grad_norm": 0.1754562981627312, |
| "learning_rate": 6.4059210413680175e-06, |
| "loss": 0.1352, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.8089171974522293, |
| "grad_norm": 0.1827948964704896, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.1343, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.821656050955414, |
| "grad_norm": 0.17640473509943577, |
| "learning_rate": 6.145512929808013e-06, |
| "loss": 0.1463, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.8343949044585988, |
| "grad_norm": 0.19352956970296584, |
| "learning_rate": 6.016968489424572e-06, |
| "loss": 0.136, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.8471337579617835, |
| "grad_norm": 0.17765227219483207, |
| "learning_rate": 5.889544219072465e-06, |
| "loss": 0.1371, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.859872611464968, |
| "grad_norm": 0.17378510781197484, |
| "learning_rate": 5.7632503266131925e-06, |
| "loss": 0.1508, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.872611464968153, |
| "grad_norm": 0.1754866346893611, |
| "learning_rate": 5.638096929354522e-06, |
| "loss": 0.1371, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.8853503184713376, |
| "grad_norm": 0.17990543007311735, |
| "learning_rate": 5.514094053240035e-06, |
| "loss": 0.1504, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.8980891719745223, |
| "grad_norm": 0.1767314352665911, |
| "learning_rate": 5.39125163204594e-06, |
| "loss": 0.1384, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.910828025477707, |
| "grad_norm": 0.18388289790416432, |
| "learning_rate": 5.269579506585259e-06, |
| "loss": 0.125, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.9235668789808917, |
| "grad_norm": 0.18709650841448822, |
| "learning_rate": 5.149087423919541e-06, |
| "loss": 0.1719, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.9363057324840764, |
| "grad_norm": 0.1765027020500491, |
| "learning_rate": 5.029785036577976e-06, |
| "loss": 0.1365, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.949044585987261, |
| "grad_norm": 0.1740407642246799, |
| "learning_rate": 4.911681901784198e-06, |
| "loss": 0.1455, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.961783439490446, |
| "grad_norm": 0.17955395869877427, |
| "learning_rate": 4.794787480690597e-06, |
| "loss": 0.135, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.9745222929936306, |
| "grad_norm": 0.18507428221903918, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.1575, |
| "step": 312 |
| }, |
| { |
| "epoch": 3.9872611464968153, |
| "grad_norm": 0.1805033770468359, |
| "learning_rate": 4.5646621393177e-06, |
| "loss": 0.1388, |
| "step": 313 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.16048437681749295, |
| "learning_rate": 4.451449654204685e-06, |
| "loss": 0.1153, |
| "step": 314 |
| }, |
| { |
| "epoch": 4.012738853503185, |
| "grad_norm": 0.24959599645461245, |
| "learning_rate": 4.339482751647557e-06, |
| "loss": 0.1103, |
| "step": 315 |
| }, |
| { |
| "epoch": 4.025477707006369, |
| "grad_norm": 0.23271125680775137, |
| "learning_rate": 4.228770401229824e-06, |
| "loss": 0.1186, |
| "step": 316 |
| }, |
| { |
| "epoch": 4.038216560509555, |
| "grad_norm": 0.2122069247359907, |
| "learning_rate": 4.119321472033779e-06, |
| "loss": 0.1008, |
| "step": 317 |
| }, |
| { |
| "epoch": 4.050955414012739, |
| "grad_norm": 0.15556832545642946, |
| "learning_rate": 4.011144731929981e-06, |
| "loss": 0.0879, |
| "step": 318 |
| }, |
| { |
| "epoch": 4.063694267515924, |
| "grad_norm": 0.13068949666043236, |
| "learning_rate": 3.904248846874894e-06, |
| "loss": 0.101, |
| "step": 319 |
| }, |
| { |
| "epoch": 4.076433121019108, |
| "grad_norm": 0.15988306601505795, |
| "learning_rate": 3.7986423802166705e-06, |
| "loss": 0.1042, |
| "step": 320 |
| }, |
| { |
| "epoch": 4.089171974522293, |
| "grad_norm": 0.2008005962776904, |
| "learning_rate": 3.694333792009115e-06, |
| "loss": 0.1186, |
| "step": 321 |
| }, |
| { |
| "epoch": 4.101910828025478, |
| "grad_norm": 0.22309601017298253, |
| "learning_rate": 3.5913314383339937e-06, |
| "loss": 0.1079, |
| "step": 322 |
| }, |
| { |
| "epoch": 4.114649681528663, |
| "grad_norm": 0.2171458668604706, |
| "learning_rate": 3.4896435706316e-06, |
| "loss": 0.0992, |
| "step": 323 |
| }, |
| { |
| "epoch": 4.127388535031847, |
| "grad_norm": 0.1754223239205233, |
| "learning_rate": 3.3892783350397675e-06, |
| "loss": 0.0966, |
| "step": 324 |
| }, |
| { |
| "epoch": 4.140127388535032, |
| "grad_norm": 0.17536902204340468, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.1002, |
| "step": 325 |
| }, |
| { |
| "epoch": 4.1528662420382165, |
| "grad_norm": 0.1349302126131413, |
| "learning_rate": 3.1925478143197418e-06, |
| "loss": 0.0864, |
| "step": 326 |
| }, |
| { |
| "epoch": 4.165605095541402, |
| "grad_norm": 0.14115081640236687, |
| "learning_rate": 3.0961982891241083e-06, |
| "loss": 0.1042, |
| "step": 327 |
| }, |
| { |
| "epoch": 4.178343949044586, |
| "grad_norm": 0.14152556061179694, |
| "learning_rate": 3.001202914641628e-06, |
| "loss": 0.0832, |
| "step": 328 |
| }, |
| { |
| "epoch": 4.191082802547771, |
| "grad_norm": 0.14396584823928577, |
| "learning_rate": 2.907569300879596e-06, |
| "loss": 0.0863, |
| "step": 329 |
| }, |
| { |
| "epoch": 4.203821656050955, |
| "grad_norm": 0.13656039310896373, |
| "learning_rate": 2.815304948755664e-06, |
| "loss": 0.105, |
| "step": 330 |
| }, |
| { |
| "epoch": 4.2165605095541405, |
| "grad_norm": 0.14707618001361536, |
| "learning_rate": 2.7244172494969978e-06, |
| "loss": 0.1121, |
| "step": 331 |
| }, |
| { |
| "epoch": 4.229299363057325, |
| "grad_norm": 0.14952406400214932, |
| "learning_rate": 2.6349134840481294e-06, |
| "loss": 0.1039, |
| "step": 332 |
| }, |
| { |
| "epoch": 4.24203821656051, |
| "grad_norm": 0.45773988775828156, |
| "learning_rate": 2.546800822487714e-06, |
| "loss": 0.1006, |
| "step": 333 |
| }, |
| { |
| "epoch": 4.254777070063694, |
| "grad_norm": 0.14086482905242464, |
| "learning_rate": 2.4600863234541338e-06, |
| "loss": 0.103, |
| "step": 334 |
| }, |
| { |
| "epoch": 4.267515923566879, |
| "grad_norm": 0.13406472253081728, |
| "learning_rate": 2.374776933580025e-06, |
| "loss": 0.1102, |
| "step": 335 |
| }, |
| { |
| "epoch": 4.280254777070064, |
| "grad_norm": 0.14550219594207334, |
| "learning_rate": 2.2908794869358044e-06, |
| "loss": 0.1125, |
| "step": 336 |
| }, |
| { |
| "epoch": 4.292993630573249, |
| "grad_norm": 0.1347484077586462, |
| "learning_rate": 2.2084007044821764e-06, |
| "loss": 0.0835, |
| "step": 337 |
| }, |
| { |
| "epoch": 4.305732484076433, |
| "grad_norm": 0.1290805497722182, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.1041, |
| "step": 338 |
| }, |
| { |
| "epoch": 4.318471337579618, |
| "grad_norm": 0.13309082164534528, |
| "learning_rate": 2.0477254472197237e-06, |
| "loss": 0.0967, |
| "step": 339 |
| }, |
| { |
| "epoch": 4.3312101910828025, |
| "grad_norm": 0.13761067925193232, |
| "learning_rate": 1.96954184398368e-06, |
| "loss": 0.1113, |
| "step": 340 |
| }, |
| { |
| "epoch": 4.343949044585988, |
| "grad_norm": 0.13572266975580224, |
| "learning_rate": 1.8928026470526917e-06, |
| "loss": 0.1053, |
| "step": 341 |
| }, |
| { |
| "epoch": 4.356687898089172, |
| "grad_norm": 0.12800656352242193, |
| "learning_rate": 1.817514003945524e-06, |
| "loss": 0.0867, |
| "step": 342 |
| }, |
| { |
| "epoch": 4.369426751592357, |
| "grad_norm": 0.13496217012761974, |
| "learning_rate": 1.743681945978184e-06, |
| "loss": 0.11, |
| "step": 343 |
| }, |
| { |
| "epoch": 4.382165605095541, |
| "grad_norm": 0.13765332230964036, |
| "learning_rate": 1.6713123877807413e-06, |
| "loss": 0.1133, |
| "step": 344 |
| }, |
| { |
| "epoch": 4.3949044585987265, |
| "grad_norm": 0.13352458324587824, |
| "learning_rate": 1.6004111268235156e-06, |
| "loss": 0.1058, |
| "step": 345 |
| }, |
| { |
| "epoch": 4.407643312101911, |
| "grad_norm": 0.13191561231330995, |
| "learning_rate": 1.5309838429526714e-06, |
| "loss": 0.1051, |
| "step": 346 |
| }, |
| { |
| "epoch": 4.420382165605096, |
| "grad_norm": 0.12490688145220193, |
| "learning_rate": 1.4630360979351644e-06, |
| "loss": 0.09, |
| "step": 347 |
| }, |
| { |
| "epoch": 4.43312101910828, |
| "grad_norm": 0.15947738459761718, |
| "learning_rate": 1.396573335013236e-06, |
| "loss": 0.0964, |
| "step": 348 |
| }, |
| { |
| "epoch": 4.445859872611465, |
| "grad_norm": 0.12096509710405506, |
| "learning_rate": 1.3316008784683265e-06, |
| "loss": 0.0907, |
| "step": 349 |
| }, |
| { |
| "epoch": 4.45859872611465, |
| "grad_norm": 0.12188980792759932, |
| "learning_rate": 1.2681239331945695e-06, |
| "loss": 0.1016, |
| "step": 350 |
| }, |
| { |
| "epoch": 4.471337579617835, |
| "grad_norm": 0.13559724068658183, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.1072, |
| "step": 351 |
| }, |
| { |
| "epoch": 4.484076433121019, |
| "grad_norm": 0.12292648232545614, |
| "learning_rate": 1.1456767966083393e-06, |
| "loss": 0.1087, |
| "step": 352 |
| }, |
| { |
| "epoch": 4.496815286624204, |
| "grad_norm": 0.1334907116310344, |
| "learning_rate": 1.086716414442952e-06, |
| "loss": 0.1161, |
| "step": 353 |
| }, |
| { |
| "epoch": 4.509554140127388, |
| "grad_norm": 0.13271118963324863, |
| "learning_rate": 1.0292711610570904e-06, |
| "loss": 0.0958, |
| "step": 354 |
| }, |
| { |
| "epoch": 4.522292993630574, |
| "grad_norm": 0.13680838872833267, |
| "learning_rate": 9.733456383463658e-07, |
| "loss": 0.0943, |
| "step": 355 |
| }, |
| { |
| "epoch": 4.535031847133758, |
| "grad_norm": 0.13801489368931033, |
| "learning_rate": 9.189443264619102e-07, |
| "loss": 0.123, |
| "step": 356 |
| }, |
| { |
| "epoch": 4.547770700636943, |
| "grad_norm": 0.12529844924959202, |
| "learning_rate": 8.660715834514977e-07, |
| "loss": 0.0975, |
| "step": 357 |
| }, |
| { |
| "epoch": 4.560509554140127, |
| "grad_norm": 0.12938877893813736, |
| "learning_rate": 8.147316449103959e-07, |
| "loss": 0.0918, |
| "step": 358 |
| }, |
| { |
| "epoch": 4.573248407643312, |
| "grad_norm": 0.12115317481092167, |
| "learning_rate": 7.649286236420806e-07, |
| "loss": 0.1135, |
| "step": 359 |
| }, |
| { |
| "epoch": 4.585987261146497, |
| "grad_norm": 0.11833791739810552, |
| "learning_rate": 7.166665093287539e-07, |
| "loss": 0.0854, |
| "step": 360 |
| }, |
| { |
| "epoch": 4.598726114649682, |
| "grad_norm": 0.12705837653951255, |
| "learning_rate": 6.69949168211721e-07, |
| "loss": 0.1109, |
| "step": 361 |
| }, |
| { |
| "epoch": 4.611464968152866, |
| "grad_norm": 0.12508614962340267, |
| "learning_rate": 6.247803427816945e-07, |
| "loss": 0.0974, |
| "step": 362 |
| }, |
| { |
| "epoch": 4.624203821656051, |
| "grad_norm": 0.1452236579744733, |
| "learning_rate": 5.811636514789598e-07, |
| "loss": 0.1157, |
| "step": 363 |
| }, |
| { |
| "epoch": 4.6369426751592355, |
| "grad_norm": 0.1300660477914448, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.0915, |
| "step": 364 |
| }, |
| { |
| "epoch": 4.649681528662421, |
| "grad_norm": 0.1200540678678256, |
| "learning_rate": 4.986005230351954e-07, |
| "loss": 0.0872, |
| "step": 365 |
| }, |
| { |
| "epoch": 4.662420382165605, |
| "grad_norm": 0.11487997298035525, |
| "learning_rate": 4.5966069996365993e-07, |
| "loss": 0.1165, |
| "step": 366 |
| }, |
| { |
| "epoch": 4.67515923566879, |
| "grad_norm": 0.139543063544079, |
| "learning_rate": 4.22286238628562e-07, |
| "loss": 0.1053, |
| "step": 367 |
| }, |
| { |
| "epoch": 4.687898089171974, |
| "grad_norm": 0.12478098586523038, |
| "learning_rate": 3.8648013306960664e-07, |
| "loss": 0.0858, |
| "step": 368 |
| }, |
| { |
| "epoch": 4.7006369426751595, |
| "grad_norm": 0.1314853226678359, |
| "learning_rate": 3.522452516867048e-07, |
| "loss": 0.1179, |
| "step": 369 |
| }, |
| { |
| "epoch": 4.713375796178344, |
| "grad_norm": 0.1561600576660889, |
| "learning_rate": 3.1958433701019697e-07, |
| "loss": 0.0977, |
| "step": 370 |
| }, |
| { |
| "epoch": 4.726114649681529, |
| "grad_norm": 0.1275347994190016, |
| "learning_rate": 2.8850000548115155e-07, |
| "loss": 0.112, |
| "step": 371 |
| }, |
| { |
| "epoch": 4.738853503184713, |
| "grad_norm": 0.12188252873510248, |
| "learning_rate": 2.5899474724174313e-07, |
| "loss": 0.0862, |
| "step": 372 |
| }, |
| { |
| "epoch": 4.751592356687898, |
| "grad_norm": 0.1318771763835626, |
| "learning_rate": 2.3107092593579905e-07, |
| "loss": 0.1142, |
| "step": 373 |
| }, |
| { |
| "epoch": 4.764331210191083, |
| "grad_norm": 0.14220929835983984, |
| "learning_rate": 2.0473077851942858e-07, |
| "loss": 0.1118, |
| "step": 374 |
| }, |
| { |
| "epoch": 4.777070063694268, |
| "grad_norm": 0.1219858566908112, |
| "learning_rate": 1.799764150818306e-07, |
| "loss": 0.1084, |
| "step": 375 |
| }, |
| { |
| "epoch": 4.789808917197452, |
| "grad_norm": 0.12566174956929266, |
| "learning_rate": 1.5680981867625566e-07, |
| "loss": 0.1077, |
| "step": 376 |
| }, |
| { |
| "epoch": 4.802547770700637, |
| "grad_norm": 0.12253664048339573, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.0899, |
| "step": 377 |
| }, |
| { |
| "epoch": 4.8152866242038215, |
| "grad_norm": 0.12142275297990943, |
| "learning_rate": 1.1524722305144231e-07, |
| "loss": 0.0956, |
| "step": 378 |
| }, |
| { |
| "epoch": 4.828025477707007, |
| "grad_norm": 0.1235160540744103, |
| "learning_rate": 9.685455338016347e-08, |
| "loss": 0.1077, |
| "step": 379 |
| }, |
| { |
| "epoch": 4.840764331210191, |
| "grad_norm": 0.12467941962401048, |
| "learning_rate": 8.005630957010014e-08, |
| "loss": 0.0793, |
| "step": 380 |
| }, |
| { |
| "epoch": 4.853503184713376, |
| "grad_norm": 0.20795298981735666, |
| "learning_rate": 6.485383731580142e-08, |
| "loss": 0.1045, |
| "step": 381 |
| }, |
| { |
| "epoch": 4.86624203821656, |
| "grad_norm": 0.12799721551039098, |
| "learning_rate": 5.1248354475768034e-08, |
| "loss": 0.101, |
| "step": 382 |
| }, |
| { |
| "epoch": 4.8789808917197455, |
| "grad_norm": 0.1258631802102174, |
| "learning_rate": 3.924095097489922e-08, |
| "loss": 0.0944, |
| "step": 383 |
| }, |
| { |
| "epoch": 4.89171974522293, |
| "grad_norm": 0.12812579528294077, |
| "learning_rate": 2.8832588717164766e-08, |
| "loss": 0.1053, |
| "step": 384 |
| }, |
| { |
| "epoch": 4.904458598726115, |
| "grad_norm": 0.12751773731121074, |
| "learning_rate": 2.0024101508555604e-08, |
| "loss": 0.0982, |
| "step": 385 |
| }, |
| { |
| "epoch": 4.917197452229299, |
| "grad_norm": 0.13129186435975, |
| "learning_rate": 1.281619499029274e-08, |
| "loss": 0.1091, |
| "step": 386 |
| }, |
| { |
| "epoch": 4.929936305732484, |
| "grad_norm": 0.12192936592060051, |
| "learning_rate": 7.209446582292501e-09, |
| "loss": 0.1065, |
| "step": 387 |
| }, |
| { |
| "epoch": 4.942675159235669, |
| "grad_norm": 0.13025711875394239, |
| "learning_rate": 3.2043054369057523e-09, |
| "loss": 0.1076, |
| "step": 388 |
| }, |
| { |
| "epoch": 4.955414012738854, |
| "grad_norm": 0.11678098849828318, |
| "learning_rate": 8.010924029533406e-10, |
| "loss": 0.1073, |
| "step": 389 |
| }, |
| { |
| "epoch": 4.968152866242038, |
| "grad_norm": 0.12263643856967697, |
| "learning_rate": 0.0, |
| "loss": 0.1206, |
| "step": 390 |
| }, |
| { |
| "epoch": 4.968152866242038, |
| "step": 390, |
| "total_flos": 4.6317288847048704e+17, |
| "train_loss": 0.30433785139750213, |
| "train_runtime": 26201.4476, |
| "train_samples_per_second": 1.908, |
| "train_steps_per_second": 0.015 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 390, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.6317288847048704e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|