| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9990426041168023, | |
| "eval_steps": 500, | |
| "global_step": 348, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0057443752991862135, | |
| "grad_norm": 3.1296188831329346, | |
| "learning_rate": 2.8571428571428575e-07, | |
| "loss": 1.9888, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.011488750598372427, | |
| "grad_norm": 3.5464422702789307, | |
| "learning_rate": 5.714285714285715e-07, | |
| "loss": 2.3002, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01723312589755864, | |
| "grad_norm": 3.974191904067993, | |
| "learning_rate": 8.571428571428572e-07, | |
| "loss": 2.4141, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.022977501196744854, | |
| "grad_norm": 3.334446907043457, | |
| "learning_rate": 1.142857142857143e-06, | |
| "loss": 2.112, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.028721876495931067, | |
| "grad_norm": 3.4912259578704834, | |
| "learning_rate": 1.4285714285714286e-06, | |
| "loss": 2.2242, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03446625179511728, | |
| "grad_norm": 2.455101490020752, | |
| "learning_rate": 1.7142857142857145e-06, | |
| "loss": 1.6981, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.040210627094303494, | |
| "grad_norm": 3.2974066734313965, | |
| "learning_rate": 2.0000000000000003e-06, | |
| "loss": 2.12, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.04595500239348971, | |
| "grad_norm": 4.005867004394531, | |
| "learning_rate": 2.285714285714286e-06, | |
| "loss": 2.4186, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.05169937769267592, | |
| "grad_norm": 2.707257032394409, | |
| "learning_rate": 2.571428571428571e-06, | |
| "loss": 1.8714, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.057443752991862135, | |
| "grad_norm": 3.6040773391723633, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 2.3221, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.06318812829104835, | |
| "grad_norm": 4.2597432136535645, | |
| "learning_rate": 3.142857142857143e-06, | |
| "loss": 2.3577, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06893250359023456, | |
| "grad_norm": 3.510695695877075, | |
| "learning_rate": 3.428571428571429e-06, | |
| "loss": 2.2429, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.07467687888942078, | |
| "grad_norm": 3.6239755153656006, | |
| "learning_rate": 3.7142857142857146e-06, | |
| "loss": 2.4518, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.08042125418860699, | |
| "grad_norm": 3.055544137954712, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.2224, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0861656294877932, | |
| "grad_norm": 3.3157715797424316, | |
| "learning_rate": 4.2857142857142855e-06, | |
| "loss": 2.5445, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.09191000478697942, | |
| "grad_norm": 3.588505744934082, | |
| "learning_rate": 4.571428571428572e-06, | |
| "loss": 2.4997, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.09765438008616563, | |
| "grad_norm": 2.1126317977905273, | |
| "learning_rate": 4.857142857142858e-06, | |
| "loss": 2.025, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.10339875538535184, | |
| "grad_norm": 2.28147029876709, | |
| "learning_rate": 5.142857142857142e-06, | |
| "loss": 2.3097, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.10914313068453806, | |
| "grad_norm": 1.7922435998916626, | |
| "learning_rate": 5.428571428571429e-06, | |
| "loss": 1.8681, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.11488750598372427, | |
| "grad_norm": 1.936302900314331, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 2.0389, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.12063188128291048, | |
| "grad_norm": 2.097909927368164, | |
| "learning_rate": 6e-06, | |
| "loss": 2.253, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.1263762565820967, | |
| "grad_norm": 1.6822686195373535, | |
| "learning_rate": 6.285714285714286e-06, | |
| "loss": 1.924, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.13212063188128292, | |
| "grad_norm": 1.2600412368774414, | |
| "learning_rate": 6.571428571428572e-06, | |
| "loss": 1.5142, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.13786500718046912, | |
| "grad_norm": 1.5162692070007324, | |
| "learning_rate": 6.857142857142858e-06, | |
| "loss": 1.7855, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.14360938247965535, | |
| "grad_norm": 1.4696098566055298, | |
| "learning_rate": 7.1428571428571436e-06, | |
| "loss": 1.7904, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.14935375777884155, | |
| "grad_norm": 1.2161146402359009, | |
| "learning_rate": 7.428571428571429e-06, | |
| "loss": 1.5942, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.15509813307802778, | |
| "grad_norm": 1.3255800008773804, | |
| "learning_rate": 7.714285714285716e-06, | |
| "loss": 1.9058, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.16084250837721398, | |
| "grad_norm": 1.1959238052368164, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.6797, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.1665868836764002, | |
| "grad_norm": 1.0977487564086914, | |
| "learning_rate": 8.285714285714287e-06, | |
| "loss": 1.7939, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.1723312589755864, | |
| "grad_norm": 1.1315933465957642, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 1.8744, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.17807563427477263, | |
| "grad_norm": 0.8908856511116028, | |
| "learning_rate": 8.857142857142858e-06, | |
| "loss": 1.5113, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.18382000957395883, | |
| "grad_norm": 0.7249335050582886, | |
| "learning_rate": 9.142857142857144e-06, | |
| "loss": 1.2153, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.18956438487314506, | |
| "grad_norm": 0.8455479741096497, | |
| "learning_rate": 9.42857142857143e-06, | |
| "loss": 1.4893, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.19530876017233126, | |
| "grad_norm": 0.8091306090354919, | |
| "learning_rate": 9.714285714285715e-06, | |
| "loss": 1.4885, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.20105313547151749, | |
| "grad_norm": 0.8945720195770264, | |
| "learning_rate": 1e-05, | |
| "loss": 1.6195, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.20679751077070369, | |
| "grad_norm": 0.8610361814498901, | |
| "learning_rate": 9.999748146823376e-06, | |
| "loss": 1.5602, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.2125418860698899, | |
| "grad_norm": 0.7537204027175903, | |
| "learning_rate": 9.99899261266551e-06, | |
| "loss": 1.4256, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.2182862613690761, | |
| "grad_norm": 0.77887362241745, | |
| "learning_rate": 9.997733473639876e-06, | |
| "loss": 1.4408, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.22403063666826234, | |
| "grad_norm": 0.7763990163803101, | |
| "learning_rate": 9.995970856593739e-06, | |
| "loss": 1.5273, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.22977501196744854, | |
| "grad_norm": 0.6893869638442993, | |
| "learning_rate": 9.993704939095376e-06, | |
| "loss": 1.335, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.23551938726663477, | |
| "grad_norm": 0.6279287338256836, | |
| "learning_rate": 9.9909359494162e-06, | |
| "loss": 1.4053, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.24126376256582097, | |
| "grad_norm": 0.6119675636291504, | |
| "learning_rate": 9.987664166507749e-06, | |
| "loss": 1.2891, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2470081378650072, | |
| "grad_norm": 0.5897510051727295, | |
| "learning_rate": 9.983889919973586e-06, | |
| "loss": 1.33, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.2527525131641934, | |
| "grad_norm": 0.7033454179763794, | |
| "learning_rate": 9.979613590036108e-06, | |
| "loss": 1.4738, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2584968884633796, | |
| "grad_norm": 0.6067219376564026, | |
| "learning_rate": 9.974835607498224e-06, | |
| "loss": 1.2997, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.26424126376256585, | |
| "grad_norm": 0.5663418173789978, | |
| "learning_rate": 9.969556453699966e-06, | |
| "loss": 1.2687, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.26998563906175205, | |
| "grad_norm": 0.5423160195350647, | |
| "learning_rate": 9.963776660469996e-06, | |
| "loss": 1.1939, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.27573001436093825, | |
| "grad_norm": 0.5493931770324707, | |
| "learning_rate": 9.957496810072027e-06, | |
| "loss": 1.2178, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.28147438966012445, | |
| "grad_norm": 0.5915349721908569, | |
| "learning_rate": 9.95071753514617e-06, | |
| "loss": 1.3606, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.2872187649593107, | |
| "grad_norm": 0.5388178825378418, | |
| "learning_rate": 9.943439518645193e-06, | |
| "loss": 1.1925, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2929631402584969, | |
| "grad_norm": 0.48433274030685425, | |
| "learning_rate": 9.935663493765726e-06, | |
| "loss": 1.1238, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2987075155576831, | |
| "grad_norm": 0.5093696713447571, | |
| "learning_rate": 9.9273902438744e-06, | |
| "loss": 1.2862, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.3044518908568693, | |
| "grad_norm": 0.4982711672782898, | |
| "learning_rate": 9.918620602428916e-06, | |
| "loss": 1.1305, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.31019626615605556, | |
| "grad_norm": 0.4642084836959839, | |
| "learning_rate": 9.909355452894098e-06, | |
| "loss": 1.0996, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.31594064145524176, | |
| "grad_norm": 0.4599153399467468, | |
| "learning_rate": 9.899595728652883e-06, | |
| "loss": 1.1156, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.32168501675442795, | |
| "grad_norm": 0.3916165828704834, | |
| "learning_rate": 9.889342412912296e-06, | |
| "loss": 1.0515, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.32742939205361415, | |
| "grad_norm": 0.5593394637107849, | |
| "learning_rate": 9.878596538604388e-06, | |
| "loss": 1.2509, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.3331737673528004, | |
| "grad_norm": 0.38805869221687317, | |
| "learning_rate": 9.867359188282193e-06, | |
| "loss": 1.0666, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3389181426519866, | |
| "grad_norm": 0.3749019503593445, | |
| "learning_rate": 9.855631494010661e-06, | |
| "loss": 1.0027, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.3446625179511728, | |
| "grad_norm": 0.3772816061973572, | |
| "learning_rate": 9.843414637252615e-06, | |
| "loss": 1.0227, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.350406893250359, | |
| "grad_norm": 0.4442100524902344, | |
| "learning_rate": 9.830709848749727e-06, | |
| "loss": 1.1194, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.35615126854954526, | |
| "grad_norm": 0.4326518774032593, | |
| "learning_rate": 9.817518408398536e-06, | |
| "loss": 1.1089, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.36189564384873146, | |
| "grad_norm": 0.39790305495262146, | |
| "learning_rate": 9.803841645121505e-06, | |
| "loss": 1.0984, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.36764001914791766, | |
| "grad_norm": 0.334896445274353, | |
| "learning_rate": 9.78968093673314e-06, | |
| "loss": 0.9764, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.37338439444710386, | |
| "grad_norm": 0.39737918972969055, | |
| "learning_rate": 9.775037709801206e-06, | |
| "loss": 1.0547, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3791287697462901, | |
| "grad_norm": 0.3748026490211487, | |
| "learning_rate": 9.759913439502982e-06, | |
| "loss": 1.029, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3848731450454763, | |
| "grad_norm": 0.41916579008102417, | |
| "learning_rate": 9.74430964947668e-06, | |
| "loss": 0.8899, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3906175203446625, | |
| "grad_norm": 0.46989861130714417, | |
| "learning_rate": 9.728227911667934e-06, | |
| "loss": 1.0217, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3963618956438487, | |
| "grad_norm": 0.4857204854488373, | |
| "learning_rate": 9.711669846171443e-06, | |
| "loss": 1.1333, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.40210627094303497, | |
| "grad_norm": 0.32028084993362427, | |
| "learning_rate": 9.694637121067764e-06, | |
| "loss": 0.8897, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.40785064624222117, | |
| "grad_norm": 0.3537141978740692, | |
| "learning_rate": 9.677131452255272e-06, | |
| "loss": 1.0079, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.41359502154140737, | |
| "grad_norm": 0.3317051827907562, | |
| "learning_rate": 9.659154603277283e-06, | |
| "loss": 0.9405, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.41933939684059357, | |
| "grad_norm": 0.3143337070941925, | |
| "learning_rate": 9.640708385144403e-06, | |
| "loss": 0.9392, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.4250837721397798, | |
| "grad_norm": 0.3233448266983032, | |
| "learning_rate": 9.62179465615209e-06, | |
| "loss": 0.956, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.430828147438966, | |
| "grad_norm": 0.3637179434299469, | |
| "learning_rate": 9.602415321693434e-06, | |
| "loss": 1.0286, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.4365725227381522, | |
| "grad_norm": 0.33391502499580383, | |
| "learning_rate": 9.582572334067213e-06, | |
| "loss": 0.9355, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.4423168980373384, | |
| "grad_norm": 0.3349672555923462, | |
| "learning_rate": 9.562267692281212e-06, | |
| "loss": 1.0103, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.4480612733365247, | |
| "grad_norm": 0.3291616439819336, | |
| "learning_rate": 9.541503441850844e-06, | |
| "loss": 0.9434, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.4538056486357109, | |
| "grad_norm": 0.34670519828796387, | |
| "learning_rate": 9.520281674593084e-06, | |
| "loss": 0.961, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.4595500239348971, | |
| "grad_norm": 0.3279688358306885, | |
| "learning_rate": 9.498604528415731e-06, | |
| "loss": 0.9716, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.4652943992340833, | |
| "grad_norm": 0.2821006774902344, | |
| "learning_rate": 9.476474187102033e-06, | |
| "loss": 0.8712, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.47103877453326953, | |
| "grad_norm": 0.3655773401260376, | |
| "learning_rate": 9.453892880090696e-06, | |
| "loss": 0.9632, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.47678314983245573, | |
| "grad_norm": 0.3721848428249359, | |
| "learning_rate": 9.430862882251279e-06, | |
| "loss": 0.9442, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.48252752513164193, | |
| "grad_norm": 0.34458214044570923, | |
| "learning_rate": 9.40738651365503e-06, | |
| "loss": 0.9808, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.48827190043082813, | |
| "grad_norm": 0.368142694234848, | |
| "learning_rate": 9.38346613934115e-06, | |
| "loss": 0.9352, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4940162757300144, | |
| "grad_norm": 0.3380177319049835, | |
| "learning_rate": 9.359104169078541e-06, | |
| "loss": 0.9109, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4997606510292006, | |
| "grad_norm": 0.34367069602012634, | |
| "learning_rate": 9.334303057123044e-06, | |
| "loss": 0.9675, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.5055050263283868, | |
| "grad_norm": 0.33642375469207764, | |
| "learning_rate": 9.309065301970193e-06, | |
| "loss": 0.9452, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.511249401627573, | |
| "grad_norm": 0.2975543439388275, | |
| "learning_rate": 9.283393446103506e-06, | |
| "loss": 0.9015, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.5169937769267592, | |
| "grad_norm": 0.3781251311302185, | |
| "learning_rate": 9.257290075738365e-06, | |
| "loss": 0.9957, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5227381522259454, | |
| "grad_norm": 0.4667631685733795, | |
| "learning_rate": 9.23075782056147e-06, | |
| "loss": 0.8759, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.5284825275251317, | |
| "grad_norm": 0.3043239712715149, | |
| "learning_rate": 9.20379935346592e-06, | |
| "loss": 0.9159, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.5342269028243178, | |
| "grad_norm": 0.2928166091442108, | |
| "learning_rate": 9.176417390281944e-06, | |
| "loss": 0.8616, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.5399712781235041, | |
| "grad_norm": 0.27756333351135254, | |
| "learning_rate": 9.148614689503307e-06, | |
| "loss": 0.8966, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.5457156534226902, | |
| "grad_norm": 0.29098498821258545, | |
| "learning_rate": 9.120394052009412e-06, | |
| "loss": 0.8998, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.5514600287218765, | |
| "grad_norm": 0.3178156614303589, | |
| "learning_rate": 9.091758320783139e-06, | |
| "loss": 0.9143, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.5572044040210627, | |
| "grad_norm": 0.31461015343666077, | |
| "learning_rate": 9.062710380624439e-06, | |
| "loss": 0.871, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.5629487793202489, | |
| "grad_norm": 0.3227941393852234, | |
| "learning_rate": 9.033253157859715e-06, | |
| "loss": 0.9499, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5686931546194351, | |
| "grad_norm": 0.2516985833644867, | |
| "learning_rate": 9.003389620047012e-06, | |
| "loss": 0.8377, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5744375299186214, | |
| "grad_norm": 0.24640558660030365, | |
| "learning_rate": 8.973122775677078e-06, | |
| "loss": 0.8193, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5801819052178075, | |
| "grad_norm": 0.2885333299636841, | |
| "learning_rate": 8.942455673870278e-06, | |
| "loss": 0.9239, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5859262805169938, | |
| "grad_norm": 0.27385029196739197, | |
| "learning_rate": 8.91139140406941e-06, | |
| "loss": 0.8668, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.59167065581618, | |
| "grad_norm": 0.2626950144767761, | |
| "learning_rate": 8.879933095728485e-06, | |
| "loss": 0.8701, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5974150311153662, | |
| "grad_norm": 0.3030356466770172, | |
| "learning_rate": 8.848083917997463e-06, | |
| "loss": 0.8487, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.6031594064145525, | |
| "grad_norm": 0.2605777382850647, | |
| "learning_rate": 8.815847079402972e-06, | |
| "loss": 0.848, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.6089037817137386, | |
| "grad_norm": 0.25984668731689453, | |
| "learning_rate": 8.783225827525098e-06, | |
| "loss": 0.8525, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.6146481570129249, | |
| "grad_norm": 0.2546154260635376, | |
| "learning_rate": 8.750223448670204e-06, | |
| "loss": 0.8418, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.6203925323121111, | |
| "grad_norm": 0.3208469748497009, | |
| "learning_rate": 8.716843267539868e-06, | |
| "loss": 0.904, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.6261369076112973, | |
| "grad_norm": 0.24169251322746277, | |
| "learning_rate": 8.683088646895955e-06, | |
| "loss": 0.8059, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.6318812829104835, | |
| "grad_norm": 0.2879669964313507, | |
| "learning_rate": 8.648962987221837e-06, | |
| "loss": 0.9361, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6376256582096697, | |
| "grad_norm": 0.23367050290107727, | |
| "learning_rate": 8.614469726379833e-06, | |
| "loss": 0.8732, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.6433700335088559, | |
| "grad_norm": 0.26886099576950073, | |
| "learning_rate": 8.579612339264867e-06, | |
| "loss": 0.9007, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.6491144088080422, | |
| "grad_norm": 0.25653254985809326, | |
| "learning_rate": 8.544394337454409e-06, | |
| "loss": 0.8145, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.6548587841072283, | |
| "grad_norm": 0.3096544146537781, | |
| "learning_rate": 8.508819268854713e-06, | |
| "loss": 0.8772, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.6606031594064146, | |
| "grad_norm": 0.24701237678527832, | |
| "learning_rate": 8.472890717343391e-06, | |
| "loss": 0.8373, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.6663475347056008, | |
| "grad_norm": 0.25847581028938293, | |
| "learning_rate": 8.436612302408376e-06, | |
| "loss": 0.807, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.672091910004787, | |
| "grad_norm": 0.2492266595363617, | |
| "learning_rate": 8.399987678783285e-06, | |
| "loss": 0.841, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6778362853039732, | |
| "grad_norm": 0.27713751792907715, | |
| "learning_rate": 8.36302053607924e-06, | |
| "loss": 0.8967, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6835806606031594, | |
| "grad_norm": 0.3271845877170563, | |
| "learning_rate": 8.325714598413169e-06, | |
| "loss": 0.934, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.6893250359023456, | |
| "grad_norm": 0.2868073284626007, | |
| "learning_rate": 8.288073624032634e-06, | |
| "loss": 0.833, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6950694112015319, | |
| "grad_norm": 0.3758274018764496, | |
| "learning_rate": 8.250101404937223e-06, | |
| "loss": 0.9077, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.700813786500718, | |
| "grad_norm": 0.2217041254043579, | |
| "learning_rate": 8.211801766496537e-06, | |
| "loss": 0.7942, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.7065581617999043, | |
| "grad_norm": 0.2789182662963867, | |
| "learning_rate": 8.17317856706482e-06, | |
| "loss": 0.8027, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.7123025370990905, | |
| "grad_norm": 0.2704261839389801, | |
| "learning_rate": 8.13423569759226e-06, | |
| "loss": 0.8983, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.7180469123982767, | |
| "grad_norm": 0.2627686560153961, | |
| "learning_rate": 8.094977081233006e-06, | |
| "loss": 0.9179, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.7237912876974629, | |
| "grad_norm": 0.23620633780956268, | |
| "learning_rate": 8.055406672949957e-06, | |
| "loss": 0.7996, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.7295356629966491, | |
| "grad_norm": 0.22714516520500183, | |
| "learning_rate": 8.015528459116321e-06, | |
| "loss": 0.7817, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.7352800382958353, | |
| "grad_norm": 0.21553781628608704, | |
| "learning_rate": 7.975346457114034e-06, | |
| "loss": 0.8167, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.7410244135950216, | |
| "grad_norm": 0.2822798192501068, | |
| "learning_rate": 7.934864714929036e-06, | |
| "loss": 0.859, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.7467687888942077, | |
| "grad_norm": 0.242783784866333, | |
| "learning_rate": 7.894087310743468e-06, | |
| "loss": 0.8587, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.752513164193394, | |
| "grad_norm": 0.23741471767425537, | |
| "learning_rate": 7.853018352524845e-06, | |
| "loss": 0.7992, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.7582575394925802, | |
| "grad_norm": 0.24776799976825714, | |
| "learning_rate": 7.811661977612202e-06, | |
| "loss": 0.8193, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.7640019147917664, | |
| "grad_norm": 0.25223901867866516, | |
| "learning_rate": 7.770022352299294e-06, | |
| "loss": 0.7981, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.7697462900909526, | |
| "grad_norm": 0.26584818959236145, | |
| "learning_rate": 7.728103671414889e-06, | |
| "loss": 0.7825, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.7754906653901388, | |
| "grad_norm": 0.2559310793876648, | |
| "learning_rate": 7.685910157900158e-06, | |
| "loss": 0.8111, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.781235040689325, | |
| "grad_norm": 0.2626366913318634, | |
| "learning_rate": 7.643446062383273e-06, | |
| "loss": 0.8573, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.7869794159885113, | |
| "grad_norm": 0.23376286029815674, | |
| "learning_rate": 7.600715662751166e-06, | |
| "loss": 0.7828, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.7927237912876974, | |
| "grad_norm": 0.29543206095695496, | |
| "learning_rate": 7.557723263718596e-06, | |
| "loss": 0.8643, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.7984681665868837, | |
| "grad_norm": 0.24273599684238434, | |
| "learning_rate": 7.514473196394467e-06, | |
| "loss": 0.8352, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.8042125418860699, | |
| "grad_norm": 0.29526424407958984, | |
| "learning_rate": 7.470969817845518e-06, | |
| "loss": 0.8839, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.8099569171852561, | |
| "grad_norm": 0.3149056136608124, | |
| "learning_rate": 7.427217510657383e-06, | |
| "loss": 0.8001, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.8157012924844423, | |
| "grad_norm": 0.24797004461288452, | |
| "learning_rate": 7.383220682493081e-06, | |
| "loss": 0.8614, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.8214456677836285, | |
| "grad_norm": 0.22716785967350006, | |
| "learning_rate": 7.338983765648985e-06, | |
| "loss": 0.7754, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.8271900430828147, | |
| "grad_norm": 0.2676171064376831, | |
| "learning_rate": 7.294511216608308e-06, | |
| "loss": 0.8503, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.832934418382001, | |
| "grad_norm": 0.27221742272377014, | |
| "learning_rate": 7.249807515592149e-06, | |
| "loss": 0.842, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.8386787936811871, | |
| "grad_norm": 0.33359500765800476, | |
| "learning_rate": 7.2048771661081515e-06, | |
| "loss": 0.8428, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.8444231689803734, | |
| "grad_norm": 0.2143690437078476, | |
| "learning_rate": 7.159724694496815e-06, | |
| "loss": 0.7765, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.8501675442795597, | |
| "grad_norm": 0.22745496034622192, | |
| "learning_rate": 7.114354649475499e-06, | |
| "loss": 0.824, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.8559119195787458, | |
| "grad_norm": 0.30718111991882324, | |
| "learning_rate": 7.068771601680191e-06, | |
| "loss": 0.7685, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.861656294877932, | |
| "grad_norm": 0.21647170186042786, | |
| "learning_rate": 7.022980143205046e-06, | |
| "loss": 0.8267, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.8674006701771182, | |
| "grad_norm": 0.268690824508667, | |
| "learning_rate": 6.976984887139775e-06, | |
| "loss": 0.8612, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.8731450454763044, | |
| "grad_norm": 0.21284270286560059, | |
| "learning_rate": 6.930790467104916e-06, | |
| "loss": 0.7249, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.8788894207754907, | |
| "grad_norm": 0.24718178808689117, | |
| "learning_rate": 6.884401536785045e-06, | |
| "loss": 0.8467, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.8846337960746768, | |
| "grad_norm": 0.23613935708999634, | |
| "learning_rate": 6.837822769459942e-06, | |
| "loss": 0.7392, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.8903781713738631, | |
| "grad_norm": 0.21455912292003632, | |
| "learning_rate": 6.791058857533814e-06, | |
| "loss": 0.7806, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.8961225466730494, | |
| "grad_norm": 0.2240087240934372, | |
| "learning_rate": 6.744114512062571e-06, | |
| "loss": 0.7993, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.9018669219722355, | |
| "grad_norm": 0.22374065220355988, | |
| "learning_rate": 6.696994462279223e-06, | |
| "loss": 0.8009, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.9076112972714218, | |
| "grad_norm": 0.22769568860530853, | |
| "learning_rate": 6.6497034551174585e-06, | |
| "loss": 0.848, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.9133556725706079, | |
| "grad_norm": 0.2437426596879959, | |
| "learning_rate": 6.602246254733431e-06, | |
| "loss": 0.7982, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.9191000478697942, | |
| "grad_norm": 0.24921968579292297, | |
| "learning_rate": 6.554627642025807e-06, | |
| "loss": 0.7817, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.9248444231689804, | |
| "grad_norm": 0.24444496631622314, | |
| "learning_rate": 6.506852414154138e-06, | |
| "loss": 0.8371, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.9305887984681666, | |
| "grad_norm": 0.23661524057388306, | |
| "learning_rate": 6.4589253840555856e-06, | |
| "loss": 0.7063, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.9363331737673528, | |
| "grad_norm": 0.37622445821762085, | |
| "learning_rate": 6.41085137996006e-06, | |
| "loss": 0.8171, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.9420775490665391, | |
| "grad_norm": 0.4032360017299652, | |
| "learning_rate": 6.362635244903818e-06, | |
| "loss": 0.8375, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.9478219243657252, | |
| "grad_norm": 0.2710743844509125, | |
| "learning_rate": 6.314281836241573e-06, | |
| "loss": 0.8012, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.9535662996649115, | |
| "grad_norm": 0.2469240427017212, | |
| "learning_rate": 6.265796025157154e-06, | |
| "loss": 0.7537, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.9593106749640976, | |
| "grad_norm": 0.2740936577320099, | |
| "learning_rate": 6.217182696172776e-06, | |
| "loss": 0.7829, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.9650550502632839, | |
| "grad_norm": 0.27178484201431274, | |
| "learning_rate": 6.168446746656973e-06, | |
| "loss": 0.8111, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.9707994255624701, | |
| "grad_norm": 0.25783851742744446, | |
| "learning_rate": 6.119593086331225e-06, | |
| "loss": 0.8109, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.9765438008616563, | |
| "grad_norm": 0.24574922025203705, | |
| "learning_rate": 6.070626636775349e-06, | |
| "loss": 0.8151, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9822881761608425, | |
| "grad_norm": 0.25368213653564453, | |
| "learning_rate": 6.021552330931693e-06, | |
| "loss": 0.8623, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.9880325514600288, | |
| "grad_norm": 0.2357417792081833, | |
| "learning_rate": 5.972375112608182e-06, | |
| "loss": 0.7879, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.9937769267592149, | |
| "grad_norm": 0.2542829215526581, | |
| "learning_rate": 5.923099935980278e-06, | |
| "loss": 0.8686, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.9995213020584012, | |
| "grad_norm": 0.22280263900756836, | |
| "learning_rate": 5.8737317650918905e-06, | |
| "loss": 0.8169, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.0052656773575874, | |
| "grad_norm": 0.6747936010360718, | |
| "learning_rate": 5.824275573355278e-06, | |
| "loss": 1.3413, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.0110100526567736, | |
| "grad_norm": 0.21218319237232208, | |
| "learning_rate": 5.7747363430500395e-06, | |
| "loss": 0.7765, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.0167544279559597, | |
| "grad_norm": 0.36771947145462036, | |
| "learning_rate": 5.725119064821185e-06, | |
| "loss": 0.8505, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.022498803255146, | |
| "grad_norm": 0.22214052081108093, | |
| "learning_rate": 5.675428737176367e-06, | |
| "loss": 0.708, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.0282431785543322, | |
| "grad_norm": 0.2072789967060089, | |
| "learning_rate": 5.625670365982332e-06, | |
| "loss": 0.6803, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.0339875538535184, | |
| "grad_norm": 0.25675857067108154, | |
| "learning_rate": 5.575848963960621e-06, | |
| "loss": 0.8374, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.0397319291527047, | |
| "grad_norm": 0.24163125455379486, | |
| "learning_rate": 5.525969550182577e-06, | |
| "loss": 0.7489, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.0454763044518909, | |
| "grad_norm": 0.21136772632598877, | |
| "learning_rate": 5.4760371495637256e-06, | |
| "loss": 0.7505, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.051220679751077, | |
| "grad_norm": 0.21455495059490204, | |
| "learning_rate": 5.426056792357552e-06, | |
| "loss": 0.7782, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.0569650550502634, | |
| "grad_norm": 0.22327716648578644, | |
| "learning_rate": 5.376033513648743e-06, | |
| "loss": 0.7899, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.0627094303494495, | |
| "grad_norm": 0.2287086695432663, | |
| "learning_rate": 5.325972352845965e-06, | |
| "loss": 0.7806, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.0684538056486357, | |
| "grad_norm": 0.22334890067577362, | |
| "learning_rate": 5.2758783531741655e-06, | |
| "loss": 0.8018, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.0741981809478218, | |
| "grad_norm": 0.2140316665172577, | |
| "learning_rate": 5.225756561166521e-06, | |
| "loss": 0.8024, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.0799425562470082, | |
| "grad_norm": 0.23731206357479095, | |
| "learning_rate": 5.175612026156045e-06, | |
| "loss": 0.8036, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.0856869315461943, | |
| "grad_norm": 0.20528081059455872, | |
| "learning_rate": 5.125449799766916e-06, | |
| "loss": 0.7181, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.0914313068453805, | |
| "grad_norm": 0.23563255369663239, | |
| "learning_rate": 5.075274935405554e-06, | |
| "loss": 0.7915, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.0971756821445668, | |
| "grad_norm": 0.2743735611438751, | |
| "learning_rate": 5.025092487751552e-06, | |
| "loss": 0.7791, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.102920057443753, | |
| "grad_norm": 0.19813962280750275, | |
| "learning_rate": 4.974907512248451e-06, | |
| "loss": 0.6926, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.1086644327429391, | |
| "grad_norm": 0.21038314700126648, | |
| "learning_rate": 4.924725064594448e-06, | |
| "loss": 0.7868, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.1144088080421255, | |
| "grad_norm": 0.20419326424598694, | |
| "learning_rate": 4.874550200233085e-06, | |
| "loss": 0.7707, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.1201531833413116, | |
| "grad_norm": 0.22805064916610718, | |
| "learning_rate": 4.824387973843957e-06, | |
| "loss": 0.7975, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.1258975586404978, | |
| "grad_norm": 0.20915883779525757, | |
| "learning_rate": 4.7742434388334815e-06, | |
| "loss": 0.6876, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.1316419339396842, | |
| "grad_norm": 0.2263288050889969, | |
| "learning_rate": 4.724121646825838e-06, | |
| "loss": 0.7852, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.1373863092388703, | |
| "grad_norm": 0.20982590317726135, | |
| "learning_rate": 4.674027647154037e-06, | |
| "loss": 0.7389, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.1431306845380564, | |
| "grad_norm": 0.17644065618515015, | |
| "learning_rate": 4.623966486351257e-06, | |
| "loss": 0.6664, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.1488750598372426, | |
| "grad_norm": 0.2019232213497162, | |
| "learning_rate": 4.573943207642452e-06, | |
| "loss": 0.7277, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.154619435136429, | |
| "grad_norm": 0.21619820594787598, | |
| "learning_rate": 4.523962850436276e-06, | |
| "loss": 0.7761, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.160363810435615, | |
| "grad_norm": 0.21579161286354065, | |
| "learning_rate": 4.474030449817423e-06, | |
| "loss": 0.7974, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.1661081857348012, | |
| "grad_norm": 0.20451530814170837, | |
| "learning_rate": 4.424151036039381e-06, | |
| "loss": 0.7373, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.1718525610339876, | |
| "grad_norm": 0.2192217856645584, | |
| "learning_rate": 4.3743296340176694e-06, | |
| "loss": 0.8093, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.1775969363331737, | |
| "grad_norm": 0.22574208676815033, | |
| "learning_rate": 4.3245712628236356e-06, | |
| "loss": 0.8279, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.18334131163236, | |
| "grad_norm": 0.19707462191581726, | |
| "learning_rate": 4.274880935178817e-06, | |
| "loss": 0.718, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.1890856869315463, | |
| "grad_norm": 0.20628628134727478, | |
| "learning_rate": 4.225263656949961e-06, | |
| "loss": 0.7298, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.1948300622307324, | |
| "grad_norm": 0.19289720058441162, | |
| "learning_rate": 4.175724426644724e-06, | |
| "loss": 0.7334, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.2005744375299185, | |
| "grad_norm": 0.21177536249160767, | |
| "learning_rate": 4.12626823490811e-06, | |
| "loss": 0.7272, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.206318812829105, | |
| "grad_norm": 0.21567998826503754, | |
| "learning_rate": 4.076900064019721e-06, | |
| "loss": 0.8351, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.212063188128291, | |
| "grad_norm": 0.2068811058998108, | |
| "learning_rate": 4.02762488739182e-06, | |
| "loss": 0.7878, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.2178075634274772, | |
| "grad_norm": 0.21340332925319672, | |
| "learning_rate": 3.978447669068309e-06, | |
| "loss": 0.7817, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.2235519387266636, | |
| "grad_norm": 0.19652466475963593, | |
| "learning_rate": 3.929373363224654e-06, | |
| "loss": 0.7094, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.2292963140258497, | |
| "grad_norm": 0.20111176371574402, | |
| "learning_rate": 3.8804069136687775e-06, | |
| "loss": 0.6904, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.2350406893250359, | |
| "grad_norm": 0.20526321232318878, | |
| "learning_rate": 3.8315532533430285e-06, | |
| "loss": 0.7855, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.2407850646242222, | |
| "grad_norm": 0.19134995341300964, | |
| "learning_rate": 3.7828173038272266e-06, | |
| "loss": 0.7904, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.2465294399234084, | |
| "grad_norm": 0.18953163921833038, | |
| "learning_rate": 3.7342039748428473e-06, | |
| "loss": 0.6998, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.2522738152225945, | |
| "grad_norm": 0.20726385712623596, | |
| "learning_rate": 3.685718163758427e-06, | |
| "loss": 0.7411, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.2580181905217809, | |
| "grad_norm": 0.2019708901643753, | |
| "learning_rate": 3.6373647550961834e-06, | |
| "loss": 0.758, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.263762565820967, | |
| "grad_norm": 0.21998775005340576, | |
| "learning_rate": 3.5891486200399413e-06, | |
| "loss": 0.849, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.2695069411201532, | |
| "grad_norm": 0.2206163853406906, | |
| "learning_rate": 3.5410746159444165e-06, | |
| "loss": 0.7877, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.2752513164193395, | |
| "grad_norm": 0.1965230256319046, | |
| "learning_rate": 3.4931475858458634e-06, | |
| "loss": 0.7374, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.2809956917185257, | |
| "grad_norm": 0.21714457869529724, | |
| "learning_rate": 3.445372357974194e-06, | |
| "loss": 0.8186, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.2867400670177118, | |
| "grad_norm": 0.1930633783340454, | |
| "learning_rate": 3.397753745266571e-06, | |
| "loss": 0.7274, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.292484442316898, | |
| "grad_norm": 0.19334417581558228, | |
| "learning_rate": 3.350296544882543e-06, | |
| "loss": 0.707, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.2982288176160843, | |
| "grad_norm": 0.19976544380187988, | |
| "learning_rate": 3.303005537720778e-06, | |
| "loss": 0.7231, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.3039731929152705, | |
| "grad_norm": 0.20306555926799774, | |
| "learning_rate": 3.255885487937431e-06, | |
| "loss": 0.7939, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.3097175682144566, | |
| "grad_norm": 0.24878649413585663, | |
| "learning_rate": 3.2089411424661864e-06, | |
| "loss": 0.8701, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.3154619435136428, | |
| "grad_norm": 0.210804745554924, | |
| "learning_rate": 3.1621772305400603e-06, | |
| "loss": 0.7847, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.3212063188128291, | |
| "grad_norm": 0.2029287964105606, | |
| "learning_rate": 3.1155984632149565e-06, | |
| "loss": 0.7605, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.3269506941120153, | |
| "grad_norm": 0.20195455849170685, | |
| "learning_rate": 3.0692095328950843e-06, | |
| "loss": 0.6717, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.3326950694112014, | |
| "grad_norm": 0.22542132437229156, | |
| "learning_rate": 3.023015112860228e-06, | |
| "loss": 0.8526, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.3384394447103878, | |
| "grad_norm": 0.2041286677122116, | |
| "learning_rate": 2.977019856794955e-06, | |
| "loss": 0.7749, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.344183820009574, | |
| "grad_norm": 0.20100484788417816, | |
| "learning_rate": 2.93122839831981e-06, | |
| "loss": 0.804, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.34992819530876, | |
| "grad_norm": 0.18580438196659088, | |
| "learning_rate": 2.8856453505245018e-06, | |
| "loss": 0.7337, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.3556725706079464, | |
| "grad_norm": 0.24488739669322968, | |
| "learning_rate": 2.840275305503186e-06, | |
| "loss": 0.8759, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.3614169459071326, | |
| "grad_norm": 0.20020714402198792, | |
| "learning_rate": 2.7951228338918506e-06, | |
| "loss": 0.7147, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.3671613212063187, | |
| "grad_norm": 0.19091568887233734, | |
| "learning_rate": 2.7501924844078538e-06, | |
| "loss": 0.7591, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.372905696505505, | |
| "grad_norm": 0.2303103655576706, | |
| "learning_rate": 2.7054887833916933e-06, | |
| "loss": 0.8035, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.3786500718046912, | |
| "grad_norm": 0.21847979724407196, | |
| "learning_rate": 2.6610162343510183e-06, | |
| "loss": 0.803, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.3843944471038774, | |
| "grad_norm": 0.18968315422534943, | |
| "learning_rate": 2.616779317506921e-06, | |
| "loss": 0.7312, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.3901388224030637, | |
| "grad_norm": 0.19670367240905762, | |
| "learning_rate": 2.572782489342617e-06, | |
| "loss": 0.6915, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.39588319770225, | |
| "grad_norm": 0.2328338474035263, | |
| "learning_rate": 2.5290301821544826e-06, | |
| "loss": 0.7976, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.401627573001436, | |
| "grad_norm": 0.20583079755306244, | |
| "learning_rate": 2.4855268036055346e-06, | |
| "loss": 0.8022, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.4073719483006224, | |
| "grad_norm": 0.21840232610702515, | |
| "learning_rate": 2.4422767362814045e-06, | |
| "loss": 0.7976, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.4131163235998085, | |
| "grad_norm": 0.2045024186372757, | |
| "learning_rate": 2.3992843372488357e-06, | |
| "loss": 0.736, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.4188606988989947, | |
| "grad_norm": 0.2078116089105606, | |
| "learning_rate": 2.3565539376167295e-06, | |
| "loss": 0.8003, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.424605074198181, | |
| "grad_norm": 0.2091427445411682, | |
| "learning_rate": 2.3140898420998425e-06, | |
| "loss": 0.7482, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.4303494494973672, | |
| "grad_norm": 0.1906297653913498, | |
| "learning_rate": 2.271896328585114e-06, | |
| "loss": 0.7527, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.4360938247965533, | |
| "grad_norm": 0.21409378945827484, | |
| "learning_rate": 2.2299776477007073e-06, | |
| "loss": 0.7525, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.4418382000957397, | |
| "grad_norm": 0.25960367918014526, | |
| "learning_rate": 2.1883380223878004e-06, | |
| "loss": 0.8014, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.4475825753949259, | |
| "grad_norm": 0.22109520435333252, | |
| "learning_rate": 2.1469816474751566e-06, | |
| "loss": 0.8214, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.453326950694112, | |
| "grad_norm": 0.19385161995887756, | |
| "learning_rate": 2.105912689256533e-06, | |
| "loss": 0.728, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.4590713259932984, | |
| "grad_norm": 0.21480326354503632, | |
| "learning_rate": 2.0651352850709656e-06, | |
| "loss": 0.7423, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.4648157012924845, | |
| "grad_norm": 0.19990293681621552, | |
| "learning_rate": 2.0246535428859652e-06, | |
| "loss": 0.7745, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.4705600765916707, | |
| "grad_norm": 0.21575427055358887, | |
| "learning_rate": 1.984471540883679e-06, | |
| "loss": 0.7902, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.4763044518908568, | |
| "grad_norm": 0.2110794484615326, | |
| "learning_rate": 1.9445933270500444e-06, | |
| "loss": 0.801, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.4820488271900432, | |
| "grad_norm": 0.17699772119522095, | |
| "learning_rate": 1.905022918766995e-06, | |
| "loss": 0.7274, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.4877932024892293, | |
| "grad_norm": 0.2559460699558258, | |
| "learning_rate": 1.8657643024077431e-06, | |
| "loss": 0.8218, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.4935375777884154, | |
| "grad_norm": 0.23164112865924835, | |
| "learning_rate": 1.8268214329351797e-06, | |
| "loss": 0.7549, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.4992819530876016, | |
| "grad_norm": 0.21822740137577057, | |
| "learning_rate": 1.7881982335034625e-06, | |
| "loss": 0.6801, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.505026328386788, | |
| "grad_norm": 0.21753251552581787, | |
| "learning_rate": 1.7498985950627794e-06, | |
| "loss": 0.7015, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.510770703685974, | |
| "grad_norm": 0.21796390414237976, | |
| "learning_rate": 1.7119263759673677e-06, | |
| "loss": 0.7475, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.5165150789851602, | |
| "grad_norm": 0.2237701267004013, | |
| "learning_rate": 1.6742854015868349e-06, | |
| "loss": 0.8307, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.5222594542843466, | |
| "grad_norm": 0.21838471293449402, | |
| "learning_rate": 1.6369794639207626e-06, | |
| "loss": 0.7456, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.5280038295835328, | |
| "grad_norm": 0.24893999099731445, | |
| "learning_rate": 1.6000123212167158e-06, | |
| "loss": 0.7324, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.533748204882719, | |
| "grad_norm": 0.19973154366016388, | |
| "learning_rate": 1.5633876975916261e-06, | |
| "loss": 0.7561, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.5394925801819053, | |
| "grad_norm": 0.2008812427520752, | |
| "learning_rate": 1.5271092826566108e-06, | |
| "loss": 0.7548, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.5452369554810914, | |
| "grad_norm": 0.25023627281188965, | |
| "learning_rate": 1.4911807311452874e-06, | |
| "loss": 0.7583, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.5509813307802776, | |
| "grad_norm": 0.2000676989555359, | |
| "learning_rate": 1.4556056625455922e-06, | |
| "loss": 0.7326, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.556725706079464, | |
| "grad_norm": 0.22878780961036682, | |
| "learning_rate": 1.4203876607351347e-06, | |
| "loss": 0.871, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.56247008137865, | |
| "grad_norm": 0.18908736109733582, | |
| "learning_rate": 1.3855302736201686e-06, | |
| "loss": 0.7398, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.5682144566778362, | |
| "grad_norm": 0.22222086787223816, | |
| "learning_rate": 1.3510370127781635e-06, | |
| "loss": 0.7862, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.5739588319770226, | |
| "grad_norm": 0.24614760279655457, | |
| "learning_rate": 1.3169113531040462e-06, | |
| "loss": 0.8017, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.5797032072762087, | |
| "grad_norm": 0.26479142904281616, | |
| "learning_rate": 1.2831567324601325e-06, | |
| "loss": 0.7775, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.5854475825753949, | |
| "grad_norm": 0.2094123810529709, | |
| "learning_rate": 1.2497765513297976e-06, | |
| "loss": 0.7929, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.5911919578745812, | |
| "grad_norm": 0.19950413703918457, | |
| "learning_rate": 1.2167741724749026e-06, | |
| "loss": 0.7876, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.5969363331737674, | |
| "grad_norm": 0.20557594299316406, | |
| "learning_rate": 1.1841529205970281e-06, | |
| "loss": 0.6664, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.6026807084729535, | |
| "grad_norm": 0.20220345258712769, | |
| "learning_rate": 1.1519160820025382e-06, | |
| "loss": 0.7648, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.6084250837721399, | |
| "grad_norm": 0.19917257130146027, | |
| "learning_rate": 1.1200669042715163e-06, | |
| "loss": 0.7629, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.614169459071326, | |
| "grad_norm": 0.20529669523239136, | |
| "learning_rate": 1.0886085959305915e-06, | |
| "loss": 0.8006, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.6199138343705122, | |
| "grad_norm": 0.20168288052082062, | |
| "learning_rate": 1.057544326129723e-06, | |
| "loss": 0.7828, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.6256582096696985, | |
| "grad_norm": 0.19760866463184357, | |
| "learning_rate": 1.026877224322923e-06, | |
| "loss": 0.7797, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.6314025849688847, | |
| "grad_norm": 0.1967737078666687, | |
| "learning_rate": 9.966103799529891e-07, | |
| "loss": 0.8298, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.6371469602680708, | |
| "grad_norm": 0.20345833897590637, | |
| "learning_rate": 9.66746842140287e-07, | |
| "loss": 0.7233, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.6428913355672572, | |
| "grad_norm": 0.19053655862808228, | |
| "learning_rate": 9.372896193755621e-07, | |
| "loss": 0.7482, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.6486357108664431, | |
| "grad_norm": 0.2005811333656311, | |
| "learning_rate": 9.082416792168608e-07, | |
| "loss": 0.8222, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.6543800861656295, | |
| "grad_norm": 0.18003535270690918, | |
| "learning_rate": 8.7960594799059e-07, | |
| "loss": 0.694, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.6601244614648158, | |
| "grad_norm": 0.21619072556495667, | |
| "learning_rate": 8.513853104966951e-07, | |
| "loss": 0.8044, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.6658688367640018, | |
| "grad_norm": 0.2093651443719864, | |
| "learning_rate": 8.235826097180566e-07, | |
| "loss": 0.7665, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.6716132120631881, | |
| "grad_norm": 0.19078946113586426, | |
| "learning_rate": 7.962006465340821e-07, | |
| "loss": 0.6924, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.6773575873623745, | |
| "grad_norm": 0.21213120222091675, | |
| "learning_rate": 7.692421794385313e-07, | |
| "loss": 0.7339, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.6831019626615604, | |
| "grad_norm": 0.21204614639282227, | |
| "learning_rate": 7.427099242616348e-07, | |
| "loss": 0.7959, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.6888463379607468, | |
| "grad_norm": 0.23744948208332062, | |
| "learning_rate": 7.166065538964955e-07, | |
| "loss": 0.8433, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.694590713259933, | |
| "grad_norm": 0.20437046885490417, | |
| "learning_rate": 6.909346980298093e-07, | |
| "loss": 0.7365, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.700335088559119, | |
| "grad_norm": 0.18826481699943542, | |
| "learning_rate": 6.656969428769567e-07, | |
| "loss": 0.7118, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.7060794638583054, | |
| "grad_norm": 0.276808500289917, | |
| "learning_rate": 6.408958309214597e-07, | |
| "loss": 0.7528, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.7118238391574916, | |
| "grad_norm": 0.20478636026382446, | |
| "learning_rate": 6.165338606588517e-07, | |
| "loss": 0.8099, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.7175682144566777, | |
| "grad_norm": 0.197705939412117, | |
| "learning_rate": 5.926134863449712e-07, | |
| "loss": 0.764, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.723312589755864, | |
| "grad_norm": 0.18358609080314636, | |
| "learning_rate": 5.691371177487215e-07, | |
| "loss": 0.6935, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.7290569650550502, | |
| "grad_norm": 0.21243707835674286, | |
| "learning_rate": 5.461071199093048e-07, | |
| "loss": 0.8161, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.7348013403542364, | |
| "grad_norm": 0.18822450935840607, | |
| "learning_rate": 5.235258128979676e-07, | |
| "loss": 0.7172, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.7405457156534228, | |
| "grad_norm": 0.1920388638973236, | |
| "learning_rate": 5.0139547158427e-07, | |
| "loss": 0.7142, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.746290090952609, | |
| "grad_norm": 0.20739546418190002, | |
| "learning_rate": 4.797183254069176e-07, | |
| "loss": 0.7913, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.752034466251795, | |
| "grad_norm": 0.24430738389492035, | |
| "learning_rate": 4.5849655814915683e-07, | |
| "loss": 0.8155, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.7577788415509814, | |
| "grad_norm": 0.19419215619564056, | |
| "learning_rate": 4.3773230771879004e-07, | |
| "loss": 0.7693, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.7635232168501676, | |
| "grad_norm": 0.19372592866420746, | |
| "learning_rate": 4.1742766593278974e-07, | |
| "loss": 0.7078, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.7692675921493537, | |
| "grad_norm": 0.20419584214687347, | |
| "learning_rate": 3.9758467830656623e-07, | |
| "loss": 0.7987, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.77501196744854, | |
| "grad_norm": 0.2112087607383728, | |
| "learning_rate": 3.782053438479094e-07, | |
| "loss": 0.7357, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.7807563427477262, | |
| "grad_norm": 0.1821969449520111, | |
| "learning_rate": 3.5929161485559694e-07, | |
| "loss": 0.7166, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.7865007180469124, | |
| "grad_norm": 0.20085568726062775, | |
| "learning_rate": 3.4084539672271764e-07, | |
| "loss": 0.7855, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.7922450933460987, | |
| "grad_norm": 0.1943003535270691, | |
| "learning_rate": 3.228685477447291e-07, | |
| "loss": 0.7345, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.7979894686452849, | |
| "grad_norm": 0.20896294713020325, | |
| "learning_rate": 3.0536287893223603e-07, | |
| "loss": 0.693, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.803733843944471, | |
| "grad_norm": 0.19734559953212738, | |
| "learning_rate": 2.883301538285582e-07, | |
| "loss": 0.6989, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.8094782192436574, | |
| "grad_norm": 0.20879290997982025, | |
| "learning_rate": 2.717720883320685e-07, | |
| "loss": 0.7743, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.8152225945428435, | |
| "grad_norm": 0.2231084406375885, | |
| "learning_rate": 2.556903505233216e-07, | |
| "loss": 0.8429, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.8209669698420297, | |
| "grad_norm": 0.19414548575878143, | |
| "learning_rate": 2.4008656049701875e-07, | |
| "loss": 0.7576, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.826711345141216, | |
| "grad_norm": 0.19559428095817566, | |
| "learning_rate": 2.2496229019879635e-07, | |
| "loss": 0.7209, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.832455720440402, | |
| "grad_norm": 0.20687630772590637, | |
| "learning_rate": 2.1031906326685946e-07, | |
| "loss": 0.7382, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.8382000957395883, | |
| "grad_norm": 0.20415708422660828, | |
| "learning_rate": 1.9615835487849677e-07, | |
| "loss": 0.7224, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.8439444710387747, | |
| "grad_norm": 0.2037113904953003, | |
| "learning_rate": 1.824815916014644e-07, | |
| "loss": 0.7959, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.8496888463379606, | |
| "grad_norm": 0.16969072818756104, | |
| "learning_rate": 1.6929015125027314e-07, | |
| "loss": 0.6885, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.855433221637147, | |
| "grad_norm": 0.20589259266853333, | |
| "learning_rate": 1.5658536274738623e-07, | |
| "loss": 0.787, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.8611775969363333, | |
| "grad_norm": 0.20015491545200348, | |
| "learning_rate": 1.443685059893396e-07, | |
| "loss": 0.7908, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.8669219722355193, | |
| "grad_norm": 0.2213539034128189, | |
| "learning_rate": 1.3264081171780797e-07, | |
| "loss": 0.7573, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.8726663475347056, | |
| "grad_norm": 0.20327641069889069, | |
| "learning_rate": 1.2140346139561277e-07, | |
| "loss": 0.7517, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.8784107228338918, | |
| "grad_norm": 0.20684504508972168, | |
| "learning_rate": 1.1065758708770468e-07, | |
| "loss": 0.7518, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.884155098133078, | |
| "grad_norm": 0.22536353766918182, | |
| "learning_rate": 1.004042713471165e-07, | |
| "loss": 0.7877, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.8898994734322643, | |
| "grad_norm": 0.1884581446647644, | |
| "learning_rate": 9.064454710590253e-08, | |
| "loss": 0.719, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.8956438487314504, | |
| "grad_norm": 0.1766548603773117, | |
| "learning_rate": 8.137939757108526e-08, | |
| "loss": 0.7001, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.9013882240306366, | |
| "grad_norm": 0.1944391429424286, | |
| "learning_rate": 7.260975612560173e-08, | |
| "loss": 0.7557, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.907132599329823, | |
| "grad_norm": 0.19803932309150696, | |
| "learning_rate": 6.433650623427379e-08, | |
| "loss": 0.66, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.912876974629009, | |
| "grad_norm": 0.21188417077064514, | |
| "learning_rate": 5.6560481354807625e-08, | |
| "loss": 0.7217, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.9186213499281952, | |
| "grad_norm": 0.20995528995990753, | |
| "learning_rate": 4.928246485383148e-08, | |
| "loss": 0.8005, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.9243657252273816, | |
| "grad_norm": 0.2416689693927765, | |
| "learning_rate": 4.250318992797375e-08, | |
| "loss": 0.7982, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.9301101005265677, | |
| "grad_norm": 0.24366450309753418, | |
| "learning_rate": 3.622333953000601e-08, | |
| "loss": 0.7007, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.9358544758257539, | |
| "grad_norm": 0.21946272253990173, | |
| "learning_rate": 3.0443546300035764e-08, | |
| "loss": 0.6817, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.9415988511249402, | |
| "grad_norm": 0.21845552325248718, | |
| "learning_rate": 2.516439250177749e-08, | |
| "loss": 0.7273, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.9473432264241264, | |
| "grad_norm": 0.2093515843153, | |
| "learning_rate": 2.038640996389285e-08, | |
| "loss": 0.8364, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.9530876017233125, | |
| "grad_norm": 0.22738634049892426, | |
| "learning_rate": 1.6110080026414123e-08, | |
| "loss": 0.7596, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.958831977022499, | |
| "grad_norm": 0.20927579700946808, | |
| "learning_rate": 1.2335833492252425e-08, | |
| "loss": 0.7889, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.964576352321685, | |
| "grad_norm": 0.2214992344379425, | |
| "learning_rate": 9.06405058380022e-09, | |
| "loss": 0.8137, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.9703207276208712, | |
| "grad_norm": 0.19121958315372467, | |
| "learning_rate": 6.295060904623618e-09, | |
| "loss": 0.6988, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.9760651029200575, | |
| "grad_norm": 0.209566131234169, | |
| "learning_rate": 4.02914340626226e-09, | |
| "loss": 0.7623, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.9818094782192437, | |
| "grad_norm": 0.19311966001987457, | |
| "learning_rate": 2.2665263601240328e-09, | |
| "loss": 0.7692, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.9875538535184298, | |
| "grad_norm": 0.2321179360151291, | |
| "learning_rate": 1.0073873344895735e-09, | |
| "loss": 0.8097, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.9932982288176162, | |
| "grad_norm": 0.20544949173927307, | |
| "learning_rate": 2.5185317662490547e-10, | |
| "loss": 0.783, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.9990426041168023, | |
| "grad_norm": 0.19750377535820007, | |
| "learning_rate": 0.0, | |
| "loss": 0.6823, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.9990426041168023, | |
| "step": 348, | |
| "total_flos": 155937190772736.0, | |
| "train_loss": 0.9592287843597347, | |
| "train_runtime": 11467.2329, | |
| "train_samples_per_second": 2.914, | |
| "train_steps_per_second": 0.03 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 348, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 155937190772736.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |