| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.990689013035381, | |
| "eval_steps": 500, | |
| "global_step": 335, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0148975791433892, | |
| "grad_norm": 5.950184932951318, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.7975, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0297951582867784, | |
| "grad_norm": 5.889999633747208, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.7918, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0446927374301676, | |
| "grad_norm": 5.479080731430823, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.7842, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0595903165735568, | |
| "grad_norm": 4.072102472920601, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.7305, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.074487895716946, | |
| "grad_norm": 2.0993469862660445, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 0.6919, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0893854748603352, | |
| "grad_norm": 5.471012079326577, | |
| "learning_rate": 1.4117647058823532e-05, | |
| "loss": 0.7321, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.1042830540037244, | |
| "grad_norm": 7.497778887994976, | |
| "learning_rate": 1.647058823529412e-05, | |
| "loss": 0.7089, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.1191806331471136, | |
| "grad_norm": 7.744362603097892, | |
| "learning_rate": 1.8823529411764708e-05, | |
| "loss": 0.7259, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.1340782122905028, | |
| "grad_norm": 4.336088431882978, | |
| "learning_rate": 2.1176470588235296e-05, | |
| "loss": 0.6722, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.148975791433892, | |
| "grad_norm": 3.1425590071036407, | |
| "learning_rate": 2.3529411764705884e-05, | |
| "loss": 0.6323, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.16387337057728119, | |
| "grad_norm": 2.293080261987849, | |
| "learning_rate": 2.5882352941176475e-05, | |
| "loss": 0.6058, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.1787709497206704, | |
| "grad_norm": 1.5511769350436764, | |
| "learning_rate": 2.8235294117647063e-05, | |
| "loss": 0.5911, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.19366852886405958, | |
| "grad_norm": 1.43403410737388, | |
| "learning_rate": 3.0588235294117644e-05, | |
| "loss": 0.5715, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.2085661080074488, | |
| "grad_norm": 1.0282624052041764, | |
| "learning_rate": 3.294117647058824e-05, | |
| "loss": 0.5482, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.22346368715083798, | |
| "grad_norm": 22.401057880346002, | |
| "learning_rate": 3.529411764705883e-05, | |
| "loss": 0.6398, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.2383612662942272, | |
| "grad_norm": 4.036215605056668, | |
| "learning_rate": 3.7647058823529415e-05, | |
| "loss": 0.5932, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2532588454376164, | |
| "grad_norm": 2.1089398812874904, | |
| "learning_rate": 4e-05, | |
| "loss": 0.5449, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2681564245810056, | |
| "grad_norm": 1.8917019656777936, | |
| "learning_rate": 4.235294117647059e-05, | |
| "loss": 0.545, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.28305400372439476, | |
| "grad_norm": 1.9916573775756592, | |
| "learning_rate": 4.470588235294118e-05, | |
| "loss": 0.5414, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.297951582867784, | |
| "grad_norm": 1.8920562076710838, | |
| "learning_rate": 4.705882352941177e-05, | |
| "loss": 0.5407, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.3128491620111732, | |
| "grad_norm": 1.0243631917296254, | |
| "learning_rate": 4.941176470588236e-05, | |
| "loss": 0.5197, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.32774674115456237, | |
| "grad_norm": 1.8353601120734953, | |
| "learning_rate": 5.176470588235295e-05, | |
| "loss": 0.5242, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.3426443202979516, | |
| "grad_norm": 1.0556187539851671, | |
| "learning_rate": 5.411764705882354e-05, | |
| "loss": 0.5115, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3575418994413408, | |
| "grad_norm": 1.6827786939863305, | |
| "learning_rate": 5.6470588235294126e-05, | |
| "loss": 0.5057, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.37243947858473, | |
| "grad_norm": 1.3016042554320595, | |
| "learning_rate": 5.8823529411764714e-05, | |
| "loss": 0.5114, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.38733705772811916, | |
| "grad_norm": 1.2977216399034888, | |
| "learning_rate": 6.117647058823529e-05, | |
| "loss": 0.4991, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.4022346368715084, | |
| "grad_norm": 1.223225396686375, | |
| "learning_rate": 6.352941176470589e-05, | |
| "loss": 0.4941, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.4171322160148976, | |
| "grad_norm": 0.9631867501185041, | |
| "learning_rate": 6.588235294117648e-05, | |
| "loss": 0.4906, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.43202979515828677, | |
| "grad_norm": 1.2189198129233703, | |
| "learning_rate": 6.823529411764707e-05, | |
| "loss": 0.4913, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.44692737430167595, | |
| "grad_norm": 0.7374132716188752, | |
| "learning_rate": 7.058823529411765e-05, | |
| "loss": 0.4785, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.4618249534450652, | |
| "grad_norm": 4.329279729767168, | |
| "learning_rate": 7.294117647058824e-05, | |
| "loss": 0.4855, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4767225325884544, | |
| "grad_norm": 1.9935350979555255, | |
| "learning_rate": 7.529411764705883e-05, | |
| "loss": 0.5068, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.49162011173184356, | |
| "grad_norm": 1.0099023827824438, | |
| "learning_rate": 7.764705882352942e-05, | |
| "loss": 0.4878, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.5065176908752328, | |
| "grad_norm": 1.453165427026921, | |
| "learning_rate": 8e-05, | |
| "loss": 0.4887, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.521415270018622, | |
| "grad_norm": 1.0681365388503938, | |
| "learning_rate": 7.999782132320701e-05, | |
| "loss": 0.4792, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.5363128491620112, | |
| "grad_norm": 1.2991278121605818, | |
| "learning_rate": 7.999128553015966e-05, | |
| "loss": 0.4746, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.5512104283054003, | |
| "grad_norm": 0.9705929073068399, | |
| "learning_rate": 7.998039333282696e-05, | |
| "loss": 0.47, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5661080074487895, | |
| "grad_norm": 1.288262936234187, | |
| "learning_rate": 7.996514591773782e-05, | |
| "loss": 0.4758, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5810055865921788, | |
| "grad_norm": 0.6816553781708893, | |
| "learning_rate": 7.99455449458517e-05, | |
| "loss": 0.4627, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.595903165735568, | |
| "grad_norm": 0.9231036421804909, | |
| "learning_rate": 7.992159255237773e-05, | |
| "loss": 0.4691, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.6108007448789572, | |
| "grad_norm": 1.1402552499674083, | |
| "learning_rate": 7.989329134654207e-05, | |
| "loss": 0.4638, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.6256983240223464, | |
| "grad_norm": 0.7253677973155603, | |
| "learning_rate": 7.986064441130378e-05, | |
| "loss": 0.4593, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.6405959031657356, | |
| "grad_norm": 1.1918043936517428, | |
| "learning_rate": 7.982365530301885e-05, | |
| "loss": 0.4624, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6554934823091247, | |
| "grad_norm": 1.0659452922693904, | |
| "learning_rate": 7.978232805105288e-05, | |
| "loss": 0.4514, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6703910614525139, | |
| "grad_norm": 0.9974435758912656, | |
| "learning_rate": 7.97366671573421e-05, | |
| "loss": 0.4495, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6852886405959032, | |
| "grad_norm": 0.741244345164511, | |
| "learning_rate": 7.968667759590298e-05, | |
| "loss": 0.4498, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.7001862197392924, | |
| "grad_norm": 0.700212224489924, | |
| "learning_rate": 7.96323648122904e-05, | |
| "loss": 0.4496, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.7150837988826816, | |
| "grad_norm": 0.6861544024192645, | |
| "learning_rate": 7.957373472300442e-05, | |
| "loss": 0.4425, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.7299813780260708, | |
| "grad_norm": 0.9097284558328156, | |
| "learning_rate": 7.951079371484578e-05, | |
| "loss": 0.4435, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.74487895716946, | |
| "grad_norm": 0.8086828171768753, | |
| "learning_rate": 7.944354864422016e-05, | |
| "loss": 0.4427, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7597765363128491, | |
| "grad_norm": 0.7336340975598995, | |
| "learning_rate": 7.937200683639133e-05, | |
| "loss": 0.4418, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7746741154562383, | |
| "grad_norm": 0.709266378863673, | |
| "learning_rate": 7.929617608468308e-05, | |
| "loss": 0.4413, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7895716945996276, | |
| "grad_norm": 0.6997957286653358, | |
| "learning_rate": 7.921606464963037e-05, | |
| "loss": 0.4343, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.8044692737430168, | |
| "grad_norm": 0.6312526710744601, | |
| "learning_rate": 7.913168125807943e-05, | |
| "loss": 0.4357, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.819366852886406, | |
| "grad_norm": 0.8682993581299321, | |
| "learning_rate": 7.90430351022371e-05, | |
| "loss": 0.4256, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.8342644320297952, | |
| "grad_norm": 1.0211685354189273, | |
| "learning_rate": 7.895013583866949e-05, | |
| "loss": 0.4383, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.8491620111731844, | |
| "grad_norm": 0.5942703334288042, | |
| "learning_rate": 7.88529935872501e-05, | |
| "loss": 0.4266, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8640595903165735, | |
| "grad_norm": 0.4905469818523975, | |
| "learning_rate": 7.875161893005736e-05, | |
| "loss": 0.4275, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.8789571694599627, | |
| "grad_norm": 0.713524523381961, | |
| "learning_rate": 7.864602291022193e-05, | |
| "loss": 0.425, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8938547486033519, | |
| "grad_norm": 0.7215883306108987, | |
| "learning_rate": 7.85362170307237e-05, | |
| "loss": 0.4296, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.9087523277467412, | |
| "grad_norm": 0.5476669418552049, | |
| "learning_rate": 7.842221325313873e-05, | |
| "loss": 0.4264, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.9236499068901304, | |
| "grad_norm": 0.5026608565405364, | |
| "learning_rate": 7.830402399633624e-05, | |
| "loss": 0.4279, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.9385474860335196, | |
| "grad_norm": 0.43642895836746987, | |
| "learning_rate": 7.818166213512581e-05, | |
| "loss": 0.4231, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.9534450651769087, | |
| "grad_norm": 0.5516886415086357, | |
| "learning_rate": 7.805514099885479e-05, | |
| "loss": 0.4286, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.9683426443202979, | |
| "grad_norm": 0.6202502097645083, | |
| "learning_rate": 7.792447436995634e-05, | |
| "loss": 0.4238, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9832402234636871, | |
| "grad_norm": 0.8483095514593049, | |
| "learning_rate": 7.778967648244807e-05, | |
| "loss": 0.4251, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9981378026070763, | |
| "grad_norm": 0.9010008832838037, | |
| "learning_rate": 7.765076202038145e-05, | |
| "loss": 0.4227, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.0130353817504656, | |
| "grad_norm": 1.1210899234881495, | |
| "learning_rate": 7.750774611624222e-05, | |
| "loss": 0.7678, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.0279329608938548, | |
| "grad_norm": 1.4294187108260912, | |
| "learning_rate": 7.736064434930193e-05, | |
| "loss": 0.4298, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.042830540037244, | |
| "grad_norm": 0.5162696432741285, | |
| "learning_rate": 7.720947274392087e-05, | |
| "loss": 0.4051, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.0577281191806331, | |
| "grad_norm": 1.2710462444765749, | |
| "learning_rate": 7.705424776780249e-05, | |
| "loss": 0.4149, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.0726256983240223, | |
| "grad_norm": 0.7995934416533542, | |
| "learning_rate": 7.689498633019941e-05, | |
| "loss": 0.4007, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.0875232774674115, | |
| "grad_norm": 0.880390966869298, | |
| "learning_rate": 7.673170578007157e-05, | |
| "loss": 0.4116, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.1024208566108007, | |
| "grad_norm": 0.7628454204466086, | |
| "learning_rate": 7.656442390419622e-05, | |
| "loss": 0.4008, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.1173184357541899, | |
| "grad_norm": 0.5370866632430045, | |
| "learning_rate": 7.63931589252304e-05, | |
| "loss": 0.3935, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.132216014897579, | |
| "grad_norm": 0.6503753033899171, | |
| "learning_rate": 7.621792949972588e-05, | |
| "loss": 0.3983, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.1471135940409685, | |
| "grad_norm": 0.483185187236749, | |
| "learning_rate": 7.603875471609677e-05, | |
| "loss": 0.4092, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.1620111731843576, | |
| "grad_norm": 0.5469246081737635, | |
| "learning_rate": 7.585565409254025e-05, | |
| "loss": 0.3988, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.1769087523277468, | |
| "grad_norm": 0.3809737733256841, | |
| "learning_rate": 7.566864757491027e-05, | |
| "loss": 0.4049, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.191806331471136, | |
| "grad_norm": 0.4978345458094043, | |
| "learning_rate": 7.547775553454485e-05, | |
| "loss": 0.3991, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.2067039106145252, | |
| "grad_norm": 0.3419277603644534, | |
| "learning_rate": 7.528299876604689e-05, | |
| "loss": 0.4015, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.2216014897579144, | |
| "grad_norm": 0.3978591421270835, | |
| "learning_rate": 7.508439848501899e-05, | |
| "loss": 0.3948, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.2364990689013036, | |
| "grad_norm": 0.3215672869140669, | |
| "learning_rate": 7.488197632575232e-05, | |
| "loss": 0.3946, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.2513966480446927, | |
| "grad_norm": 0.2805092237683431, | |
| "learning_rate": 7.467575433886989e-05, | |
| "loss": 0.3925, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.266294227188082, | |
| "grad_norm": 0.29668081700935095, | |
| "learning_rate": 7.44657549889246e-05, | |
| "loss": 0.3944, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.2811918063314711, | |
| "grad_norm": 0.32093150433950507, | |
| "learning_rate": 7.425200115195193e-05, | |
| "loss": 0.395, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.2960893854748603, | |
| "grad_norm": 0.3114671727422004, | |
| "learning_rate": 7.403451611297808e-05, | |
| "loss": 0.3896, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.3109869646182495, | |
| "grad_norm": 0.2621069849515514, | |
| "learning_rate": 7.381332356348343e-05, | |
| "loss": 0.3972, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.3258845437616387, | |
| "grad_norm": 0.35041652635951187, | |
| "learning_rate": 7.358844759882168e-05, | |
| "loss": 0.3947, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.3407821229050279, | |
| "grad_norm": 0.35165133417817274, | |
| "learning_rate": 7.335991271559512e-05, | |
| "loss": 0.3924, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.355679702048417, | |
| "grad_norm": 0.3622625923423102, | |
| "learning_rate": 7.312774380898608e-05, | |
| "loss": 0.3802, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.3705772811918062, | |
| "grad_norm": 0.34568371596915276, | |
| "learning_rate": 7.289196617004499e-05, | |
| "loss": 0.3921, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.3854748603351954, | |
| "grad_norm": 0.36106236048997714, | |
| "learning_rate": 7.265260548293535e-05, | |
| "loss": 0.3852, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.4003724394785848, | |
| "grad_norm": 0.3591694116875136, | |
| "learning_rate": 7.24096878221359e-05, | |
| "loss": 0.3864, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.415270018621974, | |
| "grad_norm": 0.4611887631357433, | |
| "learning_rate": 7.216323964960013e-05, | |
| "loss": 0.3917, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.4301675977653632, | |
| "grad_norm": 0.5900177988285087, | |
| "learning_rate": 7.191328781187374e-05, | |
| "loss": 0.3889, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.4450651769087524, | |
| "grad_norm": 0.6476785738427469, | |
| "learning_rate": 7.165985953717017e-05, | |
| "loss": 0.3976, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.4599627560521415, | |
| "grad_norm": 0.5959898548929913, | |
| "learning_rate": 7.140298243240444e-05, | |
| "loss": 0.3991, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.4748603351955307, | |
| "grad_norm": 0.45154508108061203, | |
| "learning_rate": 7.114268448018589e-05, | |
| "loss": 0.395, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.48975791433892, | |
| "grad_norm": 0.41405552449106015, | |
| "learning_rate": 7.087899403576992e-05, | |
| "loss": 0.3904, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.504655493482309, | |
| "grad_norm": 0.550596726311639, | |
| "learning_rate": 7.06119398239691e-05, | |
| "loss": 0.3809, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.5195530726256983, | |
| "grad_norm": 0.6087570876386633, | |
| "learning_rate": 7.034155093602413e-05, | |
| "loss": 0.3962, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.5344506517690877, | |
| "grad_norm": 0.533129391668596, | |
| "learning_rate": 7.006785682643479e-05, | |
| "loss": 0.3976, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.5493482309124769, | |
| "grad_norm": 0.43458513735835697, | |
| "learning_rate": 6.979088730975128e-05, | |
| "loss": 0.3917, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.564245810055866, | |
| "grad_norm": 0.3518341619336429, | |
| "learning_rate": 6.951067255732655e-05, | |
| "loss": 0.3858, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.5791433891992552, | |
| "grad_norm": 0.40015247854184544, | |
| "learning_rate": 6.92272430940295e-05, | |
| "loss": 0.3864, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.5940409683426444, | |
| "grad_norm": 0.3910716089734902, | |
| "learning_rate": 6.894062979491987e-05, | |
| "loss": 0.3883, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.6089385474860336, | |
| "grad_norm": 0.35066296337841696, | |
| "learning_rate": 6.865086388188476e-05, | |
| "loss": 0.386, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.6238361266294228, | |
| "grad_norm": 0.3072713840073755, | |
| "learning_rate": 6.835797692023774e-05, | |
| "loss": 0.39, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.638733705772812, | |
| "grad_norm": 0.3550143349129471, | |
| "learning_rate": 6.806200081528008e-05, | |
| "loss": 0.3872, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.6536312849162011, | |
| "grad_norm": 0.37053547013887106, | |
| "learning_rate": 6.776296780882537e-05, | |
| "loss": 0.3813, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.6685288640595903, | |
| "grad_norm": 0.24071852433891697, | |
| "learning_rate": 6.746091047568716e-05, | |
| "loss": 0.3812, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.6834264432029795, | |
| "grad_norm": 0.3042807960699796, | |
| "learning_rate": 6.715586172013054e-05, | |
| "loss": 0.3835, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.6983240223463687, | |
| "grad_norm": 0.3538152445413312, | |
| "learning_rate": 6.684785477228777e-05, | |
| "loss": 0.3813, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.7132216014897579, | |
| "grad_norm": 0.24086260757195502, | |
| "learning_rate": 6.653692318453831e-05, | |
| "loss": 0.3852, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.728119180633147, | |
| "grad_norm": 0.3025847544952239, | |
| "learning_rate": 6.622310082785384e-05, | |
| "loss": 0.3913, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.7430167597765363, | |
| "grad_norm": 0.31641711838551906, | |
| "learning_rate": 6.590642188810869e-05, | |
| "loss": 0.3827, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.7579143389199254, | |
| "grad_norm": 0.2688168797114581, | |
| "learning_rate": 6.558692086235565e-05, | |
| "loss": 0.3744, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.7728119180633146, | |
| "grad_norm": 0.2984297462672916, | |
| "learning_rate": 6.526463255506828e-05, | |
| "loss": 0.3811, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.7877094972067038, | |
| "grad_norm": 0.30746963837396896, | |
| "learning_rate": 6.493959207434934e-05, | |
| "loss": 0.3758, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.802607076350093, | |
| "grad_norm": 0.3230084067714464, | |
| "learning_rate": 6.461183482810646e-05, | |
| "loss": 0.3853, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.8175046554934822, | |
| "grad_norm": 0.4000243849303593, | |
| "learning_rate": 6.42813965201949e-05, | |
| "loss": 0.3882, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.8324022346368714, | |
| "grad_norm": 0.4288088964712755, | |
| "learning_rate": 6.394831314652835e-05, | |
| "loss": 0.3858, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.8472998137802608, | |
| "grad_norm": 0.5085831972309272, | |
| "learning_rate": 6.361262099115761e-05, | |
| "loss": 0.3856, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.86219739292365, | |
| "grad_norm": 0.6105698024621092, | |
| "learning_rate": 6.327435662231812e-05, | |
| "loss": 0.3849, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.8770949720670391, | |
| "grad_norm": 0.5824359200910605, | |
| "learning_rate": 6.293355688844637e-05, | |
| "loss": 0.3866, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.8919925512104283, | |
| "grad_norm": 0.44326416200929236, | |
| "learning_rate": 6.259025891416594e-05, | |
| "loss": 0.3737, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.9068901303538175, | |
| "grad_norm": 0.3430006009849836, | |
| "learning_rate": 6.224450009624332e-05, | |
| "loss": 0.3741, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.9217877094972067, | |
| "grad_norm": 0.3048045239289814, | |
| "learning_rate": 6.18963180995141e-05, | |
| "loss": 0.3767, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.9366852886405959, | |
| "grad_norm": 0.3664037873157842, | |
| "learning_rate": 6.154575085278012e-05, | |
| "loss": 0.3857, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.9515828677839853, | |
| "grad_norm": 0.3595451501590318, | |
| "learning_rate": 6.119283654467761e-05, | |
| "loss": 0.3806, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.9664804469273744, | |
| "grad_norm": 0.3076574382248462, | |
| "learning_rate": 6.083761361951722e-05, | |
| "loss": 0.3802, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.9813780260707636, | |
| "grad_norm": 0.25896414972948856, | |
| "learning_rate": 6.048012077309612e-05, | |
| "loss": 0.3773, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.9962756052141528, | |
| "grad_norm": 0.2438114619698701, | |
| "learning_rate": 6.01203969484827e-05, | |
| "loss": 0.3832, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.011173184357542, | |
| "grad_norm": 0.4880986651628094, | |
| "learning_rate": 5.975848133177442e-05, | |
| "loss": 0.7025, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.026070763500931, | |
| "grad_norm": 0.7683631863694373, | |
| "learning_rate": 5.939441334782901e-05, | |
| "loss": 0.3581, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.0409683426443204, | |
| "grad_norm": 1.0088180311972472, | |
| "learning_rate": 5.9028232655969866e-05, | |
| "loss": 0.3662, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.0558659217877095, | |
| "grad_norm": 0.8036028379230191, | |
| "learning_rate": 5.865997914566577e-05, | |
| "loss": 0.3606, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.0707635009310987, | |
| "grad_norm": 0.6488117999773151, | |
| "learning_rate": 5.8289692932185546e-05, | |
| "loss": 0.3619, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.085661080074488, | |
| "grad_norm": 0.6603945376014646, | |
| "learning_rate": 5.791741435222821e-05, | |
| "loss": 0.3613, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.100558659217877, | |
| "grad_norm": 0.6137565370101866, | |
| "learning_rate": 5.7543183959528886e-05, | |
| "loss": 0.3523, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.1154562383612663, | |
| "grad_norm": 0.6393236118969376, | |
| "learning_rate": 5.716704252044116e-05, | |
| "loss": 0.3543, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.1303538175046555, | |
| "grad_norm": 0.37602818408299177, | |
| "learning_rate": 5.678903100949625e-05, | |
| "loss": 0.3564, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.1452513966480447, | |
| "grad_norm": 0.4873243740997193, | |
| "learning_rate": 5.640919060493948e-05, | |
| "loss": 0.3547, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.160148975791434, | |
| "grad_norm": 0.399733374633036, | |
| "learning_rate": 5.602756268424457e-05, | |
| "loss": 0.3521, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.175046554934823, | |
| "grad_norm": 0.38098879738078906, | |
| "learning_rate": 5.564418881960624e-05, | |
| "loss": 0.3525, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.189944134078212, | |
| "grad_norm": 0.3112584196059825, | |
| "learning_rate": 5.5259110773411596e-05, | |
| "loss": 0.3578, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.2048417132216014, | |
| "grad_norm": 0.33974576612810525, | |
| "learning_rate": 5.487237049369075e-05, | |
| "loss": 0.3485, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.2197392923649906, | |
| "grad_norm": 0.31599699558663985, | |
| "learning_rate": 5.448401010954733e-05, | |
| "loss": 0.3482, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.2346368715083798, | |
| "grad_norm": 0.23588411232355225, | |
| "learning_rate": 5.4094071926569146e-05, | |
| "loss": 0.3465, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.249534450651769, | |
| "grad_norm": 0.259141065976224, | |
| "learning_rate": 5.370259842221972e-05, | |
| "loss": 0.3574, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.264432029795158, | |
| "grad_norm": 0.2948627119507419, | |
| "learning_rate": 5.330963224121096e-05, | |
| "loss": 0.356, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.2793296089385473, | |
| "grad_norm": 0.2398464878022089, | |
| "learning_rate": 5.291521619085785e-05, | |
| "loss": 0.3536, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.294227188081937, | |
| "grad_norm": 0.2554013267182894, | |
| "learning_rate": 5.251939323641516e-05, | |
| "loss": 0.3494, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.3091247672253257, | |
| "grad_norm": 0.2781655892263444, | |
| "learning_rate": 5.212220649639715e-05, | |
| "loss": 0.3511, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.3240223463687153, | |
| "grad_norm": 0.22065610655196213, | |
| "learning_rate": 5.172369923788046e-05, | |
| "loss": 0.3589, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.338919925512104, | |
| "grad_norm": 0.2632172433601213, | |
| "learning_rate": 5.132391487179088e-05, | |
| "loss": 0.3537, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.3538175046554937, | |
| "grad_norm": 0.18801127961637412, | |
| "learning_rate": 5.092289694817446e-05, | |
| "loss": 0.3483, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.368715083798883, | |
| "grad_norm": 0.2368052254172045, | |
| "learning_rate": 5.052068915145336e-05, | |
| "loss": 0.3571, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.383612662942272, | |
| "grad_norm": 0.23655539210427956, | |
| "learning_rate": 5.011733529566723e-05, | |
| "loss": 0.3446, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.398510242085661, | |
| "grad_norm": 0.2138112386109768, | |
| "learning_rate": 4.971287931970033e-05, | |
| "loss": 0.3482, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.4134078212290504, | |
| "grad_norm": 0.23467262592443583, | |
| "learning_rate": 4.9307365282495075e-05, | |
| "loss": 0.3473, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.4283054003724396, | |
| "grad_norm": 0.1804084648863834, | |
| "learning_rate": 4.890083735825258e-05, | |
| "loss": 0.3531, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.4432029795158288, | |
| "grad_norm": 0.22285709273589918, | |
| "learning_rate": 4.849333983162056e-05, | |
| "loss": 0.3516, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.458100558659218, | |
| "grad_norm": 0.14828204605953096, | |
| "learning_rate": 4.808491709286921e-05, | |
| "loss": 0.356, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.472998137802607, | |
| "grad_norm": 0.1797938362516586, | |
| "learning_rate": 4.76756136330557e-05, | |
| "loss": 0.3491, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.4878957169459963, | |
| "grad_norm": 0.15466564418943565, | |
| "learning_rate": 4.726547403917746e-05, | |
| "loss": 0.3435, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.5027932960893855, | |
| "grad_norm": 0.1866313977972178, | |
| "learning_rate": 4.685454298931527e-05, | |
| "loss": 0.3506, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.5176908752327747, | |
| "grad_norm": 0.15144628774380284, | |
| "learning_rate": 4.6442865247766203e-05, | |
| "loss": 0.3547, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.532588454376164, | |
| "grad_norm": 0.17455463171543423, | |
| "learning_rate": 4.603048566016735e-05, | |
| "loss": 0.3486, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.547486033519553, | |
| "grad_norm": 0.17359950511666877, | |
| "learning_rate": 4.5617449148610584e-05, | |
| "loss": 0.3483, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.5623836126629422, | |
| "grad_norm": 0.16169017429437474, | |
| "learning_rate": 4.520380070674902e-05, | |
| "loss": 0.3477, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.5772811918063314, | |
| "grad_norm": 0.17061911155312887, | |
| "learning_rate": 4.478958539489569e-05, | |
| "loss": 0.3571, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.5921787709497206, | |
| "grad_norm": 0.16695037154209072, | |
| "learning_rate": 4.437484833511499e-05, | |
| "loss": 0.3522, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.60707635009311, | |
| "grad_norm": 0.15955082753766311, | |
| "learning_rate": 4.395963470630723e-05, | |
| "loss": 0.3413, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.621973929236499, | |
| "grad_norm": 0.1573403347105783, | |
| "learning_rate": 4.3543989739287326e-05, | |
| "loss": 0.3477, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.636871508379888, | |
| "grad_norm": 0.14729071549065167, | |
| "learning_rate": 4.312795871185742e-05, | |
| "loss": 0.3483, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.6517690875232773, | |
| "grad_norm": 0.1625891228202278, | |
| "learning_rate": 4.2711586943874774e-05, | |
| "loss": 0.3452, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.15062427849577625, | |
| "learning_rate": 4.2294919792314794e-05, | |
| "loss": 0.3488, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.6815642458100557, | |
| "grad_norm": 0.14387393723407899, | |
| "learning_rate": 4.1878002646330144e-05, | |
| "loss": 0.3524, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.6964618249534453, | |
| "grad_norm": 0.14777699560280422, | |
| "learning_rate": 4.1460880922306367e-05, | |
| "loss": 0.3458, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.711359404096834, | |
| "grad_norm": 0.1590574368277688, | |
| "learning_rate": 4.1043600058914436e-05, | |
| "loss": 0.3479, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.7262569832402237, | |
| "grad_norm": 0.1285624178683975, | |
| "learning_rate": 4.0626205512161034e-05, | |
| "loss": 0.3503, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.7411545623836124, | |
| "grad_norm": 0.14369454205895854, | |
| "learning_rate": 4.020874275043679e-05, | |
| "loss": 0.346, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.756052141527002, | |
| "grad_norm": 0.12193167492676163, | |
| "learning_rate": 3.979125724956324e-05, | |
| "loss": 0.3506, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.770949720670391, | |
| "grad_norm": 0.1326021753158276, | |
| "learning_rate": 3.937379448783898e-05, | |
| "loss": 0.3502, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.7858472998137804, | |
| "grad_norm": 0.12810706794198765, | |
| "learning_rate": 3.895639994108558e-05, | |
| "loss": 0.3541, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.8007448789571696, | |
| "grad_norm": 0.12656208043359207, | |
| "learning_rate": 3.853911907769365e-05, | |
| "loss": 0.3497, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.815642458100559, | |
| "grad_norm": 0.11404167595412021, | |
| "learning_rate": 3.812199735366986e-05, | |
| "loss": 0.3473, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.830540037243948, | |
| "grad_norm": 0.13878407632584827, | |
| "learning_rate": 3.770508020768522e-05, | |
| "loss": 0.3489, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.845437616387337, | |
| "grad_norm": 0.12311309579828897, | |
| "learning_rate": 3.728841305612524e-05, | |
| "loss": 0.3464, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.8603351955307263, | |
| "grad_norm": 0.13332912284616857, | |
| "learning_rate": 3.687204128814259e-05, | |
| "loss": 0.3428, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.8752327746741155, | |
| "grad_norm": 0.1384361081760211, | |
| "learning_rate": 3.645601026071269e-05, | |
| "loss": 0.3505, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.8901303538175047, | |
| "grad_norm": 0.13736333082695462, | |
| "learning_rate": 3.604036529369277e-05, | |
| "loss": 0.347, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.905027932960894, | |
| "grad_norm": 0.11583463546593212, | |
| "learning_rate": 3.5625151664885036e-05, | |
| "loss": 0.3461, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.919925512104283, | |
| "grad_norm": 0.12889980341532484, | |
| "learning_rate": 3.5210414605104314e-05, | |
| "loss": 0.3428, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.9348230912476723, | |
| "grad_norm": 0.11316852370701413, | |
| "learning_rate": 3.4796199293250987e-05, | |
| "loss": 0.3479, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.9497206703910615, | |
| "grad_norm": 0.11454972761265435, | |
| "learning_rate": 3.438255085138943e-05, | |
| "loss": 0.3464, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.9646182495344506, | |
| "grad_norm": 0.13041453651138907, | |
| "learning_rate": 3.396951433983266e-05, | |
| "loss": 0.3458, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.97951582867784, | |
| "grad_norm": 0.12148037255351198, | |
| "learning_rate": 3.355713475223382e-05, | |
| "loss": 0.3463, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.994413407821229, | |
| "grad_norm": 0.1255719642211306, | |
| "learning_rate": 3.314545701068475e-05, | |
| "loss": 0.3491, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 3.009310986964618, | |
| "grad_norm": 0.3022359318173022, | |
| "learning_rate": 3.2734525960822545e-05, | |
| "loss": 0.6352, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 3.0242085661080074, | |
| "grad_norm": 0.24062134731666432, | |
| "learning_rate": 3.232438636694431e-05, | |
| "loss": 0.3224, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 3.0391061452513966, | |
| "grad_norm": 0.18412193702307067, | |
| "learning_rate": 3.191508290713079e-05, | |
| "loss": 0.3206, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 3.0540037243947857, | |
| "grad_norm": 0.20860348558828723, | |
| "learning_rate": 3.150666016837947e-05, | |
| "loss": 0.3264, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.068901303538175, | |
| "grad_norm": 0.20845238316157091, | |
| "learning_rate": 3.109916264174743e-05, | |
| "loss": 0.327, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 3.083798882681564, | |
| "grad_norm": 0.20010239080813352, | |
| "learning_rate": 3.069263471750493e-05, | |
| "loss": 0.3223, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 3.0986964618249533, | |
| "grad_norm": 0.18760823228422985, | |
| "learning_rate": 3.0287120680299677e-05, | |
| "loss": 0.3238, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 3.1135940409683425, | |
| "grad_norm": 0.21319162553294677, | |
| "learning_rate": 2.988266470433277e-05, | |
| "loss": 0.3249, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 3.1284916201117317, | |
| "grad_norm": 0.17329474811121787, | |
| "learning_rate": 2.9479310848546644e-05, | |
| "loss": 0.3243, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 3.143389199255121, | |
| "grad_norm": 0.20404750548823786, | |
| "learning_rate": 2.9077103051825567e-05, | |
| "loss": 0.3203, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 3.1582867783985105, | |
| "grad_norm": 0.17940098271660299, | |
| "learning_rate": 2.8676085128209133e-05, | |
| "loss": 0.3237, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 3.1731843575418996, | |
| "grad_norm": 0.16237217554172287, | |
| "learning_rate": 2.8276300762119553e-05, | |
| "loss": 0.3196, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 3.188081936685289, | |
| "grad_norm": 0.15801986044567953, | |
| "learning_rate": 2.787779350360286e-05, | |
| "loss": 0.3196, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 3.202979515828678, | |
| "grad_norm": 0.1485056929542617, | |
| "learning_rate": 2.748060676358484e-05, | |
| "loss": 0.3192, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 3.217877094972067, | |
| "grad_norm": 0.15223329100287147, | |
| "learning_rate": 2.7084783809142164e-05, | |
| "loss": 0.3192, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 3.2327746741154564, | |
| "grad_norm": 0.13836533584306443, | |
| "learning_rate": 2.6690367758789046e-05, | |
| "loss": 0.328, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 3.2476722532588456, | |
| "grad_norm": 0.1351886559378493, | |
| "learning_rate": 2.6297401577780295e-05, | |
| "loss": 0.322, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 3.2625698324022347, | |
| "grad_norm": 0.12857547848905795, | |
| "learning_rate": 2.5905928073430854e-05, | |
| "loss": 0.319, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 3.277467411545624, | |
| "grad_norm": 0.1274395295470198, | |
| "learning_rate": 2.5515989890452674e-05, | |
| "loss": 0.3237, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 3.292364990689013, | |
| "grad_norm": 0.11899376687857942, | |
| "learning_rate": 2.5127629506309264e-05, | |
| "loss": 0.3205, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 3.3072625698324023, | |
| "grad_norm": 0.1304655308172865, | |
| "learning_rate": 2.474088922658842e-05, | |
| "loss": 0.3188, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 3.3221601489757915, | |
| "grad_norm": 0.11611090203712811, | |
| "learning_rate": 2.4355811180393767e-05, | |
| "loss": 0.3195, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 3.3370577281191807, | |
| "grad_norm": 0.11604416368224107, | |
| "learning_rate": 2.397243731575543e-05, | |
| "loss": 0.3201, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 3.35195530726257, | |
| "grad_norm": 0.11134670769841407, | |
| "learning_rate": 2.359080939506052e-05, | |
| "loss": 0.32, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 3.366852886405959, | |
| "grad_norm": 0.10870133266703798, | |
| "learning_rate": 2.3210968990503755e-05, | |
| "loss": 0.3208, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 3.381750465549348, | |
| "grad_norm": 0.1110502941686079, | |
| "learning_rate": 2.2832957479558866e-05, | |
| "loss": 0.317, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 3.3966480446927374, | |
| "grad_norm": 0.10438158313371027, | |
| "learning_rate": 2.245681604047114e-05, | |
| "loss": 0.3179, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 3.4115456238361266, | |
| "grad_norm": 0.10282665897445756, | |
| "learning_rate": 2.2082585647771807e-05, | |
| "loss": 0.3228, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 3.4264432029795158, | |
| "grad_norm": 0.10672800838711503, | |
| "learning_rate": 2.171030706781446e-05, | |
| "loss": 0.3222, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 3.441340782122905, | |
| "grad_norm": 0.09786842543478856, | |
| "learning_rate": 2.1340020854334246e-05, | |
| "loss": 0.3181, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 3.456238361266294, | |
| "grad_norm": 0.10404230481342372, | |
| "learning_rate": 2.0971767344030144e-05, | |
| "loss": 0.3225, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 3.4711359404096833, | |
| "grad_norm": 0.10712648472255477, | |
| "learning_rate": 2.0605586652170998e-05, | |
| "loss": 0.321, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 3.4860335195530725, | |
| "grad_norm": 0.1044874161797147, | |
| "learning_rate": 2.0241518668225595e-05, | |
| "loss": 0.3252, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 3.5009310986964617, | |
| "grad_norm": 0.10451587153618398, | |
| "learning_rate": 1.98796030515173e-05, | |
| "loss": 0.3173, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 3.515828677839851, | |
| "grad_norm": 0.09831156843059215, | |
| "learning_rate": 1.9519879226903903e-05, | |
| "loss": 0.3188, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 3.5307262569832405, | |
| "grad_norm": 0.09438833971964583, | |
| "learning_rate": 1.9162386380482795e-05, | |
| "loss": 0.3174, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 3.5456238361266292, | |
| "grad_norm": 0.10433270782878293, | |
| "learning_rate": 1.88071634553224e-05, | |
| "loss": 0.3219, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 3.560521415270019, | |
| "grad_norm": 0.09292286071725575, | |
| "learning_rate": 1.845424914721988e-05, | |
| "loss": 0.3252, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 3.5754189944134076, | |
| "grad_norm": 0.09413410282284697, | |
| "learning_rate": 1.81036819004859e-05, | |
| "loss": 0.3163, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 3.5903165735567972, | |
| "grad_norm": 0.10158989698055416, | |
| "learning_rate": 1.7755499903756704e-05, | |
| "loss": 0.3259, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 3.605214152700186, | |
| "grad_norm": 0.09464740249041373, | |
| "learning_rate": 1.7409741085834066e-05, | |
| "loss": 0.3191, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 3.6201117318435756, | |
| "grad_norm": 0.10726107957817572, | |
| "learning_rate": 1.7066443111553627e-05, | |
| "loss": 0.3211, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 3.635009310986965, | |
| "grad_norm": 0.08917870235234628, | |
| "learning_rate": 1.6725643377681893e-05, | |
| "loss": 0.3225, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 3.649906890130354, | |
| "grad_norm": 0.10677910900640168, | |
| "learning_rate": 1.638737900884239e-05, | |
| "loss": 0.3214, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 3.664804469273743, | |
| "grad_norm": 0.09774898119089985, | |
| "learning_rate": 1.6051686853471667e-05, | |
| "loss": 0.3252, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 3.6797020484171323, | |
| "grad_norm": 0.08773810545171451, | |
| "learning_rate": 1.5718603479805113e-05, | |
| "loss": 0.3235, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 3.6945996275605215, | |
| "grad_norm": 0.10111349625699705, | |
| "learning_rate": 1.538816517189356e-05, | |
| "loss": 0.3228, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 3.7094972067039107, | |
| "grad_norm": 0.09592458935558433, | |
| "learning_rate": 1.5060407925650662e-05, | |
| "loss": 0.3234, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 3.7243947858473, | |
| "grad_norm": 0.08715032888076826, | |
| "learning_rate": 1.4735367444931722e-05, | |
| "loss": 0.3192, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 3.739292364990689, | |
| "grad_norm": 0.09053851330147095, | |
| "learning_rate": 1.4413079137644358e-05, | |
| "loss": 0.3162, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 3.7541899441340782, | |
| "grad_norm": 0.10377676484499614, | |
| "learning_rate": 1.4093578111891333e-05, | |
| "loss": 0.3189, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 3.7690875232774674, | |
| "grad_norm": 0.08949080189520761, | |
| "learning_rate": 1.377689917214617e-05, | |
| "loss": 0.3258, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 3.7839851024208566, | |
| "grad_norm": 0.09258472675428549, | |
| "learning_rate": 1.3463076815461703e-05, | |
| "loss": 0.3195, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 3.798882681564246, | |
| "grad_norm": 0.0951200280500826, | |
| "learning_rate": 1.3152145227712221e-05, | |
| "loss": 0.314, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 3.813780260707635, | |
| "grad_norm": 0.09226678982549726, | |
| "learning_rate": 1.284413827986946e-05, | |
| "loss": 0.3255, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 3.828677839851024, | |
| "grad_norm": 0.08915216421489361, | |
| "learning_rate": 1.2539089524312855e-05, | |
| "loss": 0.3261, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 3.8435754189944134, | |
| "grad_norm": 0.0925569123285621, | |
| "learning_rate": 1.2237032191174642e-05, | |
| "loss": 0.3202, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 3.8584729981378025, | |
| "grad_norm": 0.08684065982720417, | |
| "learning_rate": 1.1937999184719926e-05, | |
| "loss": 0.3212, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 3.8733705772811917, | |
| "grad_norm": 0.08765050211323365, | |
| "learning_rate": 1.1642023079762281e-05, | |
| "loss": 0.319, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 3.888268156424581, | |
| "grad_norm": 0.08724442004118642, | |
| "learning_rate": 1.1349136118115242e-05, | |
| "loss": 0.3154, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 3.90316573556797, | |
| "grad_norm": 0.08586178309454286, | |
| "learning_rate": 1.1059370205080157e-05, | |
| "loss": 0.3226, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 3.9180633147113593, | |
| "grad_norm": 0.08501873030376662, | |
| "learning_rate": 1.07727569059705e-05, | |
| "loss": 0.3229, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 3.9329608938547485, | |
| "grad_norm": 0.09206134247645327, | |
| "learning_rate": 1.0489327442673459e-05, | |
| "loss": 0.3227, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 3.9478584729981376, | |
| "grad_norm": 0.08410101453326498, | |
| "learning_rate": 1.0209112690248726e-05, | |
| "loss": 0.3177, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 3.9627560521415273, | |
| "grad_norm": 0.08821111302237976, | |
| "learning_rate": 9.932143173565225e-06, | |
| "loss": 0.3227, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 3.977653631284916, | |
| "grad_norm": 0.08616646529797505, | |
| "learning_rate": 9.658449063975875e-06, | |
| "loss": 0.3216, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 3.9925512104283056, | |
| "grad_norm": 0.0885838583519875, | |
| "learning_rate": 9.388060176030907e-06, | |
| "loss": 0.3226, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 4.007448789571694, | |
| "grad_norm": 0.22436110155064673, | |
| "learning_rate": 9.12100596423009e-06, | |
| "loss": 0.577, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 4.022346368715084, | |
| "grad_norm": 0.11607630428391699, | |
| "learning_rate": 8.857315519814111e-06, | |
| "loss": 0.306, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 4.037243947858473, | |
| "grad_norm": 0.11774451222490284, | |
| "learning_rate": 8.597017567595562e-06, | |
| "loss": 0.3064, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 4.052141527001862, | |
| "grad_norm": 0.09907950082186576, | |
| "learning_rate": 8.34014046282984e-06, | |
| "loss": 0.302, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 4.067039106145251, | |
| "grad_norm": 0.10570787971335635, | |
| "learning_rate": 8.086712188126263e-06, | |
| "loss": 0.3021, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 4.081936685288641, | |
| "grad_norm": 0.10877019752281963, | |
| "learning_rate": 7.836760350399881e-06, | |
| "loss": 0.2992, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 4.0968342644320295, | |
| "grad_norm": 0.1173773952444213, | |
| "learning_rate": 7.5903121778641096e-06, | |
| "loss": 0.3077, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 4.111731843575419, | |
| "grad_norm": 0.10898666220477518, | |
| "learning_rate": 7.347394517064663e-06, | |
| "loss": 0.3068, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 4.126629422718808, | |
| "grad_norm": 0.09803810087456757, | |
| "learning_rate": 7.108033829955028e-06, | |
| "loss": 0.3077, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 4.1415270018621975, | |
| "grad_norm": 0.09446749801813671, | |
| "learning_rate": 6.87225619101394e-06, | |
| "loss": 0.3061, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 4.156424581005586, | |
| "grad_norm": 0.0951410747387855, | |
| "learning_rate": 6.640087284404888e-06, | |
| "loss": 0.3054, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 4.171322160148976, | |
| "grad_norm": 0.10491597814630026, | |
| "learning_rate": 6.411552401178327e-06, | |
| "loss": 0.3098, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 4.186219739292365, | |
| "grad_norm": 0.09593380058395089, | |
| "learning_rate": 6.186676436516581e-06, | |
| "loss": 0.3008, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 4.201117318435754, | |
| "grad_norm": 0.09551371288826128, | |
| "learning_rate": 5.965483887021934e-06, | |
| "loss": 0.3087, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 4.216014897579143, | |
| "grad_norm": 0.0895153508605015, | |
| "learning_rate": 5.747998848048091e-06, | |
| "loss": 0.3091, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 4.230912476722533, | |
| "grad_norm": 0.09197880309307534, | |
| "learning_rate": 5.534245011075414e-06, | |
| "loss": 0.298, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 4.245810055865922, | |
| "grad_norm": 0.08575532851271551, | |
| "learning_rate": 5.3242456611301095e-06, | |
| "loss": 0.3, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 4.260707635009311, | |
| "grad_norm": 0.08276481700085347, | |
| "learning_rate": 5.118023674247692e-06, | |
| "loss": 0.2984, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 4.275605214152701, | |
| "grad_norm": 0.08713205903583655, | |
| "learning_rate": 4.915601514981024e-06, | |
| "loss": 0.3078, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 4.290502793296089, | |
| "grad_norm": 0.08363731970724225, | |
| "learning_rate": 4.717001233953116e-06, | |
| "loss": 0.2987, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 4.305400372439479, | |
| "grad_norm": 0.08529607146039025, | |
| "learning_rate": 4.522244465455154e-06, | |
| "loss": 0.3038, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 4.320297951582868, | |
| "grad_norm": 0.08215569802598276, | |
| "learning_rate": 4.33135242508973e-06, | |
| "loss": 0.3043, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 4.335195530726257, | |
| "grad_norm": 0.08149200923150887, | |
| "learning_rate": 4.144345907459753e-06, | |
| "loss": 0.3013, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 4.350093109869646, | |
| "grad_norm": 0.08086624632566386, | |
| "learning_rate": 3.961245283903239e-06, | |
| "loss": 0.308, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 4.364990689013036, | |
| "grad_norm": 0.07896402407099593, | |
| "learning_rate": 3.7820705002741353e-06, | |
| "loss": 0.3052, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 4.379888268156424, | |
| "grad_norm": 0.08231952485165063, | |
| "learning_rate": 3.6068410747696112e-06, | |
| "loss": 0.3002, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 4.394785847299814, | |
| "grad_norm": 0.07789166320140198, | |
| "learning_rate": 3.435576095803792e-06, | |
| "loss": 0.3021, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 4.409683426443203, | |
| "grad_norm": 0.07892696916845467, | |
| "learning_rate": 3.268294219928434e-06, | |
| "loss": 0.3046, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 4.424581005586592, | |
| "grad_norm": 0.07646811095500806, | |
| "learning_rate": 3.1050136698005963e-06, | |
| "loss": 0.3025, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 4.439478584729981, | |
| "grad_norm": 0.07957607014767461, | |
| "learning_rate": 2.9457522321975253e-06, | |
| "loss": 0.3079, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 4.454376163873371, | |
| "grad_norm": 0.08390752993938617, | |
| "learning_rate": 2.790527256079134e-06, | |
| "loss": 0.3066, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 4.4692737430167595, | |
| "grad_norm": 0.07541814294925331, | |
| "learning_rate": 2.6393556506980834e-06, | |
| "loss": 0.3071, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 4.484171322160149, | |
| "grad_norm": 0.0714738276840467, | |
| "learning_rate": 2.4922538837577916e-06, | |
| "loss": 0.3078, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 4.499068901303538, | |
| "grad_norm": 0.0729695749304457, | |
| "learning_rate": 2.349237979618555e-06, | |
| "loss": 0.3112, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 4.5139664804469275, | |
| "grad_norm": 0.10108195097380965, | |
| "learning_rate": 2.2103235175519355e-06, | |
| "loss": 0.3052, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 4.528864059590316, | |
| "grad_norm": 0.07138906578544997, | |
| "learning_rate": 2.0755256300436687e-06, | |
| "loss": 0.3014, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 4.543761638733706, | |
| "grad_norm": 0.07356056598380585, | |
| "learning_rate": 1.944859001145223e-06, | |
| "loss": 0.3065, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 4.558659217877095, | |
| "grad_norm": 0.07516469607423958, | |
| "learning_rate": 1.8183378648741979e-06, | |
| "loss": 0.3049, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 4.573556797020484, | |
| "grad_norm": 0.07371825538928153, | |
| "learning_rate": 1.6959760036637662e-06, | |
| "loss": 0.304, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 4.588454376163874, | |
| "grad_norm": 0.07502909475594224, | |
| "learning_rate": 1.5777867468612874e-06, | |
| "loss": 0.3011, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 4.603351955307263, | |
| "grad_norm": 0.07352157905066296, | |
| "learning_rate": 1.4637829692763128e-06, | |
| "loss": 0.3029, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 4.618249534450651, | |
| "grad_norm": 0.07644054766848495, | |
| "learning_rate": 1.353977089778078e-06, | |
| "loss": 0.3046, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 4.633147113594041, | |
| "grad_norm": 0.07171131988386027, | |
| "learning_rate": 1.2483810699426458e-06, | |
| "loss": 0.3068, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 4.648044692737431, | |
| "grad_norm": 0.07057850167398541, | |
| "learning_rate": 1.1470064127499091e-06, | |
| "loss": 0.3064, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 4.662942271880819, | |
| "grad_norm": 0.07626745016001804, | |
| "learning_rate": 1.0498641613305182e-06, | |
| "loss": 0.3007, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 4.677839851024208, | |
| "grad_norm": 0.0693829644961785, | |
| "learning_rate": 9.569648977629176e-07, | |
| "loss": 0.3049, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 4.692737430167598, | |
| "grad_norm": 0.0674761214613311, | |
| "learning_rate": 8.683187419205797e-07, | |
| "loss": 0.3013, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 4.707635009310987, | |
| "grad_norm": 0.06823222699165288, | |
| "learning_rate": 7.839353503696379e-07, | |
| "loss": 0.3071, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 4.722532588454376, | |
| "grad_norm": 0.06859479836699696, | |
| "learning_rate": 7.038239153169324e-07, | |
| "loss": 0.301, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 4.737430167597766, | |
| "grad_norm": 0.07031736775350605, | |
| "learning_rate": 6.279931636086912e-07, | |
| "loss": 0.3086, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 4.752327746741154, | |
| "grad_norm": 0.06915744071480374, | |
| "learning_rate": 5.564513557798501e-07, | |
| "loss": 0.3048, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 4.767225325884544, | |
| "grad_norm": 0.06815891873681693, | |
| "learning_rate": 4.892062851542356e-07, | |
| "loss": 0.309, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 4.782122905027933, | |
| "grad_norm": 0.06935050248754245, | |
| "learning_rate": 4.2626527699558996e-07, | |
| "loss": 0.3018, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 4.797020484171322, | |
| "grad_norm": 0.06784753003557113, | |
| "learning_rate": 3.6763518770960517e-07, | |
| "loss": 0.3051, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 4.811918063314711, | |
| "grad_norm": 0.06808652827602746, | |
| "learning_rate": 3.133224040970273e-07, | |
| "loss": 0.3071, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 4.826815642458101, | |
| "grad_norm": 0.07121784851648261, | |
| "learning_rate": 2.6333284265790627e-07, | |
| "loss": 0.3047, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 4.8417132216014895, | |
| "grad_norm": 0.06700344502725675, | |
| "learning_rate": 2.1767194894712462e-07, | |
| "loss": 0.305, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 4.856610800744879, | |
| "grad_norm": 0.06927419764551407, | |
| "learning_rate": 1.763446969811522e-07, | |
| "loss": 0.306, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 4.871508379888268, | |
| "grad_norm": 0.06947638763948212, | |
| "learning_rate": 1.3935558869622168e-07, | |
| "loss": 0.3042, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 4.8864059590316575, | |
| "grad_norm": 0.06866693792434822, | |
| "learning_rate": 1.0670865345793425e-07, | |
| "loss": 0.3094, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 4.901303538175046, | |
| "grad_norm": 0.06859589228192586, | |
| "learning_rate": 7.840744762229069e-08, | |
| "loss": 0.3054, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 4.916201117318436, | |
| "grad_norm": 0.06927380611470749, | |
| "learning_rate": 5.445505414831242e-08, | |
| "loss": 0.3054, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 4.931098696461825, | |
| "grad_norm": 0.06680927663742126, | |
| "learning_rate": 3.485408226218567e-08, | |
| "loss": 0.2998, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 4.945996275605214, | |
| "grad_norm": 0.06773777244031369, | |
| "learning_rate": 1.960666717304438e-08, | |
| "loss": 0.3058, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 4.960893854748603, | |
| "grad_norm": 0.06864556914107234, | |
| "learning_rate": 8.714469840351848e-09, | |
| "loss": 0.2984, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 4.975791433891993, | |
| "grad_norm": 0.0678647360547641, | |
| "learning_rate": 2.178676792996548e-09, | |
| "loss": 0.3051, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 4.990689013035381, | |
| "grad_norm": 0.06662103871858636, | |
| "learning_rate": 0.0, | |
| "loss": 0.2973, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 4.990689013035381, | |
| "step": 335, | |
| "total_flos": 8.620546826447094e+18, | |
| "train_loss": 0.380745083004681, | |
| "train_runtime": 51129.1279, | |
| "train_samples_per_second": 3.36, | |
| "train_steps_per_second": 0.007 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 335, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.620546826447094e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |