| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.9943851768669285, |
| "eval_steps": 500, |
| "global_step": 666, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.004491858506457047, |
| "grad_norm": 5.744765553310083, |
| "learning_rate": 4.2215328358208957e-07, |
| "loss": 0.8286, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.008983717012914094, |
| "grad_norm": 5.938427190328152, |
| "learning_rate": 8.443065671641791e-07, |
| "loss": 0.8731, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01347557551937114, |
| "grad_norm": 5.7787941236611164, |
| "learning_rate": 1.2664598507462685e-06, |
| "loss": 0.8549, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.017967434025828188, |
| "grad_norm": 5.730303354635296, |
| "learning_rate": 1.6886131343283583e-06, |
| "loss": 0.8641, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.022459292532285232, |
| "grad_norm": 5.463956060746314, |
| "learning_rate": 2.1107664179104476e-06, |
| "loss": 0.859, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.02695115103874228, |
| "grad_norm": 4.359222218466504, |
| "learning_rate": 2.532919701492537e-06, |
| "loss": 0.8086, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.031443009545199324, |
| "grad_norm": 3.9314742540418877, |
| "learning_rate": 2.955072985074627e-06, |
| "loss": 0.7987, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.035934868051656375, |
| "grad_norm": 2.2948652467354904, |
| "learning_rate": 3.3772262686567165e-06, |
| "loss": 0.768, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.04042672655811342, |
| "grad_norm": 1.9963513578345484, |
| "learning_rate": 3.799379552238806e-06, |
| "loss": 0.7609, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.044918585064570464, |
| "grad_norm": 1.7047152492699387, |
| "learning_rate": 4.221532835820895e-06, |
| "loss": 0.7504, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.049410443571027515, |
| "grad_norm": 3.8017193994614322, |
| "learning_rate": 4.6436861194029855e-06, |
| "loss": 0.7503, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.05390230207748456, |
| "grad_norm": 4.022894465311438, |
| "learning_rate": 5.065839402985074e-06, |
| "loss": 0.7455, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.058394160583941604, |
| "grad_norm": 3.9288586911775494, |
| "learning_rate": 5.4879926865671645e-06, |
| "loss": 0.7295, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.06288601909039865, |
| "grad_norm": 3.209589214583598, |
| "learning_rate": 5.910145970149254e-06, |
| "loss": 0.7024, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.06737787759685569, |
| "grad_norm": 3.011893288172853, |
| "learning_rate": 6.3322992537313436e-06, |
| "loss": 0.6876, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.07186973610331275, |
| "grad_norm": 2.414862038960305, |
| "learning_rate": 6.754452537313433e-06, |
| "loss": 0.6812, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.0763615946097698, |
| "grad_norm": 1.6414783160484103, |
| "learning_rate": 7.176605820895523e-06, |
| "loss": 0.6814, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.08085345311622684, |
| "grad_norm": 1.369475527415624, |
| "learning_rate": 7.598759104477612e-06, |
| "loss": 0.659, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.08534531162268388, |
| "grad_norm": 1.6743202955313934, |
| "learning_rate": 8.0209123880597e-06, |
| "loss": 0.6456, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.08983717012914093, |
| "grad_norm": 1.8668298838147386, |
| "learning_rate": 8.44306567164179e-06, |
| "loss": 0.6516, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.09432902863559797, |
| "grad_norm": 1.4484409449750104, |
| "learning_rate": 8.865218955223881e-06, |
| "loss": 0.6304, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.09882088714205503, |
| "grad_norm": 1.0037439652654327, |
| "learning_rate": 9.287372238805971e-06, |
| "loss": 0.6245, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.10331274564851207, |
| "grad_norm": 1.2035026989163056, |
| "learning_rate": 9.70952552238806e-06, |
| "loss": 0.621, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.10780460415496912, |
| "grad_norm": 1.2996895063130545, |
| "learning_rate": 1.0131678805970148e-05, |
| "loss": 0.6092, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.11229646266142616, |
| "grad_norm": 0.9219635804670414, |
| "learning_rate": 1.055383208955224e-05, |
| "loss": 0.6082, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.11678832116788321, |
| "grad_norm": 0.8154512278475434, |
| "learning_rate": 1.0975985373134329e-05, |
| "loss": 0.585, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.12128017967434025, |
| "grad_norm": 0.9247980377052808, |
| "learning_rate": 1.1398138656716419e-05, |
| "loss": 0.5938, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.1257720381807973, |
| "grad_norm": 0.8351405332062142, |
| "learning_rate": 1.1820291940298508e-05, |
| "loss": 0.5915, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.13026389668725435, |
| "grad_norm": 0.7133220578891004, |
| "learning_rate": 1.2242445223880596e-05, |
| "loss": 0.588, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.13475575519371139, |
| "grad_norm": 0.7504890368369718, |
| "learning_rate": 1.2664598507462687e-05, |
| "loss": 0.5871, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.13924761370016844, |
| "grad_norm": 0.7315285717243533, |
| "learning_rate": 1.3086751791044777e-05, |
| "loss": 0.5858, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.1437394722066255, |
| "grad_norm": 0.5441735042606766, |
| "learning_rate": 1.3508905074626866e-05, |
| "loss": 0.575, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.14823133071308253, |
| "grad_norm": 0.6473935352696674, |
| "learning_rate": 1.3931058358208956e-05, |
| "loss": 0.5615, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.1527231892195396, |
| "grad_norm": 0.6223091865317807, |
| "learning_rate": 1.4353211641791045e-05, |
| "loss": 0.5686, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.15721504772599662, |
| "grad_norm": 0.5869026473489213, |
| "learning_rate": 1.4775364925373133e-05, |
| "loss": 0.5632, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.16170690623245368, |
| "grad_norm": 0.6697261661322835, |
| "learning_rate": 1.5197518208955224e-05, |
| "loss": 0.5538, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.16619876473891074, |
| "grad_norm": 0.5523340874893558, |
| "learning_rate": 1.5619671492537315e-05, |
| "loss": 0.5515, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.17069062324536777, |
| "grad_norm": 0.5402779001437888, |
| "learning_rate": 1.60418247761194e-05, |
| "loss": 0.5585, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.17518248175182483, |
| "grad_norm": 0.6393793878404187, |
| "learning_rate": 1.6463978059701494e-05, |
| "loss": 0.5545, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.17967434025828186, |
| "grad_norm": 0.4041240036369504, |
| "learning_rate": 1.688613134328358e-05, |
| "loss": 0.5578, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.18416619876473891, |
| "grad_norm": 0.6086298220416736, |
| "learning_rate": 1.7308284626865673e-05, |
| "loss": 0.554, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.18865805727119594, |
| "grad_norm": 0.3999870115396727, |
| "learning_rate": 1.7730437910447763e-05, |
| "loss": 0.5399, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.193149915777653, |
| "grad_norm": 0.5006786688823199, |
| "learning_rate": 1.815259119402985e-05, |
| "loss": 0.5388, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.19764177428411006, |
| "grad_norm": 0.5268906883702746, |
| "learning_rate": 1.8574744477611942e-05, |
| "loss": 0.5458, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.2021336327905671, |
| "grad_norm": 0.36114074862039075, |
| "learning_rate": 1.899689776119403e-05, |
| "loss": 0.5318, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.20662549129702415, |
| "grad_norm": 0.49219557077110226, |
| "learning_rate": 1.941905104477612e-05, |
| "loss": 0.538, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.21111734980348118, |
| "grad_norm": 0.3898606142294089, |
| "learning_rate": 1.984120432835821e-05, |
| "loss": 0.5303, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.21560920830993824, |
| "grad_norm": 0.4747550212627052, |
| "learning_rate": 2.0263357611940297e-05, |
| "loss": 0.5359, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.2201010668163953, |
| "grad_norm": 0.356291193847858, |
| "learning_rate": 2.068551089552239e-05, |
| "loss": 0.5225, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.22459292532285233, |
| "grad_norm": 0.5154384369701829, |
| "learning_rate": 2.110766417910448e-05, |
| "loss": 0.5432, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.22908478382930939, |
| "grad_norm": 0.4156042173356652, |
| "learning_rate": 2.152981746268657e-05, |
| "loss": 0.5269, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.23357664233576642, |
| "grad_norm": 0.34458484379208343, |
| "learning_rate": 2.1951970746268658e-05, |
| "loss": 0.5318, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.23806850084222347, |
| "grad_norm": 0.45421913764112787, |
| "learning_rate": 2.2374124029850744e-05, |
| "loss": 0.5245, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.2425603593486805, |
| "grad_norm": 0.567239004859855, |
| "learning_rate": 2.2796277313432837e-05, |
| "loss": 0.5289, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.24705221785513756, |
| "grad_norm": 0.6740910902425077, |
| "learning_rate": 2.3218430597014927e-05, |
| "loss": 0.5084, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.2515440763615946, |
| "grad_norm": 0.7752068297960009, |
| "learning_rate": 2.3640583880597016e-05, |
| "loss": 0.5226, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.25603593486805165, |
| "grad_norm": 0.9552601356444903, |
| "learning_rate": 2.4062737164179106e-05, |
| "loss": 0.5196, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.2605277933745087, |
| "grad_norm": 0.9379110300025817, |
| "learning_rate": 2.4484890447761192e-05, |
| "loss": 0.5105, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.26501965188096577, |
| "grad_norm": 0.7772460143850226, |
| "learning_rate": 2.4907043731343285e-05, |
| "loss": 0.5242, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.26951151038742277, |
| "grad_norm": 0.8788446944945193, |
| "learning_rate": 2.5329197014925374e-05, |
| "loss": 0.5139, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.27400336889387983, |
| "grad_norm": 0.7472788464484211, |
| "learning_rate": 2.5751350298507464e-05, |
| "loss": 0.5087, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.2784952274003369, |
| "grad_norm": 0.5206157571508017, |
| "learning_rate": 2.6173503582089553e-05, |
| "loss": 0.5135, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.28298708590679394, |
| "grad_norm": 0.4158324584939451, |
| "learning_rate": 2.6595656865671643e-05, |
| "loss": 0.5221, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.287478944413251, |
| "grad_norm": 0.6550327478832779, |
| "learning_rate": 2.7017810149253732e-05, |
| "loss": 0.5076, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.291970802919708, |
| "grad_norm": 0.9256302053482004, |
| "learning_rate": 2.7439963432835822e-05, |
| "loss": 0.5127, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.29646266142616506, |
| "grad_norm": 0.9775983835454853, |
| "learning_rate": 2.786211671641791e-05, |
| "loss": 0.5026, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.3009545199326221, |
| "grad_norm": 1.51113067675059, |
| "learning_rate": 2.828427e-05, |
| "loss": 0.521, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.3054463784390792, |
| "grad_norm": 0.8192228855206966, |
| "learning_rate": 2.8284075495305353e-05, |
| "loss": 0.501, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.30993823694553624, |
| "grad_norm": 1.2300181702860546, |
| "learning_rate": 2.8283491986571674e-05, |
| "loss": 0.5127, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.31443009545199324, |
| "grad_norm": 1.1329832003075657, |
| "learning_rate": 2.8282519489849603e-05, |
| "loss": 0.5071, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.3189219539584503, |
| "grad_norm": 1.2646909380634277, |
| "learning_rate": 2.8281158031889725e-05, |
| "loss": 0.5053, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.32341381246490736, |
| "grad_norm": 0.9837732589410637, |
| "learning_rate": 2.8279407650141834e-05, |
| "loss": 0.5197, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.3279056709713644, |
| "grad_norm": 1.224216460317663, |
| "learning_rate": 2.8277268392753874e-05, |
| "loss": 0.5066, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.3323975294778215, |
| "grad_norm": 0.8794559027371804, |
| "learning_rate": 2.827474031857067e-05, |
| "loss": 0.5064, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.3368893879842785, |
| "grad_norm": 1.028650146078467, |
| "learning_rate": 2.8271823497132245e-05, |
| "loss": 0.5117, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.34138124649073553, |
| "grad_norm": 1.0164722337507859, |
| "learning_rate": 2.8268518008671955e-05, |
| "loss": 0.4991, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.3458731049971926, |
| "grad_norm": 1.024636665199281, |
| "learning_rate": 2.8264823944114274e-05, |
| "loss": 0.5125, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.35036496350364965, |
| "grad_norm": 0.726482732351472, |
| "learning_rate": 2.826074140507226e-05, |
| "loss": 0.4926, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.35485682201010665, |
| "grad_norm": 0.953436081285762, |
| "learning_rate": 2.825627050384482e-05, |
| "loss": 0.5061, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.3593486805165637, |
| "grad_norm": 0.8241055859758966, |
| "learning_rate": 2.8251411363413542e-05, |
| "loss": 0.5074, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.36384053902302077, |
| "grad_norm": 0.8014142170976486, |
| "learning_rate": 2.8246164117439403e-05, |
| "loss": 0.5065, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.36833239752947783, |
| "grad_norm": 0.8877417141780828, |
| "learning_rate": 2.8240528910259e-05, |
| "loss": 0.5106, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.3728242560359349, |
| "grad_norm": 0.7288874323602634, |
| "learning_rate": 2.823450589688067e-05, |
| "loss": 0.5025, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.3773161145423919, |
| "grad_norm": 0.6009546992075813, |
| "learning_rate": 2.8228095242980137e-05, |
| "loss": 0.5006, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.38180797304884895, |
| "grad_norm": 0.7490335675774521, |
| "learning_rate": 2.822129712489603e-05, |
| "loss": 0.5044, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.386299831555306, |
| "grad_norm": 0.779289911199876, |
| "learning_rate": 2.8214111729624994e-05, |
| "loss": 0.487, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.39079169006176306, |
| "grad_norm": 0.5030837162189737, |
| "learning_rate": 2.8206539254816545e-05, |
| "loss": 0.4859, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.3952835485682201, |
| "grad_norm": 0.6689905622885237, |
| "learning_rate": 2.8198579908767657e-05, |
| "loss": 0.4912, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.3997754070746771, |
| "grad_norm": 0.591172274153396, |
| "learning_rate": 2.8190233910416995e-05, |
| "loss": 0.4996, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.4042672655811342, |
| "grad_norm": 0.49390500427611916, |
| "learning_rate": 2.8181501489338944e-05, |
| "loss": 0.4972, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.40875912408759124, |
| "grad_norm": 0.5414477218971624, |
| "learning_rate": 2.8172382885737236e-05, |
| "loss": 0.4981, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.4132509825940483, |
| "grad_norm": 0.43235732060188803, |
| "learning_rate": 2.8162878350438392e-05, |
| "loss": 0.4895, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.41774284110050536, |
| "grad_norm": 0.7027519999992445, |
| "learning_rate": 2.8152988144884793e-05, |
| "loss": 0.4916, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.42223469960696236, |
| "grad_norm": 0.7103554223289062, |
| "learning_rate": 2.8142712541127497e-05, |
| "loss": 0.4992, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.4267265581134194, |
| "grad_norm": 0.727736158392093, |
| "learning_rate": 2.813205182181876e-05, |
| "loss": 0.4985, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.4312184166198765, |
| "grad_norm": 0.9555199352525765, |
| "learning_rate": 2.812100628020426e-05, |
| "loss": 0.4798, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.43571027512633353, |
| "grad_norm": 0.868910581986562, |
| "learning_rate": 2.8109576220115017e-05, |
| "loss": 0.4981, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.4402021336327906, |
| "grad_norm": 0.6529644140315375, |
| "learning_rate": 2.8097761955959054e-05, |
| "loss": 0.4919, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.4446939921392476, |
| "grad_norm": 0.5671052329679708, |
| "learning_rate": 2.8085563812712747e-05, |
| "loss": 0.4712, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.44918585064570465, |
| "grad_norm": 0.5136497684738213, |
| "learning_rate": 2.8072982125911862e-05, |
| "loss": 0.4822, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.4536777091521617, |
| "grad_norm": 0.473175867183367, |
| "learning_rate": 2.8060017241642368e-05, |
| "loss": 0.4947, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.45816956765861877, |
| "grad_norm": 0.6552480677126711, |
| "learning_rate": 2.8046669516530868e-05, |
| "loss": 0.4906, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.4626614261650758, |
| "grad_norm": 0.659650419201284, |
| "learning_rate": 2.8032939317734826e-05, |
| "loss": 0.4909, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.46715328467153283, |
| "grad_norm": 0.7383804714557528, |
| "learning_rate": 2.8018827022932455e-05, |
| "loss": 0.4804, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.4716451431779899, |
| "grad_norm": 0.5224334871193078, |
| "learning_rate": 2.8004333020312324e-05, |
| "loss": 0.476, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.47613700168444695, |
| "grad_norm": 0.5088529143014228, |
| "learning_rate": 2.798945770856269e-05, |
| "loss": 0.4838, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.480628860190904, |
| "grad_norm": 0.5964414382468598, |
| "learning_rate": 2.7974201496860518e-05, |
| "loss": 0.4828, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.485120718697361, |
| "grad_norm": 0.6360776918845367, |
| "learning_rate": 2.7958564804860243e-05, |
| "loss": 0.4931, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.48961257720381807, |
| "grad_norm": 0.44711939823413394, |
| "learning_rate": 2.7942548062682207e-05, |
| "loss": 0.4931, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.4941044357102751, |
| "grad_norm": 0.39966821409323977, |
| "learning_rate": 2.7926151710900844e-05, |
| "loss": 0.4877, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.4985962942167322, |
| "grad_norm": 0.5350994599031005, |
| "learning_rate": 2.7909376200532555e-05, |
| "loss": 0.489, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.5030881527231892, |
| "grad_norm": 0.6783310634204751, |
| "learning_rate": 2.7892221993023292e-05, |
| "loss": 0.4787, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.5075800112296462, |
| "grad_norm": 0.6347156840740921, |
| "learning_rate": 2.7874689560235885e-05, |
| "loss": 0.4947, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.5120718697361033, |
| "grad_norm": 0.6856811377347193, |
| "learning_rate": 2.785677938443705e-05, |
| "loss": 0.4895, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.5165637282425604, |
| "grad_norm": 0.5537101587480877, |
| "learning_rate": 2.7838491958284118e-05, |
| "loss": 0.4766, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.5210555867490174, |
| "grad_norm": 0.4282052718202538, |
| "learning_rate": 2.78198277848115e-05, |
| "loss": 0.497, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.5255474452554745, |
| "grad_norm": 0.3393944725268088, |
| "learning_rate": 2.780078737741683e-05, |
| "loss": 0.4824, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.5300393037619315, |
| "grad_norm": 0.44536797037412856, |
| "learning_rate": 2.7781371259846857e-05, |
| "loss": 0.4889, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.5345311622683886, |
| "grad_norm": 0.6227643022770726, |
| "learning_rate": 2.7761579966183044e-05, |
| "loss": 0.4819, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.5390230207748455, |
| "grad_norm": 0.6149075794124363, |
| "learning_rate": 2.774141404082686e-05, |
| "loss": 0.4823, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.5435148792813026, |
| "grad_norm": 0.5251207353470196, |
| "learning_rate": 2.7720874038484803e-05, |
| "loss": 0.4896, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.5480067377877597, |
| "grad_norm": 0.540780427167242, |
| "learning_rate": 2.7699960524153162e-05, |
| "loss": 0.4817, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.5524985962942167, |
| "grad_norm": 0.5891410499193173, |
| "learning_rate": 2.767867407310247e-05, |
| "loss": 0.4662, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.5569904548006738, |
| "grad_norm": 0.6370627313155826, |
| "learning_rate": 2.765701527086166e-05, |
| "loss": 0.4934, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.5614823133071308, |
| "grad_norm": 0.6977215530471076, |
| "learning_rate": 2.7634984713201984e-05, |
| "loss": 0.4945, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.5659741718135879, |
| "grad_norm": 0.6200258417785485, |
| "learning_rate": 2.7612583006120617e-05, |
| "loss": 0.4722, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.570466030320045, |
| "grad_norm": 0.5002600730923096, |
| "learning_rate": 2.758981076582397e-05, |
| "loss": 0.4743, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.574957888826502, |
| "grad_norm": 0.5784959376735728, |
| "learning_rate": 2.7566668618710772e-05, |
| "loss": 0.4785, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.5794497473329591, |
| "grad_norm": 0.6395956953557737, |
| "learning_rate": 2.754315720135481e-05, |
| "loss": 0.4844, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.583941605839416, |
| "grad_norm": 0.5350847012934989, |
| "learning_rate": 2.751927716048744e-05, |
| "loss": 0.4692, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.5884334643458731, |
| "grad_norm": 0.43903297472349573, |
| "learning_rate": 2.749502915297979e-05, |
| "loss": 0.4842, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.5929253228523301, |
| "grad_norm": 0.45348394184058516, |
| "learning_rate": 2.747041384582468e-05, |
| "loss": 0.4673, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.5974171813587872, |
| "grad_norm": 0.5537640504259822, |
| "learning_rate": 2.7445431916118297e-05, |
| "loss": 0.4735, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.6019090398652442, |
| "grad_norm": 0.44994353104607454, |
| "learning_rate": 2.742008405104155e-05, |
| "loss": 0.4797, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.6064008983717013, |
| "grad_norm": 0.4544214314383658, |
| "learning_rate": 2.7394370947841175e-05, |
| "loss": 0.4796, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.6108927568781584, |
| "grad_norm": 0.5054093825095585, |
| "learning_rate": 2.7368293313810566e-05, |
| "loss": 0.483, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.6153846153846154, |
| "grad_norm": 0.7187663549969284, |
| "learning_rate": 2.7341851866270298e-05, |
| "loss": 0.4773, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.6198764738910725, |
| "grad_norm": 0.699515058479278, |
| "learning_rate": 2.7315047332548415e-05, |
| "loss": 0.4736, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.6243683323975294, |
| "grad_norm": 0.5165553858169959, |
| "learning_rate": 2.728788044996041e-05, |
| "loss": 0.479, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.6288601909039865, |
| "grad_norm": 0.4327740748377208, |
| "learning_rate": 2.7260351965788962e-05, |
| "loss": 0.4715, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.6333520494104435, |
| "grad_norm": 0.5038382774885327, |
| "learning_rate": 2.7232462637263354e-05, |
| "loss": 0.4821, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.6378439079169006, |
| "grad_norm": 0.5193208261847068, |
| "learning_rate": 2.720421323153866e-05, |
| "loss": 0.4721, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.6423357664233577, |
| "grad_norm": 0.5055322576434895, |
| "learning_rate": 2.7175604525674646e-05, |
| "loss": 0.4656, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.6468276249298147, |
| "grad_norm": 0.46251349642636685, |
| "learning_rate": 2.7146637306614383e-05, |
| "loss": 0.4676, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.6513194834362718, |
| "grad_norm": 0.5012429738213727, |
| "learning_rate": 2.711731237116262e-05, |
| "loss": 0.4831, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.6558113419427288, |
| "grad_norm": 0.5946163957927814, |
| "learning_rate": 2.708763052596383e-05, |
| "loss": 0.4761, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.6603032004491859, |
| "grad_norm": 0.6959852968009713, |
| "learning_rate": 2.7057592587480068e-05, |
| "loss": 0.4749, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.664795058955643, |
| "grad_norm": 0.6454980147287459, |
| "learning_rate": 2.702719938196848e-05, |
| "loss": 0.4743, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.6692869174620999, |
| "grad_norm": 0.48108943458946607, |
| "learning_rate": 2.6996451745458583e-05, |
| "loss": 0.4773, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.673778775968557, |
| "grad_norm": 0.3774779921512963, |
| "learning_rate": 2.696535052372927e-05, |
| "loss": 0.47, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.678270634475014, |
| "grad_norm": 0.4392920410610574, |
| "learning_rate": 2.6933896572285545e-05, |
| "loss": 0.4848, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.6827624929814711, |
| "grad_norm": 0.5769958845705692, |
| "learning_rate": 2.6902090756334994e-05, |
| "loss": 0.4923, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.6872543514879281, |
| "grad_norm": 0.5757560343292677, |
| "learning_rate": 2.686993395076398e-05, |
| "loss": 0.4791, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.6917462099943852, |
| "grad_norm": 0.5671105946152095, |
| "learning_rate": 2.683742704011357e-05, |
| "loss": 0.4729, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.6962380685008422, |
| "grad_norm": 0.5782184489398106, |
| "learning_rate": 2.6804570918555232e-05, |
| "loss": 0.4779, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.7007299270072993, |
| "grad_norm": 0.6301887119444084, |
| "learning_rate": 2.67713664898662e-05, |
| "loss": 0.4757, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.7052217855137564, |
| "grad_norm": 0.6399273153805983, |
| "learning_rate": 2.673781466740465e-05, |
| "loss": 0.4683, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.7097136440202133, |
| "grad_norm": 0.5751139777508563, |
| "learning_rate": 2.6703916374084546e-05, |
| "loss": 0.474, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.7142055025266704, |
| "grad_norm": 0.5237392026559451, |
| "learning_rate": 2.6669672542350276e-05, |
| "loss": 0.4868, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.7186973610331274, |
| "grad_norm": 0.546643971635381, |
| "learning_rate": 2.6635084114150996e-05, |
| "loss": 0.4804, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.7231892195395845, |
| "grad_norm": 0.5550996062832795, |
| "learning_rate": 2.6600152040914712e-05, |
| "loss": 0.4697, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.7276810780460415, |
| "grad_norm": 0.5376518959056129, |
| "learning_rate": 2.6564877283522115e-05, |
| "loss": 0.4722, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.7321729365524986, |
| "grad_norm": 0.4777569065930761, |
| "learning_rate": 2.6529260812280152e-05, |
| "loss": 0.4555, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.7366647950589557, |
| "grad_norm": 0.46167672523022346, |
| "learning_rate": 2.6493303606895333e-05, |
| "loss": 0.4606, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.7411566535654127, |
| "grad_norm": 0.41976167794536035, |
| "learning_rate": 2.6457006656446777e-05, |
| "loss": 0.4771, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.7456485120718698, |
| "grad_norm": 0.6674527791762378, |
| "learning_rate": 2.642037095935903e-05, |
| "loss": 0.4772, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.7501403705783268, |
| "grad_norm": 0.6290119968833247, |
| "learning_rate": 2.638339752337456e-05, |
| "loss": 0.482, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.7546322290847838, |
| "grad_norm": 0.40285509227429195, |
| "learning_rate": 2.634608736552607e-05, |
| "loss": 0.4653, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.7591240875912408, |
| "grad_norm": 0.4164094017051228, |
| "learning_rate": 2.630844151210851e-05, |
| "loss": 0.4709, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.7636159460976979, |
| "grad_norm": 0.4467679602252483, |
| "learning_rate": 2.6270460998650838e-05, |
| "loss": 0.4698, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.768107804604155, |
| "grad_norm": 0.5462945507817383, |
| "learning_rate": 2.6232146869887565e-05, |
| "loss": 0.4778, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.772599663110612, |
| "grad_norm": 0.3685789574173616, |
| "learning_rate": 2.6193500179729982e-05, |
| "loss": 0.4681, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.7770915216170691, |
| "grad_norm": 0.4490725514023042, |
| "learning_rate": 2.6154521991237195e-05, |
| "loss": 0.4586, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.7815833801235261, |
| "grad_norm": 0.46279499032609167, |
| "learning_rate": 2.6115213376586873e-05, |
| "loss": 0.4731, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.7860752386299832, |
| "grad_norm": 0.4997523270309222, |
| "learning_rate": 2.6075575417045745e-05, |
| "loss": 0.4769, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.7905670971364402, |
| "grad_norm": 0.5505245764097036, |
| "learning_rate": 2.603560920293988e-05, |
| "loss": 0.4758, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.7950589556428973, |
| "grad_norm": 0.363463467720154, |
| "learning_rate": 2.5995315833624693e-05, |
| "loss": 0.4561, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.7995508141493542, |
| "grad_norm": 0.3917739564310974, |
| "learning_rate": 2.595469641745467e-05, |
| "loss": 0.4641, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.8040426726558113, |
| "grad_norm": 0.45375939875892646, |
| "learning_rate": 2.5913752071752935e-05, |
| "loss": 0.4679, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.8085345311622684, |
| "grad_norm": 0.4072903389108503, |
| "learning_rate": 2.5872483922780468e-05, |
| "loss": 0.4736, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.8130263896687254, |
| "grad_norm": 0.5207413079308953, |
| "learning_rate": 2.583089310570515e-05, |
| "loss": 0.4823, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.8175182481751825, |
| "grad_norm": 0.47973720569304484, |
| "learning_rate": 2.578898076457053e-05, |
| "loss": 0.4649, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.8220101066816395, |
| "grad_norm": 0.3987214245331553, |
| "learning_rate": 2.5746748052264372e-05, |
| "loss": 0.4697, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.8265019651880966, |
| "grad_norm": 0.40108553854919987, |
| "learning_rate": 2.570419613048691e-05, |
| "loss": 0.4622, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.8309938236945537, |
| "grad_norm": 0.521581370496541, |
| "learning_rate": 2.566132616971891e-05, |
| "loss": 0.4619, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.8354856822010107, |
| "grad_norm": 0.5047605572020768, |
| "learning_rate": 2.5618139349189486e-05, |
| "loss": 0.4671, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.8399775407074677, |
| "grad_norm": 0.5364692556005867, |
| "learning_rate": 2.5574636856843648e-05, |
| "loss": 0.4689, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.8444693992139247, |
| "grad_norm": 0.4492847766801117, |
| "learning_rate": 2.5530819889309624e-05, |
| "loss": 0.4599, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.8489612577203818, |
| "grad_norm": 0.38696429925917464, |
| "learning_rate": 2.548668965186596e-05, |
| "loss": 0.4606, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.8534531162268388, |
| "grad_norm": 0.4268656959845891, |
| "learning_rate": 2.5442247358408328e-05, |
| "loss": 0.4755, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.8579449747332959, |
| "grad_norm": 0.5247596649132644, |
| "learning_rate": 2.539749423141619e-05, |
| "loss": 0.477, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.862436833239753, |
| "grad_norm": 0.4921984452162432, |
| "learning_rate": 2.5352431501919134e-05, |
| "loss": 0.4731, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.86692869174621, |
| "grad_norm": 0.48240116746472755, |
| "learning_rate": 2.5307060409463016e-05, |
| "loss": 0.4689, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.8714205502526671, |
| "grad_norm": 0.4931496661169666, |
| "learning_rate": 2.526138220207587e-05, |
| "loss": 0.4803, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.8759124087591241, |
| "grad_norm": 0.582045224133066, |
| "learning_rate": 2.5215398136233585e-05, |
| "loss": 0.4557, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.8804042672655812, |
| "grad_norm": 0.5411459162367669, |
| "learning_rate": 2.5169109476825326e-05, |
| "loss": 0.4589, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.8848961257720381, |
| "grad_norm": 0.39863691240843, |
| "learning_rate": 2.512251749711875e-05, |
| "loss": 0.4668, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.8893879842784952, |
| "grad_norm": 0.4593569962235714, |
| "learning_rate": 2.5075623478724984e-05, |
| "loss": 0.4753, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.8938798427849522, |
| "grad_norm": 0.4287957363960024, |
| "learning_rate": 2.502842871156337e-05, |
| "loss": 0.465, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.8983717012914093, |
| "grad_norm": 0.4228370536587653, |
| "learning_rate": 2.498093449382598e-05, |
| "loss": 0.4519, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.9028635597978664, |
| "grad_norm": 0.375019534593526, |
| "learning_rate": 2.4933142131941917e-05, |
| "loss": 0.4645, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.9073554183043234, |
| "grad_norm": 0.43124335474679265, |
| "learning_rate": 2.488505294054135e-05, |
| "loss": 0.4804, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.9118472768107805, |
| "grad_norm": 0.45579971562245586, |
| "learning_rate": 2.4836668242419398e-05, |
| "loss": 0.4589, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.9163391353172375, |
| "grad_norm": 0.36653038951983824, |
| "learning_rate": 2.47879893684997e-05, |
| "loss": 0.4593, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.9208309938236946, |
| "grad_norm": 0.38913683153964257, |
| "learning_rate": 2.4739017657797826e-05, |
| "loss": 0.469, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.9253228523301515, |
| "grad_norm": 0.3716699947181807, |
| "learning_rate": 2.4689754457384455e-05, |
| "loss": 0.474, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.9298147108366086, |
| "grad_norm": 0.4537303687953904, |
| "learning_rate": 2.4640201122348293e-05, |
| "loss": 0.4624, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.9343065693430657, |
| "grad_norm": 0.3637412162756154, |
| "learning_rate": 2.4590359015758823e-05, |
| "loss": 0.4602, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.9387984278495227, |
| "grad_norm": 0.45311583807808, |
| "learning_rate": 2.45402295086288e-05, |
| "loss": 0.4699, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.9432902863559798, |
| "grad_norm": 0.4006232500664817, |
| "learning_rate": 2.4489813979876532e-05, |
| "loss": 0.4734, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.9477821448624368, |
| "grad_norm": 0.3550939989105772, |
| "learning_rate": 2.443911381628797e-05, |
| "loss": 0.4635, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.9522740033688939, |
| "grad_norm": 0.3358080281360604, |
| "learning_rate": 2.4388130412478547e-05, |
| "loss": 0.4572, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.956765861875351, |
| "grad_norm": 0.4055886952871211, |
| "learning_rate": 2.4336865170854804e-05, |
| "loss": 0.4569, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.961257720381808, |
| "grad_norm": 0.4576518855442424, |
| "learning_rate": 2.4285319501575847e-05, |
| "loss": 0.4671, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.9657495788882651, |
| "grad_norm": 0.40899913469666216, |
| "learning_rate": 2.4233494822514523e-05, |
| "loss": 0.4458, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.970241437394722, |
| "grad_norm": 0.28945530496891764, |
| "learning_rate": 2.4181392559218453e-05, |
| "loss": 0.4618, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.9747332959011791, |
| "grad_norm": 0.35107378537689937, |
| "learning_rate": 2.4129014144870783e-05, |
| "loss": 0.4574, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.9792251544076361, |
| "grad_norm": 0.4020814076094774, |
| "learning_rate": 2.4076361020250788e-05, |
| "loss": 0.4646, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.9837170129140932, |
| "grad_norm": 0.4291582741080788, |
| "learning_rate": 2.4023434633694226e-05, |
| "loss": 0.4637, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.9882088714205502, |
| "grad_norm": 0.40682515531336805, |
| "learning_rate": 2.3970236441053506e-05, |
| "loss": 0.4772, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.9927007299270073, |
| "grad_norm": 0.3749932255250272, |
| "learning_rate": 2.3916767905657643e-05, |
| "loss": 0.465, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.9971925884334644, |
| "grad_norm": 0.4077210425304697, |
| "learning_rate": 2.3863030498271994e-05, |
| "loss": 0.4628, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.0030881527231892, |
| "grad_norm": 0.6159591297183691, |
| "learning_rate": 2.3809025697057822e-05, |
| "loss": 0.7494, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.0075800112296462, |
| "grad_norm": 0.743081485050473, |
| "learning_rate": 2.3754754987531616e-05, |
| "loss": 0.4526, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.0120718697361033, |
| "grad_norm": 0.9936672942774801, |
| "learning_rate": 2.3700219862524227e-05, |
| "loss": 0.4246, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.0165637282425604, |
| "grad_norm": 0.798262818791095, |
| "learning_rate": 2.364542182213983e-05, |
| "loss": 0.4375, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.0210555867490174, |
| "grad_norm": 0.6667703200360748, |
| "learning_rate": 2.3590362373714646e-05, |
| "loss": 0.4336, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.0255474452554745, |
| "grad_norm": 0.8477945932709707, |
| "learning_rate": 2.3535043031775457e-05, |
| "loss": 0.4555, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.0300393037619315, |
| "grad_norm": 0.7922389974719279, |
| "learning_rate": 2.3479465317997993e-05, |
| "loss": 0.4365, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.0345311622683886, |
| "grad_norm": 0.7126148195703437, |
| "learning_rate": 2.342363076116503e-05, |
| "loss": 0.4523, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.0390230207748457, |
| "grad_norm": 0.8802731555951069, |
| "learning_rate": 2.336754089712436e-05, |
| "loss": 0.4491, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.0435148792813027, |
| "grad_norm": 0.9077015045585495, |
| "learning_rate": 2.331119726874655e-05, |
| "loss": 0.4564, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.0480067377877598, |
| "grad_norm": 0.8853843133720011, |
| "learning_rate": 2.3254601425882486e-05, |
| "loss": 0.442, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.0524985962942168, |
| "grad_norm": 0.7675340514208551, |
| "learning_rate": 2.3197754925320744e-05, |
| "loss": 0.4386, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.0569904548006739, |
| "grad_norm": 0.795855384998558, |
| "learning_rate": 2.314065933074478e-05, |
| "loss": 0.4483, |
| "step": 235 |
| }, |
| { |
| "epoch": 1.0614823133071307, |
| "grad_norm": 0.8688332898542778, |
| "learning_rate": 2.30833162126899e-05, |
| "loss": 0.4401, |
| "step": 236 |
| }, |
| { |
| "epoch": 1.0659741718135878, |
| "grad_norm": 0.6912025881093444, |
| "learning_rate": 2.3025727148500078e-05, |
| "loss": 0.4258, |
| "step": 237 |
| }, |
| { |
| "epoch": 1.0704660303200448, |
| "grad_norm": 0.6210071932563501, |
| "learning_rate": 2.296789372228454e-05, |
| "loss": 0.4446, |
| "step": 238 |
| }, |
| { |
| "epoch": 1.074957888826502, |
| "grad_norm": 0.7612886989218541, |
| "learning_rate": 2.290981752487423e-05, |
| "loss": 0.4371, |
| "step": 239 |
| }, |
| { |
| "epoch": 1.079449747332959, |
| "grad_norm": 0.49304669277389745, |
| "learning_rate": 2.285150015377801e-05, |
| "loss": 0.4313, |
| "step": 240 |
| }, |
| { |
| "epoch": 1.083941605839416, |
| "grad_norm": 0.6740461248297163, |
| "learning_rate": 2.279294321313875e-05, |
| "loss": 0.4347, |
| "step": 241 |
| }, |
| { |
| "epoch": 1.088433464345873, |
| "grad_norm": 0.47110764302293234, |
| "learning_rate": 2.2734148313689173e-05, |
| "loss": 0.4472, |
| "step": 242 |
| }, |
| { |
| "epoch": 1.0929253228523301, |
| "grad_norm": 0.6319912881250679, |
| "learning_rate": 2.2675117072707572e-05, |
| "loss": 0.438, |
| "step": 243 |
| }, |
| { |
| "epoch": 1.0974171813587872, |
| "grad_norm": 0.48989174750287556, |
| "learning_rate": 2.2615851113973312e-05, |
| "loss": 0.4266, |
| "step": 244 |
| }, |
| { |
| "epoch": 1.1019090398652442, |
| "grad_norm": 0.4886585461236165, |
| "learning_rate": 2.255635206772217e-05, |
| "loss": 0.4359, |
| "step": 245 |
| }, |
| { |
| "epoch": 1.1064008983717013, |
| "grad_norm": 0.5509821980510767, |
| "learning_rate": 2.2496621570601487e-05, |
| "loss": 0.4598, |
| "step": 246 |
| }, |
| { |
| "epoch": 1.1108927568781584, |
| "grad_norm": 0.366525891107272, |
| "learning_rate": 2.243666126562515e-05, |
| "loss": 0.4199, |
| "step": 247 |
| }, |
| { |
| "epoch": 1.1153846153846154, |
| "grad_norm": 0.5181695608354417, |
| "learning_rate": 2.2376472802128402e-05, |
| "loss": 0.4375, |
| "step": 248 |
| }, |
| { |
| "epoch": 1.1198764738910725, |
| "grad_norm": 0.38286389285013006, |
| "learning_rate": 2.231605783572246e-05, |
| "loss": 0.4417, |
| "step": 249 |
| }, |
| { |
| "epoch": 1.1243683323975295, |
| "grad_norm": 0.5620717664471987, |
| "learning_rate": 2.2255418028248992e-05, |
| "loss": 0.4593, |
| "step": 250 |
| }, |
| { |
| "epoch": 1.1288601909039866, |
| "grad_norm": 0.4462443722479522, |
| "learning_rate": 2.21945550477344e-05, |
| "loss": 0.4374, |
| "step": 251 |
| }, |
| { |
| "epoch": 1.1333520494104437, |
| "grad_norm": 0.4272534160396403, |
| "learning_rate": 2.213347056834392e-05, |
| "loss": 0.4131, |
| "step": 252 |
| }, |
| { |
| "epoch": 1.1378439079169007, |
| "grad_norm": 0.5008773829340849, |
| "learning_rate": 2.2072166270335592e-05, |
| "loss": 0.4586, |
| "step": 253 |
| }, |
| { |
| "epoch": 1.1423357664233578, |
| "grad_norm": 0.3792872396771057, |
| "learning_rate": 2.2010643840014032e-05, |
| "loss": 0.4219, |
| "step": 254 |
| }, |
| { |
| "epoch": 1.1468276249298146, |
| "grad_norm": 0.44576445227452827, |
| "learning_rate": 2.1948904969684047e-05, |
| "loss": 0.4512, |
| "step": 255 |
| }, |
| { |
| "epoch": 1.1513194834362717, |
| "grad_norm": 0.3963785617647081, |
| "learning_rate": 2.188695135760409e-05, |
| "loss": 0.4187, |
| "step": 256 |
| }, |
| { |
| "epoch": 1.1558113419427287, |
| "grad_norm": 0.3745162442173085, |
| "learning_rate": 2.1824784707939534e-05, |
| "loss": 0.4527, |
| "step": 257 |
| }, |
| { |
| "epoch": 1.1603032004491858, |
| "grad_norm": 0.4302552912637187, |
| "learning_rate": 2.17624067307158e-05, |
| "loss": 0.4338, |
| "step": 258 |
| }, |
| { |
| "epoch": 1.1647950589556428, |
| "grad_norm": 0.35964883159886635, |
| "learning_rate": 2.1699819141771334e-05, |
| "loss": 0.4503, |
| "step": 259 |
| }, |
| { |
| "epoch": 1.1692869174621, |
| "grad_norm": 0.46269656933781855, |
| "learning_rate": 2.1637023662710378e-05, |
| "loss": 0.4226, |
| "step": 260 |
| }, |
| { |
| "epoch": 1.173778775968557, |
| "grad_norm": 0.3412078867716096, |
| "learning_rate": 2.1574022020855653e-05, |
| "loss": 0.4147, |
| "step": 261 |
| }, |
| { |
| "epoch": 1.178270634475014, |
| "grad_norm": 0.4572735310680308, |
| "learning_rate": 2.1510815949200806e-05, |
| "loss": 0.4611, |
| "step": 262 |
| }, |
| { |
| "epoch": 1.182762492981471, |
| "grad_norm": 0.37564713470069194, |
| "learning_rate": 2.1447407186362773e-05, |
| "loss": 0.4317, |
| "step": 263 |
| }, |
| { |
| "epoch": 1.1872543514879281, |
| "grad_norm": 0.3853481594309711, |
| "learning_rate": 2.138379747653393e-05, |
| "loss": 0.4413, |
| "step": 264 |
| }, |
| { |
| "epoch": 1.1917462099943852, |
| "grad_norm": 0.3203618610490023, |
| "learning_rate": 2.131998856943413e-05, |
| "loss": 0.4273, |
| "step": 265 |
| }, |
| { |
| "epoch": 1.1962380685008422, |
| "grad_norm": 0.4418711444396078, |
| "learning_rate": 2.125598222026257e-05, |
| "loss": 0.4434, |
| "step": 266 |
| }, |
| { |
| "epoch": 1.2007299270072993, |
| "grad_norm": 0.4573587827702061, |
| "learning_rate": 2.1191780189649513e-05, |
| "loss": 0.4367, |
| "step": 267 |
| }, |
| { |
| "epoch": 1.2052217855137564, |
| "grad_norm": 0.38776707741248617, |
| "learning_rate": 2.1127384243607844e-05, |
| "loss": 0.4357, |
| "step": 268 |
| }, |
| { |
| "epoch": 1.2097136440202134, |
| "grad_norm": 0.3259763037218784, |
| "learning_rate": 2.106279615348451e-05, |
| "loss": 0.4446, |
| "step": 269 |
| }, |
| { |
| "epoch": 1.2142055025266705, |
| "grad_norm": 0.34557251617943774, |
| "learning_rate": 2.099801769591179e-05, |
| "loss": 0.4204, |
| "step": 270 |
| }, |
| { |
| "epoch": 1.2186973610331275, |
| "grad_norm": 0.4181566384819826, |
| "learning_rate": 2.0933050652758417e-05, |
| "loss": 0.4551, |
| "step": 271 |
| }, |
| { |
| "epoch": 1.2231892195395846, |
| "grad_norm": 0.2875529069270976, |
| "learning_rate": 2.0867896811080576e-05, |
| "loss": 0.4155, |
| "step": 272 |
| }, |
| { |
| "epoch": 1.2276810780460417, |
| "grad_norm": 0.32586319219897486, |
| "learning_rate": 2.0802557963072736e-05, |
| "loss": 0.4315, |
| "step": 273 |
| }, |
| { |
| "epoch": 1.2321729365524985, |
| "grad_norm": 0.3014143337486855, |
| "learning_rate": 2.073703590601836e-05, |
| "loss": 0.445, |
| "step": 274 |
| }, |
| { |
| "epoch": 1.2366647950589555, |
| "grad_norm": 0.3366952207644692, |
| "learning_rate": 2.067133244224047e-05, |
| "loss": 0.4522, |
| "step": 275 |
| }, |
| { |
| "epoch": 1.2411566535654126, |
| "grad_norm": 0.2981657265784339, |
| "learning_rate": 2.0605449379052047e-05, |
| "loss": 0.4128, |
| "step": 276 |
| }, |
| { |
| "epoch": 1.2456485120718697, |
| "grad_norm": 0.2741164199668628, |
| "learning_rate": 2.0539388528706354e-05, |
| "loss": 0.4465, |
| "step": 277 |
| }, |
| { |
| "epoch": 1.2501403705783267, |
| "grad_norm": 0.28116445398834183, |
| "learning_rate": 2.0473151708347052e-05, |
| "loss": 0.4214, |
| "step": 278 |
| }, |
| { |
| "epoch": 1.2546322290847838, |
| "grad_norm": 0.26743411373744025, |
| "learning_rate": 2.040674073995824e-05, |
| "loss": 0.4378, |
| "step": 279 |
| }, |
| { |
| "epoch": 1.2591240875912408, |
| "grad_norm": 0.349163241377986, |
| "learning_rate": 2.0340157450314317e-05, |
| "loss": 0.4328, |
| "step": 280 |
| }, |
| { |
| "epoch": 1.263615946097698, |
| "grad_norm": 0.3478461930112901, |
| "learning_rate": 2.0273403670929756e-05, |
| "loss": 0.4373, |
| "step": 281 |
| }, |
| { |
| "epoch": 1.268107804604155, |
| "grad_norm": 0.35369927623819397, |
| "learning_rate": 2.020648123800871e-05, |
| "loss": 0.4356, |
| "step": 282 |
| }, |
| { |
| "epoch": 1.272599663110612, |
| "grad_norm": 0.4318259227052038, |
| "learning_rate": 2.0139391992394488e-05, |
| "loss": 0.4337, |
| "step": 283 |
| }, |
| { |
| "epoch": 1.277091521617069, |
| "grad_norm": 0.3341822520627836, |
| "learning_rate": 2.0072137779518955e-05, |
| "loss": 0.4364, |
| "step": 284 |
| }, |
| { |
| "epoch": 1.2815833801235261, |
| "grad_norm": 0.32765945183019074, |
| "learning_rate": 2.000472044935174e-05, |
| "loss": 0.4376, |
| "step": 285 |
| }, |
| { |
| "epoch": 1.2860752386299832, |
| "grad_norm": 0.29486663216008824, |
| "learning_rate": 1.9937141856349362e-05, |
| "loss": 0.4362, |
| "step": 286 |
| }, |
| { |
| "epoch": 1.2905670971364402, |
| "grad_norm": 0.3708118576046198, |
| "learning_rate": 1.9869403859404217e-05, |
| "loss": 0.4505, |
| "step": 287 |
| }, |
| { |
| "epoch": 1.2950589556428973, |
| "grad_norm": 0.2647734977958948, |
| "learning_rate": 1.9801508321793443e-05, |
| "loss": 0.4351, |
| "step": 288 |
| }, |
| { |
| "epoch": 1.2995508141493544, |
| "grad_norm": 0.29786540958009505, |
| "learning_rate": 1.973345711112766e-05, |
| "loss": 0.4148, |
| "step": 289 |
| }, |
| { |
| "epoch": 1.3040426726558114, |
| "grad_norm": 0.34111071679248295, |
| "learning_rate": 1.9665252099299626e-05, |
| "loss": 0.4369, |
| "step": 290 |
| }, |
| { |
| "epoch": 1.3085345311622683, |
| "grad_norm": 0.2825874052298496, |
| "learning_rate": 1.95968951624327e-05, |
| "loss": 0.4375, |
| "step": 291 |
| }, |
| { |
| "epoch": 1.3130263896687255, |
| "grad_norm": 0.35110084669077885, |
| "learning_rate": 1.9528388180829284e-05, |
| "loss": 0.4296, |
| "step": 292 |
| }, |
| { |
| "epoch": 1.3175182481751824, |
| "grad_norm": 0.26124827557042796, |
| "learning_rate": 1.9459733038919063e-05, |
| "loss": 0.4404, |
| "step": 293 |
| }, |
| { |
| "epoch": 1.3220101066816397, |
| "grad_norm": 0.3319179737449958, |
| "learning_rate": 1.939093162520719e-05, |
| "loss": 0.4342, |
| "step": 294 |
| }, |
| { |
| "epoch": 1.3265019651880965, |
| "grad_norm": 0.3292095376226441, |
| "learning_rate": 1.9321985832222337e-05, |
| "loss": 0.4433, |
| "step": 295 |
| }, |
| { |
| "epoch": 1.3309938236945535, |
| "grad_norm": 0.24475291813862662, |
| "learning_rate": 1.9252897556464622e-05, |
| "loss": 0.4368, |
| "step": 296 |
| }, |
| { |
| "epoch": 1.3354856822010106, |
| "grad_norm": 0.3056744142638689, |
| "learning_rate": 1.9183668698353474e-05, |
| "loss": 0.4284, |
| "step": 297 |
| }, |
| { |
| "epoch": 1.3399775407074677, |
| "grad_norm": 0.28821906996690305, |
| "learning_rate": 1.9114301162175324e-05, |
| "loss": 0.4297, |
| "step": 298 |
| }, |
| { |
| "epoch": 1.3444693992139247, |
| "grad_norm": 0.26046990859779695, |
| "learning_rate": 1.9044796856031236e-05, |
| "loss": 0.4289, |
| "step": 299 |
| }, |
| { |
| "epoch": 1.3489612577203818, |
| "grad_norm": 0.3281548899006919, |
| "learning_rate": 1.8975157691784422e-05, |
| "loss": 0.446, |
| "step": 300 |
| }, |
| { |
| "epoch": 1.3534531162268388, |
| "grad_norm": 0.3101251116335806, |
| "learning_rate": 1.890538558500766e-05, |
| "loss": 0.4381, |
| "step": 301 |
| }, |
| { |
| "epoch": 1.357944974733296, |
| "grad_norm": 0.27697116298210783, |
| "learning_rate": 1.8835482454930595e-05, |
| "loss": 0.4303, |
| "step": 302 |
| }, |
| { |
| "epoch": 1.362436833239753, |
| "grad_norm": 0.26183856441501985, |
| "learning_rate": 1.8765450224386924e-05, |
| "loss": 0.4271, |
| "step": 303 |
| }, |
| { |
| "epoch": 1.36692869174621, |
| "grad_norm": 0.24441781947836166, |
| "learning_rate": 1.8695290819761552e-05, |
| "loss": 0.426, |
| "step": 304 |
| }, |
| { |
| "epoch": 1.371420550252667, |
| "grad_norm": 0.2637655350752813, |
| "learning_rate": 1.8625006170937556e-05, |
| "loss": 0.4255, |
| "step": 305 |
| }, |
| { |
| "epoch": 1.3759124087591241, |
| "grad_norm": 0.3417693190160526, |
| "learning_rate": 1.855459821124314e-05, |
| "loss": 0.4304, |
| "step": 306 |
| }, |
| { |
| "epoch": 1.3804042672655812, |
| "grad_norm": 0.24824305610064504, |
| "learning_rate": 1.8484068877398422e-05, |
| "loss": 0.4149, |
| "step": 307 |
| }, |
| { |
| "epoch": 1.3848961257720382, |
| "grad_norm": 0.2983957956329138, |
| "learning_rate": 1.8413420109462173e-05, |
| "loss": 0.4459, |
| "step": 308 |
| }, |
| { |
| "epoch": 1.3893879842784953, |
| "grad_norm": 0.29141281712707245, |
| "learning_rate": 1.8342653850778457e-05, |
| "loss": 0.4261, |
| "step": 309 |
| }, |
| { |
| "epoch": 1.3938798427849521, |
| "grad_norm": 0.31643097989145, |
| "learning_rate": 1.8271772047923166e-05, |
| "loss": 0.4276, |
| "step": 310 |
| }, |
| { |
| "epoch": 1.3983717012914094, |
| "grad_norm": 0.26661817104489643, |
| "learning_rate": 1.820077665065048e-05, |
| "loss": 0.413, |
| "step": 311 |
| }, |
| { |
| "epoch": 1.4028635597978663, |
| "grad_norm": 0.3061227775995877, |
| "learning_rate": 1.812966961183924e-05, |
| "loss": 0.4329, |
| "step": 312 |
| }, |
| { |
| "epoch": 1.4073554183043235, |
| "grad_norm": 0.29634237723914175, |
| "learning_rate": 1.805845288743921e-05, |
| "loss": 0.4249, |
| "step": 313 |
| }, |
| { |
| "epoch": 1.4118472768107804, |
| "grad_norm": 0.3301297214638871, |
| "learning_rate": 1.7987128436417303e-05, |
| "loss": 0.4588, |
| "step": 314 |
| }, |
| { |
| "epoch": 1.4163391353172374, |
| "grad_norm": 0.3698658191349894, |
| "learning_rate": 1.791569822070368e-05, |
| "loss": 0.4245, |
| "step": 315 |
| }, |
| { |
| "epoch": 1.4208309938236945, |
| "grad_norm": 0.22262993712499923, |
| "learning_rate": 1.7844164205137772e-05, |
| "loss": 0.424, |
| "step": 316 |
| }, |
| { |
| "epoch": 1.4253228523301515, |
| "grad_norm": 0.37654108374839934, |
| "learning_rate": 1.7772528357414252e-05, |
| "loss": 0.453, |
| "step": 317 |
| }, |
| { |
| "epoch": 1.4298147108366086, |
| "grad_norm": 0.2594395877577525, |
| "learning_rate": 1.770079264802891e-05, |
| "loss": 0.4296, |
| "step": 318 |
| }, |
| { |
| "epoch": 1.4343065693430657, |
| "grad_norm": 0.3514445535834096, |
| "learning_rate": 1.762895905022443e-05, |
| "loss": 0.4407, |
| "step": 319 |
| }, |
| { |
| "epoch": 1.4387984278495227, |
| "grad_norm": 0.2476040970236891, |
| "learning_rate": 1.7557029539936132e-05, |
| "loss": 0.4324, |
| "step": 320 |
| }, |
| { |
| "epoch": 1.4432902863559798, |
| "grad_norm": 0.31874415328292965, |
| "learning_rate": 1.7485006095737602e-05, |
| "loss": 0.445, |
| "step": 321 |
| }, |
| { |
| "epoch": 1.4477821448624368, |
| "grad_norm": 0.2583979409843575, |
| "learning_rate": 1.7412890698786298e-05, |
| "loss": 0.4244, |
| "step": 322 |
| }, |
| { |
| "epoch": 1.452274003368894, |
| "grad_norm": 0.3144942018984089, |
| "learning_rate": 1.734068533276901e-05, |
| "loss": 0.4443, |
| "step": 323 |
| }, |
| { |
| "epoch": 1.456765861875351, |
| "grad_norm": 0.27690020636097235, |
| "learning_rate": 1.726839198384733e-05, |
| "loss": 0.4364, |
| "step": 324 |
| }, |
| { |
| "epoch": 1.461257720381808, |
| "grad_norm": 0.26426874913937026, |
| "learning_rate": 1.7196012640603007e-05, |
| "loss": 0.4383, |
| "step": 325 |
| }, |
| { |
| "epoch": 1.465749578888265, |
| "grad_norm": 0.24541716474302672, |
| "learning_rate": 1.7123549293983246e-05, |
| "loss": 0.4374, |
| "step": 326 |
| }, |
| { |
| "epoch": 1.4702414373947221, |
| "grad_norm": 0.24834945996912894, |
| "learning_rate": 1.7051003937245932e-05, |
| "loss": 0.4126, |
| "step": 327 |
| }, |
| { |
| "epoch": 1.4747332959011792, |
| "grad_norm": 0.2624637386600607, |
| "learning_rate": 1.6978378565904823e-05, |
| "loss": 0.453, |
| "step": 328 |
| }, |
| { |
| "epoch": 1.479225154407636, |
| "grad_norm": 0.27142087832902706, |
| "learning_rate": 1.6905675177674652e-05, |
| "loss": 0.4194, |
| "step": 329 |
| }, |
| { |
| "epoch": 1.4837170129140933, |
| "grad_norm": 0.27566892739796234, |
| "learning_rate": 1.683289577241615e-05, |
| "loss": 0.4456, |
| "step": 330 |
| }, |
| { |
| "epoch": 1.4882088714205501, |
| "grad_norm": 0.2700242393817183, |
| "learning_rate": 1.676004235208109e-05, |
| "loss": 0.4367, |
| "step": 331 |
| }, |
| { |
| "epoch": 1.4927007299270074, |
| "grad_norm": 0.3142461399016386, |
| "learning_rate": 1.668711692065716e-05, |
| "loss": 0.4247, |
| "step": 332 |
| }, |
| { |
| "epoch": 1.4971925884334643, |
| "grad_norm": 0.25043376604704976, |
| "learning_rate": 1.661412148411288e-05, |
| "loss": 0.4326, |
| "step": 333 |
| }, |
| { |
| "epoch": 1.5016844469399215, |
| "grad_norm": 0.2826302795105161, |
| "learning_rate": 1.6541058050342402e-05, |
| "loss": 0.438, |
| "step": 334 |
| }, |
| { |
| "epoch": 1.5061763054463784, |
| "grad_norm": 0.27506460881743333, |
| "learning_rate": 1.646792862911028e-05, |
| "loss": 0.4358, |
| "step": 335 |
| }, |
| { |
| "epoch": 1.5106681639528357, |
| "grad_norm": 0.30976807555293234, |
| "learning_rate": 1.6394735231996212e-05, |
| "loss": 0.443, |
| "step": 336 |
| }, |
| { |
| "epoch": 1.5151600224592925, |
| "grad_norm": 0.3182926560081222, |
| "learning_rate": 1.632147987233967e-05, |
| "loss": 0.4284, |
| "step": 337 |
| }, |
| { |
| "epoch": 1.5196518809657495, |
| "grad_norm": 0.3073893367627004, |
| "learning_rate": 1.624816456518455e-05, |
| "loss": 0.421, |
| "step": 338 |
| }, |
| { |
| "epoch": 1.5241437394722066, |
| "grad_norm": 0.3026151787871035, |
| "learning_rate": 1.617479132722372e-05, |
| "loss": 0.4345, |
| "step": 339 |
| }, |
| { |
| "epoch": 1.5286355979786637, |
| "grad_norm": 0.3488262304344424, |
| "learning_rate": 1.6101362176743562e-05, |
| "loss": 0.4304, |
| "step": 340 |
| }, |
| { |
| "epoch": 1.5331274564851207, |
| "grad_norm": 0.31909400236183205, |
| "learning_rate": 1.6027879133568455e-05, |
| "loss": 0.4367, |
| "step": 341 |
| }, |
| { |
| "epoch": 1.5376193149915778, |
| "grad_norm": 0.3507006208659429, |
| "learning_rate": 1.59543442190052e-05, |
| "loss": 0.4286, |
| "step": 342 |
| }, |
| { |
| "epoch": 1.5421111734980348, |
| "grad_norm": 0.3247486352951706, |
| "learning_rate": 1.5880759455787446e-05, |
| "loss": 0.444, |
| "step": 343 |
| }, |
| { |
| "epoch": 1.546603032004492, |
| "grad_norm": 0.3157749827732778, |
| "learning_rate": 1.5807126868020016e-05, |
| "loss": 0.4354, |
| "step": 344 |
| }, |
| { |
| "epoch": 1.551094890510949, |
| "grad_norm": 0.3169873921812676, |
| "learning_rate": 1.5733448481123264e-05, |
| "loss": 0.4236, |
| "step": 345 |
| }, |
| { |
| "epoch": 1.5555867490174058, |
| "grad_norm": 0.33198535545378055, |
| "learning_rate": 1.565972632177734e-05, |
| "loss": 0.4386, |
| "step": 346 |
| }, |
| { |
| "epoch": 1.560078607523863, |
| "grad_norm": 0.3083260440048532, |
| "learning_rate": 1.558596241786645e-05, |
| "loss": 0.4282, |
| "step": 347 |
| }, |
| { |
| "epoch": 1.56457046603032, |
| "grad_norm": 0.3357526579603315, |
| "learning_rate": 1.551215879842307e-05, |
| "loss": 0.4178, |
| "step": 348 |
| }, |
| { |
| "epoch": 1.5690623245367772, |
| "grad_norm": 0.3287950186329193, |
| "learning_rate": 1.543831749357214e-05, |
| "loss": 0.431, |
| "step": 349 |
| }, |
| { |
| "epoch": 1.573554183043234, |
| "grad_norm": 0.3260432629951177, |
| "learning_rate": 1.5364440534475205e-05, |
| "loss": 0.4486, |
| "step": 350 |
| }, |
| { |
| "epoch": 1.5780460415496913, |
| "grad_norm": 0.29274164579590756, |
| "learning_rate": 1.5290529953274573e-05, |
| "loss": 0.4348, |
| "step": 351 |
| }, |
| { |
| "epoch": 1.5825379000561481, |
| "grad_norm": 0.2793617167300319, |
| "learning_rate": 1.521658778303739e-05, |
| "loss": 0.4339, |
| "step": 352 |
| }, |
| { |
| "epoch": 1.5870297585626054, |
| "grad_norm": 0.22729692344689448, |
| "learning_rate": 1.5142616057699725e-05, |
| "loss": 0.4088, |
| "step": 353 |
| }, |
| { |
| "epoch": 1.5915216170690623, |
| "grad_norm": 0.2706527199288221, |
| "learning_rate": 1.506861681201062e-05, |
| "loss": 0.4487, |
| "step": 354 |
| }, |
| { |
| "epoch": 1.5960134755755195, |
| "grad_norm": 0.23763623498472133, |
| "learning_rate": 1.4994592081476132e-05, |
| "loss": 0.4399, |
| "step": 355 |
| }, |
| { |
| "epoch": 1.6005053340819764, |
| "grad_norm": 0.2289251582859145, |
| "learning_rate": 1.4920543902303326e-05, |
| "loss": 0.4114, |
| "step": 356 |
| }, |
| { |
| "epoch": 1.6049971925884334, |
| "grad_norm": 0.22999268807574383, |
| "learning_rate": 1.4846474311344276e-05, |
| "loss": 0.4249, |
| "step": 357 |
| }, |
| { |
| "epoch": 1.6094890510948905, |
| "grad_norm": 0.2550402742123251, |
| "learning_rate": 1.4772385346040023e-05, |
| "loss": 0.4414, |
| "step": 358 |
| }, |
| { |
| "epoch": 1.6139809096013475, |
| "grad_norm": 0.28794877883820574, |
| "learning_rate": 1.4698279044364549e-05, |
| "loss": 0.4418, |
| "step": 359 |
| }, |
| { |
| "epoch": 1.6184727681078046, |
| "grad_norm": 0.2587286922700844, |
| "learning_rate": 1.4624157444768707e-05, |
| "loss": 0.4279, |
| "step": 360 |
| }, |
| { |
| "epoch": 1.6229646266142617, |
| "grad_norm": 0.26948710334654885, |
| "learning_rate": 1.4550022586124148e-05, |
| "loss": 0.4315, |
| "step": 361 |
| }, |
| { |
| "epoch": 1.6274564851207187, |
| "grad_norm": 0.26259124764610464, |
| "learning_rate": 1.4475876507667255e-05, |
| "loss": 0.4349, |
| "step": 362 |
| }, |
| { |
| "epoch": 1.6319483436271758, |
| "grad_norm": 0.23935177475002697, |
| "learning_rate": 1.4401721248943016e-05, |
| "loss": 0.4092, |
| "step": 363 |
| }, |
| { |
| "epoch": 1.6364402021336328, |
| "grad_norm": 0.23992737233823272, |
| "learning_rate": 1.432755884974896e-05, |
| "loss": 0.437, |
| "step": 364 |
| }, |
| { |
| "epoch": 1.6409320606400897, |
| "grad_norm": 0.22177963951273988, |
| "learning_rate": 1.4253391350079015e-05, |
| "loss": 0.4175, |
| "step": 365 |
| }, |
| { |
| "epoch": 1.645423919146547, |
| "grad_norm": 0.2670621296838188, |
| "learning_rate": 1.417922079006742e-05, |
| "loss": 0.4287, |
| "step": 366 |
| }, |
| { |
| "epoch": 1.6499157776530038, |
| "grad_norm": 0.23007364031192343, |
| "learning_rate": 1.4105049209932585e-05, |
| "loss": 0.4315, |
| "step": 367 |
| }, |
| { |
| "epoch": 1.654407636159461, |
| "grad_norm": 0.2862891319622476, |
| "learning_rate": 1.4030878649920989e-05, |
| "loss": 0.4105, |
| "step": 368 |
| }, |
| { |
| "epoch": 1.658899494665918, |
| "grad_norm": 0.2542826948774142, |
| "learning_rate": 1.3956711150251044e-05, |
| "loss": 0.4418, |
| "step": 369 |
| }, |
| { |
| "epoch": 1.6633913531723752, |
| "grad_norm": 0.25016704641271026, |
| "learning_rate": 1.3882548751056987e-05, |
| "loss": 0.4234, |
| "step": 370 |
| }, |
| { |
| "epoch": 1.667883211678832, |
| "grad_norm": 0.31248484686645767, |
| "learning_rate": 1.380839349233275e-05, |
| "loss": 0.4246, |
| "step": 371 |
| }, |
| { |
| "epoch": 1.6723750701852893, |
| "grad_norm": 0.2493758279331931, |
| "learning_rate": 1.3734247413875855e-05, |
| "loss": 0.4417, |
| "step": 372 |
| }, |
| { |
| "epoch": 1.6768669286917461, |
| "grad_norm": 0.33089735282060806, |
| "learning_rate": 1.36601125552313e-05, |
| "loss": 0.4453, |
| "step": 373 |
| }, |
| { |
| "epoch": 1.6813587871982034, |
| "grad_norm": 0.2358526660603175, |
| "learning_rate": 1.3585990955635459e-05, |
| "loss": 0.4138, |
| "step": 374 |
| }, |
| { |
| "epoch": 1.6858506457046603, |
| "grad_norm": 0.2836876613895396, |
| "learning_rate": 1.3511884653959981e-05, |
| "loss": 0.4235, |
| "step": 375 |
| }, |
| { |
| "epoch": 1.6903425042111173, |
| "grad_norm": 0.2796457407843088, |
| "learning_rate": 1.3437795688655726e-05, |
| "loss": 0.4355, |
| "step": 376 |
| }, |
| { |
| "epoch": 1.6948343627175744, |
| "grad_norm": 0.252086707823967, |
| "learning_rate": 1.3363726097696673e-05, |
| "loss": 0.433, |
| "step": 377 |
| }, |
| { |
| "epoch": 1.6993262212240314, |
| "grad_norm": 0.3504937797572051, |
| "learning_rate": 1.3289677918523868e-05, |
| "loss": 0.4314, |
| "step": 378 |
| }, |
| { |
| "epoch": 1.7038180797304885, |
| "grad_norm": 0.23392274241262292, |
| "learning_rate": 1.321565318798938e-05, |
| "loss": 0.4234, |
| "step": 379 |
| }, |
| { |
| "epoch": 1.7083099382369455, |
| "grad_norm": 0.2527199944902928, |
| "learning_rate": 1.3141653942300279e-05, |
| "loss": 0.4265, |
| "step": 380 |
| }, |
| { |
| "epoch": 1.7128017967434026, |
| "grad_norm": 0.27218994035462163, |
| "learning_rate": 1.3067682216962613e-05, |
| "loss": 0.4428, |
| "step": 381 |
| }, |
| { |
| "epoch": 1.7172936552498597, |
| "grad_norm": 0.24181872866822765, |
| "learning_rate": 1.2993740046725429e-05, |
| "loss": 0.4251, |
| "step": 382 |
| }, |
| { |
| "epoch": 1.7217855137563167, |
| "grad_norm": 0.23454523482773984, |
| "learning_rate": 1.2919829465524797e-05, |
| "loss": 0.4508, |
| "step": 383 |
| }, |
| { |
| "epoch": 1.7262773722627736, |
| "grad_norm": 0.2141716112253175, |
| "learning_rate": 1.2845952506427866e-05, |
| "loss": 0.4209, |
| "step": 384 |
| }, |
| { |
| "epoch": 1.7307692307692308, |
| "grad_norm": 0.21483571836516002, |
| "learning_rate": 1.2772111201576932e-05, |
| "loss": 0.4297, |
| "step": 385 |
| }, |
| { |
| "epoch": 1.7352610892756877, |
| "grad_norm": 0.22676478933835803, |
| "learning_rate": 1.2698307582133551e-05, |
| "loss": 0.4436, |
| "step": 386 |
| }, |
| { |
| "epoch": 1.739752947782145, |
| "grad_norm": 0.21804047909429286, |
| "learning_rate": 1.2624543678222662e-05, |
| "loss": 0.4284, |
| "step": 387 |
| }, |
| { |
| "epoch": 1.7442448062886018, |
| "grad_norm": 0.2161320011302351, |
| "learning_rate": 1.255082151887674e-05, |
| "loss": 0.4199, |
| "step": 388 |
| }, |
| { |
| "epoch": 1.748736664795059, |
| "grad_norm": 0.23536261615237247, |
| "learning_rate": 1.2477143131979989e-05, |
| "loss": 0.4416, |
| "step": 389 |
| }, |
| { |
| "epoch": 1.753228523301516, |
| "grad_norm": 0.19516762515369254, |
| "learning_rate": 1.2403510544212563e-05, |
| "loss": 0.422, |
| "step": 390 |
| }, |
| { |
| "epoch": 1.7577203818079732, |
| "grad_norm": 0.2597761337222779, |
| "learning_rate": 1.2329925780994805e-05, |
| "loss": 0.4555, |
| "step": 391 |
| }, |
| { |
| "epoch": 1.76221224031443, |
| "grad_norm": 0.2365793711701312, |
| "learning_rate": 1.225639086643155e-05, |
| "loss": 0.4198, |
| "step": 392 |
| }, |
| { |
| "epoch": 1.7667040988208873, |
| "grad_norm": 0.2697456739133074, |
| "learning_rate": 1.218290782325644e-05, |
| "loss": 0.4356, |
| "step": 393 |
| }, |
| { |
| "epoch": 1.7711959573273441, |
| "grad_norm": 0.27620361441234015, |
| "learning_rate": 1.2109478672776284e-05, |
| "loss": 0.4203, |
| "step": 394 |
| }, |
| { |
| "epoch": 1.7756878158338012, |
| "grad_norm": 0.24512868512343508, |
| "learning_rate": 1.2036105434815453e-05, |
| "loss": 0.4203, |
| "step": 395 |
| }, |
| { |
| "epoch": 1.7801796743402583, |
| "grad_norm": 0.26248183198436537, |
| "learning_rate": 1.196279012766033e-05, |
| "loss": 0.4171, |
| "step": 396 |
| }, |
| { |
| "epoch": 1.7846715328467153, |
| "grad_norm": 0.22075536899568315, |
| "learning_rate": 1.1889534768003789e-05, |
| "loss": 0.417, |
| "step": 397 |
| }, |
| { |
| "epoch": 1.7891633913531724, |
| "grad_norm": 0.22534931235643543, |
| "learning_rate": 1.1816341370889721e-05, |
| "loss": 0.4307, |
| "step": 398 |
| }, |
| { |
| "epoch": 1.7936552498596294, |
| "grad_norm": 0.226631593581966, |
| "learning_rate": 1.1743211949657602e-05, |
| "loss": 0.4188, |
| "step": 399 |
| }, |
| { |
| "epoch": 1.7981471083660865, |
| "grad_norm": 0.23848047414834528, |
| "learning_rate": 1.1670148515887122e-05, |
| "loss": 0.4356, |
| "step": 400 |
| }, |
| { |
| "epoch": 1.8026389668725435, |
| "grad_norm": 0.19151331669734864, |
| "learning_rate": 1.1597153079342841e-05, |
| "loss": 0.436, |
| "step": 401 |
| }, |
| { |
| "epoch": 1.8071308253790006, |
| "grad_norm": 0.22870877041290116, |
| "learning_rate": 1.1524227647918911e-05, |
| "loss": 0.419, |
| "step": 402 |
| }, |
| { |
| "epoch": 1.8116226838854577, |
| "grad_norm": 0.21466616093632906, |
| "learning_rate": 1.1451374227583852e-05, |
| "loss": 0.4424, |
| "step": 403 |
| }, |
| { |
| "epoch": 1.8161145423919147, |
| "grad_norm": 0.25318885652943574, |
| "learning_rate": 1.1378594822325357e-05, |
| "loss": 0.4406, |
| "step": 404 |
| }, |
| { |
| "epoch": 1.8206064008983716, |
| "grad_norm": 0.20250073619587594, |
| "learning_rate": 1.1305891434095182e-05, |
| "loss": 0.4238, |
| "step": 405 |
| }, |
| { |
| "epoch": 1.8250982594048288, |
| "grad_norm": 0.2259040741342042, |
| "learning_rate": 1.1233266062754074e-05, |
| "loss": 0.4287, |
| "step": 406 |
| }, |
| { |
| "epoch": 1.8295901179112857, |
| "grad_norm": 0.21613984026340174, |
| "learning_rate": 1.116072070601676e-05, |
| "loss": 0.429, |
| "step": 407 |
| }, |
| { |
| "epoch": 1.834081976417743, |
| "grad_norm": 0.1986928299960945, |
| "learning_rate": 1.1088257359396997e-05, |
| "loss": 0.412, |
| "step": 408 |
| }, |
| { |
| "epoch": 1.8385738349241998, |
| "grad_norm": 0.23767036451906237, |
| "learning_rate": 1.1015878016152666e-05, |
| "loss": 0.4235, |
| "step": 409 |
| }, |
| { |
| "epoch": 1.843065693430657, |
| "grad_norm": 0.22229388428754168, |
| "learning_rate": 1.094358466723099e-05, |
| "loss": 0.4434, |
| "step": 410 |
| }, |
| { |
| "epoch": 1.847557551937114, |
| "grad_norm": 0.2652264081143693, |
| "learning_rate": 1.0871379301213705e-05, |
| "loss": 0.4181, |
| "step": 411 |
| }, |
| { |
| "epoch": 1.8520494104435712, |
| "grad_norm": 0.26380236575167754, |
| "learning_rate": 1.0799263904262397e-05, |
| "loss": 0.4242, |
| "step": 412 |
| }, |
| { |
| "epoch": 1.856541268950028, |
| "grad_norm": 0.23160769920164645, |
| "learning_rate": 1.0727240460063872e-05, |
| "loss": 0.4461, |
| "step": 413 |
| }, |
| { |
| "epoch": 1.861033127456485, |
| "grad_norm": 0.2277570708946221, |
| "learning_rate": 1.0655310949775572e-05, |
| "loss": 0.405, |
| "step": 414 |
| }, |
| { |
| "epoch": 1.8655249859629421, |
| "grad_norm": 0.21522197135984833, |
| "learning_rate": 1.0583477351971092e-05, |
| "loss": 0.4244, |
| "step": 415 |
| }, |
| { |
| "epoch": 1.8700168444693992, |
| "grad_norm": 0.24137253643395323, |
| "learning_rate": 1.0511741642585747e-05, |
| "loss": 0.4569, |
| "step": 416 |
| }, |
| { |
| "epoch": 1.8745087029758563, |
| "grad_norm": 0.22284276780245038, |
| "learning_rate": 1.044010579486223e-05, |
| "loss": 0.4209, |
| "step": 417 |
| }, |
| { |
| "epoch": 1.8790005614823133, |
| "grad_norm": 0.20682215213756386, |
| "learning_rate": 1.0368571779296324e-05, |
| "loss": 0.427, |
| "step": 418 |
| }, |
| { |
| "epoch": 1.8834924199887704, |
| "grad_norm": 0.1903967688448084, |
| "learning_rate": 1.0297141563582698e-05, |
| "loss": 0.439, |
| "step": 419 |
| }, |
| { |
| "epoch": 1.8879842784952274, |
| "grad_norm": 0.22460948249947274, |
| "learning_rate": 1.0225817112560795e-05, |
| "loss": 0.4154, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.8924761370016845, |
| "grad_norm": 0.20928856005143603, |
| "learning_rate": 1.0154600388160767e-05, |
| "loss": 0.4267, |
| "step": 421 |
| }, |
| { |
| "epoch": 1.8969679955081415, |
| "grad_norm": 0.1970852478249293, |
| "learning_rate": 1.0083493349349523e-05, |
| "loss": 0.424, |
| "step": 422 |
| }, |
| { |
| "epoch": 1.9014598540145986, |
| "grad_norm": 0.2189181076915757, |
| "learning_rate": 1.0012497952076838e-05, |
| "loss": 0.427, |
| "step": 423 |
| }, |
| { |
| "epoch": 1.9059517125210554, |
| "grad_norm": 0.2009004896673229, |
| "learning_rate": 9.941616149221549e-06, |
| "loss": 0.4413, |
| "step": 424 |
| }, |
| { |
| "epoch": 1.9104435710275127, |
| "grad_norm": 0.20621935633909672, |
| "learning_rate": 9.870849890537826e-06, |
| "loss": 0.4133, |
| "step": 425 |
| }, |
| { |
| "epoch": 1.9149354295339696, |
| "grad_norm": 0.20677347142692576, |
| "learning_rate": 9.800201122601579e-06, |
| "loss": 0.4177, |
| "step": 426 |
| }, |
| { |
| "epoch": 1.9194272880404268, |
| "grad_norm": 0.216610474906858, |
| "learning_rate": 9.72967178875686e-06, |
| "loss": 0.4493, |
| "step": 427 |
| }, |
| { |
| "epoch": 1.9239191465468837, |
| "grad_norm": 0.19615798611811022, |
| "learning_rate": 9.659263829062443e-06, |
| "loss": 0.4235, |
| "step": 428 |
| }, |
| { |
| "epoch": 1.928411005053341, |
| "grad_norm": 0.21389343121593873, |
| "learning_rate": 9.588979180238454e-06, |
| "loss": 0.4275, |
| "step": 429 |
| }, |
| { |
| "epoch": 1.9329028635597978, |
| "grad_norm": 0.1783568108760251, |
| "learning_rate": 9.518819775613077e-06, |
| "loss": 0.417, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.937394722066255, |
| "grad_norm": 0.19382064589217926, |
| "learning_rate": 9.44878754506941e-06, |
| "loss": 0.427, |
| "step": 431 |
| }, |
| { |
| "epoch": 1.941886580572712, |
| "grad_norm": 0.1868776486518385, |
| "learning_rate": 9.37888441499234e-06, |
| "loss": 0.4344, |
| "step": 432 |
| }, |
| { |
| "epoch": 1.946378439079169, |
| "grad_norm": 0.19196173137999686, |
| "learning_rate": 9.309112308215579e-06, |
| "loss": 0.4218, |
| "step": 433 |
| }, |
| { |
| "epoch": 1.950870297585626, |
| "grad_norm": 0.18168839883358026, |
| "learning_rate": 9.239473143968768e-06, |
| "loss": 0.4164, |
| "step": 434 |
| }, |
| { |
| "epoch": 1.955362156092083, |
| "grad_norm": 0.19586388090243548, |
| "learning_rate": 9.169968837824679e-06, |
| "loss": 0.4346, |
| "step": 435 |
| }, |
| { |
| "epoch": 1.9598540145985401, |
| "grad_norm": 0.1968196503268063, |
| "learning_rate": 9.100601301646527e-06, |
| "loss": 0.435, |
| "step": 436 |
| }, |
| { |
| "epoch": 1.9643458731049972, |
| "grad_norm": 0.1892076438600969, |
| "learning_rate": 9.031372443535382e-06, |
| "loss": 0.4277, |
| "step": 437 |
| }, |
| { |
| "epoch": 1.9688377316114543, |
| "grad_norm": 0.17997215604618058, |
| "learning_rate": 8.96228416777767e-06, |
| "loss": 0.4246, |
| "step": 438 |
| }, |
| { |
| "epoch": 1.9733295901179113, |
| "grad_norm": 0.21280790454241325, |
| "learning_rate": 8.893338374792817e-06, |
| "loss": 0.4191, |
| "step": 439 |
| }, |
| { |
| "epoch": 1.9778214486243684, |
| "grad_norm": 0.2122734568816957, |
| "learning_rate": 8.824536961080943e-06, |
| "loss": 0.4326, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.9823133071308254, |
| "grad_norm": 0.18374860642807206, |
| "learning_rate": 8.75588181917072e-06, |
| "loss": 0.4201, |
| "step": 441 |
| }, |
| { |
| "epoch": 1.9868051656372825, |
| "grad_norm": 0.19354659406561456, |
| "learning_rate": 8.687374837567298e-06, |
| "loss": 0.4138, |
| "step": 442 |
| }, |
| { |
| "epoch": 1.9912970241437393, |
| "grad_norm": 0.21445379315505658, |
| "learning_rate": 8.619017900700374e-06, |
| "loss": 0.4393, |
| "step": 443 |
| }, |
| { |
| "epoch": 1.9957888826501966, |
| "grad_norm": 0.20362548247073203, |
| "learning_rate": 8.550812888872337e-06, |
| "loss": 0.4239, |
| "step": 444 |
| }, |
| { |
| "epoch": 2.0016844469399215, |
| "grad_norm": 0.38025114081538913, |
| "learning_rate": 8.48276167820656e-06, |
| "loss": 0.6928, |
| "step": 445 |
| }, |
| { |
| "epoch": 2.0061763054463784, |
| "grad_norm": 0.24372193822660385, |
| "learning_rate": 8.414866140595784e-06, |
| "loss": 0.4081, |
| "step": 446 |
| }, |
| { |
| "epoch": 2.0106681639528357, |
| "grad_norm": 0.2776148147113284, |
| "learning_rate": 8.347128143650638e-06, |
| "loss": 0.4048, |
| "step": 447 |
| }, |
| { |
| "epoch": 2.0151600224592925, |
| "grad_norm": 0.23373713524039486, |
| "learning_rate": 8.279549550648262e-06, |
| "loss": 0.3931, |
| "step": 448 |
| }, |
| { |
| "epoch": 2.0196518809657498, |
| "grad_norm": 0.22958395982377883, |
| "learning_rate": 8.212132220481046e-06, |
| "loss": 0.4008, |
| "step": 449 |
| }, |
| { |
| "epoch": 2.0241437394722066, |
| "grad_norm": 0.29063552467815107, |
| "learning_rate": 8.144878007605514e-06, |
| "loss": 0.4154, |
| "step": 450 |
| }, |
| { |
| "epoch": 2.028635597978664, |
| "grad_norm": 0.21727860645522265, |
| "learning_rate": 8.077788761991291e-06, |
| "loss": 0.4162, |
| "step": 451 |
| }, |
| { |
| "epoch": 2.0331274564851207, |
| "grad_norm": 0.24646687235941006, |
| "learning_rate": 8.01086632907024e-06, |
| "loss": 0.4003, |
| "step": 452 |
| }, |
| { |
| "epoch": 2.0376193149915776, |
| "grad_norm": 0.26443217492234267, |
| "learning_rate": 7.944112549685684e-06, |
| "loss": 0.3955, |
| "step": 453 |
| }, |
| { |
| "epoch": 2.042111173498035, |
| "grad_norm": 0.2351417166729191, |
| "learning_rate": 7.877529260041764e-06, |
| "loss": 0.4102, |
| "step": 454 |
| }, |
| { |
| "epoch": 2.0466030320044917, |
| "grad_norm": 0.23415549490836263, |
| "learning_rate": 7.811118291652952e-06, |
| "loss": 0.4138, |
| "step": 455 |
| }, |
| { |
| "epoch": 2.051094890510949, |
| "grad_norm": 0.23596205188013428, |
| "learning_rate": 7.744881471293649e-06, |
| "loss": 0.3935, |
| "step": 456 |
| }, |
| { |
| "epoch": 2.055586749017406, |
| "grad_norm": 0.2535757872007115, |
| "learning_rate": 7.678820620947954e-06, |
| "loss": 0.4042, |
| "step": 457 |
| }, |
| { |
| "epoch": 2.060078607523863, |
| "grad_norm": 0.20639323573803794, |
| "learning_rate": 7.612937557759534e-06, |
| "loss": 0.4161, |
| "step": 458 |
| }, |
| { |
| "epoch": 2.06457046603032, |
| "grad_norm": 0.22362539726003386, |
| "learning_rate": 7.54723409398164e-06, |
| "loss": 0.421, |
| "step": 459 |
| }, |
| { |
| "epoch": 2.069062324536777, |
| "grad_norm": 0.22127045680743251, |
| "learning_rate": 7.481712036927267e-06, |
| "loss": 0.3861, |
| "step": 460 |
| }, |
| { |
| "epoch": 2.073554183043234, |
| "grad_norm": 0.19232753420525026, |
| "learning_rate": 7.416373188919427e-06, |
| "loss": 0.3922, |
| "step": 461 |
| }, |
| { |
| "epoch": 2.0780460415496913, |
| "grad_norm": 0.20966827389795048, |
| "learning_rate": 7.351219347241587e-06, |
| "loss": 0.4049, |
| "step": 462 |
| }, |
| { |
| "epoch": 2.082537900056148, |
| "grad_norm": 0.19401074372527943, |
| "learning_rate": 7.286252304088215e-06, |
| "loss": 0.4038, |
| "step": 463 |
| }, |
| { |
| "epoch": 2.0870297585626054, |
| "grad_norm": 0.2033592701308335, |
| "learning_rate": 7.221473846515494e-06, |
| "loss": 0.402, |
| "step": 464 |
| }, |
| { |
| "epoch": 2.0915216170690623, |
| "grad_norm": 0.19991172839172033, |
| "learning_rate": 7.156885756392161e-06, |
| "loss": 0.3956, |
| "step": 465 |
| }, |
| { |
| "epoch": 2.0960134755755195, |
| "grad_norm": 0.19027112767750493, |
| "learning_rate": 7.092489810350491e-06, |
| "loss": 0.3912, |
| "step": 466 |
| }, |
| { |
| "epoch": 2.1005053340819764, |
| "grad_norm": 0.18837141345128516, |
| "learning_rate": 7.028287779737433e-06, |
| "loss": 0.4066, |
| "step": 467 |
| }, |
| { |
| "epoch": 2.1049971925884337, |
| "grad_norm": 0.18954944621965775, |
| "learning_rate": 6.964281430565874e-06, |
| "loss": 0.4048, |
| "step": 468 |
| }, |
| { |
| "epoch": 2.1094890510948905, |
| "grad_norm": 0.19737625715167087, |
| "learning_rate": 6.9004725234660754e-06, |
| "loss": 0.3929, |
| "step": 469 |
| }, |
| { |
| "epoch": 2.1139809096013478, |
| "grad_norm": 0.20640221374882523, |
| "learning_rate": 6.836862813637232e-06, |
| "loss": 0.4214, |
| "step": 470 |
| }, |
| { |
| "epoch": 2.1184727681078046, |
| "grad_norm": 0.19729058573447977, |
| "learning_rate": 6.773454050799198e-06, |
| "loss": 0.3956, |
| "step": 471 |
| }, |
| { |
| "epoch": 2.1229646266142614, |
| "grad_norm": 0.2210511604280809, |
| "learning_rate": 6.710247979144353e-06, |
| "loss": 0.4067, |
| "step": 472 |
| }, |
| { |
| "epoch": 2.1274564851207187, |
| "grad_norm": 0.21144716690556348, |
| "learning_rate": 6.6472463372896255e-06, |
| "loss": 0.4087, |
| "step": 473 |
| }, |
| { |
| "epoch": 2.1319483436271756, |
| "grad_norm": 0.21401394951272074, |
| "learning_rate": 6.584450858228674e-06, |
| "loss": 0.4155, |
| "step": 474 |
| }, |
| { |
| "epoch": 2.136440202133633, |
| "grad_norm": 0.2110307270757492, |
| "learning_rate": 6.5218632692842e-06, |
| "loss": 0.3982, |
| "step": 475 |
| }, |
| { |
| "epoch": 2.1409320606400897, |
| "grad_norm": 0.2188562401970234, |
| "learning_rate": 6.459485292060468e-06, |
| "loss": 0.4012, |
| "step": 476 |
| }, |
| { |
| "epoch": 2.145423919146547, |
| "grad_norm": 0.20122965285822791, |
| "learning_rate": 6.397318642395911e-06, |
| "loss": 0.4009, |
| "step": 477 |
| }, |
| { |
| "epoch": 2.149915777653004, |
| "grad_norm": 0.18963676557334538, |
| "learning_rate": 6.335365030315953e-06, |
| "loss": 0.3972, |
| "step": 478 |
| }, |
| { |
| "epoch": 2.154407636159461, |
| "grad_norm": 0.1936342170939391, |
| "learning_rate": 6.273626159985971e-06, |
| "loss": 0.4017, |
| "step": 479 |
| }, |
| { |
| "epoch": 2.158899494665918, |
| "grad_norm": 0.1617872571717289, |
| "learning_rate": 6.2121037296644114e-06, |
| "loss": 0.4028, |
| "step": 480 |
| }, |
| { |
| "epoch": 2.163391353172375, |
| "grad_norm": 0.1927846186688275, |
| "learning_rate": 6.1507994316560836e-06, |
| "loss": 0.4188, |
| "step": 481 |
| }, |
| { |
| "epoch": 2.167883211678832, |
| "grad_norm": 0.18495924225832375, |
| "learning_rate": 6.089714952265603e-06, |
| "loss": 0.37, |
| "step": 482 |
| }, |
| { |
| "epoch": 2.1723750701852893, |
| "grad_norm": 0.188904946062925, |
| "learning_rate": 6.028851971751007e-06, |
| "loss": 0.4305, |
| "step": 483 |
| }, |
| { |
| "epoch": 2.176866928691746, |
| "grad_norm": 0.18764565684075873, |
| "learning_rate": 5.968212164277541e-06, |
| "loss": 0.3958, |
| "step": 484 |
| }, |
| { |
| "epoch": 2.1813587871982034, |
| "grad_norm": 0.17630565500194345, |
| "learning_rate": 5.9077971978716e-06, |
| "loss": 0.4198, |
| "step": 485 |
| }, |
| { |
| "epoch": 2.1858506457046603, |
| "grad_norm": 0.1773620762541041, |
| "learning_rate": 5.8476087343748505e-06, |
| "loss": 0.4195, |
| "step": 486 |
| }, |
| { |
| "epoch": 2.1903425042111175, |
| "grad_norm": 0.1747002407714538, |
| "learning_rate": 5.787648429398515e-06, |
| "loss": 0.3914, |
| "step": 487 |
| }, |
| { |
| "epoch": 2.1948343627175744, |
| "grad_norm": 0.16985548830481717, |
| "learning_rate": 5.727917932277831e-06, |
| "loss": 0.414, |
| "step": 488 |
| }, |
| { |
| "epoch": 2.199326221224031, |
| "grad_norm": 0.18212297018180695, |
| "learning_rate": 5.668418886026689e-06, |
| "loss": 0.3993, |
| "step": 489 |
| }, |
| { |
| "epoch": 2.2038180797304885, |
| "grad_norm": 0.16983942237475746, |
| "learning_rate": 5.609152927292432e-06, |
| "loss": 0.3934, |
| "step": 490 |
| }, |
| { |
| "epoch": 2.2083099382369458, |
| "grad_norm": 0.17190288036434193, |
| "learning_rate": 5.55012168631083e-06, |
| "loss": 0.3982, |
| "step": 491 |
| }, |
| { |
| "epoch": 2.2128017967434026, |
| "grad_norm": 0.18111711688919532, |
| "learning_rate": 5.491326786861248e-06, |
| "loss": 0.3956, |
| "step": 492 |
| }, |
| { |
| "epoch": 2.2172936552498594, |
| "grad_norm": 0.18152369105301266, |
| "learning_rate": 5.432769846221986e-06, |
| "loss": 0.4166, |
| "step": 493 |
| }, |
| { |
| "epoch": 2.2217855137563167, |
| "grad_norm": 0.1633243062989284, |
| "learning_rate": 5.374452475125769e-06, |
| "loss": 0.3881, |
| "step": 494 |
| }, |
| { |
| "epoch": 2.2262773722627736, |
| "grad_norm": 0.17295838406368827, |
| "learning_rate": 5.316376277715458e-06, |
| "loss": 0.3912, |
| "step": 495 |
| }, |
| { |
| "epoch": 2.230769230769231, |
| "grad_norm": 0.17468906348146815, |
| "learning_rate": 5.258542851499929e-06, |
| "loss": 0.3956, |
| "step": 496 |
| }, |
| { |
| "epoch": 2.2352610892756877, |
| "grad_norm": 0.17244392435386519, |
| "learning_rate": 5.200953787310104e-06, |
| "loss": 0.4026, |
| "step": 497 |
| }, |
| { |
| "epoch": 2.239752947782145, |
| "grad_norm": 0.1596913829094359, |
| "learning_rate": 5.143610669255224e-06, |
| "loss": 0.3952, |
| "step": 498 |
| }, |
| { |
| "epoch": 2.244244806288602, |
| "grad_norm": 0.1725969837782697, |
| "learning_rate": 5.08651507467926e-06, |
| "loss": 0.3885, |
| "step": 499 |
| }, |
| { |
| "epoch": 2.248736664795059, |
| "grad_norm": 0.17719497977550294, |
| "learning_rate": 5.02966857411752e-06, |
| "loss": 0.4158, |
| "step": 500 |
| }, |
| { |
| "epoch": 2.253228523301516, |
| "grad_norm": 0.1564882926046542, |
| "learning_rate": 4.9730727312534544e-06, |
| "loss": 0.3998, |
| "step": 501 |
| }, |
| { |
| "epoch": 2.257720381807973, |
| "grad_norm": 0.1562726027662319, |
| "learning_rate": 4.916729102875645e-06, |
| "loss": 0.418, |
| "step": 502 |
| }, |
| { |
| "epoch": 2.26221224031443, |
| "grad_norm": 0.16350678522915907, |
| "learning_rate": 4.860639238834977e-06, |
| "loss": 0.3897, |
| "step": 503 |
| }, |
| { |
| "epoch": 2.2667040988208873, |
| "grad_norm": 0.1590466726049414, |
| "learning_rate": 4.8048046820020125e-06, |
| "loss": 0.4283, |
| "step": 504 |
| }, |
| { |
| "epoch": 2.271195957327344, |
| "grad_norm": 0.1598494755149621, |
| "learning_rate": 4.7492269682245465e-06, |
| "loss": 0.3976, |
| "step": 505 |
| }, |
| { |
| "epoch": 2.2756878158338014, |
| "grad_norm": 0.1564777609984641, |
| "learning_rate": 4.693907626285361e-06, |
| "loss": 0.3925, |
| "step": 506 |
| }, |
| { |
| "epoch": 2.2801796743402583, |
| "grad_norm": 0.15431022450548962, |
| "learning_rate": 4.638848177860173e-06, |
| "loss": 0.4004, |
| "step": 507 |
| }, |
| { |
| "epoch": 2.2846715328467155, |
| "grad_norm": 0.16548691474486177, |
| "learning_rate": 4.584050137475774e-06, |
| "loss": 0.4313, |
| "step": 508 |
| }, |
| { |
| "epoch": 2.2891633913531724, |
| "grad_norm": 0.1559165936066942, |
| "learning_rate": 4.529515012468387e-06, |
| "loss": 0.3981, |
| "step": 509 |
| }, |
| { |
| "epoch": 2.293655249859629, |
| "grad_norm": 0.1502614692328062, |
| "learning_rate": 4.475244302942176e-06, |
| "loss": 0.409, |
| "step": 510 |
| }, |
| { |
| "epoch": 2.2981471083660865, |
| "grad_norm": 0.17002770838666154, |
| "learning_rate": 4.421239501728004e-06, |
| "loss": 0.4111, |
| "step": 511 |
| }, |
| { |
| "epoch": 2.3026389668725433, |
| "grad_norm": 0.15405846526605765, |
| "learning_rate": 4.367502094342358e-06, |
| "loss": 0.4148, |
| "step": 512 |
| }, |
| { |
| "epoch": 2.3071308253790006, |
| "grad_norm": 0.16443857327327438, |
| "learning_rate": 4.3140335589464955e-06, |
| "loss": 0.4175, |
| "step": 513 |
| }, |
| { |
| "epoch": 2.3116226838854574, |
| "grad_norm": 0.16108894575419452, |
| "learning_rate": 4.260835366305778e-06, |
| "loss": 0.3954, |
| "step": 514 |
| }, |
| { |
| "epoch": 2.3161145423919147, |
| "grad_norm": 0.16581098420880921, |
| "learning_rate": 4.2079089797492155e-06, |
| "loss": 0.4029, |
| "step": 515 |
| }, |
| { |
| "epoch": 2.3206064008983716, |
| "grad_norm": 0.1462496764056089, |
| "learning_rate": 4.1552558551292194e-06, |
| "loss": 0.3914, |
| "step": 516 |
| }, |
| { |
| "epoch": 2.325098259404829, |
| "grad_norm": 0.16257688465964204, |
| "learning_rate": 4.1028774407815484e-06, |
| "loss": 0.4087, |
| "step": 517 |
| }, |
| { |
| "epoch": 2.3295901179112857, |
| "grad_norm": 0.15447353531940203, |
| "learning_rate": 4.050775177485476e-06, |
| "loss": 0.4116, |
| "step": 518 |
| }, |
| { |
| "epoch": 2.334081976417743, |
| "grad_norm": 0.15272812111654316, |
| "learning_rate": 3.998950498424157e-06, |
| "loss": 0.4011, |
| "step": 519 |
| }, |
| { |
| "epoch": 2.3385738349242, |
| "grad_norm": 0.14233004337736202, |
| "learning_rate": 3.947404829145199e-06, |
| "loss": 0.3923, |
| "step": 520 |
| }, |
| { |
| "epoch": 2.343065693430657, |
| "grad_norm": 0.1703317244482347, |
| "learning_rate": 3.896139587521457e-06, |
| "loss": 0.4405, |
| "step": 521 |
| }, |
| { |
| "epoch": 2.347557551937114, |
| "grad_norm": 0.14563012327083313, |
| "learning_rate": 3.84515618371203e-06, |
| "loss": 0.385, |
| "step": 522 |
| }, |
| { |
| "epoch": 2.352049410443571, |
| "grad_norm": 0.17742686794643436, |
| "learning_rate": 3.794456020123469e-06, |
| "loss": 0.4128, |
| "step": 523 |
| }, |
| { |
| "epoch": 2.356541268950028, |
| "grad_norm": 0.14926460855891024, |
| "learning_rate": 3.744040491371204e-06, |
| "loss": 0.3956, |
| "step": 524 |
| }, |
| { |
| "epoch": 2.3610331274564853, |
| "grad_norm": 0.14734023096974366, |
| "learning_rate": 3.693910984241176e-06, |
| "loss": 0.4134, |
| "step": 525 |
| }, |
| { |
| "epoch": 2.365524985962942, |
| "grad_norm": 0.17461277226346902, |
| "learning_rate": 3.6440688776517076e-06, |
| "loss": 0.3955, |
| "step": 526 |
| }, |
| { |
| "epoch": 2.370016844469399, |
| "grad_norm": 0.1829426578142121, |
| "learning_rate": 3.5945155426155476e-06, |
| "loss": 0.3952, |
| "step": 527 |
| }, |
| { |
| "epoch": 2.3745087029758563, |
| "grad_norm": 0.1473193865232448, |
| "learning_rate": 3.5452523422021744e-06, |
| "loss": 0.4198, |
| "step": 528 |
| }, |
| { |
| "epoch": 2.3790005614823135, |
| "grad_norm": 0.15970567054534546, |
| "learning_rate": 3.496280631500304e-06, |
| "loss": 0.4071, |
| "step": 529 |
| }, |
| { |
| "epoch": 2.3834924199887704, |
| "grad_norm": 0.16964921917368456, |
| "learning_rate": 3.447601757580604e-06, |
| "loss": 0.4254, |
| "step": 530 |
| }, |
| { |
| "epoch": 2.387984278495227, |
| "grad_norm": 0.16200193328776716, |
| "learning_rate": 3.399217059458649e-06, |
| "loss": 0.3874, |
| "step": 531 |
| }, |
| { |
| "epoch": 2.3924761370016845, |
| "grad_norm": 0.147997015119327, |
| "learning_rate": 3.3511278680580843e-06, |
| "loss": 0.4191, |
| "step": 532 |
| }, |
| { |
| "epoch": 2.3969679955081413, |
| "grad_norm": 0.15472131311543866, |
| "learning_rate": 3.303335506174018e-06, |
| "loss": 0.3967, |
| "step": 533 |
| }, |
| { |
| "epoch": 2.4014598540145986, |
| "grad_norm": 0.15792003965676266, |
| "learning_rate": 3.2558412884366303e-06, |
| "loss": 0.404, |
| "step": 534 |
| }, |
| { |
| "epoch": 2.4059517125210554, |
| "grad_norm": 0.14885399084267711, |
| "learning_rate": 3.2086465212750174e-06, |
| "loss": 0.419, |
| "step": 535 |
| }, |
| { |
| "epoch": 2.4104435710275127, |
| "grad_norm": 0.14435379373559862, |
| "learning_rate": 3.1617525028812533e-06, |
| "loss": 0.4005, |
| "step": 536 |
| }, |
| { |
| "epoch": 2.4149354295339696, |
| "grad_norm": 0.1419888428470599, |
| "learning_rate": 3.115160523174681e-06, |
| "loss": 0.4078, |
| "step": 537 |
| }, |
| { |
| "epoch": 2.419427288040427, |
| "grad_norm": 0.15641078822340915, |
| "learning_rate": 3.0688718637664205e-06, |
| "loss": 0.4163, |
| "step": 538 |
| }, |
| { |
| "epoch": 2.4239191465468837, |
| "grad_norm": 0.14302287152301948, |
| "learning_rate": 3.0228877979241345e-06, |
| "loss": 0.3743, |
| "step": 539 |
| }, |
| { |
| "epoch": 2.428411005053341, |
| "grad_norm": 0.14838140644695952, |
| "learning_rate": 2.9772095905369913e-06, |
| "loss": 0.4152, |
| "step": 540 |
| }, |
| { |
| "epoch": 2.432902863559798, |
| "grad_norm": 0.14296491653836946, |
| "learning_rate": 2.9318384980808727e-06, |
| "loss": 0.4059, |
| "step": 541 |
| }, |
| { |
| "epoch": 2.437394722066255, |
| "grad_norm": 0.15008286020132178, |
| "learning_rate": 2.8867757685838118e-06, |
| "loss": 0.3939, |
| "step": 542 |
| }, |
| { |
| "epoch": 2.441886580572712, |
| "grad_norm": 0.1403709429341766, |
| "learning_rate": 2.8420226415916757e-06, |
| "loss": 0.3997, |
| "step": 543 |
| }, |
| { |
| "epoch": 2.446378439079169, |
| "grad_norm": 0.15082563258882675, |
| "learning_rate": 2.7975803481340463e-06, |
| "loss": 0.4023, |
| "step": 544 |
| }, |
| { |
| "epoch": 2.450870297585626, |
| "grad_norm": 0.15784705073081262, |
| "learning_rate": 2.753450110690375e-06, |
| "loss": 0.4052, |
| "step": 545 |
| }, |
| { |
| "epoch": 2.4553621560920833, |
| "grad_norm": 0.13636735237201847, |
| "learning_rate": 2.709633143156353e-06, |
| "loss": 0.3799, |
| "step": 546 |
| }, |
| { |
| "epoch": 2.45985401459854, |
| "grad_norm": 0.1593373076088822, |
| "learning_rate": 2.666130650810515e-06, |
| "loss": 0.4042, |
| "step": 547 |
| }, |
| { |
| "epoch": 2.464345873104997, |
| "grad_norm": 0.1580770346070772, |
| "learning_rate": 2.6229438302810933e-06, |
| "loss": 0.4253, |
| "step": 548 |
| }, |
| { |
| "epoch": 2.4688377316114543, |
| "grad_norm": 0.13423927554603976, |
| "learning_rate": 2.580073869513094e-06, |
| "loss": 0.3916, |
| "step": 549 |
| }, |
| { |
| "epoch": 2.473329590117911, |
| "grad_norm": 0.16720772837034956, |
| "learning_rate": 2.5375219477356294e-06, |
| "loss": 0.4193, |
| "step": 550 |
| }, |
| { |
| "epoch": 2.4778214486243684, |
| "grad_norm": 0.1493859768063124, |
| "learning_rate": 2.4952892354294682e-06, |
| "loss": 0.4168, |
| "step": 551 |
| }, |
| { |
| "epoch": 2.482313307130825, |
| "grad_norm": 0.13772252373465832, |
| "learning_rate": 2.4533768942948534e-06, |
| "loss": 0.3986, |
| "step": 552 |
| }, |
| { |
| "epoch": 2.4868051656372825, |
| "grad_norm": 0.15653288394332063, |
| "learning_rate": 2.4117860772195375e-06, |
| "loss": 0.4066, |
| "step": 553 |
| }, |
| { |
| "epoch": 2.4912970241437393, |
| "grad_norm": 0.15949884029658656, |
| "learning_rate": 2.3705179282470687e-06, |
| "loss": 0.3951, |
| "step": 554 |
| }, |
| { |
| "epoch": 2.4957888826501966, |
| "grad_norm": 0.1429803919129004, |
| "learning_rate": 2.329573582545332e-06, |
| "loss": 0.4096, |
| "step": 555 |
| }, |
| { |
| "epoch": 2.5002807411566534, |
| "grad_norm": 0.1367359661730616, |
| "learning_rate": 2.288954166375312e-06, |
| "loss": 0.3814, |
| "step": 556 |
| }, |
| { |
| "epoch": 2.5047725996631107, |
| "grad_norm": 0.1513111966124078, |
| "learning_rate": 2.2486607970601196e-06, |
| "loss": 0.4286, |
| "step": 557 |
| }, |
| { |
| "epoch": 2.5092644581695676, |
| "grad_norm": 0.14045368969817978, |
| "learning_rate": 2.2086945829542548e-06, |
| "loss": 0.375, |
| "step": 558 |
| }, |
| { |
| "epoch": 2.513756316676025, |
| "grad_norm": 0.15528683121885756, |
| "learning_rate": 2.1690566234131268e-06, |
| "loss": 0.3948, |
| "step": 559 |
| }, |
| { |
| "epoch": 2.5182481751824817, |
| "grad_norm": 0.14551379178457585, |
| "learning_rate": 2.1297480087628017e-06, |
| "loss": 0.4195, |
| "step": 560 |
| }, |
| { |
| "epoch": 2.522740033688939, |
| "grad_norm": 0.14434811868307373, |
| "learning_rate": 2.090769820270016e-06, |
| "loss": 0.394, |
| "step": 561 |
| }, |
| { |
| "epoch": 2.527231892195396, |
| "grad_norm": 0.14183706063818433, |
| "learning_rate": 2.0521231301124356e-06, |
| "loss": 0.4015, |
| "step": 562 |
| }, |
| { |
| "epoch": 2.531723750701853, |
| "grad_norm": 0.13841495180500263, |
| "learning_rate": 2.013809001349163e-06, |
| "loss": 0.39, |
| "step": 563 |
| }, |
| { |
| "epoch": 2.53621560920831, |
| "grad_norm": 0.1405110514240103, |
| "learning_rate": 1.9758284878914944e-06, |
| "loss": 0.4132, |
| "step": 564 |
| }, |
| { |
| "epoch": 2.5407074677147667, |
| "grad_norm": 0.1582397797574471, |
| "learning_rate": 1.9381826344739317e-06, |
| "loss": 0.4211, |
| "step": 565 |
| }, |
| { |
| "epoch": 2.545199326221224, |
| "grad_norm": 0.14844762464251526, |
| "learning_rate": 1.9008724766254398e-06, |
| "loss": 0.4131, |
| "step": 566 |
| }, |
| { |
| "epoch": 2.5496911847276813, |
| "grad_norm": 0.13912350401808557, |
| "learning_rate": 1.8638990406409697e-06, |
| "loss": 0.3899, |
| "step": 567 |
| }, |
| { |
| "epoch": 2.554183043234138, |
| "grad_norm": 0.1348047186536698, |
| "learning_rate": 1.8272633435532205e-06, |
| "loss": 0.3861, |
| "step": 568 |
| }, |
| { |
| "epoch": 2.558674901740595, |
| "grad_norm": 0.1509435397791266, |
| "learning_rate": 1.7909663931046709e-06, |
| "loss": 0.4472, |
| "step": 569 |
| }, |
| { |
| "epoch": 2.5631667602470523, |
| "grad_norm": 0.14173089587097376, |
| "learning_rate": 1.7550091877198515e-06, |
| "loss": 0.3958, |
| "step": 570 |
| }, |
| { |
| "epoch": 2.5676586187535095, |
| "grad_norm": 0.14302822127986503, |
| "learning_rate": 1.719392716477887e-06, |
| "loss": 0.3979, |
| "step": 571 |
| }, |
| { |
| "epoch": 2.5721504772599664, |
| "grad_norm": 0.15922994001113902, |
| "learning_rate": 1.6841179590852887e-06, |
| "loss": 0.4047, |
| "step": 572 |
| }, |
| { |
| "epoch": 2.576642335766423, |
| "grad_norm": 0.12883007341555425, |
| "learning_rate": 1.6491858858490027e-06, |
| "loss": 0.3919, |
| "step": 573 |
| }, |
| { |
| "epoch": 2.5811341942728805, |
| "grad_norm": 0.1431322016396387, |
| "learning_rate": 1.614597457649723e-06, |
| "loss": 0.4361, |
| "step": 574 |
| }, |
| { |
| "epoch": 2.5856260527793373, |
| "grad_norm": 0.16005846399946025, |
| "learning_rate": 1.5803536259154552e-06, |
| "loss": 0.3871, |
| "step": 575 |
| }, |
| { |
| "epoch": 2.5901179112857946, |
| "grad_norm": 0.1438806473172459, |
| "learning_rate": 1.546455332595352e-06, |
| "loss": 0.4031, |
| "step": 576 |
| }, |
| { |
| "epoch": 2.5946097697922514, |
| "grad_norm": 0.14492270187306566, |
| "learning_rate": 1.5129035101338013e-06, |
| "loss": 0.41, |
| "step": 577 |
| }, |
| { |
| "epoch": 2.5991016282987087, |
| "grad_norm": 0.13992088460829535, |
| "learning_rate": 1.479699081444771e-06, |
| "loss": 0.3911, |
| "step": 578 |
| }, |
| { |
| "epoch": 2.6035934868051656, |
| "grad_norm": 0.15034483503496646, |
| "learning_rate": 1.44684295988643e-06, |
| "loss": 0.4091, |
| "step": 579 |
| }, |
| { |
| "epoch": 2.608085345311623, |
| "grad_norm": 0.14079803397340293, |
| "learning_rate": 1.4143360492360238e-06, |
| "loss": 0.4048, |
| "step": 580 |
| }, |
| { |
| "epoch": 2.6125772038180797, |
| "grad_norm": 0.1433534420740642, |
| "learning_rate": 1.3821792436650067e-06, |
| "loss": 0.4065, |
| "step": 581 |
| }, |
| { |
| "epoch": 2.6170690623245365, |
| "grad_norm": 0.14477932815556296, |
| "learning_rate": 1.350373427714457e-06, |
| "loss": 0.4163, |
| "step": 582 |
| }, |
| { |
| "epoch": 2.621560920830994, |
| "grad_norm": 0.12446547733516874, |
| "learning_rate": 1.3189194762707326e-06, |
| "loss": 0.3724, |
| "step": 583 |
| }, |
| { |
| "epoch": 2.626052779337451, |
| "grad_norm": 0.13398055360321232, |
| "learning_rate": 1.2878182545414191e-06, |
| "loss": 0.4149, |
| "step": 584 |
| }, |
| { |
| "epoch": 2.630544637843908, |
| "grad_norm": 0.13177864587555388, |
| "learning_rate": 1.2570706180315225e-06, |
| "loss": 0.3933, |
| "step": 585 |
| }, |
| { |
| "epoch": 2.6350364963503647, |
| "grad_norm": 0.1436166683524555, |
| "learning_rate": 1.2266774125199337e-06, |
| "loss": 0.4214, |
| "step": 586 |
| }, |
| { |
| "epoch": 2.639528354856822, |
| "grad_norm": 0.13383337224950775, |
| "learning_rate": 1.1966394740361719e-06, |
| "loss": 0.4042, |
| "step": 587 |
| }, |
| { |
| "epoch": 2.6440202133632793, |
| "grad_norm": 0.13908228590146968, |
| "learning_rate": 1.1669576288373846e-06, |
| "loss": 0.4081, |
| "step": 588 |
| }, |
| { |
| "epoch": 2.648512071869736, |
| "grad_norm": 0.13706113871707354, |
| "learning_rate": 1.1376326933856181e-06, |
| "loss": 0.3906, |
| "step": 589 |
| }, |
| { |
| "epoch": 2.653003930376193, |
| "grad_norm": 0.14637537593074662, |
| "learning_rate": 1.1086654743253583e-06, |
| "loss": 0.4137, |
| "step": 590 |
| }, |
| { |
| "epoch": 2.6574957888826503, |
| "grad_norm": 0.13803972231031073, |
| "learning_rate": 1.0800567684613445e-06, |
| "loss": 0.4087, |
| "step": 591 |
| }, |
| { |
| "epoch": 2.661987647389107, |
| "grad_norm": 0.13332822270628664, |
| "learning_rate": 1.0518073627366485e-06, |
| "loss": 0.4019, |
| "step": 592 |
| }, |
| { |
| "epoch": 2.6664795058955644, |
| "grad_norm": 0.13077083513715798, |
| "learning_rate": 1.0239180342110379e-06, |
| "loss": 0.3952, |
| "step": 593 |
| }, |
| { |
| "epoch": 2.670971364402021, |
| "grad_norm": 0.1336310148960128, |
| "learning_rate": 9.963895500395894e-07, |
| "loss": 0.4065, |
| "step": 594 |
| }, |
| { |
| "epoch": 2.6754632229084785, |
| "grad_norm": 0.1432123548055435, |
| "learning_rate": 9.692226674515869e-07, |
| "loss": 0.4102, |
| "step": 595 |
| }, |
| { |
| "epoch": 2.6799550814149353, |
| "grad_norm": 0.13782458019797403, |
| "learning_rate": 9.424181337297035e-07, |
| "loss": 0.402, |
| "step": 596 |
| }, |
| { |
| "epoch": 2.6844469399213926, |
| "grad_norm": 0.1407930193717778, |
| "learning_rate": 9.159766861894357e-07, |
| "loss": 0.4067, |
| "step": 597 |
| }, |
| { |
| "epoch": 2.6889387984278494, |
| "grad_norm": 0.1309505801207627, |
| "learning_rate": 8.89899052158824e-07, |
| "loss": 0.395, |
| "step": 598 |
| }, |
| { |
| "epoch": 2.6934306569343067, |
| "grad_norm": 0.13372502807779402, |
| "learning_rate": 8.641859489584498e-07, |
| "loss": 0.4073, |
| "step": 599 |
| }, |
| { |
| "epoch": 2.6979225154407636, |
| "grad_norm": 0.1341983283881159, |
| "learning_rate": 8.388380838817022e-07, |
| "loss": 0.387, |
| "step": 600 |
| }, |
| { |
| "epoch": 2.702414373947221, |
| "grad_norm": 0.13533167423280718, |
| "learning_rate": 8.138561541753177e-07, |
| "loss": 0.411, |
| "step": 601 |
| }, |
| { |
| "epoch": 2.7069062324536777, |
| "grad_norm": 0.12945926261780205, |
| "learning_rate": 7.892408470202099e-07, |
| "loss": 0.3968, |
| "step": 602 |
| }, |
| { |
| "epoch": 2.7113980909601345, |
| "grad_norm": 0.13778972221216923, |
| "learning_rate": 7.649928395125592e-07, |
| "loss": 0.4057, |
| "step": 603 |
| }, |
| { |
| "epoch": 2.715889949466592, |
| "grad_norm": 0.13968457328358516, |
| "learning_rate": 7.411127986451912e-07, |
| "loss": 0.4037, |
| "step": 604 |
| }, |
| { |
| "epoch": 2.720381807973049, |
| "grad_norm": 0.1397235533395217, |
| "learning_rate": 7.176013812892307e-07, |
| "loss": 0.4194, |
| "step": 605 |
| }, |
| { |
| "epoch": 2.724873666479506, |
| "grad_norm": 0.12677793388517908, |
| "learning_rate": 6.944592341760311e-07, |
| "loss": 0.3818, |
| "step": 606 |
| }, |
| { |
| "epoch": 2.7293655249859627, |
| "grad_norm": 0.13425231454213735, |
| "learning_rate": 6.716869938793857e-07, |
| "loss": 0.3984, |
| "step": 607 |
| }, |
| { |
| "epoch": 2.73385738349242, |
| "grad_norm": 0.12619942214267516, |
| "learning_rate": 6.492852867980155e-07, |
| "loss": 0.398, |
| "step": 608 |
| }, |
| { |
| "epoch": 2.7383492419988773, |
| "grad_norm": 0.13488281757393125, |
| "learning_rate": 6.272547291383403e-07, |
| "loss": 0.4086, |
| "step": 609 |
| }, |
| { |
| "epoch": 2.742841100505334, |
| "grad_norm": 0.1291152392505185, |
| "learning_rate": 6.055959268975332e-07, |
| "loss": 0.4031, |
| "step": 610 |
| }, |
| { |
| "epoch": 2.747332959011791, |
| "grad_norm": 0.13610320289234296, |
| "learning_rate": 5.843094758468389e-07, |
| "loss": 0.3861, |
| "step": 611 |
| }, |
| { |
| "epoch": 2.7518248175182483, |
| "grad_norm": 0.13227498539439772, |
| "learning_rate": 5.633959615152007e-07, |
| "loss": 0.4037, |
| "step": 612 |
| }, |
| { |
| "epoch": 2.756316676024705, |
| "grad_norm": 0.13186795971842452, |
| "learning_rate": 5.428559591731436e-07, |
| "loss": 0.3994, |
| "step": 613 |
| }, |
| { |
| "epoch": 2.7608085345311624, |
| "grad_norm": 0.1333721489625571, |
| "learning_rate": 5.226900338169552e-07, |
| "loss": 0.3973, |
| "step": 614 |
| }, |
| { |
| "epoch": 2.765300393037619, |
| "grad_norm": 0.1338646249689998, |
| "learning_rate": 5.028987401531437e-07, |
| "loss": 0.4056, |
| "step": 615 |
| }, |
| { |
| "epoch": 2.7697922515440765, |
| "grad_norm": 0.1382552063190408, |
| "learning_rate": 4.834826225831736e-07, |
| "loss": 0.3863, |
| "step": 616 |
| }, |
| { |
| "epoch": 2.7742841100505333, |
| "grad_norm": 0.1323165241089875, |
| "learning_rate": 4.644422151885038e-07, |
| "loss": 0.4194, |
| "step": 617 |
| }, |
| { |
| "epoch": 2.7787759685569906, |
| "grad_norm": 0.12553692047901485, |
| "learning_rate": 4.457780417158842e-07, |
| "loss": 0.4066, |
| "step": 618 |
| }, |
| { |
| "epoch": 2.7832678270634474, |
| "grad_norm": 0.12956844890475527, |
| "learning_rate": 4.274906155629513e-07, |
| "loss": 0.4004, |
| "step": 619 |
| }, |
| { |
| "epoch": 2.7877596855699043, |
| "grad_norm": 0.14538864839626559, |
| "learning_rate": 4.095804397641149e-07, |
| "loss": 0.3835, |
| "step": 620 |
| }, |
| { |
| "epoch": 2.7922515440763616, |
| "grad_norm": 0.13248803294494046, |
| "learning_rate": 3.9204800697670965e-07, |
| "loss": 0.4147, |
| "step": 621 |
| }, |
| { |
| "epoch": 2.796743402582819, |
| "grad_norm": 0.12885381135549048, |
| "learning_rate": 3.7489379946744694e-07, |
| "loss": 0.4056, |
| "step": 622 |
| }, |
| { |
| "epoch": 2.8012352610892757, |
| "grad_norm": 0.14078461225962682, |
| "learning_rate": 3.5811828909915534e-07, |
| "loss": 0.4149, |
| "step": 623 |
| }, |
| { |
| "epoch": 2.8057271195957325, |
| "grad_norm": 0.13097000904248593, |
| "learning_rate": 3.417219373177928e-07, |
| "loss": 0.3966, |
| "step": 624 |
| }, |
| { |
| "epoch": 2.81021897810219, |
| "grad_norm": 0.1376316131786541, |
| "learning_rate": 3.257051951397572e-07, |
| "loss": 0.3997, |
| "step": 625 |
| }, |
| { |
| "epoch": 2.814710836608647, |
| "grad_norm": 0.13657095261268032, |
| "learning_rate": 3.1006850313948107e-07, |
| "loss": 0.4033, |
| "step": 626 |
| }, |
| { |
| "epoch": 2.819202695115104, |
| "grad_norm": 0.13207939504527372, |
| "learning_rate": 2.9481229143731197e-07, |
| "loss": 0.3886, |
| "step": 627 |
| }, |
| { |
| "epoch": 2.8236945536215607, |
| "grad_norm": 0.13018989583008223, |
| "learning_rate": 2.7993697968767725e-07, |
| "loss": 0.4158, |
| "step": 628 |
| }, |
| { |
| "epoch": 2.828186412128018, |
| "grad_norm": 0.13040240548669169, |
| "learning_rate": 2.6544297706754683e-07, |
| "loss": 0.4068, |
| "step": 629 |
| }, |
| { |
| "epoch": 2.832678270634475, |
| "grad_norm": 0.13392224545793752, |
| "learning_rate": 2.5133068226517584e-07, |
| "loss": 0.398, |
| "step": 630 |
| }, |
| { |
| "epoch": 2.837170129140932, |
| "grad_norm": 0.1278432929244879, |
| "learning_rate": 2.3760048346913432e-07, |
| "loss": 0.4131, |
| "step": 631 |
| }, |
| { |
| "epoch": 2.841661987647389, |
| "grad_norm": 0.125465501573195, |
| "learning_rate": 2.2425275835763363e-07, |
| "loss": 0.3974, |
| "step": 632 |
| }, |
| { |
| "epoch": 2.8461538461538463, |
| "grad_norm": 0.13434683909908915, |
| "learning_rate": 2.1128787408813722e-07, |
| "loss": 0.4061, |
| "step": 633 |
| }, |
| { |
| "epoch": 2.850645704660303, |
| "grad_norm": 0.12917014219042902, |
| "learning_rate": 1.9870618728725558e-07, |
| "loss": 0.4119, |
| "step": 634 |
| }, |
| { |
| "epoch": 2.8551375631667604, |
| "grad_norm": 0.12927799197589832, |
| "learning_rate": 1.8650804404094718e-07, |
| "loss": 0.4165, |
| "step": 635 |
| }, |
| { |
| "epoch": 2.859629421673217, |
| "grad_norm": 0.12828667749491696, |
| "learning_rate": 1.7469377988498657e-07, |
| "loss": 0.3936, |
| "step": 636 |
| }, |
| { |
| "epoch": 2.8641212801796745, |
| "grad_norm": 0.13007544913143448, |
| "learning_rate": 1.632637197957446e-07, |
| "loss": 0.3946, |
| "step": 637 |
| }, |
| { |
| "epoch": 2.8686131386861313, |
| "grad_norm": 0.1354126849121975, |
| "learning_rate": 1.522181781812407e-07, |
| "loss": 0.4082, |
| "step": 638 |
| }, |
| { |
| "epoch": 2.8731049971925886, |
| "grad_norm": 0.13152253429926178, |
| "learning_rate": 1.415574588725056e-07, |
| "loss": 0.4165, |
| "step": 639 |
| }, |
| { |
| "epoch": 2.8775968556990454, |
| "grad_norm": 0.12461687039628878, |
| "learning_rate": 1.3128185511520974e-07, |
| "loss": 0.3951, |
| "step": 640 |
| }, |
| { |
| "epoch": 2.8820887142055023, |
| "grad_norm": 0.12537645712273682, |
| "learning_rate": 1.2139164956160858e-07, |
| "loss": 0.3838, |
| "step": 641 |
| }, |
| { |
| "epoch": 2.8865805727119596, |
| "grad_norm": 0.1304530973255801, |
| "learning_rate": 1.1188711426276294e-07, |
| "loss": 0.4242, |
| "step": 642 |
| }, |
| { |
| "epoch": 2.891072431218417, |
| "grad_norm": 0.1270688998552854, |
| "learning_rate": 1.0276851066105732e-07, |
| "loss": 0.3999, |
| "step": 643 |
| }, |
| { |
| "epoch": 2.8955642897248737, |
| "grad_norm": 0.12479392286448229, |
| "learning_rate": 9.403608958300276e-08, |
| "loss": 0.3986, |
| "step": 644 |
| }, |
| { |
| "epoch": 2.9000561482313305, |
| "grad_norm": 0.12433792246453539, |
| "learning_rate": 8.569009123234562e-08, |
| "loss": 0.4052, |
| "step": 645 |
| }, |
| { |
| "epoch": 2.904548006737788, |
| "grad_norm": 0.13028761337968897, |
| "learning_rate": 7.773074518345438e-08, |
| "loss": 0.4053, |
| "step": 646 |
| }, |
| { |
| "epoch": 2.909039865244245, |
| "grad_norm": 0.1245947432502766, |
| "learning_rate": 7.015827037500785e-08, |
| "loss": 0.393, |
| "step": 647 |
| }, |
| { |
| "epoch": 2.913531723750702, |
| "grad_norm": 0.13031905992918363, |
| "learning_rate": 6.297287510397075e-08, |
| "loss": 0.4043, |
| "step": 648 |
| }, |
| { |
| "epoch": 2.9180235822571587, |
| "grad_norm": 0.12590059661457523, |
| "learning_rate": 5.6174757019866e-08, |
| "loss": 0.4062, |
| "step": 649 |
| }, |
| { |
| "epoch": 2.922515440763616, |
| "grad_norm": 0.13389326831674256, |
| "learning_rate": 4.976410311933432e-08, |
| "loss": 0.4024, |
| "step": 650 |
| }, |
| { |
| "epoch": 2.927007299270073, |
| "grad_norm": 0.1250548459939687, |
| "learning_rate": 4.3741089740998524e-08, |
| "loss": 0.4023, |
| "step": 651 |
| }, |
| { |
| "epoch": 2.93149915777653, |
| "grad_norm": 0.12270077200805969, |
| "learning_rate": 3.810588256060089e-08, |
| "loss": 0.3967, |
| "step": 652 |
| }, |
| { |
| "epoch": 2.935991016282987, |
| "grad_norm": 0.12624611049254067, |
| "learning_rate": 3.2858636586456185e-08, |
| "loss": 0.4006, |
| "step": 653 |
| }, |
| { |
| "epoch": 2.9404828747894443, |
| "grad_norm": 0.1306767931158808, |
| "learning_rate": 2.7999496155184166e-08, |
| "loss": 0.39, |
| "step": 654 |
| }, |
| { |
| "epoch": 2.944974733295901, |
| "grad_norm": 0.12512189399104776, |
| "learning_rate": 2.3528594927737242e-08, |
| "loss": 0.3861, |
| "step": 655 |
| }, |
| { |
| "epoch": 2.9494665918023584, |
| "grad_norm": 0.1331701408947349, |
| "learning_rate": 1.9446055885728008e-08, |
| "loss": 0.4219, |
| "step": 656 |
| }, |
| { |
| "epoch": 2.953958450308815, |
| "grad_norm": 0.1269883743014457, |
| "learning_rate": 1.5751991328044162e-08, |
| "loss": 0.3958, |
| "step": 657 |
| }, |
| { |
| "epoch": 2.958450308815272, |
| "grad_norm": 0.12769990540091625, |
| "learning_rate": 1.2446502867756959e-08, |
| "loss": 0.4025, |
| "step": 658 |
| }, |
| { |
| "epoch": 2.9629421673217293, |
| "grad_norm": 0.13209320420166154, |
| "learning_rate": 9.529681429332745e-09, |
| "loss": 0.4144, |
| "step": 659 |
| }, |
| { |
| "epoch": 2.9674340258281866, |
| "grad_norm": 0.13117251039825162, |
| "learning_rate": 7.0016072461239484e-09, |
| "loss": 0.3971, |
| "step": 660 |
| }, |
| { |
| "epoch": 2.9719258843346434, |
| "grad_norm": 0.12741148550926698, |
| "learning_rate": 4.862349858167804e-09, |
| "loss": 0.4035, |
| "step": 661 |
| }, |
| { |
| "epoch": 2.9764177428411003, |
| "grad_norm": 0.13192324038184064, |
| "learning_rate": 3.111968110273985e-09, |
| "loss": 0.4031, |
| "step": 662 |
| }, |
| { |
| "epoch": 2.9809096013475576, |
| "grad_norm": 0.13150035056861856, |
| "learning_rate": 1.7505101503979872e-09, |
| "loss": 0.4044, |
| "step": 663 |
| }, |
| { |
| "epoch": 2.985401459854015, |
| "grad_norm": 0.12410294041882683, |
| "learning_rate": 7.780134283285303e-10, |
| "loss": 0.3951, |
| "step": 664 |
| }, |
| { |
| "epoch": 2.9898933183604717, |
| "grad_norm": 0.12559324324833065, |
| "learning_rate": 1.945046946481569e-10, |
| "loss": 0.3995, |
| "step": 665 |
| }, |
| { |
| "epoch": 2.9943851768669285, |
| "grad_norm": 0.13271897553792253, |
| "learning_rate": 0.0, |
| "loss": 0.4058, |
| "step": 666 |
| }, |
| { |
| "epoch": 2.9943851768669285, |
| "step": 666, |
| "total_flos": 7.06867120956244e+18, |
| "train_loss": 0.4525994020360368, |
| "train_runtime": 109632.7326, |
| "train_samples_per_second": 3.118, |
| "train_steps_per_second": 0.006 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 666, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 7.06867120956244e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|