| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 9.307135470527404, | |
| "eval_steps": 500, | |
| "global_step": 9000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.010341261633919338, | |
| "grad_norm": 8.149396896362305, | |
| "learning_rate": 1.8e-06, | |
| "loss": 0.7867, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.020682523267838676, | |
| "grad_norm": 6.155493259429932, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.6806, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.031023784901758014, | |
| "grad_norm": 2.3532819747924805, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.3933, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04136504653567735, | |
| "grad_norm": 2.036257028579712, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.2849, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.05170630816959669, | |
| "grad_norm": 2.012796401977539, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2381, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.06204756980351603, | |
| "grad_norm": 1.7664618492126465, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.234, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.07238883143743537, | |
| "grad_norm": 2.0668587684631348, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.192, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0827300930713547, | |
| "grad_norm": 2.1389617919921875, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1676, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09307135470527404, | |
| "grad_norm": 2.014838457107544, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1564, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.10341261633919338, | |
| "grad_norm": 1.2980386018753052, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1491, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11375387797311272, | |
| "grad_norm": 1.6397504806518555, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1488, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12409513960703206, | |
| "grad_norm": 1.1800998449325562, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1509, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1344364012409514, | |
| "grad_norm": 1.8919923305511475, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.1348, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14477766287487073, | |
| "grad_norm": 1.2429007291793823, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.1268, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15511892450879008, | |
| "grad_norm": 1.5326730012893677, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.1416, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.1654601861427094, | |
| "grad_norm": 0.8138331770896912, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.1416, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17580144777662876, | |
| "grad_norm": 1.4102532863616943, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.1172, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18614270941054809, | |
| "grad_norm": 1.4643559455871582, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.1267, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19648397104446744, | |
| "grad_norm": 0.9335357546806335, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.1233, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.20682523267838676, | |
| "grad_norm": 1.4486008882522583, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.115, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2171664943123061, | |
| "grad_norm": 1.1640030145645142, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.1091, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.22750775594622544, | |
| "grad_norm": 1.262294054031372, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.1055, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2378490175801448, | |
| "grad_norm": 1.672928810119629, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.1144, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2481902792140641, | |
| "grad_norm": 1.1388617753982544, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.1107, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.25853154084798347, | |
| "grad_norm": 1.4822652339935303, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.1052, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2688728024819028, | |
| "grad_norm": 1.422031044960022, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.1079, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2792140641158221, | |
| "grad_norm": 1.258497953414917, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0981, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.28955532574974147, | |
| "grad_norm": 1.2558832168579102, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0935, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2998965873836608, | |
| "grad_norm": 0.9570427536964417, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0905, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.31023784901758017, | |
| "grad_norm": 1.1470006704330444, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0918, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.32057911065149947, | |
| "grad_norm": 1.6402456760406494, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0849, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.3309203722854188, | |
| "grad_norm": 1.5231980085372925, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0927, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.34126163391933817, | |
| "grad_norm": 1.3603947162628174, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0903, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3516028955532575, | |
| "grad_norm": 1.6248503923416138, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0932, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3619441571871768, | |
| "grad_norm": 1.067590355873108, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.1047, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.37228541882109617, | |
| "grad_norm": 1.610234260559082, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.1044, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3826266804550155, | |
| "grad_norm": 1.0361660718917847, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0908, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3929679420889349, | |
| "grad_norm": 1.2278398275375366, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0989, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.40330920372285417, | |
| "grad_norm": 1.2411803007125854, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0917, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4136504653567735, | |
| "grad_norm": 1.6133360862731934, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.082, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4239917269906929, | |
| "grad_norm": 1.059349775314331, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0826, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4343329886246122, | |
| "grad_norm": 0.9120876789093018, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.1, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.4446742502585315, | |
| "grad_norm": 1.1069954633712769, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0792, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4550155118924509, | |
| "grad_norm": 1.2280263900756836, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0905, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4653567735263702, | |
| "grad_norm": 1.6888885498046875, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0947, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4756980351602896, | |
| "grad_norm": 1.3998686075210571, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0773, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.4860392967942089, | |
| "grad_norm": 1.184749722480774, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0789, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4963805584281282, | |
| "grad_norm": 1.2737512588500977, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.079, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.5067218200620476, | |
| "grad_norm": 1.1363584995269775, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0915, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5170630816959669, | |
| "grad_norm": 1.4589959383010864, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.07, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5274043433298863, | |
| "grad_norm": 1.0789856910705566, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0751, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5377456049638056, | |
| "grad_norm": 0.9680395126342773, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0763, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5480868665977249, | |
| "grad_norm": 1.0887702703475952, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0788, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5584281282316442, | |
| "grad_norm": 0.9130731225013733, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0833, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5687693898655636, | |
| "grad_norm": 2.3674962520599365, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0785, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5791106514994829, | |
| "grad_norm": 0.7402455806732178, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0856, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5894519131334023, | |
| "grad_norm": 0.9020370244979858, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0779, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5997931747673216, | |
| "grad_norm": 1.2453511953353882, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0873, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.610134436401241, | |
| "grad_norm": 0.8675932884216309, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0687, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6204756980351603, | |
| "grad_norm": 0.9966331124305725, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0722, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6308169596690796, | |
| "grad_norm": 0.9224534630775452, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0722, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6411582213029989, | |
| "grad_norm": 1.4033963680267334, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0834, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6514994829369183, | |
| "grad_norm": 0.9545592069625854, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0791, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6618407445708376, | |
| "grad_norm": 1.295036792755127, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0737, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.672182006204757, | |
| "grad_norm": 1.0808061361312866, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0898, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6825232678386763, | |
| "grad_norm": 0.8616644740104675, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0839, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6928645294725957, | |
| "grad_norm": 0.9336108565330505, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0725, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.703205791106515, | |
| "grad_norm": 1.1453830003738403, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.083, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.7135470527404343, | |
| "grad_norm": 1.0346120595932007, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0678, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7238883143743536, | |
| "grad_norm": 0.9411576390266418, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0621, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.734229576008273, | |
| "grad_norm": 0.8834561109542847, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0699, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7445708376421923, | |
| "grad_norm": 0.604751706123352, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0554, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.7549120992761117, | |
| "grad_norm": 0.44892871379852295, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0813, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.765253360910031, | |
| "grad_norm": 0.6999841928482056, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0703, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7755946225439504, | |
| "grad_norm": 0.8311813473701477, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0725, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7859358841778697, | |
| "grad_norm": 0.9027028679847717, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0755, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.796277145811789, | |
| "grad_norm": 1.5489064455032349, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0776, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.8066184074457083, | |
| "grad_norm": 0.591497540473938, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0837, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8169596690796277, | |
| "grad_norm": 0.9439393877983093, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0661, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.827300930713547, | |
| "grad_norm": 0.9268099665641785, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0796, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8376421923474664, | |
| "grad_norm": 0.7318974137306213, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0619, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.8479834539813857, | |
| "grad_norm": 0.7151139378547668, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0816, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.8583247156153051, | |
| "grad_norm": 1.0155748128890991, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0736, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8686659772492245, | |
| "grad_norm": 0.9889300465583801, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0687, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8790072388831437, | |
| "grad_norm": 0.9797405004501343, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0704, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.889348500517063, | |
| "grad_norm": 0.7920046448707581, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0848, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8996897621509824, | |
| "grad_norm": 0.6183204650878906, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0692, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.9100310237849017, | |
| "grad_norm": 0.9597355723381042, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0691, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.9203722854188211, | |
| "grad_norm": 0.8262777924537659, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0631, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9307135470527405, | |
| "grad_norm": 0.4940412640571594, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0601, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9410548086866598, | |
| "grad_norm": 0.7991613149642944, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0662, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.9513960703205792, | |
| "grad_norm": 1.5338730812072754, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.073, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.9617373319544984, | |
| "grad_norm": 0.7227517366409302, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0676, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9720785935884177, | |
| "grad_norm": 0.854946494102478, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0644, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9824198552223371, | |
| "grad_norm": 0.8006120324134827, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0621, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.9927611168562565, | |
| "grad_norm": 1.1748908758163452, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0709, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.0031023784901758, | |
| "grad_norm": 1.727028489112854, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0654, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.0134436401240952, | |
| "grad_norm": 1.283582329750061, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0704, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.0237849017580145, | |
| "grad_norm": 0.6932828426361084, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0599, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.0341261633919339, | |
| "grad_norm": 1.0551997423171997, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.066, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.0444674250258532, | |
| "grad_norm": 1.2890373468399048, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0748, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.0548086866597726, | |
| "grad_norm": 0.9331973791122437, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0682, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.065149948293692, | |
| "grad_norm": 1.0202667713165283, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0658, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.0754912099276113, | |
| "grad_norm": 0.7298310995101929, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0696, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.0858324715615306, | |
| "grad_norm": 1.018120527267456, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0578, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.0961737331954498, | |
| "grad_norm": 1.1090223789215088, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0627, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.106514994829369, | |
| "grad_norm": 0.982001543045044, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0528, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.1168562564632885, | |
| "grad_norm": 0.5015981197357178, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0565, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.1271975180972078, | |
| "grad_norm": 0.8871455788612366, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0619, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.1375387797311272, | |
| "grad_norm": 0.5811088681221008, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0637, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.1478800413650465, | |
| "grad_norm": 1.1258891820907593, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0571, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.1582213029989659, | |
| "grad_norm": 0.6611842513084412, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0526, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.1685625646328852, | |
| "grad_norm": 0.551165759563446, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0648, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.1789038262668046, | |
| "grad_norm": 0.6980261206626892, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0553, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.189245087900724, | |
| "grad_norm": 1.1229857206344604, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.067, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.1995863495346433, | |
| "grad_norm": 0.6213947534561157, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0542, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.2099276111685626, | |
| "grad_norm": 0.7247045040130615, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0597, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.220268872802482, | |
| "grad_norm": 0.7021093368530273, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0669, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.2306101344364013, | |
| "grad_norm": 0.685075044631958, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0597, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.2409513960703205, | |
| "grad_norm": 0.5643942356109619, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0698, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.2512926577042398, | |
| "grad_norm": 1.0317023992538452, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0765, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.2616339193381592, | |
| "grad_norm": 0.8478870391845703, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0646, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.2719751809720785, | |
| "grad_norm": 0.7335549592971802, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0606, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.2823164426059979, | |
| "grad_norm": 0.9537607431411743, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0651, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.2926577042399172, | |
| "grad_norm": 0.6781355142593384, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0559, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.3029989658738366, | |
| "grad_norm": 0.762302815914154, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0509, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.313340227507756, | |
| "grad_norm": 0.943731963634491, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0556, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.3236814891416753, | |
| "grad_norm": 1.0327494144439697, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0639, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.3340227507755946, | |
| "grad_norm": 0.7240540981292725, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.057, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.344364012409514, | |
| "grad_norm": 0.598537802696228, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0553, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.3547052740434333, | |
| "grad_norm": 0.6769247651100159, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.058, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.3650465356773527, | |
| "grad_norm": 0.9476394057273865, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0644, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.375387797311272, | |
| "grad_norm": 0.912298858165741, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0693, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.3857290589451914, | |
| "grad_norm": 0.5882769227027893, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0752, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.3960703205791107, | |
| "grad_norm": 0.40450236201286316, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0521, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.40641158221303, | |
| "grad_norm": 0.8500616550445557, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0629, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.4167528438469494, | |
| "grad_norm": 0.6569103598594666, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0579, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.4270941054808688, | |
| "grad_norm": 1.1073946952819824, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0599, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.4374353671147881, | |
| "grad_norm": 0.9515782594680786, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0523, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.4477766287487073, | |
| "grad_norm": 0.4773123562335968, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0534, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.4581178903826266, | |
| "grad_norm": 0.7259894013404846, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.067, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.468459152016546, | |
| "grad_norm": 0.5047846436500549, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0708, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.4788004136504653, | |
| "grad_norm": 1.3085306882858276, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.066, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.4891416752843847, | |
| "grad_norm": 0.5943396687507629, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0603, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.499482936918304, | |
| "grad_norm": 0.8696584701538086, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0697, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.5098241985522234, | |
| "grad_norm": 0.836199939250946, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0518, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.5201654601861427, | |
| "grad_norm": 0.8995105028152466, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0527, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.530506721820062, | |
| "grad_norm": 0.5041167736053467, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0577, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.5408479834539814, | |
| "grad_norm": 1.044538140296936, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0536, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.5511892450879006, | |
| "grad_norm": 0.8556201457977295, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0552, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.56153050672182, | |
| "grad_norm": 1.2117815017700195, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.048, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.5718717683557393, | |
| "grad_norm": 0.5549541711807251, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.057, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.5822130299896586, | |
| "grad_norm": 0.6075549721717834, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0666, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.592554291623578, | |
| "grad_norm": 0.6179999113082886, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0475, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.6028955532574973, | |
| "grad_norm": 0.6538460850715637, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0591, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.6132368148914167, | |
| "grad_norm": 0.5411471128463745, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0508, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.623578076525336, | |
| "grad_norm": 0.7011205554008484, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0599, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.6339193381592554, | |
| "grad_norm": 0.4414671063423157, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0569, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.6442605997931747, | |
| "grad_norm": 0.6055905222892761, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0612, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.654601861427094, | |
| "grad_norm": 0.46874237060546875, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0587, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.6649431230610134, | |
| "grad_norm": 0.7625353932380676, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0633, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.6752843846949328, | |
| "grad_norm": 0.5977282524108887, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0603, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.6856256463288521, | |
| "grad_norm": 1.0120965242385864, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.058, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.6959669079627715, | |
| "grad_norm": 0.6692385673522949, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0547, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.7063081695966908, | |
| "grad_norm": 0.5050467848777771, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0497, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.7166494312306102, | |
| "grad_norm": 0.6187373995780945, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0528, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.7269906928645296, | |
| "grad_norm": 0.7905316948890686, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0493, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.737331954498449, | |
| "grad_norm": 0.6983612775802612, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0515, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.7476732161323683, | |
| "grad_norm": 0.458248108625412, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0616, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.7580144777662876, | |
| "grad_norm": 0.791432797908783, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0542, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.768355739400207, | |
| "grad_norm": 0.39505401253700256, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0591, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.7786970010341263, | |
| "grad_norm": 0.6002795696258545, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0664, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.7890382626680457, | |
| "grad_norm": 0.7972106337547302, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0478, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.7993795243019648, | |
| "grad_norm": 0.7573429346084595, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0581, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.8097207859358841, | |
| "grad_norm": 0.4389801323413849, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0539, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.8200620475698035, | |
| "grad_norm": 0.5790207386016846, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0504, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.8304033092037229, | |
| "grad_norm": 0.647650957107544, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0531, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.8407445708376422, | |
| "grad_norm": 0.7187141180038452, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0464, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.8510858324715616, | |
| "grad_norm": 1.000731110572815, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0533, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.861427094105481, | |
| "grad_norm": 0.7128459811210632, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0513, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.8717683557394003, | |
| "grad_norm": 0.5781081914901733, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0486, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.8821096173733194, | |
| "grad_norm": 0.6659092903137207, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0431, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.8924508790072387, | |
| "grad_norm": 0.6032243967056274, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0516, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.902792140641158, | |
| "grad_norm": 0.8847993016242981, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0555, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.9131334022750774, | |
| "grad_norm": 0.4947476387023926, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0489, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.9234746639089968, | |
| "grad_norm": 0.7030625343322754, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0501, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.9338159255429161, | |
| "grad_norm": 0.7029416561126709, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.046, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.9441571871768355, | |
| "grad_norm": 0.7042602300643921, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0536, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.9544984488107549, | |
| "grad_norm": 0.7400018572807312, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.056, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.9648397104446742, | |
| "grad_norm": 0.9477378726005554, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0545, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.9751809720785936, | |
| "grad_norm": 0.658433198928833, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0445, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.985522233712513, | |
| "grad_norm": 0.942138671875, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0516, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.9958634953464323, | |
| "grad_norm": 0.7159606218338013, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0498, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.0062047569803516, | |
| "grad_norm": 0.41084176301956177, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0515, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.016546018614271, | |
| "grad_norm": 0.6837812066078186, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0522, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.0268872802481903, | |
| "grad_norm": 0.827167272567749, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0471, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.0372285418821097, | |
| "grad_norm": 0.5262520909309387, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0473, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.047569803516029, | |
| "grad_norm": 0.5549857020378113, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0617, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.0579110651499484, | |
| "grad_norm": 0.7001256942749023, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0454, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.0682523267838677, | |
| "grad_norm": 0.45477667450904846, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0484, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.078593588417787, | |
| "grad_norm": 0.8515445590019226, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.05, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.0889348500517064, | |
| "grad_norm": 0.7437769174575806, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0453, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.099276111685626, | |
| "grad_norm": 0.8379760980606079, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0491, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.109617373319545, | |
| "grad_norm": 0.6811707019805908, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0487, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.1199586349534645, | |
| "grad_norm": 0.6526315212249756, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0476, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.130299896587384, | |
| "grad_norm": 0.6808832287788391, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0484, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.140641158221303, | |
| "grad_norm": 0.9608593583106995, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0504, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.1509824198552225, | |
| "grad_norm": 0.9973729252815247, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0566, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.161323681489142, | |
| "grad_norm": 1.163051962852478, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0509, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.1716649431230612, | |
| "grad_norm": 0.7200208902359009, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0444, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.18200620475698, | |
| "grad_norm": 0.8001949191093445, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0473, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.1923474663908995, | |
| "grad_norm": 1.0816729068756104, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0554, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.202688728024819, | |
| "grad_norm": 1.2773263454437256, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0559, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.213029989658738, | |
| "grad_norm": 0.5496627688407898, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0516, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.2233712512926576, | |
| "grad_norm": 0.6876987814903259, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.053, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.233712512926577, | |
| "grad_norm": 0.7567824721336365, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0534, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.2440537745604963, | |
| "grad_norm": 0.6181732416152954, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0425, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.2543950361944156, | |
| "grad_norm": 0.8472265005111694, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0415, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.264736297828335, | |
| "grad_norm": 0.690981924533844, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0421, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.2750775594622543, | |
| "grad_norm": 0.7380693554878235, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0486, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.2854188210961737, | |
| "grad_norm": 0.69078528881073, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0519, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.295760082730093, | |
| "grad_norm": 0.7110298275947571, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0424, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.3061013443640124, | |
| "grad_norm": 0.5352296233177185, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0489, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.3164426059979317, | |
| "grad_norm": 0.8834404945373535, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0415, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.326783867631851, | |
| "grad_norm": 0.4739783704280853, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0479, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.3371251292657704, | |
| "grad_norm": 0.5279911160469055, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.049, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.34746639089969, | |
| "grad_norm": 0.7019535303115845, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0441, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.357807652533609, | |
| "grad_norm": 0.5389631390571594, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0533, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.3681489141675285, | |
| "grad_norm": 0.6318327188491821, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0506, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.378490175801448, | |
| "grad_norm": 0.7012895345687866, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0547, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.388831437435367, | |
| "grad_norm": 0.568821907043457, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0418, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.3991726990692865, | |
| "grad_norm": 0.5281450152397156, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0504, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.409513960703206, | |
| "grad_norm": 0.5024285912513733, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.046, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.4198552223371252, | |
| "grad_norm": 0.3743893504142761, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0477, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.4301964839710446, | |
| "grad_norm": 0.501488208770752, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0413, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.440537745604964, | |
| "grad_norm": 0.8769798278808594, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0462, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.4508790072388833, | |
| "grad_norm": 0.6660071611404419, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0493, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.4612202688728027, | |
| "grad_norm": 0.6684579849243164, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0399, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.471561530506722, | |
| "grad_norm": 0.7604276537895203, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.05, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.481902792140641, | |
| "grad_norm": 0.45810768008232117, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0426, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.4922440537745603, | |
| "grad_norm": 0.5932450294494629, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0453, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.5025853154084796, | |
| "grad_norm": 0.6226415038108826, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0543, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.512926577042399, | |
| "grad_norm": 0.4741579294204712, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.043, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.5232678386763183, | |
| "grad_norm": 0.6971632838249207, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0388, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.5336091003102377, | |
| "grad_norm": 0.8449572920799255, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0476, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.543950361944157, | |
| "grad_norm": 1.035272479057312, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0464, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.5542916235780764, | |
| "grad_norm": 0.6585243940353394, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0467, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.5646328852119957, | |
| "grad_norm": 0.6686993837356567, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0497, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.574974146845915, | |
| "grad_norm": 0.43413418531417847, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0522, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.5853154084798344, | |
| "grad_norm": 0.5102486610412598, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0501, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.595656670113754, | |
| "grad_norm": 0.5289815664291382, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0501, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.605997931747673, | |
| "grad_norm": 0.6268642544746399, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0475, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.6163391933815925, | |
| "grad_norm": 0.6896649599075317, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0393, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.626680455015512, | |
| "grad_norm": 0.5943664312362671, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0538, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.637021716649431, | |
| "grad_norm": 0.48935872316360474, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0437, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.6473629782833505, | |
| "grad_norm": 0.520727276802063, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0486, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.65770423991727, | |
| "grad_norm": 0.8430449962615967, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0548, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.6680455015511892, | |
| "grad_norm": 0.6561726331710815, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0469, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.6783867631851086, | |
| "grad_norm": 0.5026679039001465, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0445, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.688728024819028, | |
| "grad_norm": 0.5810408592224121, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0556, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.6990692864529473, | |
| "grad_norm": 0.5039211511611938, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.041, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.7094105480868667, | |
| "grad_norm": 0.49479565024375916, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.049, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.719751809720786, | |
| "grad_norm": 0.9741470217704773, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0518, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.7300930713547054, | |
| "grad_norm": 0.5409213900566101, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0477, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.7404343329886247, | |
| "grad_norm": 0.6023234724998474, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0448, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.750775594622544, | |
| "grad_norm": 0.6539735794067383, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0427, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.7611168562564634, | |
| "grad_norm": 0.5833004713058472, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0465, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.7714581178903828, | |
| "grad_norm": 0.6909075975418091, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0447, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.781799379524302, | |
| "grad_norm": 0.7476276159286499, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0456, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.7921406411582215, | |
| "grad_norm": 0.6345753073692322, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0434, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.802481902792141, | |
| "grad_norm": 0.4737369418144226, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.043, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.81282316442606, | |
| "grad_norm": 0.8068066835403442, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0513, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.8231644260599795, | |
| "grad_norm": 0.7784083485603333, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0413, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.833505687693899, | |
| "grad_norm": 0.6053678393363953, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0341, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.8438469493278182, | |
| "grad_norm": 0.5391380786895752, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0419, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.8541882109617376, | |
| "grad_norm": 0.5964809656143188, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0458, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.864529472595657, | |
| "grad_norm": 0.5052362084388733, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0329, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.8748707342295763, | |
| "grad_norm": 0.5219334363937378, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0392, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.885211995863495, | |
| "grad_norm": 0.5843889117240906, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0445, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.8955532574974145, | |
| "grad_norm": 0.6976073980331421, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.043, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.905894519131334, | |
| "grad_norm": 0.6056023240089417, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0384, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.9162357807652532, | |
| "grad_norm": 0.36529815196990967, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.038, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.9265770423991726, | |
| "grad_norm": 0.4777768552303314, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0475, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.936918304033092, | |
| "grad_norm": 0.5702307820320129, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0456, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.9472595656670113, | |
| "grad_norm": 0.6453633904457092, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0422, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.9576008273009307, | |
| "grad_norm": 0.559160053730011, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0442, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.96794208893485, | |
| "grad_norm": 0.9055858850479126, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0516, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.9782833505687694, | |
| "grad_norm": 0.5340009331703186, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0409, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.9886246122026887, | |
| "grad_norm": 0.694705605506897, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0456, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.998965873836608, | |
| "grad_norm": 0.6423262357711792, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0478, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.0093071354705274, | |
| "grad_norm": 0.7967825531959534, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0515, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.0196483971044468, | |
| "grad_norm": 0.86309814453125, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.043, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.029989658738366, | |
| "grad_norm": 0.5841116905212402, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0371, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.0403309203722855, | |
| "grad_norm": 0.5810801386833191, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0393, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.050672182006205, | |
| "grad_norm": 0.6818716526031494, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0441, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.061013443640124, | |
| "grad_norm": 0.6192893981933594, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.051, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.0713547052740435, | |
| "grad_norm": 0.4708903431892395, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0391, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.081695966907963, | |
| "grad_norm": 0.633942723274231, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0382, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.0920372285418822, | |
| "grad_norm": 0.6821548342704773, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0395, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.1023784901758016, | |
| "grad_norm": 0.7218717932701111, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0486, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.112719751809721, | |
| "grad_norm": 0.5830649733543396, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0483, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.1230610134436403, | |
| "grad_norm": 0.5195898413658142, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0368, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.1334022750775596, | |
| "grad_norm": 0.6910445094108582, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0379, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.143743536711479, | |
| "grad_norm": 0.6298835277557373, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0424, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.1540847983453983, | |
| "grad_norm": 0.6096106767654419, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0444, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.1644260599793173, | |
| "grad_norm": 0.5233014225959778, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0371, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.1747673216132366, | |
| "grad_norm": 0.6389190554618835, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0371, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.185108583247156, | |
| "grad_norm": 0.5496965050697327, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0454, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.1954498448810753, | |
| "grad_norm": 0.4676893949508667, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0431, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.2057911065149947, | |
| "grad_norm": 0.40901947021484375, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.046, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.216132368148914, | |
| "grad_norm": 0.4361165463924408, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0386, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.2264736297828334, | |
| "grad_norm": 0.6600609421730042, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0387, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.2368148914167527, | |
| "grad_norm": 0.40081214904785156, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0346, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.247156153050672, | |
| "grad_norm": 0.4680558741092682, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0343, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.2574974146845914, | |
| "grad_norm": 0.46012917160987854, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0388, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.2678386763185108, | |
| "grad_norm": 0.5295402407646179, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0362, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.27817993795243, | |
| "grad_norm": 0.7489907145500183, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0387, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.2885211995863495, | |
| "grad_norm": 0.846535861492157, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0334, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.298862461220269, | |
| "grad_norm": 0.44992080330848694, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0367, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.309203722854188, | |
| "grad_norm": 0.4321269690990448, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0419, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.3195449844881075, | |
| "grad_norm": 0.8248120546340942, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0417, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.329886246122027, | |
| "grad_norm": 0.7593957781791687, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0417, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.3402275077559462, | |
| "grad_norm": 0.8113712668418884, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0378, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.3505687693898656, | |
| "grad_norm": 0.6793055534362793, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0346, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 3.360910031023785, | |
| "grad_norm": 0.5956434607505798, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0391, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 3.3712512926577043, | |
| "grad_norm": 0.6982569694519043, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0332, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 3.3815925542916236, | |
| "grad_norm": 0.3158213198184967, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0441, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 3.391933815925543, | |
| "grad_norm": 0.46166232228279114, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0402, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 3.4022750775594623, | |
| "grad_norm": 0.3405998945236206, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0415, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 3.4126163391933817, | |
| "grad_norm": 0.5354151129722595, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0406, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 3.422957600827301, | |
| "grad_norm": 0.6202423572540283, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0425, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 3.4332988624612204, | |
| "grad_norm": 0.8136566877365112, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0328, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 3.4436401240951398, | |
| "grad_norm": 0.3956882953643799, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0397, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 3.453981385729059, | |
| "grad_norm": 0.6842160224914551, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0346, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 3.4643226473629785, | |
| "grad_norm": 0.532228410243988, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.036, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 3.474663908996898, | |
| "grad_norm": 0.4018094837665558, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0493, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 3.485005170630817, | |
| "grad_norm": 0.41058021783828735, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0368, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 3.4953464322647365, | |
| "grad_norm": 0.4128235876560211, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0367, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 3.505687693898656, | |
| "grad_norm": 0.4618026316165924, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0341, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 3.516028955532575, | |
| "grad_norm": 0.42472904920578003, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0387, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 3.5263702171664946, | |
| "grad_norm": 0.4191513657569885, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0363, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 3.536711478800414, | |
| "grad_norm": 0.8841342926025391, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0425, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 3.547052740434333, | |
| "grad_norm": 0.4915429353713989, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0374, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 3.557394002068252, | |
| "grad_norm": 0.37256190180778503, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0411, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 3.5677352637021715, | |
| "grad_norm": 0.3888627290725708, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0399, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 3.578076525336091, | |
| "grad_norm": 0.4838491678237915, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0387, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 3.5884177869700102, | |
| "grad_norm": 0.4837625324726105, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0456, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 3.5987590486039296, | |
| "grad_norm": 0.6947500109672546, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.03, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 3.609100310237849, | |
| "grad_norm": 0.4781893491744995, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0352, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 3.6194415718717683, | |
| "grad_norm": 0.45729202032089233, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0403, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 3.6297828335056876, | |
| "grad_norm": 0.7594940066337585, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0417, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 3.640124095139607, | |
| "grad_norm": 0.6064152717590332, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.044, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 3.6504653567735263, | |
| "grad_norm": 0.5854924321174622, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0358, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 3.6608066184074457, | |
| "grad_norm": 0.7333920001983643, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0396, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 3.671147880041365, | |
| "grad_norm": 0.25505274534225464, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0395, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 3.6814891416752844, | |
| "grad_norm": 0.5614821314811707, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0447, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 3.6918304033092038, | |
| "grad_norm": 0.6589389443397522, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0347, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 3.702171664943123, | |
| "grad_norm": 0.42713990807533264, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0418, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 3.7125129265770425, | |
| "grad_norm": 0.5433197021484375, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0401, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 3.722854188210962, | |
| "grad_norm": 0.34031739830970764, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0387, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 3.733195449844881, | |
| "grad_norm": 0.7647942900657654, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0299, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 3.7435367114788005, | |
| "grad_norm": 0.35516348481178284, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0352, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 3.75387797311272, | |
| "grad_norm": 0.3834609389305115, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0378, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 3.764219234746639, | |
| "grad_norm": 0.546096920967102, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0306, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 3.7745604963805586, | |
| "grad_norm": 0.5136013627052307, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0408, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 3.784901758014478, | |
| "grad_norm": 0.55198734998703, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.034, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 3.795243019648397, | |
| "grad_norm": 0.7280723452568054, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0386, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 3.805584281282316, | |
| "grad_norm": 0.5837876200675964, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0356, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 3.8159255429162355, | |
| "grad_norm": 0.5259419679641724, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0413, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 3.826266804550155, | |
| "grad_norm": 0.4155033528804779, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0361, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 3.8366080661840742, | |
| "grad_norm": 0.4253619909286499, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0361, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 3.8469493278179936, | |
| "grad_norm": 0.5708061456680298, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0444, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 3.857290589451913, | |
| "grad_norm": 0.38971978425979614, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0403, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 3.8676318510858323, | |
| "grad_norm": 0.6087445616722107, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0426, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 3.8779731127197516, | |
| "grad_norm": 0.38466373085975647, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0393, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 3.888314374353671, | |
| "grad_norm": 0.5579493045806885, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0379, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 3.8986556359875904, | |
| "grad_norm": 0.7091334462165833, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.029, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 3.9089968976215097, | |
| "grad_norm": 0.48447132110595703, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0386, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 3.919338159255429, | |
| "grad_norm": 0.5803413987159729, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0341, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 3.9296794208893484, | |
| "grad_norm": 0.4800499379634857, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0421, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 3.9400206825232678, | |
| "grad_norm": 0.5513915419578552, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0352, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 3.950361944157187, | |
| "grad_norm": 0.46302342414855957, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0346, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 3.9607032057911065, | |
| "grad_norm": 0.4482763409614563, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0396, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 3.971044467425026, | |
| "grad_norm": 0.3062077760696411, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0375, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 3.981385729058945, | |
| "grad_norm": 0.41926732659339905, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0377, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 3.9917269906928645, | |
| "grad_norm": 0.45469629764556885, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0327, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 4.002068252326784, | |
| "grad_norm": 0.3543528616428375, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.033, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 4.012409513960703, | |
| "grad_norm": 0.5153759121894836, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0298, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 4.022750775594623, | |
| "grad_norm": 0.5380675196647644, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0363, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 4.033092037228542, | |
| "grad_norm": 0.3933749794960022, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0381, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.043433298862461, | |
| "grad_norm": 0.6372244954109192, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.028, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 4.053774560496381, | |
| "grad_norm": 0.3350529670715332, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.028, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 4.0641158221303, | |
| "grad_norm": 0.4343299865722656, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0343, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 4.074457083764219, | |
| "grad_norm": 0.568745493888855, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0472, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 4.084798345398139, | |
| "grad_norm": 0.6434438228607178, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0345, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 4.095139607032058, | |
| "grad_norm": 0.4950769543647766, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0381, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.105480868665977, | |
| "grad_norm": 0.6425999402999878, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0381, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 4.115822130299897, | |
| "grad_norm": 0.5493191480636597, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0326, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 4.126163391933816, | |
| "grad_norm": 0.7481514811515808, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0345, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 4.1365046535677354, | |
| "grad_norm": 0.6899704933166504, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0341, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.146845915201655, | |
| "grad_norm": 0.5919772386550903, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0269, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 4.157187176835574, | |
| "grad_norm": 0.599338710308075, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0321, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 4.1675284384694935, | |
| "grad_norm": 0.4761026203632355, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.028, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 4.177869700103413, | |
| "grad_norm": 0.3328249156475067, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0278, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 4.188210961737332, | |
| "grad_norm": 0.7431259155273438, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0391, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 4.198552223371252, | |
| "grad_norm": 0.5777941346168518, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0372, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 4.208893485005171, | |
| "grad_norm": 0.6628890037536621, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0422, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 4.21923474663909, | |
| "grad_norm": 0.5190800428390503, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0405, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 4.22957600827301, | |
| "grad_norm": 0.547655463218689, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0376, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 4.239917269906929, | |
| "grad_norm": 0.5152060985565186, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0312, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 4.250258531540848, | |
| "grad_norm": 0.5621763467788696, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0273, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 4.260599793174768, | |
| "grad_norm": 0.5856252312660217, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0291, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 4.270941054808687, | |
| "grad_norm": 0.42214545607566833, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0341, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 4.281282316442606, | |
| "grad_norm": 0.4187379479408264, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0259, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 4.291623578076526, | |
| "grad_norm": 0.44406747817993164, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.032, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 4.301964839710445, | |
| "grad_norm": 0.492849200963974, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0303, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 4.312306101344364, | |
| "grad_norm": 0.4632762670516968, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0275, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 4.322647362978284, | |
| "grad_norm": 0.6003650426864624, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0329, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 4.332988624612202, | |
| "grad_norm": 0.5510930418968201, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0262, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 4.3433298862461225, | |
| "grad_norm": 0.44001391530036926, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.026, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 4.353671147880041, | |
| "grad_norm": 0.38990435004234314, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0284, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 4.36401240951396, | |
| "grad_norm": 0.4497740864753723, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0258, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 4.37435367114788, | |
| "grad_norm": 0.36997362971305847, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0261, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 4.384694932781799, | |
| "grad_norm": 0.24545027315616608, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0245, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 4.395036194415718, | |
| "grad_norm": 0.8343655467033386, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0301, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 4.405377456049638, | |
| "grad_norm": 0.4763126075267792, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0271, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 4.415718717683557, | |
| "grad_norm": 0.34656819701194763, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0267, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 4.426059979317476, | |
| "grad_norm": 0.38647395372390747, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0309, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 4.436401240951396, | |
| "grad_norm": 0.3593956530094147, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0399, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 4.446742502585315, | |
| "grad_norm": 0.49549368023872375, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0303, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 4.4570837642192345, | |
| "grad_norm": 0.359908789396286, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0246, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 4.467425025853154, | |
| "grad_norm": 0.5926442742347717, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0332, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 4.477766287487073, | |
| "grad_norm": 0.30265843868255615, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0327, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 4.4881075491209925, | |
| "grad_norm": 0.22608672082424164, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0239, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 4.498448810754912, | |
| "grad_norm": 0.45136043429374695, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0266, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 4.508790072388831, | |
| "grad_norm": 0.459216833114624, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0279, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 4.519131334022751, | |
| "grad_norm": 0.34493857622146606, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0357, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 4.52947259565667, | |
| "grad_norm": 0.39789867401123047, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0337, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 4.539813857290589, | |
| "grad_norm": 0.32809141278266907, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0276, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 4.550155118924509, | |
| "grad_norm": 0.5596356391906738, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0298, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 4.560496380558428, | |
| "grad_norm": 0.4974158704280853, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0281, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 4.570837642192347, | |
| "grad_norm": 0.2964128255844116, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0282, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 4.581178903826267, | |
| "grad_norm": 0.40163174271583557, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.025, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 4.591520165460186, | |
| "grad_norm": 0.7338986992835999, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0351, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 4.601861427094105, | |
| "grad_norm": 0.517850935459137, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0305, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 4.612202688728025, | |
| "grad_norm": 0.5357605814933777, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0339, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 4.622543950361944, | |
| "grad_norm": 0.41859233379364014, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0348, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 4.6328852119958635, | |
| "grad_norm": 0.5915603041648865, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0354, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 4.643226473629783, | |
| "grad_norm": 0.7310323119163513, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.026, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 4.653567735263702, | |
| "grad_norm": 0.48020657896995544, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0394, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 4.6639089968976215, | |
| "grad_norm": 0.5009847283363342, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.03, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 4.674250258531541, | |
| "grad_norm": 0.5299360156059265, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0374, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 4.68459152016546, | |
| "grad_norm": 0.39377573132514954, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0318, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 4.69493278179938, | |
| "grad_norm": 0.3663831949234009, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0253, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 4.705274043433299, | |
| "grad_norm": 0.47811511158943176, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0328, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 4.715615305067218, | |
| "grad_norm": 0.4114089608192444, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0312, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 4.725956566701138, | |
| "grad_norm": 0.4716252088546753, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0296, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 4.736297828335057, | |
| "grad_norm": 0.5247455835342407, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.032, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 4.746639089968976, | |
| "grad_norm": 0.5990042686462402, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0286, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 4.756980351602896, | |
| "grad_norm": 0.6098853945732117, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0202, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 4.767321613236815, | |
| "grad_norm": 0.44338905811309814, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0427, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 4.777662874870734, | |
| "grad_norm": 0.36942416429519653, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0279, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 4.788004136504654, | |
| "grad_norm": 0.31302952766418457, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0286, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 4.798345398138573, | |
| "grad_norm": 0.6090943813323975, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0266, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 4.808686659772492, | |
| "grad_norm": 0.4752368927001953, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0318, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 4.819027921406412, | |
| "grad_norm": 0.6850677132606506, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0343, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 4.829369183040331, | |
| "grad_norm": 0.5129321813583374, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0317, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 4.8397104446742505, | |
| "grad_norm": 0.4740126430988312, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0289, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 4.85005170630817, | |
| "grad_norm": 0.40469151735305786, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0298, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 4.860392967942089, | |
| "grad_norm": 0.3935494124889374, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0243, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 4.8707342295760085, | |
| "grad_norm": 0.6111148595809937, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0302, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 4.881075491209928, | |
| "grad_norm": 0.39912882447242737, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0302, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 4.891416752843847, | |
| "grad_norm": 0.40417158603668213, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0288, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 4.901758014477767, | |
| "grad_norm": 0.6526668071746826, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0313, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 4.912099276111686, | |
| "grad_norm": 0.46881532669067383, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0273, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 4.922440537745605, | |
| "grad_norm": 0.4235084652900696, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0213, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 4.932781799379525, | |
| "grad_norm": 0.5397869348526001, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0305, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 4.943123061013444, | |
| "grad_norm": 0.8347296714782715, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0346, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 4.953464322647363, | |
| "grad_norm": 0.7255750894546509, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0315, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 4.963805584281282, | |
| "grad_norm": 0.37905624508857727, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0309, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 4.974146845915202, | |
| "grad_norm": 0.595689594745636, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0256, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 4.9844881075491205, | |
| "grad_norm": 0.28635790944099426, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.03, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 4.994829369183041, | |
| "grad_norm": 0.34727078676223755, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0276, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 5.005170630816959, | |
| "grad_norm": 0.32059282064437866, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0292, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 5.015511892450879, | |
| "grad_norm": 0.7704080939292908, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0231, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 5.025853154084798, | |
| "grad_norm": 0.7081512808799744, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0254, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 5.036194415718717, | |
| "grad_norm": 0.5465281009674072, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.03, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 5.046535677352637, | |
| "grad_norm": 0.4960368275642395, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0271, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 5.056876938986556, | |
| "grad_norm": 0.7808933854103088, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0277, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 5.067218200620475, | |
| "grad_norm": 0.476457417011261, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0316, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 5.077559462254395, | |
| "grad_norm": 0.5583111643791199, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0243, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 5.087900723888314, | |
| "grad_norm": 0.37905189394950867, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0253, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 5.098241985522233, | |
| "grad_norm": 0.3141867518424988, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0293, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 5.108583247156153, | |
| "grad_norm": 0.6776558756828308, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0224, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 5.118924508790072, | |
| "grad_norm": 0.26149335503578186, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0242, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 5.1292657704239915, | |
| "grad_norm": 0.6455309391021729, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0346, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 5.139607032057911, | |
| "grad_norm": 0.3990233838558197, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.027, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 5.14994829369183, | |
| "grad_norm": 0.6490347385406494, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0262, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 5.1602895553257495, | |
| "grad_norm": 0.3322477340698242, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0253, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 5.170630816959669, | |
| "grad_norm": 0.36100462079048157, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0265, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 5.180972078593588, | |
| "grad_norm": 0.568352222442627, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0308, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 5.191313340227508, | |
| "grad_norm": 0.49155017733573914, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0264, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 5.201654601861427, | |
| "grad_norm": 0.46040263772010803, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0331, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 5.211995863495346, | |
| "grad_norm": 0.4238363206386566, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0247, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 5.222337125129266, | |
| "grad_norm": 0.4879581332206726, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0261, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 5.232678386763185, | |
| "grad_norm": 0.32399460673332214, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0216, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 5.243019648397104, | |
| "grad_norm": 0.5650364756584167, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0291, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 5.253360910031024, | |
| "grad_norm": 0.45291629433631897, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0288, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 5.263702171664943, | |
| "grad_norm": 0.392665296792984, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0278, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 5.274043433298862, | |
| "grad_norm": 0.43833160400390625, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.038, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 5.284384694932782, | |
| "grad_norm": 0.3306518793106079, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0273, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 5.294725956566701, | |
| "grad_norm": 0.4742429852485657, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0267, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 5.30506721820062, | |
| "grad_norm": 0.3381189703941345, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0241, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 5.31540847983454, | |
| "grad_norm": 0.33004701137542725, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0246, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 5.325749741468459, | |
| "grad_norm": 0.2710516154766083, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0288, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 5.3360910031023785, | |
| "grad_norm": 0.4145199656486511, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.021, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 5.346432264736298, | |
| "grad_norm": 0.49003541469573975, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0262, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 5.356773526370217, | |
| "grad_norm": 0.36205345392227173, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0308, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 5.3671147880041365, | |
| "grad_norm": 0.5112054944038391, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0219, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 5.377456049638056, | |
| "grad_norm": 0.4349391758441925, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0269, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 5.387797311271975, | |
| "grad_norm": 0.3681429326534271, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0236, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 5.398138572905895, | |
| "grad_norm": 0.41119542717933655, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0227, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 5.408479834539814, | |
| "grad_norm": 0.35644015669822693, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0277, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 5.418821096173733, | |
| "grad_norm": 0.26292476058006287, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0233, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 5.429162357807653, | |
| "grad_norm": 0.3663982152938843, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0196, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 5.439503619441572, | |
| "grad_norm": 0.3362174332141876, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0208, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 5.449844881075491, | |
| "grad_norm": 0.307075172662735, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0221, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 5.460186142709411, | |
| "grad_norm": 0.5596702098846436, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0239, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 5.47052740434333, | |
| "grad_norm": 0.5934080481529236, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0227, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 5.480868665977249, | |
| "grad_norm": 0.36830008029937744, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0241, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 5.491209927611169, | |
| "grad_norm": 0.33537349104881287, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0303, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 5.501551189245088, | |
| "grad_norm": 0.7705275416374207, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0305, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 5.5118924508790075, | |
| "grad_norm": 0.42248255014419556, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0269, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 5.522233712512927, | |
| "grad_norm": 0.43332451581954956, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0315, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 5.532574974146846, | |
| "grad_norm": 0.2902929186820984, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0206, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 5.5429162357807655, | |
| "grad_norm": 0.39924174547195435, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0328, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 5.553257497414685, | |
| "grad_norm": 0.29035380482673645, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.021, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 5.563598759048604, | |
| "grad_norm": 0.3116118311882019, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0236, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 5.573940020682524, | |
| "grad_norm": 0.41957587003707886, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0287, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 5.584281282316443, | |
| "grad_norm": 0.5572388768196106, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0279, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 5.594622543950362, | |
| "grad_norm": 0.41090792417526245, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0282, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 5.604963805584282, | |
| "grad_norm": 0.27527108788490295, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0269, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 5.615305067218201, | |
| "grad_norm": 0.41373440623283386, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0278, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 5.62564632885212, | |
| "grad_norm": 0.2767966389656067, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0209, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 5.635987590486039, | |
| "grad_norm": 0.6138311624526978, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0285, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 5.646328852119959, | |
| "grad_norm": 0.48996567726135254, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0267, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 5.6566701137538775, | |
| "grad_norm": 0.41447046399116516, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0243, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 5.667011375387798, | |
| "grad_norm": 0.7177706360816956, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0265, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 5.677352637021716, | |
| "grad_norm": 0.4367203116416931, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0216, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 5.6876938986556365, | |
| "grad_norm": 0.4282858669757843, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0187, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 5.698035160289555, | |
| "grad_norm": 0.4515775740146637, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0261, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 5.708376421923474, | |
| "grad_norm": 0.5574241876602173, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0272, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 5.718717683557394, | |
| "grad_norm": 0.4142419397830963, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0218, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 5.729058945191313, | |
| "grad_norm": 0.4656107425689697, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0242, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 5.739400206825232, | |
| "grad_norm": 0.3958209156990051, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0222, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 5.749741468459152, | |
| "grad_norm": 0.4042109251022339, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0275, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 5.760082730093071, | |
| "grad_norm": 0.2427556812763214, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0222, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 5.77042399172699, | |
| "grad_norm": 0.5609144568443298, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0196, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 5.78076525336091, | |
| "grad_norm": 0.5411838293075562, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0227, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 5.791106514994829, | |
| "grad_norm": 0.25017276406288147, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0238, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 5.801447776628748, | |
| "grad_norm": 0.3903113901615143, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0259, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 5.811789038262668, | |
| "grad_norm": 0.43156832456588745, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.026, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 5.822130299896587, | |
| "grad_norm": 0.37518760561943054, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0212, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 5.8324715615305065, | |
| "grad_norm": 0.27050355076789856, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0178, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 5.842812823164426, | |
| "grad_norm": 0.392545223236084, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0272, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 5.853154084798345, | |
| "grad_norm": 0.6526646614074707, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0254, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 5.8634953464322646, | |
| "grad_norm": 0.5840209126472473, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.021, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 5.873836608066184, | |
| "grad_norm": 0.462772399187088, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0314, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 5.884177869700103, | |
| "grad_norm": 0.28084126114845276, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0217, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 5.894519131334023, | |
| "grad_norm": 0.41295117139816284, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0254, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 5.904860392967942, | |
| "grad_norm": 0.46676045656204224, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0211, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 5.915201654601861, | |
| "grad_norm": 0.4448712170124054, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0273, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 5.925542916235781, | |
| "grad_norm": 0.46349889039993286, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0225, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 5.9358841778697, | |
| "grad_norm": 0.3470129072666168, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0212, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 5.946225439503619, | |
| "grad_norm": 0.6921860575675964, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0245, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 5.956566701137539, | |
| "grad_norm": 0.388237863779068, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0198, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 5.966907962771458, | |
| "grad_norm": 0.3637796938419342, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0179, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 5.977249224405377, | |
| "grad_norm": 0.3763841986656189, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0234, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 5.987590486039297, | |
| "grad_norm": 0.49042052030563354, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0209, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 5.997931747673216, | |
| "grad_norm": 0.4530317187309265, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0229, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 6.0082730093071355, | |
| "grad_norm": 0.3438562750816345, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0266, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 6.018614270941055, | |
| "grad_norm": 0.5681377649307251, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0343, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 6.028955532574974, | |
| "grad_norm": 0.3786006271839142, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0228, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 6.0392967942088935, | |
| "grad_norm": 0.3635367751121521, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0242, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 6.049638055842813, | |
| "grad_norm": 0.47645261883735657, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0202, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 6.059979317476732, | |
| "grad_norm": 0.28816187381744385, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0185, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 6.070320579110652, | |
| "grad_norm": 0.49700361490249634, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.018, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 6.080661840744571, | |
| "grad_norm": 0.3844532072544098, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0214, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 6.09100310237849, | |
| "grad_norm": 0.2704000771045685, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0206, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 6.10134436401241, | |
| "grad_norm": 0.45350486040115356, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0165, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 6.111685625646329, | |
| "grad_norm": 0.44593918323516846, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0262, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 6.122026887280248, | |
| "grad_norm": 0.40854206681251526, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.029, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 6.132368148914168, | |
| "grad_norm": 0.3487979769706726, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0203, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 6.142709410548087, | |
| "grad_norm": 0.4407898187637329, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0212, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 6.153050672182006, | |
| "grad_norm": 0.3672132194042206, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0249, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 6.163391933815926, | |
| "grad_norm": 0.30679500102996826, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0237, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 6.173733195449845, | |
| "grad_norm": 0.2706143260002136, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0178, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 6.1840744570837645, | |
| "grad_norm": 0.4161182641983032, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0187, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 6.194415718717684, | |
| "grad_norm": 0.3767077326774597, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0202, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 6.204756980351603, | |
| "grad_norm": 0.5312715768814087, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0214, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 6.2150982419855225, | |
| "grad_norm": 0.4219953119754791, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0232, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 6.225439503619442, | |
| "grad_norm": 0.3066677451133728, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0211, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 6.235780765253361, | |
| "grad_norm": 0.48293983936309814, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0265, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 6.246122026887281, | |
| "grad_norm": 0.2928076386451721, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0265, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 6.2564632885212, | |
| "grad_norm": 0.3460075557231903, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0194, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 6.266804550155119, | |
| "grad_norm": 0.39125117659568787, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0232, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 6.277145811789039, | |
| "grad_norm": 0.4593959450721741, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.02, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 6.287487073422958, | |
| "grad_norm": 0.19893543422222137, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0173, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 6.297828335056877, | |
| "grad_norm": 0.25699928402900696, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0168, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 6.308169596690797, | |
| "grad_norm": 0.233521968126297, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0203, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 6.318510858324716, | |
| "grad_norm": 0.2538311183452606, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0278, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 6.3288521199586345, | |
| "grad_norm": 0.38477128744125366, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0206, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 6.339193381592555, | |
| "grad_norm": 0.4474034905433655, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0255, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 6.349534643226473, | |
| "grad_norm": 0.29829108715057373, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.025, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 6.359875904860393, | |
| "grad_norm": 0.35248029232025146, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0245, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 6.370217166494312, | |
| "grad_norm": 0.3922407329082489, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0178, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 6.380558428128231, | |
| "grad_norm": 0.2970897853374481, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.024, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 6.390899689762151, | |
| "grad_norm": 0.5166146159172058, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0277, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 6.40124095139607, | |
| "grad_norm": 0.3991735875606537, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0204, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 6.411582213029989, | |
| "grad_norm": 0.39830419421195984, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0248, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 6.421923474663909, | |
| "grad_norm": 0.23356054723262787, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0242, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 6.432264736297828, | |
| "grad_norm": 0.2731786370277405, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0271, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 6.442605997931747, | |
| "grad_norm": 0.32770147919654846, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0258, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 6.452947259565667, | |
| "grad_norm": 0.27517178654670715, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0202, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 6.463288521199586, | |
| "grad_norm": 0.3228018581867218, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0178, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 6.473629782833505, | |
| "grad_norm": 0.6733930706977844, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.027, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 6.483971044467425, | |
| "grad_norm": 0.4040469527244568, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0213, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 6.494312306101344, | |
| "grad_norm": 0.20306985080242157, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0242, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 6.5046535677352635, | |
| "grad_norm": 0.4461956024169922, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0224, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 6.514994829369183, | |
| "grad_norm": 0.4035451114177704, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0239, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 6.525336091003102, | |
| "grad_norm": 0.37136587500572205, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0184, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 6.5356773526370215, | |
| "grad_norm": 0.23377655446529388, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0191, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 6.546018614270941, | |
| "grad_norm": 0.4048984944820404, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0269, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 6.55635987590486, | |
| "grad_norm": 0.2862883508205414, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0236, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 6.56670113753878, | |
| "grad_norm": 0.5136815905570984, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0208, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 6.577042399172699, | |
| "grad_norm": 0.31841617822647095, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0181, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 6.587383660806618, | |
| "grad_norm": 0.27017977833747864, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0171, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 6.597724922440538, | |
| "grad_norm": 0.5394287705421448, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0213, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 6.608066184074457, | |
| "grad_norm": 0.3285492956638336, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.03, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 6.618407445708376, | |
| "grad_norm": 0.30345842242240906, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0211, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 6.628748707342296, | |
| "grad_norm": 0.41494321823120117, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0243, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 6.639089968976215, | |
| "grad_norm": 0.1504533439874649, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.021, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 6.649431230610134, | |
| "grad_norm": 0.3772699534893036, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0219, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 6.659772492244054, | |
| "grad_norm": 0.338788241147995, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0182, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 6.670113753877973, | |
| "grad_norm": 0.33966636657714844, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0195, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 6.6804550155118925, | |
| "grad_norm": 0.5159633159637451, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0248, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 6.690796277145812, | |
| "grad_norm": 0.2215542048215866, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0142, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 6.701137538779731, | |
| "grad_norm": 0.3615117371082306, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0191, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 6.7114788004136505, | |
| "grad_norm": 0.26616737246513367, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0163, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 6.72182006204757, | |
| "grad_norm": 0.5228382349014282, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0187, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 6.732161323681489, | |
| "grad_norm": 0.3557437062263489, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0241, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 6.742502585315409, | |
| "grad_norm": 0.2981914281845093, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0271, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 6.752843846949328, | |
| "grad_norm": 0.31652310490608215, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0146, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 6.763185108583247, | |
| "grad_norm": 0.5531163215637207, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0235, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 6.773526370217167, | |
| "grad_norm": 0.532093346118927, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0236, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 6.783867631851086, | |
| "grad_norm": 0.3278164863586426, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0206, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 6.794208893485005, | |
| "grad_norm": 0.4535083770751953, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.021, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 6.804550155118925, | |
| "grad_norm": 0.2904408574104309, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0202, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 6.814891416752844, | |
| "grad_norm": 0.32751718163490295, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0153, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 6.825232678386763, | |
| "grad_norm": 0.38492077589035034, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0159, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 6.835573940020683, | |
| "grad_norm": 0.20383960008621216, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.016, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 6.845915201654602, | |
| "grad_norm": 0.3615643084049225, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0189, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 6.8562564632885215, | |
| "grad_norm": 0.29443955421447754, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0191, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 6.866597724922441, | |
| "grad_norm": 0.34050431847572327, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0209, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 6.87693898655636, | |
| "grad_norm": 0.46691590547561646, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0174, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 6.8872802481902795, | |
| "grad_norm": 0.3617284595966339, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0174, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 6.897621509824199, | |
| "grad_norm": 0.4693650007247925, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0245, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 6.907962771458118, | |
| "grad_norm": 0.3834041953086853, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0193, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 6.918304033092038, | |
| "grad_norm": 0.3388548493385315, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0221, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 6.928645294725957, | |
| "grad_norm": 0.4713430106639862, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0189, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 6.938986556359876, | |
| "grad_norm": 0.33172714710235596, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0236, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 6.949327817993796, | |
| "grad_norm": 0.26537957787513733, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0177, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 6.959669079627714, | |
| "grad_norm": 0.49504348635673523, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0237, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 6.970010341261634, | |
| "grad_norm": 0.3808205723762512, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0302, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 6.980351602895553, | |
| "grad_norm": 0.4882674813270569, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0208, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 6.990692864529473, | |
| "grad_norm": 0.34831565618515015, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0187, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 7.0010341261633915, | |
| "grad_norm": 0.2984711229801178, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0272, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 7.011375387797311, | |
| "grad_norm": 0.33718201518058777, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0165, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 7.02171664943123, | |
| "grad_norm": 0.2751483619213104, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0169, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 7.0320579110651495, | |
| "grad_norm": 0.29956188797950745, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0221, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 7.042399172699069, | |
| "grad_norm": 0.23008765280246735, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0237, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 7.052740434332988, | |
| "grad_norm": 0.6314752697944641, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0236, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 7.063081695966908, | |
| "grad_norm": 0.3773835301399231, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0166, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 7.073422957600827, | |
| "grad_norm": 0.3228418529033661, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0225, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 7.083764219234746, | |
| "grad_norm": 0.3780131936073303, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0166, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 7.094105480868666, | |
| "grad_norm": 0.4371061623096466, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0209, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 7.104446742502585, | |
| "grad_norm": 0.33374977111816406, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0136, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 7.114788004136504, | |
| "grad_norm": 0.4408278465270996, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0156, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 7.125129265770424, | |
| "grad_norm": 0.3536933958530426, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0262, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 7.135470527404343, | |
| "grad_norm": 0.2848654091358185, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0166, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 7.145811789038262, | |
| "grad_norm": 0.40110746026039124, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0211, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 7.156153050672182, | |
| "grad_norm": 0.6171153783798218, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0241, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 7.166494312306101, | |
| "grad_norm": 0.350641131401062, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0182, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 7.1768355739400205, | |
| "grad_norm": 0.3263093829154968, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0204, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 7.18717683557394, | |
| "grad_norm": 0.2667697072029114, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0157, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 7.197518097207859, | |
| "grad_norm": 0.20453166961669922, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.02, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 7.2078593588417785, | |
| "grad_norm": 0.3733457922935486, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0182, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 7.218200620475698, | |
| "grad_norm": 0.4627114236354828, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0161, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 7.228541882109617, | |
| "grad_norm": 0.43927302956581116, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0261, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 7.238883143743537, | |
| "grad_norm": 0.530809223651886, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0244, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 7.249224405377456, | |
| "grad_norm": 0.2127174437046051, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0153, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 7.259565667011375, | |
| "grad_norm": 0.3921542763710022, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0214, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 7.269906928645295, | |
| "grad_norm": 0.2447759062051773, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0138, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 7.280248190279214, | |
| "grad_norm": 0.2889174222946167, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0196, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 7.290589451913133, | |
| "grad_norm": 0.25713327527046204, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0194, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 7.300930713547053, | |
| "grad_norm": 0.3660365045070648, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0168, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 7.311271975180972, | |
| "grad_norm": 0.3480668365955353, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0203, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 7.321613236814891, | |
| "grad_norm": 0.38890811800956726, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0196, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 7.331954498448811, | |
| "grad_norm": 0.19116535782814026, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0219, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 7.34229576008273, | |
| "grad_norm": 0.46444493532180786, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0199, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 7.3526370217166495, | |
| "grad_norm": 0.3371870815753937, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0193, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 7.362978283350569, | |
| "grad_norm": 0.2734653651714325, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0219, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 7.373319544984488, | |
| "grad_norm": 0.3000454604625702, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0198, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 7.3836608066184075, | |
| "grad_norm": 0.5115442276000977, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0202, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 7.394002068252327, | |
| "grad_norm": 0.3468710482120514, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0192, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 7.404343329886246, | |
| "grad_norm": 0.3191637098789215, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0203, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 7.414684591520166, | |
| "grad_norm": 0.1676134616136551, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.024, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 7.425025853154085, | |
| "grad_norm": 0.23598401248455048, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0157, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 7.435367114788004, | |
| "grad_norm": 0.3338153660297394, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0151, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 7.445708376421924, | |
| "grad_norm": 0.2540847063064575, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0173, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 7.456049638055843, | |
| "grad_norm": 0.2920980453491211, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0233, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 7.466390899689762, | |
| "grad_norm": 0.31009557843208313, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0147, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 7.476732161323682, | |
| "grad_norm": 0.25142157077789307, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0162, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 7.487073422957601, | |
| "grad_norm": 0.39813631772994995, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0176, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 7.49741468459152, | |
| "grad_norm": 0.2764523923397064, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.012, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 7.50775594622544, | |
| "grad_norm": 0.37694424390792847, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0217, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 7.518097207859359, | |
| "grad_norm": 0.37361258268356323, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0193, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 7.528438469493278, | |
| "grad_norm": 0.2000025063753128, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0116, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 7.538779731127198, | |
| "grad_norm": 0.38702917098999023, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0179, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 7.549120992761117, | |
| "grad_norm": 0.21176272630691528, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0181, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 7.5594622543950365, | |
| "grad_norm": 0.20558993518352509, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0185, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 7.569803516028956, | |
| "grad_norm": 0.276991605758667, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0155, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 7.580144777662875, | |
| "grad_norm": 0.5275682806968689, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0174, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 7.5904860392967946, | |
| "grad_norm": 0.2282254695892334, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0184, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 7.600827300930714, | |
| "grad_norm": 0.28452035784721375, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.014, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 7.611168562564633, | |
| "grad_norm": 0.44985654950141907, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0173, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 7.621509824198553, | |
| "grad_norm": 0.26188555359840393, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0233, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 7.631851085832471, | |
| "grad_norm": 0.2840867340564728, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0182, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 7.642192347466391, | |
| "grad_norm": 0.23657751083374023, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0188, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 7.65253360910031, | |
| "grad_norm": 0.2065773755311966, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0157, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 7.66287487073423, | |
| "grad_norm": 0.4474356472492218, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0171, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 7.6732161323681485, | |
| "grad_norm": 0.16499949991703033, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0241, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 7.683557394002069, | |
| "grad_norm": 0.2749648690223694, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0197, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 7.693898655635987, | |
| "grad_norm": 0.4223771095275879, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0217, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 7.704239917269907, | |
| "grad_norm": 0.25864261388778687, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0131, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 7.714581178903826, | |
| "grad_norm": 0.2720250189304352, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0131, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 7.724922440537745, | |
| "grad_norm": 0.1645669937133789, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0154, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 7.735263702171665, | |
| "grad_norm": 0.3111282289028168, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0194, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 7.745604963805584, | |
| "grad_norm": 0.4516369700431824, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0185, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 7.755946225439503, | |
| "grad_norm": 0.31883952021598816, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0141, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 7.766287487073423, | |
| "grad_norm": 0.27546900510787964, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0168, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 7.776628748707342, | |
| "grad_norm": 0.20381714403629303, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0138, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 7.786970010341261, | |
| "grad_norm": 0.3134346902370453, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0206, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 7.797311271975181, | |
| "grad_norm": 0.45453575253486633, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0266, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 7.8076525336091, | |
| "grad_norm": 0.3042468726634979, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.016, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 7.817993795243019, | |
| "grad_norm": 0.2079951912164688, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0165, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 7.828335056876939, | |
| "grad_norm": 0.23328404128551483, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0144, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 7.838676318510858, | |
| "grad_norm": 0.2313033491373062, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0134, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 7.8490175801447775, | |
| "grad_norm": 0.17795367538928986, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.014, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 7.859358841778697, | |
| "grad_norm": 0.29680335521698, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0238, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 7.869700103412616, | |
| "grad_norm": 0.31815585494041443, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0173, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 7.8800413650465355, | |
| "grad_norm": 0.16826115548610687, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0132, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 7.890382626680455, | |
| "grad_norm": 0.38009387254714966, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0123, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 7.900723888314374, | |
| "grad_norm": 0.2718311548233032, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0236, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 7.911065149948294, | |
| "grad_norm": 0.32470130920410156, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0155, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 7.921406411582213, | |
| "grad_norm": 0.37667518854141235, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0214, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 7.931747673216132, | |
| "grad_norm": 0.17286966741085052, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0085, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 7.942088934850052, | |
| "grad_norm": 0.571099579334259, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0194, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 7.952430196483971, | |
| "grad_norm": 0.3398350477218628, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0181, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 7.96277145811789, | |
| "grad_norm": 0.16115669906139374, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.012, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 7.97311271975181, | |
| "grad_norm": 0.43018287420272827, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.015, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 7.983453981385729, | |
| "grad_norm": 0.3045998513698578, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.022, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 7.993795243019648, | |
| "grad_norm": 0.40354740619659424, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0208, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 8.004136504653568, | |
| "grad_norm": 0.3087363839149475, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0123, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 8.014477766287486, | |
| "grad_norm": 0.2670136094093323, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0159, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 8.024819027921406, | |
| "grad_norm": 0.27276137471199036, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0166, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 8.035160289555325, | |
| "grad_norm": 0.29411280155181885, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0094, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 8.045501551189245, | |
| "grad_norm": 0.278071790933609, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0155, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 8.055842812823164, | |
| "grad_norm": 0.2220621258020401, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0159, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 8.066184074457084, | |
| "grad_norm": 0.18182110786437988, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0204, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 8.076525336091002, | |
| "grad_norm": 0.1914730966091156, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0132, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 8.086866597724923, | |
| "grad_norm": 0.16777215898036957, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0141, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 8.097207859358841, | |
| "grad_norm": 0.42128607630729675, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0174, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 8.107549120992761, | |
| "grad_norm": 0.3622506260871887, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0175, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 8.11789038262668, | |
| "grad_norm": 0.39431047439575195, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0135, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 8.1282316442606, | |
| "grad_norm": 0.230220764875412, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0147, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 8.138572905894518, | |
| "grad_norm": 0.41766777634620667, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0166, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 8.148914167528439, | |
| "grad_norm": 0.25175362825393677, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0159, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 8.159255429162357, | |
| "grad_norm": 0.2253105789422989, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0133, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 8.169596690796277, | |
| "grad_norm": 0.3982727527618408, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0206, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 8.179937952430196, | |
| "grad_norm": 0.20401015877723694, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0157, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 8.190279214064116, | |
| "grad_norm": 0.30480557680130005, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0124, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 8.200620475698035, | |
| "grad_norm": 0.282543808221817, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0121, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 8.210961737331955, | |
| "grad_norm": 0.3908750116825104, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0207, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 8.221302998965873, | |
| "grad_norm": 0.30012720823287964, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0192, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 8.231644260599793, | |
| "grad_norm": 0.29306560754776, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0134, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 8.241985522233712, | |
| "grad_norm": 0.3094498813152313, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0144, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 8.252326783867632, | |
| "grad_norm": 0.4647205173969269, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0135, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 8.26266804550155, | |
| "grad_norm": 0.235570028424263, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0115, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 8.273009307135471, | |
| "grad_norm": 0.2871288061141968, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0167, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 8.28335056876939, | |
| "grad_norm": 0.1976521760225296, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0162, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 8.29369183040331, | |
| "grad_norm": 0.2932208478450775, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0132, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 8.304033092037228, | |
| "grad_norm": 0.3506277799606323, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0167, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 8.314374353671148, | |
| "grad_norm": 0.3956264555454254, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0148, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 8.324715615305067, | |
| "grad_norm": 0.21484267711639404, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0135, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 8.335056876938987, | |
| "grad_norm": 0.29518088698387146, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0152, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 8.345398138572905, | |
| "grad_norm": 0.36268720030784607, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0201, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 8.355739400206826, | |
| "grad_norm": 0.3740086555480957, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0191, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 8.366080661840744, | |
| "grad_norm": 0.229885071516037, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0154, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 8.376421923474664, | |
| "grad_norm": 0.25833815336227417, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0112, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 8.386763185108583, | |
| "grad_norm": 0.28120049834251404, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0189, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 8.397104446742503, | |
| "grad_norm": 0.2793098986148834, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0136, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 8.407445708376422, | |
| "grad_norm": 0.22316910326480865, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.02, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 8.417786970010342, | |
| "grad_norm": 0.1618022322654724, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0138, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 8.42812823164426, | |
| "grad_norm": 0.19442273676395416, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0154, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 8.43846949327818, | |
| "grad_norm": 0.3358261287212372, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0124, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 8.448810754912099, | |
| "grad_norm": 0.362191379070282, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.021, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 8.45915201654602, | |
| "grad_norm": 0.3233567178249359, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0119, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 8.469493278179938, | |
| "grad_norm": 0.27966511249542236, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0123, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 8.479834539813858, | |
| "grad_norm": 0.5476665496826172, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0185, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 8.490175801447776, | |
| "grad_norm": 0.34484347701072693, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0171, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 8.500517063081697, | |
| "grad_norm": 0.3474807143211365, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.019, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 8.510858324715615, | |
| "grad_norm": 0.21322867274284363, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0148, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 8.521199586349535, | |
| "grad_norm": 0.340084433555603, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0165, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 8.531540847983454, | |
| "grad_norm": 0.18872112035751343, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0156, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 8.541882109617374, | |
| "grad_norm": 0.3330586552619934, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0184, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 8.552223371251293, | |
| "grad_norm": 0.15777526795864105, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0165, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 8.562564632885213, | |
| "grad_norm": 0.3571397662162781, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0157, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 8.572905894519131, | |
| "grad_norm": 0.38949018716812134, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0197, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 8.583247156153051, | |
| "grad_norm": 0.2196318358182907, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0167, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 8.59358841778697, | |
| "grad_norm": 0.15664514899253845, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0083, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 8.60392967942089, | |
| "grad_norm": 0.38924410939216614, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0118, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 8.614270941054809, | |
| "grad_norm": 0.2224714308977127, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0178, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 8.624612202688729, | |
| "grad_norm": 0.21766221523284912, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0148, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 8.634953464322647, | |
| "grad_norm": 0.3073166608810425, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0187, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 8.645294725956568, | |
| "grad_norm": 0.34649544954299927, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0181, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 8.655635987590486, | |
| "grad_norm": 0.40810760855674744, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0202, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 8.665977249224404, | |
| "grad_norm": 0.4159800410270691, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0189, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 8.676318510858325, | |
| "grad_norm": 0.24623003602027893, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0168, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 8.686659772492245, | |
| "grad_norm": 0.4504314064979553, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0143, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 8.697001034126163, | |
| "grad_norm": 0.16085493564605713, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0114, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 8.707342295760082, | |
| "grad_norm": 0.3509012758731842, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0141, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 8.717683557394002, | |
| "grad_norm": 0.2967391610145569, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0127, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 8.72802481902792, | |
| "grad_norm": 0.1618923544883728, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0137, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 8.73836608066184, | |
| "grad_norm": 0.2387555092573166, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0118, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 8.74870734229576, | |
| "grad_norm": 0.1779862344264984, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0112, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 8.75904860392968, | |
| "grad_norm": 0.3305777907371521, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0118, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 8.769389865563598, | |
| "grad_norm": 0.32230043411254883, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0151, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 8.779731127197518, | |
| "grad_norm": 0.4056425392627716, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.018, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 8.790072388831437, | |
| "grad_norm": 0.33739617466926575, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0157, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 8.800413650465357, | |
| "grad_norm": 0.34399762749671936, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.014, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 8.810754912099275, | |
| "grad_norm": 0.2914503514766693, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0211, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 8.821096173733196, | |
| "grad_norm": 0.18939660489559174, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0115, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 8.831437435367114, | |
| "grad_norm": 0.32885876297950745, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0142, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 8.841778697001034, | |
| "grad_norm": 0.155549094080925, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0103, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 8.852119958634953, | |
| "grad_norm": 0.3471592664718628, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0141, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 8.862461220268873, | |
| "grad_norm": 0.16061702370643616, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0154, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 8.872802481902792, | |
| "grad_norm": 0.40517985820770264, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.013, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 8.883143743536712, | |
| "grad_norm": 0.5627370476722717, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0175, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 8.89348500517063, | |
| "grad_norm": 0.39706698060035706, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0133, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 8.90382626680455, | |
| "grad_norm": 0.17194890975952148, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0124, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 8.914167528438469, | |
| "grad_norm": 0.30311140418052673, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0121, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 8.92450879007239, | |
| "grad_norm": 0.25823259353637695, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0163, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 8.934850051706308, | |
| "grad_norm": 0.27010640501976013, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0131, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 8.945191313340228, | |
| "grad_norm": 0.20116625726222992, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0086, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 8.955532574974146, | |
| "grad_norm": 0.3503723740577698, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0189, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 8.965873836608067, | |
| "grad_norm": 0.17119552195072174, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0079, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 8.976215098241985, | |
| "grad_norm": 0.18822835385799408, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0141, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 8.986556359875905, | |
| "grad_norm": 0.2981252074241638, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.013, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 8.996897621509824, | |
| "grad_norm": 0.4735909402370453, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0144, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 9.007238883143744, | |
| "grad_norm": 0.2632445991039276, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0147, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 9.017580144777662, | |
| "grad_norm": 0.2802174389362335, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.01, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 9.027921406411583, | |
| "grad_norm": 0.3493552505970001, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0153, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 9.038262668045501, | |
| "grad_norm": 0.15704280138015747, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0112, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 9.048603929679421, | |
| "grad_norm": 0.11671019345521927, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0116, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 9.05894519131334, | |
| "grad_norm": 0.1330782175064087, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0118, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 9.06928645294726, | |
| "grad_norm": 0.21825750172138214, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0163, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 9.079627714581179, | |
| "grad_norm": 0.23146793246269226, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0136, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 9.089968976215099, | |
| "grad_norm": 0.4681452214717865, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0205, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 9.100310237849017, | |
| "grad_norm": 0.28803685307502747, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0204, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 9.110651499482938, | |
| "grad_norm": 0.31491759419441223, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0224, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 9.120992761116856, | |
| "grad_norm": 0.16889670491218567, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.013, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 9.131334022750776, | |
| "grad_norm": 0.21065466105937958, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0113, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 9.141675284384695, | |
| "grad_norm": 0.12983869016170502, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.014, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 9.152016546018615, | |
| "grad_norm": 0.2970364987850189, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0177, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 9.162357807652533, | |
| "grad_norm": 0.23263870179653168, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0118, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 9.172699069286454, | |
| "grad_norm": 0.2912689447402954, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.014, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 9.183040330920372, | |
| "grad_norm": 0.2819783687591553, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0146, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 9.193381592554292, | |
| "grad_norm": 0.18291838467121124, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0217, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 9.20372285418821, | |
| "grad_norm": 0.21960976719856262, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0165, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 9.214064115822131, | |
| "grad_norm": 0.29368966817855835, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.01, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 9.22440537745605, | |
| "grad_norm": 0.271101713180542, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0172, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 9.23474663908997, | |
| "grad_norm": 0.21469582617282867, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0121, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 9.245087900723888, | |
| "grad_norm": 0.16965404152870178, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0125, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 9.255429162357808, | |
| "grad_norm": 0.3307068943977356, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0159, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 9.265770423991727, | |
| "grad_norm": 0.23451893031597137, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0129, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 9.276111685625647, | |
| "grad_norm": 0.379814088344574, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0173, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 9.286452947259566, | |
| "grad_norm": 0.42687952518463135, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0136, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 9.296794208893486, | |
| "grad_norm": 0.1909567266702652, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0119, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 9.307135470527404, | |
| "grad_norm": 0.15361298620700836, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.014, | |
| "step": 9000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 11, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |