| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 11.097410604192355, | |
| "eval_steps": 500, | |
| "global_step": 9000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012330456226880395, | |
| "grad_norm": 10.717575073242188, | |
| "learning_rate": 1.8e-06, | |
| "loss": 0.9145, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02466091245376079, | |
| "grad_norm": 3.8855795860290527, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.725, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.036991368680641186, | |
| "grad_norm": 3.5154707431793213, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.3639, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04932182490752158, | |
| "grad_norm": 1.4310674667358398, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.2567, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06165228113440197, | |
| "grad_norm": 1.068009853363037, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.1924, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07398273736128237, | |
| "grad_norm": 0.7420321106910706, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1706, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08631319358816276, | |
| "grad_norm": 0.8617730140686035, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1482, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.09864364981504316, | |
| "grad_norm": 0.9111265540122986, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1355, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11097410604192355, | |
| "grad_norm": 0.7781851291656494, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1219, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12330456226880394, | |
| "grad_norm": 1.2688745260238647, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1218, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.13563501849568435, | |
| "grad_norm": 1.1680793762207031, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1143, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.14796547472256474, | |
| "grad_norm": 1.322534203529358, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1009, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16029593094944514, | |
| "grad_norm": 1.7856727838516235, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.1082, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.17262638717632553, | |
| "grad_norm": 0.9354536533355713, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.1007, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.18495684340320592, | |
| "grad_norm": 1.1846843957901, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0926, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.19728729963008632, | |
| "grad_norm": 1.18549644947052, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0978, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.2096177558569667, | |
| "grad_norm": 1.2301164865493774, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.099, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2219482120838471, | |
| "grad_norm": 0.7240736484527588, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0924, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2342786683107275, | |
| "grad_norm": 0.8248300552368164, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.0816, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2466091245376079, | |
| "grad_norm": 1.2205982208251953, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0873, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2589395807644883, | |
| "grad_norm": 0.7609217762947083, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0986, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2712700369913687, | |
| "grad_norm": 1.1541670560836792, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0921, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2836004932182491, | |
| "grad_norm": 0.8996898531913757, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0879, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2959309494451295, | |
| "grad_norm": 0.9619576930999756, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.083, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3082614056720099, | |
| "grad_norm": 0.8574262857437134, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0888, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3205918618988903, | |
| "grad_norm": 1.0983144044876099, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0802, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.33292231812577067, | |
| "grad_norm": 0.8157097697257996, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0874, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.34525277435265106, | |
| "grad_norm": 0.6633105278015137, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0775, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.35758323057953145, | |
| "grad_norm": 1.0699334144592285, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0845, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.36991368680641185, | |
| "grad_norm": 0.612727701663971, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0752, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.38224414303329224, | |
| "grad_norm": 0.8121215105056763, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0735, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.39457459926017263, | |
| "grad_norm": 0.843544065952301, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0699, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.406905055487053, | |
| "grad_norm": 1.069931983947754, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0754, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.4192355117139334, | |
| "grad_norm": 1.0049164295196533, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0766, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.4315659679408138, | |
| "grad_norm": 1.38992440700531, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0693, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4438964241676942, | |
| "grad_norm": 0.9545595049858093, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0694, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4562268803945746, | |
| "grad_norm": 0.6656662821769714, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0624, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.468557336621455, | |
| "grad_norm": 0.5403919219970703, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0628, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4808877928483354, | |
| "grad_norm": 0.5143225193023682, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0582, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.4932182490752158, | |
| "grad_norm": 0.959304928779602, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0658, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5055487053020962, | |
| "grad_norm": 1.1352159976959229, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0762, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.5178791615289766, | |
| "grad_norm": 0.4174583852291107, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0676, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.530209617755857, | |
| "grad_norm": 0.6497669219970703, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0666, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5425400739827374, | |
| "grad_norm": 0.8416606187820435, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0606, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5548705302096177, | |
| "grad_norm": 0.9862777590751648, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0585, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5672009864364982, | |
| "grad_norm": 0.9437181353569031, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0672, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5795314426633785, | |
| "grad_norm": 0.7231281995773315, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0672, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.591861898890259, | |
| "grad_norm": 0.7037879228591919, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0681, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6041923551171393, | |
| "grad_norm": 1.173652172088623, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0617, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6165228113440198, | |
| "grad_norm": 0.6118177175521851, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0582, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6288532675709001, | |
| "grad_norm": 0.6720106601715088, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0639, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6411837237977805, | |
| "grad_norm": 0.5622101426124573, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0602, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6535141800246609, | |
| "grad_norm": 0.4737834930419922, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0617, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6658446362515413, | |
| "grad_norm": 0.5074479579925537, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0526, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.6781750924784217, | |
| "grad_norm": 0.4941859245300293, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0566, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.6905055487053021, | |
| "grad_norm": 0.5586684346199036, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0587, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7028360049321825, | |
| "grad_norm": 0.4972110390663147, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0518, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7151664611590629, | |
| "grad_norm": 0.47018104791641235, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0533, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7274969173859432, | |
| "grad_norm": 0.7728797197341919, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0617, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7398273736128237, | |
| "grad_norm": 0.636345624923706, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.056, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.752157829839704, | |
| "grad_norm": 0.6593806147575378, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0617, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7644882860665845, | |
| "grad_norm": 0.6342796087265015, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0536, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.7768187422934648, | |
| "grad_norm": 0.21732759475708008, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0471, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.7891491985203453, | |
| "grad_norm": 0.653475284576416, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0532, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8014796547472256, | |
| "grad_norm": 0.6683667898178101, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0553, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.813810110974106, | |
| "grad_norm": 0.6435586810112, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0558, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8261405672009864, | |
| "grad_norm": 0.7211766242980957, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0636, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8384710234278668, | |
| "grad_norm": 0.5196639895439148, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0605, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.8508014796547472, | |
| "grad_norm": 0.9773313999176025, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0543, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.8631319358816276, | |
| "grad_norm": 0.8289105296134949, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0612, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.8754623921085081, | |
| "grad_norm": 0.7855521440505981, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0609, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.8877928483353884, | |
| "grad_norm": 0.7073290944099426, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0559, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9001233045622689, | |
| "grad_norm": 0.3936100900173187, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0482, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9124537607891492, | |
| "grad_norm": 0.5569445490837097, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0439, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9247842170160296, | |
| "grad_norm": 0.3866192698478699, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0466, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.93711467324291, | |
| "grad_norm": 0.5356541275978088, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0457, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.9494451294697904, | |
| "grad_norm": 0.4026905596256256, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0499, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.9617755856966708, | |
| "grad_norm": 0.5838866829872131, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0438, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.9741060419235512, | |
| "grad_norm": 0.7922773957252502, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0451, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.9864364981504316, | |
| "grad_norm": 0.3669157326221466, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0468, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.998766954377312, | |
| "grad_norm": 0.7431704998016357, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0498, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.0110974106041923, | |
| "grad_norm": 0.5643433928489685, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0458, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.0234278668310728, | |
| "grad_norm": 0.5226078033447266, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0481, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.0357583230579532, | |
| "grad_norm": 0.5678564310073853, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0443, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.0480887792848335, | |
| "grad_norm": 0.43138501048088074, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0543, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.060419235511714, | |
| "grad_norm": 0.40397635102272034, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0472, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.0727496917385944, | |
| "grad_norm": 0.6934443116188049, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0525, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.0850801479654748, | |
| "grad_norm": 0.4818338453769684, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.044, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.097410604192355, | |
| "grad_norm": 0.5821869373321533, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0449, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.1097410604192355, | |
| "grad_norm": 0.46832275390625, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0504, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.122071516646116, | |
| "grad_norm": 0.44488951563835144, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0407, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.1344019728729964, | |
| "grad_norm": 0.445198655128479, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0424, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.1467324290998766, | |
| "grad_norm": 0.39317309856414795, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0416, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.159062885326757, | |
| "grad_norm": 0.43172624707221985, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0428, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.1713933415536375, | |
| "grad_norm": 0.3563348352909088, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0464, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.183723797780518, | |
| "grad_norm": 0.47464248538017273, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0388, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.1960542540073984, | |
| "grad_norm": 0.33187857270240784, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0404, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.2083847102342786, | |
| "grad_norm": 0.30818915367126465, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0418, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.220715166461159, | |
| "grad_norm": 0.43571925163269043, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0534, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.2330456226880395, | |
| "grad_norm": 0.5636769533157349, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0477, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.2453760789149197, | |
| "grad_norm": 0.5454676151275635, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0451, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.2577065351418002, | |
| "grad_norm": 0.512248694896698, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0479, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.2700369913686806, | |
| "grad_norm": 0.6990445256233215, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0408, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.282367447595561, | |
| "grad_norm": 0.38518843054771423, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0422, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.2946979038224415, | |
| "grad_norm": 0.4438116252422333, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0409, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.3070283600493218, | |
| "grad_norm": 0.6028305888175964, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0475, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.3193588162762022, | |
| "grad_norm": 0.4458123743534088, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0445, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.3316892725030827, | |
| "grad_norm": 0.36406946182250977, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0471, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.344019728729963, | |
| "grad_norm": 0.3557729423046112, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0351, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.3563501849568433, | |
| "grad_norm": 0.5295067429542542, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.04, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.3686806411837238, | |
| "grad_norm": 0.5780554413795471, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0468, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.3810110974106042, | |
| "grad_norm": 0.8454430103302002, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0427, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.3933415536374847, | |
| "grad_norm": 0.42524266242980957, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0503, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.405672009864365, | |
| "grad_norm": 0.45193225145339966, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0483, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.4180024660912454, | |
| "grad_norm": 0.6405168175697327, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0485, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.4303329223181258, | |
| "grad_norm": 0.5809157490730286, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0432, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.442663378545006, | |
| "grad_norm": 0.6145975589752197, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0425, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.4549938347718865, | |
| "grad_norm": 0.4401192367076874, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.042, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.467324290998767, | |
| "grad_norm": 0.42817187309265137, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.044, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.4796547472256474, | |
| "grad_norm": 0.42052963376045227, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0475, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.4919852034525278, | |
| "grad_norm": 0.5015923976898193, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0412, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.504315659679408, | |
| "grad_norm": 0.6461988091468811, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0393, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.5166461159062885, | |
| "grad_norm": 0.48356345295906067, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.047, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.528976572133169, | |
| "grad_norm": 0.4189961552619934, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0399, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.5413070283600492, | |
| "grad_norm": 0.36472582817077637, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0462, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.5536374845869299, | |
| "grad_norm": 0.3705209493637085, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0386, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.56596794081381, | |
| "grad_norm": 0.5778259038925171, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0436, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.5782983970406905, | |
| "grad_norm": 0.4346117377281189, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0442, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.590628853267571, | |
| "grad_norm": 0.6357501149177551, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0471, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.6029593094944512, | |
| "grad_norm": 0.6236826777458191, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.043, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.6152897657213316, | |
| "grad_norm": 0.5809472799301147, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0428, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.627620221948212, | |
| "grad_norm": 0.34088054299354553, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0437, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.6399506781750923, | |
| "grad_norm": 0.6971454620361328, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0415, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.652281134401973, | |
| "grad_norm": 0.41413041949272156, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0397, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.6646115906288532, | |
| "grad_norm": 0.4379029870033264, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0378, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.6769420468557337, | |
| "grad_norm": 0.40448686480522156, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0349, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.6892725030826141, | |
| "grad_norm": 0.3020240068435669, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0319, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.7016029593094943, | |
| "grad_norm": 0.30792954564094543, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0389, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.7139334155363748, | |
| "grad_norm": 0.49618154764175415, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0365, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.7262638717632552, | |
| "grad_norm": 0.5307965278625488, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0406, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.7385943279901355, | |
| "grad_norm": 0.3284684121608734, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0369, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.7509247842170161, | |
| "grad_norm": 0.47114741802215576, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0423, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.7632552404438964, | |
| "grad_norm": 0.3520047068595886, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0367, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.7755856966707768, | |
| "grad_norm": 0.21034875512123108, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0402, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.7879161528976573, | |
| "grad_norm": 0.41852134466171265, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0393, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.8002466091245375, | |
| "grad_norm": 0.44161736965179443, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0388, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.8125770653514182, | |
| "grad_norm": 0.40450319647789, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0325, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.8249075215782984, | |
| "grad_norm": 0.41689491271972656, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0403, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.8372379778051788, | |
| "grad_norm": 0.4946388006210327, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0356, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.8495684340320593, | |
| "grad_norm": 0.4605123996734619, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.041, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.8618988902589395, | |
| "grad_norm": 0.2958815097808838, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0441, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.87422934648582, | |
| "grad_norm": 0.35005122423171997, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0392, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.8865598027127004, | |
| "grad_norm": 0.3829946219921112, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0412, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.8988902589395806, | |
| "grad_norm": 0.41099175810813904, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.04, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.9112207151664613, | |
| "grad_norm": 0.5977879762649536, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0409, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.9235511713933415, | |
| "grad_norm": 0.4658485949039459, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0404, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.935881627620222, | |
| "grad_norm": 0.44003263115882874, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0382, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.9482120838471024, | |
| "grad_norm": 0.47026577591896057, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0408, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.9605425400739827, | |
| "grad_norm": 0.5023799538612366, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0394, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.972872996300863, | |
| "grad_norm": 0.4780862331390381, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0358, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.9852034525277436, | |
| "grad_norm": 0.5214049816131592, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0373, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.9975339087546238, | |
| "grad_norm": 0.2676173448562622, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0329, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.0098643649815044, | |
| "grad_norm": 0.5386433005332947, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0317, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.0221948212083847, | |
| "grad_norm": 0.3479117751121521, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0337, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.034525277435265, | |
| "grad_norm": 0.3379543423652649, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0389, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.0468557336621456, | |
| "grad_norm": 0.5163589715957642, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0356, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.059186189889026, | |
| "grad_norm": 0.2318829447031021, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.034, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.0715166461159065, | |
| "grad_norm": 0.3423265814781189, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0366, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.0838471023427867, | |
| "grad_norm": 0.3213823437690735, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0383, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.096177558569667, | |
| "grad_norm": 0.24304090440273285, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0341, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.1085080147965476, | |
| "grad_norm": 0.4713955819606781, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0362, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.120838471023428, | |
| "grad_norm": 0.4015974998474121, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.033, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.133168927250308, | |
| "grad_norm": 0.46206021308898926, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0379, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.1454993834771887, | |
| "grad_norm": 0.7064021825790405, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0367, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.157829839704069, | |
| "grad_norm": 0.2710956335067749, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0382, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.1701602959309496, | |
| "grad_norm": 0.3196651339530945, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0357, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.18249075215783, | |
| "grad_norm": 0.29166680574417114, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0336, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.19482120838471, | |
| "grad_norm": 0.41343066096305847, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0354, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.2071516646115907, | |
| "grad_norm": 0.4520774185657501, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0405, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.219482120838471, | |
| "grad_norm": 0.49877840280532837, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0406, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.2318125770653516, | |
| "grad_norm": 0.3894934356212616, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.037, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.244143033292232, | |
| "grad_norm": 0.24778224527835846, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0344, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.256473489519112, | |
| "grad_norm": 0.38821402192115784, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0351, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.2688039457459928, | |
| "grad_norm": 0.45476725697517395, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0345, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.281134401972873, | |
| "grad_norm": 0.4913976192474365, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0337, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.293464858199753, | |
| "grad_norm": 0.43286487460136414, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0435, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.305795314426634, | |
| "grad_norm": 0.3469620645046234, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0377, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 2.318125770653514, | |
| "grad_norm": 0.35717296600341797, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0358, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 2.3304562268803943, | |
| "grad_norm": 0.6867918968200684, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0351, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 2.342786683107275, | |
| "grad_norm": 0.36012470722198486, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0373, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 2.3551171393341552, | |
| "grad_norm": 0.4134921431541443, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0432, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 2.367447595561036, | |
| "grad_norm": 0.36093100905418396, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.032, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 2.379778051787916, | |
| "grad_norm": 0.4086175560951233, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0369, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 2.392108508014797, | |
| "grad_norm": 0.2936928868293762, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0367, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 2.404438964241677, | |
| "grad_norm": 0.42611727118492126, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0399, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 2.4167694204685573, | |
| "grad_norm": 0.3418695628643036, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0308, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 2.429099876695438, | |
| "grad_norm": 0.42617467045783997, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0365, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 2.441430332922318, | |
| "grad_norm": 0.3463040292263031, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0399, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 2.4537607891491984, | |
| "grad_norm": 0.32411280274391174, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0356, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 2.466091245376079, | |
| "grad_norm": 0.3793613016605377, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0353, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 2.4784217016029593, | |
| "grad_norm": 0.4391370117664337, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0345, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 2.4907521578298395, | |
| "grad_norm": 0.37539857625961304, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0353, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 2.50308261405672, | |
| "grad_norm": 0.3593038022518158, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0433, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 2.5154130702836004, | |
| "grad_norm": 0.30487290024757385, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0412, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 2.5277435265104806, | |
| "grad_norm": 0.2666114866733551, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0366, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 2.5400739827373613, | |
| "grad_norm": 0.47192150354385376, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0307, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 2.5524044389642415, | |
| "grad_norm": 0.3995994031429291, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0329, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 2.564734895191122, | |
| "grad_norm": 0.5093575119972229, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0384, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 2.5770653514180024, | |
| "grad_norm": 0.4511171579360962, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0335, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 2.589395807644883, | |
| "grad_norm": 0.41962429881095886, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0318, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 2.6017262638717633, | |
| "grad_norm": 0.5530136227607727, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0316, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 2.6140567200986435, | |
| "grad_norm": 0.3715955913066864, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0358, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 2.626387176325524, | |
| "grad_norm": 0.31101542711257935, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0349, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 2.6387176325524044, | |
| "grad_norm": 0.5140761137008667, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0311, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.6510480887792847, | |
| "grad_norm": 0.49348512291908264, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0384, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.6633785450061653, | |
| "grad_norm": 0.24525637924671173, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0295, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.6757090012330456, | |
| "grad_norm": 0.19566422700881958, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0299, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.688039457459926, | |
| "grad_norm": 0.4520023465156555, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0293, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.7003699136868065, | |
| "grad_norm": 0.41364359855651855, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0342, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.7127003699136867, | |
| "grad_norm": 0.4098246693611145, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0339, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.7250308261405674, | |
| "grad_norm": 0.30342575907707214, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0326, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.7373612823674476, | |
| "grad_norm": 0.32657480239868164, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0325, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.7496917385943282, | |
| "grad_norm": 0.32405316829681396, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0316, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.7620221948212085, | |
| "grad_norm": 0.19613325595855713, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0326, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.7743526510480887, | |
| "grad_norm": 0.47685542702674866, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0326, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.7866831072749694, | |
| "grad_norm": 0.3843764364719391, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.034, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.7990135635018496, | |
| "grad_norm": 0.38438719511032104, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0331, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.81134401972873, | |
| "grad_norm": 0.5032266974449158, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0335, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.8236744759556105, | |
| "grad_norm": 0.33314791321754456, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0348, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.8360049321824907, | |
| "grad_norm": 0.41419926285743713, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0319, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.848335388409371, | |
| "grad_norm": 0.25893399119377136, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0394, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.8606658446362516, | |
| "grad_norm": 0.35780569911003113, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0342, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.872996300863132, | |
| "grad_norm": 0.40284544229507446, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0407, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.885326757090012, | |
| "grad_norm": 0.2593776285648346, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0356, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.8976572133168927, | |
| "grad_norm": 0.3005968928337097, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0364, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.909987669543773, | |
| "grad_norm": 0.46636074781417847, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0311, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.9223181257706536, | |
| "grad_norm": 0.24965031445026398, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0312, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.934648581997534, | |
| "grad_norm": 0.34543120861053467, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0287, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.9469790382244145, | |
| "grad_norm": 0.37850749492645264, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0399, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.9593094944512948, | |
| "grad_norm": 0.369529128074646, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0348, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.971639950678175, | |
| "grad_norm": 0.25628718733787537, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0313, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.9839704069050557, | |
| "grad_norm": 0.29385656118392944, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0322, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.996300863131936, | |
| "grad_norm": 0.27464205026626587, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0321, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.008631319358816, | |
| "grad_norm": 0.23686601221561432, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0479, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.020961775585697, | |
| "grad_norm": 0.43010351061820984, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.046, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.033292231812577, | |
| "grad_norm": 0.5947278738021851, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0496, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.0456226880394572, | |
| "grad_norm": 0.3201181888580322, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.043, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.057953144266338, | |
| "grad_norm": 0.42945221066474915, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0385, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.070283600493218, | |
| "grad_norm": 0.7006970643997192, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0411, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 3.082614056720099, | |
| "grad_norm": 0.6188066005706787, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0394, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 3.094944512946979, | |
| "grad_norm": 0.34453701972961426, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0391, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 3.1072749691738593, | |
| "grad_norm": 0.3709743618965149, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0384, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 3.11960542540074, | |
| "grad_norm": 0.37968412041664124, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0333, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 3.13193588162762, | |
| "grad_norm": 0.2592119872570038, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0367, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 3.144266337854501, | |
| "grad_norm": 0.3800894618034363, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0377, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 3.156596794081381, | |
| "grad_norm": 0.3679257035255432, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0349, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 3.1689272503082613, | |
| "grad_norm": 0.4382827579975128, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0326, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 3.181257706535142, | |
| "grad_norm": 0.28470033407211304, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0338, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 3.193588162762022, | |
| "grad_norm": 0.35729333758354187, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0383, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 3.2059186189889024, | |
| "grad_norm": 0.34574776887893677, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.03, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 3.218249075215783, | |
| "grad_norm": 0.319701224565506, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0326, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 3.2305795314426633, | |
| "grad_norm": 0.4792560636997223, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0308, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 3.242909987669544, | |
| "grad_norm": 0.33934926986694336, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0298, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 3.255240443896424, | |
| "grad_norm": 0.40221115946769714, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0279, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 3.2675709001233044, | |
| "grad_norm": 0.3183158338069916, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0311, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 3.279901356350185, | |
| "grad_norm": 0.31430214643478394, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0379, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 3.2922318125770653, | |
| "grad_norm": 0.2962730824947357, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.036, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 3.304562268803946, | |
| "grad_norm": 0.35871070623397827, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0353, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 3.316892725030826, | |
| "grad_norm": 0.3619069457054138, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.03, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 3.3292231812577064, | |
| "grad_norm": 0.2398015260696411, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.038, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 3.341553637484587, | |
| "grad_norm": 0.3300010859966278, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0323, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 3.3538840937114673, | |
| "grad_norm": 0.4098781943321228, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0301, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 3.3662145499383476, | |
| "grad_norm": 0.4757755398750305, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0344, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 3.3785450061652282, | |
| "grad_norm": 0.2588600814342499, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0306, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 3.3908754623921085, | |
| "grad_norm": 0.2977026402950287, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0273, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 3.4032059186189887, | |
| "grad_norm": 0.3322371542453766, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.028, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 3.4155363748458694, | |
| "grad_norm": 0.22466640174388885, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.027, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 3.4278668310727496, | |
| "grad_norm": 0.43801864981651306, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0343, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 3.4401972872996303, | |
| "grad_norm": 0.4810059666633606, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0286, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 3.4525277435265105, | |
| "grad_norm": 0.30760639905929565, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0294, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 3.4648581997533907, | |
| "grad_norm": 0.261068731546402, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0319, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 3.4771886559802714, | |
| "grad_norm": 0.2873305678367615, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0258, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 3.4895191122071516, | |
| "grad_norm": 0.2515256404876709, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0291, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 3.5018495684340323, | |
| "grad_norm": 0.39009010791778564, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0308, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 3.5141800246609125, | |
| "grad_norm": 0.2483336329460144, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.025, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 3.5265104808877927, | |
| "grad_norm": 0.31179869174957275, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0266, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 3.5388409371146734, | |
| "grad_norm": 0.2990468144416809, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0304, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 3.5511713933415536, | |
| "grad_norm": 0.3057761788368225, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0308, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 3.563501849568434, | |
| "grad_norm": 0.3865143954753876, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0265, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 3.5758323057953145, | |
| "grad_norm": 0.30393242835998535, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0326, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 3.5881627620221948, | |
| "grad_norm": 0.4474574625492096, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0405, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 3.600493218249075, | |
| "grad_norm": 0.40014782547950745, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0272, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 3.6128236744759556, | |
| "grad_norm": 0.3668227791786194, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0277, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 3.625154130702836, | |
| "grad_norm": 0.33630821108818054, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0278, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 3.6374845869297165, | |
| "grad_norm": 0.38713976740837097, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.029, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 3.6498150431565968, | |
| "grad_norm": 0.40157631039619446, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0359, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 3.6621454993834774, | |
| "grad_norm": 0.47970589995384216, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0289, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 3.6744759556103577, | |
| "grad_norm": 0.29617589712142944, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0328, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 3.686806411837238, | |
| "grad_norm": 0.34878507256507874, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0263, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 3.6991368680641186, | |
| "grad_norm": 0.2661670446395874, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0312, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 3.711467324290999, | |
| "grad_norm": 0.1930977702140808, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0307, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 3.723797780517879, | |
| "grad_norm": 0.45096123218536377, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0386, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 3.7361282367447597, | |
| "grad_norm": 0.17403753101825714, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0274, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 3.74845869297164, | |
| "grad_norm": 0.3632602393627167, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0261, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 3.76078914919852, | |
| "grad_norm": 0.4452861249446869, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0297, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 3.773119605425401, | |
| "grad_norm": 0.29421886801719666, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0319, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 3.785450061652281, | |
| "grad_norm": 0.30862492322921753, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0308, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 3.7977805178791613, | |
| "grad_norm": 0.36564901471138, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0263, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 3.810110974106042, | |
| "grad_norm": 0.31529054045677185, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0298, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 3.822441430332922, | |
| "grad_norm": 0.4234089255332947, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0333, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 3.834771886559803, | |
| "grad_norm": 0.39119094610214233, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0354, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 3.847102342786683, | |
| "grad_norm": 0.3775312006473541, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0344, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 3.8594327990135637, | |
| "grad_norm": 0.4479222297668457, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0275, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 3.871763255240444, | |
| "grad_norm": 0.429328978061676, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0327, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 3.884093711467324, | |
| "grad_norm": 0.3288831412792206, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0306, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 3.896424167694205, | |
| "grad_norm": 0.23620422184467316, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0263, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 3.908754623921085, | |
| "grad_norm": 0.2985759675502777, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0333, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 3.9210850801479653, | |
| "grad_norm": 0.36511659622192383, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0252, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 3.933415536374846, | |
| "grad_norm": 0.2632274627685547, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0255, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 3.945745992601726, | |
| "grad_norm": 0.3238585591316223, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0351, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 3.9580764488286064, | |
| "grad_norm": 0.27797338366508484, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0314, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 3.970406905055487, | |
| "grad_norm": 0.38050952553749084, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0306, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 3.9827373612823673, | |
| "grad_norm": 0.27192583680152893, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0302, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 3.995067817509248, | |
| "grad_norm": 0.3690575659275055, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0254, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 4.007398273736128, | |
| "grad_norm": 0.3868391215801239, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0279, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 4.019728729963009, | |
| "grad_norm": 0.6307650804519653, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.036, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 4.032059186189889, | |
| "grad_norm": 0.3873135447502136, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0316, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 4.044389642416769, | |
| "grad_norm": 0.20493006706237793, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0302, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 4.05672009864365, | |
| "grad_norm": 0.30493855476379395, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0284, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 4.06905055487053, | |
| "grad_norm": 0.3948972225189209, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0264, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 4.0813810110974105, | |
| "grad_norm": 0.3125462830066681, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.024, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 4.093711467324291, | |
| "grad_norm": 0.48805707693099976, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0276, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 4.106041923551172, | |
| "grad_norm": 0.4154878854751587, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0248, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 4.118372379778052, | |
| "grad_norm": 0.24094894528388977, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0293, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 4.130702836004932, | |
| "grad_norm": 0.3568149507045746, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0274, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 4.143033292231813, | |
| "grad_norm": 0.36104142665863037, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.03, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 4.155363748458693, | |
| "grad_norm": 0.29286569356918335, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0358, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 4.167694204685573, | |
| "grad_norm": 0.5099199414253235, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0303, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 4.180024660912454, | |
| "grad_norm": 0.20983999967575073, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0321, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 4.192355117139334, | |
| "grad_norm": 0.4159587323665619, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0302, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 4.2046855733662145, | |
| "grad_norm": 0.5001829266548157, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0316, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 4.217016029593095, | |
| "grad_norm": 0.28088536858558655, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0299, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 4.229346485819975, | |
| "grad_norm": 0.2824207842350006, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0238, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 4.241676942046856, | |
| "grad_norm": 0.24666544795036316, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0245, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 4.254007398273736, | |
| "grad_norm": 0.33369141817092896, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0311, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 4.266337854500616, | |
| "grad_norm": 0.3011949956417084, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0288, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 4.278668310727497, | |
| "grad_norm": 0.35821273922920227, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0308, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 4.290998766954377, | |
| "grad_norm": 0.4880791902542114, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0253, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 4.303329223181258, | |
| "grad_norm": 0.2648777663707733, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.028, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 4.315659679408138, | |
| "grad_norm": 0.30733415484428406, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0304, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 4.3279901356350186, | |
| "grad_norm": 0.2258274108171463, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0267, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 4.340320591861899, | |
| "grad_norm": 0.3003375828266144, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0237, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 4.352651048088779, | |
| "grad_norm": 0.28849828243255615, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.024, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 4.36498150431566, | |
| "grad_norm": 0.2744177281856537, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0257, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 4.37731196054254, | |
| "grad_norm": 0.191752091050148, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0309, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 4.38964241676942, | |
| "grad_norm": 0.22608980536460876, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0258, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 4.401972872996301, | |
| "grad_norm": 0.23972775042057037, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.026, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 4.4143033292231815, | |
| "grad_norm": 0.2698058485984802, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0251, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 4.426633785450061, | |
| "grad_norm": 0.22463522851467133, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0334, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 4.438964241676942, | |
| "grad_norm": 0.29725492000579834, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0267, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 4.451294697903823, | |
| "grad_norm": 0.4111051857471466, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0269, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 4.463625154130703, | |
| "grad_norm": 0.31639209389686584, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0261, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 4.475955610357583, | |
| "grad_norm": 0.27205532789230347, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0279, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 4.488286066584464, | |
| "grad_norm": 0.25912031531333923, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0331, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 4.500616522811344, | |
| "grad_norm": 0.39170870184898376, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0271, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 4.512946979038224, | |
| "grad_norm": 0.26685839891433716, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0261, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 4.525277435265105, | |
| "grad_norm": 0.59464430809021, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0289, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 4.5376078914919855, | |
| "grad_norm": 0.3193068206310272, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0275, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 4.549938347718865, | |
| "grad_norm": 0.21632061898708344, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0327, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 4.562268803945746, | |
| "grad_norm": 0.3070428967475891, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0287, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 4.574599260172627, | |
| "grad_norm": 0.28113073110580444, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0261, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 4.586929716399506, | |
| "grad_norm": 0.3845267593860626, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0344, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 4.599260172626387, | |
| "grad_norm": 0.32508736848831177, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0267, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 4.611590628853268, | |
| "grad_norm": 0.49549639225006104, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0257, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 4.623921085080148, | |
| "grad_norm": 0.2788389027118683, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0298, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 4.636251541307028, | |
| "grad_norm": 0.2841396629810333, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0254, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 4.648581997533909, | |
| "grad_norm": 0.17385002970695496, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.0297, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 4.660912453760789, | |
| "grad_norm": 0.26165494322776794, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0293, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 4.673242909987669, | |
| "grad_norm": 0.2686731219291687, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0229, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 4.68557336621455, | |
| "grad_norm": 0.25602030754089355, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0277, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 4.697903822441431, | |
| "grad_norm": 0.26638665795326233, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0259, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 4.7102342786683105, | |
| "grad_norm": 0.2751389443874359, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0285, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 4.722564734895191, | |
| "grad_norm": 0.35715851187705994, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0368, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 4.734895191122072, | |
| "grad_norm": 0.241239994764328, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0242, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 4.747225647348952, | |
| "grad_norm": 0.2586405575275421, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0252, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 4.759556103575832, | |
| "grad_norm": 0.27252063155174255, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.026, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 4.771886559802713, | |
| "grad_norm": 0.2868618965148926, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0268, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 4.784217016029594, | |
| "grad_norm": 0.28082075715065, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0242, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 4.796547472256473, | |
| "grad_norm": 0.4589066207408905, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0217, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 4.808877928483354, | |
| "grad_norm": 0.25454071164131165, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.027, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 4.821208384710234, | |
| "grad_norm": 0.3064049482345581, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0236, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 4.8335388409371145, | |
| "grad_norm": 0.29005786776542664, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0274, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 4.845869297163995, | |
| "grad_norm": 0.3328990638256073, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0251, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 4.858199753390876, | |
| "grad_norm": 0.2889794111251831, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0261, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 4.870530209617756, | |
| "grad_norm": 0.4047600030899048, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0289, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 4.882860665844636, | |
| "grad_norm": 0.3704594671726227, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0274, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 4.895191122071517, | |
| "grad_norm": 0.34426748752593994, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0225, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 4.907521578298397, | |
| "grad_norm": 0.3096552789211273, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0269, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 4.919852034525277, | |
| "grad_norm": 0.2392643392086029, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0266, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 4.932182490752158, | |
| "grad_norm": 0.3062688112258911, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0275, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 4.944512946979038, | |
| "grad_norm": 0.26748013496398926, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0267, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 4.9568434032059185, | |
| "grad_norm": 0.16465726494789124, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0249, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 4.969173859432799, | |
| "grad_norm": 0.29066964983940125, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0282, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 4.981504315659679, | |
| "grad_norm": 0.2784776985645294, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.027, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 4.99383477188656, | |
| "grad_norm": 0.19568200409412384, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0228, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 5.00616522811344, | |
| "grad_norm": 0.37055474519729614, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0252, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 5.018495684340321, | |
| "grad_norm": 0.32663726806640625, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0274, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 5.030826140567201, | |
| "grad_norm": 0.2959806025028229, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.023, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 5.0431565967940815, | |
| "grad_norm": 0.3356919586658478, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0239, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 5.055487053020962, | |
| "grad_norm": 0.2019459307193756, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0253, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 5.067817509247842, | |
| "grad_norm": 0.21574294567108154, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0243, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 5.080147965474723, | |
| "grad_norm": 0.2918378710746765, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0229, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 5.092478421701603, | |
| "grad_norm": 0.3412553071975708, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0258, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 5.104808877928483, | |
| "grad_norm": 0.2434139996767044, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0233, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 5.117139334155364, | |
| "grad_norm": 0.1951841562986374, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0276, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 5.129469790382244, | |
| "grad_norm": 0.3163956105709076, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0177, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 5.141800246609124, | |
| "grad_norm": 0.23022493720054626, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0237, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 5.154130702836005, | |
| "grad_norm": 0.3176528811454773, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0277, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 5.1664611590628855, | |
| "grad_norm": 0.3909565508365631, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0299, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 5.178791615289766, | |
| "grad_norm": 0.21056251227855682, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0223, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 5.191122071516646, | |
| "grad_norm": 0.3032974302768707, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.023, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 5.203452527743527, | |
| "grad_norm": 0.33099058270454407, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0229, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 5.215782983970407, | |
| "grad_norm": 0.34281712770462036, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0234, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 5.228113440197287, | |
| "grad_norm": 0.1906030774116516, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0219, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 5.240443896424168, | |
| "grad_norm": 0.24526453018188477, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0297, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 5.252774352651048, | |
| "grad_norm": 0.2897971570491791, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0318, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 5.265104808877928, | |
| "grad_norm": 0.40906140208244324, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0264, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 5.277435265104809, | |
| "grad_norm": 0.3060914874076843, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0248, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 5.2897657213316895, | |
| "grad_norm": 0.21351800858974457, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0216, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 5.302096177558569, | |
| "grad_norm": 0.29025885462760925, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0275, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 5.31442663378545, | |
| "grad_norm": 0.24519126117229462, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0231, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 5.326757090012331, | |
| "grad_norm": 0.3450472056865692, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0236, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 5.3390875462392104, | |
| "grad_norm": 0.3075282871723175, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0231, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 5.351418002466091, | |
| "grad_norm": 0.48635169863700867, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0212, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 5.363748458692972, | |
| "grad_norm": 0.2588857412338257, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0225, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 5.376078914919852, | |
| "grad_norm": 0.3633054196834564, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0217, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 5.388409371146732, | |
| "grad_norm": 0.28437089920043945, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0234, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 5.400739827373613, | |
| "grad_norm": 0.3541273772716522, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0201, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 5.413070283600494, | |
| "grad_norm": 0.23156724870204926, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0262, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 5.425400739827373, | |
| "grad_norm": 0.26685383915901184, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0255, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 5.437731196054254, | |
| "grad_norm": 0.32229578495025635, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0263, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 5.450061652281135, | |
| "grad_norm": 0.28889018297195435, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0238, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 5.4623921085080145, | |
| "grad_norm": 0.22566655278205872, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.025, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 5.474722564734895, | |
| "grad_norm": 0.33604100346565247, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0246, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 5.487053020961776, | |
| "grad_norm": 0.3482617139816284, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0304, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 5.499383477188656, | |
| "grad_norm": 0.31966832280158997, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.023, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 5.511713933415536, | |
| "grad_norm": 0.21755751967430115, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0203, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 5.524044389642417, | |
| "grad_norm": 0.21764443814754486, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0235, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 5.536374845869297, | |
| "grad_norm": 0.3675846755504608, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0247, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 5.548705302096177, | |
| "grad_norm": 0.3585027754306793, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0269, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 5.561035758323058, | |
| "grad_norm": 0.21989168226718903, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0246, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 5.573366214549939, | |
| "grad_norm": 0.2681444585323334, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0268, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 5.5856966707768185, | |
| "grad_norm": 0.3481389284133911, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0251, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 5.598027127003699, | |
| "grad_norm": 0.24546685814857483, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0239, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 5.61035758323058, | |
| "grad_norm": 0.36851373314857483, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0229, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 5.62268803945746, | |
| "grad_norm": 0.20858986675739288, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0229, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 5.63501849568434, | |
| "grad_norm": 0.25986698269844055, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0272, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 5.647348951911221, | |
| "grad_norm": 0.27059510350227356, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0205, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 5.659679408138101, | |
| "grad_norm": 0.21684052050113678, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0259, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 5.6720098643649814, | |
| "grad_norm": 0.23955918848514557, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0238, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 5.684340320591862, | |
| "grad_norm": 0.27122101187705994, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0268, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 5.696670776818742, | |
| "grad_norm": 0.36009183526039124, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0249, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 5.709001233045623, | |
| "grad_norm": 0.26205703616142273, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0231, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 5.721331689272503, | |
| "grad_norm": 0.3562557101249695, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.023, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 5.733662145499384, | |
| "grad_norm": 0.26748237013816833, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0259, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 5.745992601726264, | |
| "grad_norm": 0.2844463586807251, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0255, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 5.758323057953144, | |
| "grad_norm": 0.30506637692451477, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0305, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 5.770653514180025, | |
| "grad_norm": 0.25649866461753845, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0221, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 5.782983970406905, | |
| "grad_norm": 0.28870755434036255, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0261, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 5.7953144266337855, | |
| "grad_norm": 0.31868359446525574, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0267, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 5.807644882860666, | |
| "grad_norm": 0.26874470710754395, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0224, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 5.819975339087546, | |
| "grad_norm": 0.17658057808876038, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0225, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 5.832305795314427, | |
| "grad_norm": 0.22887958586215973, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0214, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 5.844636251541307, | |
| "grad_norm": 0.24907585978507996, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.021, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 5.856966707768187, | |
| "grad_norm": 0.3161385655403137, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0208, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 5.869297163995068, | |
| "grad_norm": 0.21596252918243408, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0227, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 5.881627620221948, | |
| "grad_norm": 0.20386070013046265, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0252, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 5.893958076448829, | |
| "grad_norm": 0.34854716062545776, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0245, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 5.906288532675709, | |
| "grad_norm": 0.20014896988868713, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0219, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 5.9186189889025895, | |
| "grad_norm": 0.2527239918708801, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0186, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 5.930949445129469, | |
| "grad_norm": 0.20940017700195312, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0266, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 5.94327990135635, | |
| "grad_norm": 0.3266250789165497, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.025, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 5.955610357583231, | |
| "grad_norm": 0.2225354164838791, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0261, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 5.967940813810111, | |
| "grad_norm": 0.40273720026016235, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0342, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 5.980271270036991, | |
| "grad_norm": 0.2384943664073944, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0259, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 5.992601726263872, | |
| "grad_norm": 0.17832358181476593, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0215, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 6.0049321824907524, | |
| "grad_norm": 0.2760484218597412, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0277, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 6.017262638717632, | |
| "grad_norm": 0.2816040813922882, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0258, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 6.029593094944513, | |
| "grad_norm": 0.19857008755207062, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.024, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 6.041923551171394, | |
| "grad_norm": 0.27289700508117676, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.024, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 6.054254007398273, | |
| "grad_norm": 0.18250992894172668, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0229, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 6.066584463625154, | |
| "grad_norm": 0.16475291550159454, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0206, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 6.078914919852035, | |
| "grad_norm": 0.2761680781841278, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0223, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 6.0912453760789145, | |
| "grad_norm": 0.2531130015850067, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0251, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 6.103575832305795, | |
| "grad_norm": 0.3336329162120819, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0226, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 6.115906288532676, | |
| "grad_norm": 0.21103692054748535, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0211, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 6.1282367447595565, | |
| "grad_norm": 0.24651874601840973, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0218, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 6.140567200986436, | |
| "grad_norm": 0.2976286709308624, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0281, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 6.152897657213317, | |
| "grad_norm": 0.31618136167526245, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.026, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 6.165228113440198, | |
| "grad_norm": 0.21486596763134003, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.022, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 6.177558569667077, | |
| "grad_norm": 0.22519131004810333, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0217, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 6.189889025893958, | |
| "grad_norm": 0.2845526933670044, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0241, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 6.202219482120839, | |
| "grad_norm": 0.23237572610378265, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0218, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 6.2145499383477185, | |
| "grad_norm": 0.2708719074726105, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0212, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 6.226880394574599, | |
| "grad_norm": 0.2959873676300049, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0222, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 6.23921085080148, | |
| "grad_norm": 0.15844032168388367, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0197, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 6.25154130702836, | |
| "grad_norm": 0.20172832906246185, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0212, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 6.26387176325524, | |
| "grad_norm": 0.25829359889030457, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0221, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 6.276202219482121, | |
| "grad_norm": 0.3920601010322571, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0199, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 6.288532675709002, | |
| "grad_norm": 0.199340358376503, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0202, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 6.300863131935881, | |
| "grad_norm": 0.3202466070652008, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.022, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 6.313193588162762, | |
| "grad_norm": 0.36835238337516785, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.024, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 6.325524044389643, | |
| "grad_norm": 0.20573300123214722, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0192, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 6.337854500616523, | |
| "grad_norm": 0.17183108627796173, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0182, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 6.350184956843403, | |
| "grad_norm": 0.26490992307662964, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0203, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 6.362515413070284, | |
| "grad_norm": 0.3476687967777252, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0231, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 6.374845869297164, | |
| "grad_norm": 0.36158332228660583, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0188, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 6.387176325524044, | |
| "grad_norm": 0.307809054851532, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0175, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 6.399506781750925, | |
| "grad_norm": 0.1964550018310547, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0251, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 6.411837237977805, | |
| "grad_norm": 0.22386327385902405, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0237, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 6.4241676942046855, | |
| "grad_norm": 0.26394808292388916, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0225, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 6.436498150431566, | |
| "grad_norm": 0.26697394251823425, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0216, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 6.448828606658447, | |
| "grad_norm": 0.2529113292694092, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0222, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 6.461159062885327, | |
| "grad_norm": 0.2774154245853424, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0181, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 6.473489519112207, | |
| "grad_norm": 0.28620442748069763, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.02, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 6.485819975339088, | |
| "grad_norm": 0.31283897161483765, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0201, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 6.498150431565968, | |
| "grad_norm": 0.3378382623195648, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0224, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 6.510480887792848, | |
| "grad_norm": 0.3042767345905304, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0229, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 6.522811344019729, | |
| "grad_norm": 0.31953081488609314, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0208, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 6.535141800246609, | |
| "grad_norm": 0.18668696284294128, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0203, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 6.5474722564734895, | |
| "grad_norm": 0.27435287833213806, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0226, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 6.55980271270037, | |
| "grad_norm": 0.23059211671352386, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0211, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 6.57213316892725, | |
| "grad_norm": 0.26881295442581177, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0198, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 6.584463625154131, | |
| "grad_norm": 0.19851483404636383, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0216, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 6.596794081381011, | |
| "grad_norm": 0.29459458589553833, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0206, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 6.609124537607892, | |
| "grad_norm": 0.24166008830070496, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0223, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 6.621454993834772, | |
| "grad_norm": 0.2806544899940491, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0223, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 6.633785450061652, | |
| "grad_norm": 0.2812330424785614, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0193, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 6.646115906288532, | |
| "grad_norm": 0.31820350885391235, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0202, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 6.658446362515413, | |
| "grad_norm": 0.20299507677555084, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0234, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 6.670776818742294, | |
| "grad_norm": 0.18802104890346527, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0221, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 6.683107274969174, | |
| "grad_norm": 0.34215790033340454, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0189, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 6.695437731196054, | |
| "grad_norm": 0.24965830147266388, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0205, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 6.707768187422935, | |
| "grad_norm": 0.3378036320209503, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0199, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 6.720098643649815, | |
| "grad_norm": 0.21934591233730316, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0205, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 6.732429099876695, | |
| "grad_norm": 0.2336743324995041, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0208, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 6.744759556103576, | |
| "grad_norm": 0.2497650682926178, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0197, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 6.7570900123304565, | |
| "grad_norm": 0.28810304403305054, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0152, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 6.769420468557336, | |
| "grad_norm": 0.292516827583313, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0206, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 6.781750924784217, | |
| "grad_norm": 0.18946313858032227, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.016, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 6.794081381011098, | |
| "grad_norm": 0.2844982445240021, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0178, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 6.806411837237977, | |
| "grad_norm": 0.15547184646129608, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0185, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 6.818742293464858, | |
| "grad_norm": 0.21672311425209045, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0195, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 6.831072749691739, | |
| "grad_norm": 0.21264362335205078, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.027, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 6.843403205918619, | |
| "grad_norm": 0.19885507225990295, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.017, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 6.855733662145499, | |
| "grad_norm": 0.17071495950222015, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0212, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 6.86806411837238, | |
| "grad_norm": 0.2471088021993637, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0165, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 6.8803945745992605, | |
| "grad_norm": 0.29187387228012085, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.018, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 6.89272503082614, | |
| "grad_norm": 0.25299355387687683, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0233, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 6.905055487053021, | |
| "grad_norm": 0.3724147081375122, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0208, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 6.917385943279902, | |
| "grad_norm": 0.22367283701896667, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0231, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 6.929716399506781, | |
| "grad_norm": 0.2505429685115814, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0203, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 6.942046855733662, | |
| "grad_norm": 0.26013052463531494, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0198, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 6.954377311960543, | |
| "grad_norm": 0.2301289290189743, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0201, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 6.9667077681874225, | |
| "grad_norm": 0.1870003193616867, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.02, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 6.979038224414303, | |
| "grad_norm": 0.24088424444198608, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.018, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 6.991368680641184, | |
| "grad_norm": 0.17567826807498932, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0206, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 7.003699136868065, | |
| "grad_norm": 0.2931348979473114, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0183, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 7.016029593094944, | |
| "grad_norm": 0.4159621298313141, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0222, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 7.028360049321825, | |
| "grad_norm": 0.24496355652809143, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0234, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 7.040690505548706, | |
| "grad_norm": 0.22245050966739655, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.017, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 7.0530209617755855, | |
| "grad_norm": 0.25052809715270996, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0251, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 7.065351418002466, | |
| "grad_norm": 0.22843345999717712, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0206, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 7.077681874229347, | |
| "grad_norm": 0.21196547150611877, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0244, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 7.090012330456227, | |
| "grad_norm": 0.28079167008399963, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0175, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 7.102342786683107, | |
| "grad_norm": 0.17624829709529877, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0171, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 7.114673242909988, | |
| "grad_norm": 0.21200713515281677, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0233, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 7.127003699136868, | |
| "grad_norm": 0.3544151782989502, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.018, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 7.139334155363748, | |
| "grad_norm": 0.23281414806842804, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0201, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 7.151664611590629, | |
| "grad_norm": 0.23365364968776703, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0194, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 7.163995067817509, | |
| "grad_norm": 0.24543672800064087, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0156, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 7.1763255240443895, | |
| "grad_norm": 0.23045861721038818, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0172, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 7.18865598027127, | |
| "grad_norm": 0.24501411616802216, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0212, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 7.200986436498151, | |
| "grad_norm": 0.24463316798210144, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.021, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 7.213316892725031, | |
| "grad_norm": 0.2964494526386261, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0204, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 7.225647348951911, | |
| "grad_norm": 0.20629799365997314, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0229, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 7.237977805178792, | |
| "grad_norm": 0.15498551726341248, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0162, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 7.250308261405672, | |
| "grad_norm": 0.15830431878566742, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.016, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 7.262638717632552, | |
| "grad_norm": 0.29659491777420044, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0232, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 7.274969173859433, | |
| "grad_norm": 0.30270400643348694, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0189, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 7.287299630086313, | |
| "grad_norm": 0.17669810354709625, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0192, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 7.2996300863131935, | |
| "grad_norm": 0.21936142444610596, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0169, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 7.311960542540074, | |
| "grad_norm": 0.26419296860694885, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0182, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 7.324290998766954, | |
| "grad_norm": 0.1880316287279129, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.017, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 7.336621454993835, | |
| "grad_norm": 0.23106351494789124, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.016, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 7.348951911220715, | |
| "grad_norm": 0.2518641948699951, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0184, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 7.361282367447595, | |
| "grad_norm": 0.12708133459091187, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0163, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 7.373612823674476, | |
| "grad_norm": 0.19658632576465607, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0159, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 7.3859432799013565, | |
| "grad_norm": 0.14912943542003632, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0174, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 7.398273736128237, | |
| "grad_norm": 0.21809303760528564, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0189, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 7.410604192355117, | |
| "grad_norm": 0.16916091740131378, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0155, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 7.422934648581998, | |
| "grad_norm": 0.19845576584339142, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0166, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 7.435265104808878, | |
| "grad_norm": 0.2885666787624359, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0181, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 7.447595561035758, | |
| "grad_norm": 0.16880282759666443, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0184, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 7.459926017262639, | |
| "grad_norm": 0.27704015374183655, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0186, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 7.472256473489519, | |
| "grad_norm": 0.19714327156543732, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0138, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 7.484586929716399, | |
| "grad_norm": 0.2862388491630554, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0184, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 7.49691738594328, | |
| "grad_norm": 0.2042715698480606, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0217, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 7.5092478421701605, | |
| "grad_norm": 0.2108624428510666, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0161, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 7.52157829839704, | |
| "grad_norm": 0.21481405198574066, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0174, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 7.533908754623921, | |
| "grad_norm": 0.14143317937850952, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.013, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 7.546239210850802, | |
| "grad_norm": 0.22384566068649292, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0207, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 7.558569667077682, | |
| "grad_norm": 0.2333938181400299, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0179, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 7.570900123304562, | |
| "grad_norm": 0.3189919888973236, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0198, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 7.583230579531443, | |
| "grad_norm": 0.15587118268013, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0143, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 7.595561035758323, | |
| "grad_norm": 0.21738408505916595, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.016, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 7.607891491985203, | |
| "grad_norm": 0.16509151458740234, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0186, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 7.620221948212084, | |
| "grad_norm": 0.27286073565483093, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0176, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 7.6325524044389645, | |
| "grad_norm": 0.20789702236652374, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0194, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 7.644882860665844, | |
| "grad_norm": 0.2709747552871704, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0231, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 7.657213316892725, | |
| "grad_norm": 0.17928388714790344, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0178, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 7.669543773119606, | |
| "grad_norm": 0.1535818874835968, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.015, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 7.6818742293464854, | |
| "grad_norm": 0.30152714252471924, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0159, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 7.694204685573366, | |
| "grad_norm": 0.2273498922586441, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0152, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 7.706535141800247, | |
| "grad_norm": 0.20150801539421082, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0166, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 7.7188655980271275, | |
| "grad_norm": 0.16808821260929108, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0189, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 7.731196054254007, | |
| "grad_norm": 0.17800591886043549, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0218, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 7.743526510480888, | |
| "grad_norm": 0.2024526745080948, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0165, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 7.755856966707768, | |
| "grad_norm": 0.13782727718353271, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0198, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 7.768187422934648, | |
| "grad_norm": 0.25474774837493896, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.018, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 7.780517879161529, | |
| "grad_norm": 0.25358206033706665, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.019, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 7.79284833538841, | |
| "grad_norm": 0.21187220513820648, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0151, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 7.8051787916152895, | |
| "grad_norm": 0.33573442697525024, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0163, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 7.81750924784217, | |
| "grad_norm": 0.20695525407791138, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0181, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 7.829839704069051, | |
| "grad_norm": 0.18595805764198303, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0171, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 7.842170160295931, | |
| "grad_norm": 0.19523392617702484, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0141, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 7.854500616522811, | |
| "grad_norm": 0.13170596957206726, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0183, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 7.866831072749692, | |
| "grad_norm": 0.21886803209781647, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0172, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 7.879161528976573, | |
| "grad_norm": 0.21515388786792755, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0189, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 7.891491985203452, | |
| "grad_norm": 0.31785643100738525, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0173, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 7.903822441430333, | |
| "grad_norm": 0.14406168460845947, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0159, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 7.916152897657213, | |
| "grad_norm": 0.18788745999336243, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0152, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 7.9284833538840935, | |
| "grad_norm": 0.19854816794395447, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0182, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 7.940813810110974, | |
| "grad_norm": 0.3562459647655487, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0152, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 7.953144266337855, | |
| "grad_norm": 0.16023540496826172, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0143, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 7.965474722564735, | |
| "grad_norm": 0.1785842925310135, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0161, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 7.977805178791615, | |
| "grad_norm": 0.2162104696035385, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0171, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 7.990135635018496, | |
| "grad_norm": 0.14766299724578857, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0158, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 8.002466091245376, | |
| "grad_norm": 0.19538649916648865, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0144, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 8.014796547472256, | |
| "grad_norm": 0.1957716941833496, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0152, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 8.027127003699137, | |
| "grad_norm": 0.22792336344718933, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0157, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 8.039457459926018, | |
| "grad_norm": 0.3002369701862335, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0183, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 8.051787916152898, | |
| "grad_norm": 0.289244145154953, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0206, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 8.064118372379777, | |
| "grad_norm": 0.210015669465065, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0185, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 8.076448828606658, | |
| "grad_norm": 0.16126251220703125, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0177, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 8.088779284833539, | |
| "grad_norm": 0.20670470595359802, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0145, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 8.10110974106042, | |
| "grad_norm": 0.17739975452423096, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0179, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 8.1134401972873, | |
| "grad_norm": 0.26979953050613403, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0203, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 8.12577065351418, | |
| "grad_norm": 0.244442880153656, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0219, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 8.13810110974106, | |
| "grad_norm": 0.29723724722862244, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0181, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 8.15043156596794, | |
| "grad_norm": 0.21775288879871368, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0152, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 8.162762022194821, | |
| "grad_norm": 0.20183885097503662, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0181, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 8.175092478421702, | |
| "grad_norm": 0.19183766841888428, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.016, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 8.187422934648582, | |
| "grad_norm": 0.2394537478685379, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0206, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 8.199753390875463, | |
| "grad_norm": 0.198512002825737, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0167, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 8.212083847102344, | |
| "grad_norm": 0.2386707067489624, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.015, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 8.224414303329223, | |
| "grad_norm": 0.1818169355392456, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0158, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 8.236744759556103, | |
| "grad_norm": 0.2665708065032959, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0162, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 8.249075215782984, | |
| "grad_norm": 0.2305258959531784, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0158, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 8.261405672009865, | |
| "grad_norm": 0.28788959980010986, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0178, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 8.273736128236745, | |
| "grad_norm": 0.22098831832408905, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0162, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 8.286066584463626, | |
| "grad_norm": 0.20243878662586212, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0177, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 8.298397040690505, | |
| "grad_norm": 0.24761496484279633, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0177, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 8.310727496917385, | |
| "grad_norm": 0.31881335377693176, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0162, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 8.323057953144266, | |
| "grad_norm": 0.2624727487564087, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0184, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 8.335388409371147, | |
| "grad_norm": 0.15907232463359833, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.013, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 8.347718865598027, | |
| "grad_norm": 0.17582719027996063, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0158, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 8.360049321824908, | |
| "grad_norm": 0.20458853244781494, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0132, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 8.372379778051787, | |
| "grad_norm": 0.23957616090774536, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0138, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 8.384710234278668, | |
| "grad_norm": 0.24291783571243286, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.016, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 8.397040690505548, | |
| "grad_norm": 0.2142944186925888, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0168, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 8.409371146732429, | |
| "grad_norm": 0.16882851719856262, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0147, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 8.42170160295931, | |
| "grad_norm": 0.2075236439704895, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0113, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 8.43403205918619, | |
| "grad_norm": 0.21113787591457367, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0132, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 8.446362515413071, | |
| "grad_norm": 0.2373654842376709, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0137, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 8.45869297163995, | |
| "grad_norm": 0.21145884692668915, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0129, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 8.47102342786683, | |
| "grad_norm": 0.17757846415042877, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0168, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 8.483353884093711, | |
| "grad_norm": 0.24146784842014313, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0147, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 8.495684340320592, | |
| "grad_norm": 0.1981785148382187, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0205, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 8.508014796547473, | |
| "grad_norm": 0.2341061383485794, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0148, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 8.520345252774353, | |
| "grad_norm": 0.225665882229805, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0158, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 8.532675709001232, | |
| "grad_norm": 0.23870795965194702, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0163, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 8.545006165228113, | |
| "grad_norm": 0.21512368321418762, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0167, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 8.557336621454994, | |
| "grad_norm": 0.15514829754829407, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0128, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 8.569667077681874, | |
| "grad_norm": 0.29986897110939026, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.015, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 8.581997533908755, | |
| "grad_norm": 0.24149705469608307, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0149, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 8.594327990135636, | |
| "grad_norm": 0.1457284688949585, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0145, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 8.606658446362516, | |
| "grad_norm": 0.149446502327919, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0155, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 8.618988902589395, | |
| "grad_norm": 0.17576131224632263, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0151, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 8.631319358816276, | |
| "grad_norm": 0.1714191883802414, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0155, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 8.643649815043156, | |
| "grad_norm": 0.23094867169857025, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.014, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 8.655980271270037, | |
| "grad_norm": 0.21593840420246124, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0163, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 8.668310727496918, | |
| "grad_norm": 0.24063308537006378, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0123, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 8.680641183723798, | |
| "grad_norm": 0.20024220645427704, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0154, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 8.692971639950677, | |
| "grad_norm": 0.19818070530891418, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0169, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 8.705302096177558, | |
| "grad_norm": 0.17557866871356964, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0196, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 8.717632552404439, | |
| "grad_norm": 0.26247546076774597, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0129, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 8.72996300863132, | |
| "grad_norm": 0.1336849331855774, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0132, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 8.7422934648582, | |
| "grad_norm": 0.1905038058757782, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0141, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 8.75462392108508, | |
| "grad_norm": 0.1590624898672104, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0131, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 8.766954377311961, | |
| "grad_norm": 0.15904150903224945, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0135, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 8.77928483353884, | |
| "grad_norm": 0.18446631729602814, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0163, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 8.791615289765721, | |
| "grad_norm": 0.2694113254547119, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0148, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 8.803945745992602, | |
| "grad_norm": 0.21019041538238525, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0152, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 8.816276202219482, | |
| "grad_norm": 0.19869400560855865, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0175, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 8.828606658446363, | |
| "grad_norm": 0.2876332998275757, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0178, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 8.840937114673244, | |
| "grad_norm": 0.23244088888168335, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0148, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 8.853267570900123, | |
| "grad_norm": 0.22310832142829895, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0137, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 8.865598027127003, | |
| "grad_norm": 0.3229195773601532, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0173, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 8.877928483353884, | |
| "grad_norm": 0.15355561673641205, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.016, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 8.890258939580765, | |
| "grad_norm": 0.13414426147937775, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0122, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 8.902589395807645, | |
| "grad_norm": 0.21771904826164246, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.012, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 8.914919852034526, | |
| "grad_norm": 0.11459210515022278, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.015, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 8.927250308261407, | |
| "grad_norm": 0.1791151762008667, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0126, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 8.939580764488285, | |
| "grad_norm": 0.177819162607193, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0144, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 8.951911220715166, | |
| "grad_norm": 0.14514821767807007, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0111, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 8.964241676942047, | |
| "grad_norm": 0.24837376177310944, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0148, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 8.976572133168927, | |
| "grad_norm": 0.14810924232006073, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0127, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 8.988902589395808, | |
| "grad_norm": 0.1758640706539154, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0118, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 9.001233045622689, | |
| "grad_norm": 0.15091773867607117, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.014, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 9.013563501849568, | |
| "grad_norm": 0.20095612108707428, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0162, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 9.025893958076448, | |
| "grad_norm": 0.11367142200469971, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0167, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 9.038224414303329, | |
| "grad_norm": 0.24351157248020172, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0152, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 9.05055487053021, | |
| "grad_norm": 0.27105528116226196, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0171, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 9.06288532675709, | |
| "grad_norm": 0.13679392635822296, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.012, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 9.075215782983971, | |
| "grad_norm": 0.15514865517616272, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0124, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 9.087546239210852, | |
| "grad_norm": 0.17450058460235596, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0124, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 9.09987669543773, | |
| "grad_norm": 0.13302433490753174, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0138, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 9.112207151664611, | |
| "grad_norm": 0.21018704771995544, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0129, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 9.124537607891492, | |
| "grad_norm": 0.20799696445465088, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0161, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 9.136868064118373, | |
| "grad_norm": 0.19017937779426575, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0122, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 9.149198520345253, | |
| "grad_norm": 0.1649792641401291, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0129, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 9.161528976572134, | |
| "grad_norm": 0.10437487810850143, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0144, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 9.173859432799013, | |
| "grad_norm": 0.1570027619600296, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0133, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 9.186189889025894, | |
| "grad_norm": 0.17987479269504547, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0142, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 9.198520345252774, | |
| "grad_norm": 0.12210801988840103, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0098, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 9.210850801479655, | |
| "grad_norm": 0.254069060087204, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0182, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 9.223181257706536, | |
| "grad_norm": 0.22524644434452057, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0135, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 9.235511713933416, | |
| "grad_norm": 0.2430953085422516, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0134, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 9.247842170160295, | |
| "grad_norm": 0.16173475980758667, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0143, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 9.260172626387176, | |
| "grad_norm": 0.21961258351802826, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0148, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 9.272503082614056, | |
| "grad_norm": 0.1678265631198883, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0113, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 9.284833538840937, | |
| "grad_norm": 0.16909259557724, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0161, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 9.297163995067818, | |
| "grad_norm": 0.18035666644573212, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0133, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 9.309494451294698, | |
| "grad_norm": 0.19551506638526917, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0108, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 9.321824907521579, | |
| "grad_norm": 0.11349731683731079, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0112, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 9.334155363748458, | |
| "grad_norm": 0.1814197450876236, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0134, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 9.346485819975339, | |
| "grad_norm": 0.16847750544548035, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0134, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 9.35881627620222, | |
| "grad_norm": 0.13412600755691528, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0148, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 9.3711467324291, | |
| "grad_norm": 0.1541246920824051, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0162, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 9.38347718865598, | |
| "grad_norm": 0.14065852761268616, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0151, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 9.395807644882861, | |
| "grad_norm": 0.24035611748695374, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0135, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 9.40813810110974, | |
| "grad_norm": 0.1955443173646927, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0113, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 9.420468557336621, | |
| "grad_norm": 0.2768889367580414, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0135, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 9.432799013563502, | |
| "grad_norm": 0.20212136209011078, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0126, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 9.445129469790382, | |
| "grad_norm": 0.294341117143631, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0151, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 9.457459926017263, | |
| "grad_norm": 0.12416042387485504, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0146, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 9.469790382244144, | |
| "grad_norm": 0.14236067235469818, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0123, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 9.482120838471024, | |
| "grad_norm": 0.15881061553955078, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0109, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 9.494451294697903, | |
| "grad_norm": 0.11476548761129379, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0101, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 9.506781750924784, | |
| "grad_norm": 0.19930553436279297, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0123, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 9.519112207151665, | |
| "grad_norm": 0.1265823096036911, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0149, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 9.531442663378545, | |
| "grad_norm": 0.28434011340141296, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0135, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 9.543773119605426, | |
| "grad_norm": 0.1804695427417755, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0152, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 9.556103575832307, | |
| "grad_norm": 0.18090833723545074, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.013, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 9.568434032059185, | |
| "grad_norm": 0.17950284481048584, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0147, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 9.580764488286066, | |
| "grad_norm": 0.14134123921394348, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0126, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 9.593094944512947, | |
| "grad_norm": 0.25070080161094666, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.01, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 9.605425400739827, | |
| "grad_norm": 0.25582218170166016, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0122, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 9.617755856966708, | |
| "grad_norm": 0.1508312225341797, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0129, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 9.630086313193589, | |
| "grad_norm": 0.1487092673778534, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0128, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 9.642416769420468, | |
| "grad_norm": 0.13612839579582214, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0124, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 9.654747225647348, | |
| "grad_norm": 0.27921780943870544, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0177, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 9.667077681874229, | |
| "grad_norm": 0.16460204124450684, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0195, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 9.67940813810111, | |
| "grad_norm": 0.20853613317012787, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0132, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 9.69173859432799, | |
| "grad_norm": 0.24421434104442596, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.013, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 9.704069050554871, | |
| "grad_norm": 0.1668795496225357, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0114, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 9.716399506781752, | |
| "grad_norm": 0.20450633764266968, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.012, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 9.72872996300863, | |
| "grad_norm": 0.304129034280777, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0144, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 9.741060419235511, | |
| "grad_norm": 0.25662940740585327, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0132, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 9.753390875462392, | |
| "grad_norm": 0.23272952437400818, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0105, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 9.765721331689273, | |
| "grad_norm": 0.1865607351064682, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.015, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 9.778051787916153, | |
| "grad_norm": 0.14360679686069489, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0099, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 9.790382244143034, | |
| "grad_norm": 0.12644459307193756, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0136, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 9.802712700369913, | |
| "grad_norm": 0.1347009688615799, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0113, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 9.815043156596793, | |
| "grad_norm": 0.23196075856685638, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.019, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 9.827373612823674, | |
| "grad_norm": 0.22510278224945068, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0146, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 9.839704069050555, | |
| "grad_norm": 0.13115385174751282, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0141, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 9.852034525277436, | |
| "grad_norm": 0.11301623284816742, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0176, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 9.864364981504316, | |
| "grad_norm": 0.160536527633667, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0137, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 9.876695437731197, | |
| "grad_norm": 0.1591915637254715, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0121, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 9.889025893958076, | |
| "grad_norm": 0.1496218591928482, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0134, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 9.901356350184956, | |
| "grad_norm": 0.21466733515262604, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0132, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 9.913686806411837, | |
| "grad_norm": 0.1314503401517868, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0107, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 9.926017262638718, | |
| "grad_norm": 0.18860751390457153, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0098, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 9.938347718865598, | |
| "grad_norm": 0.21014855802059174, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0107, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 9.950678175092479, | |
| "grad_norm": 0.1975817084312439, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0117, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 9.963008631319358, | |
| "grad_norm": 0.1471155881881714, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0093, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 9.975339087546239, | |
| "grad_norm": 0.16085171699523926, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.013, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 9.98766954377312, | |
| "grad_norm": 0.12788863480091095, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0106, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.7229554057121277, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0107, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 10.01233045622688, | |
| "grad_norm": 0.23828713595867157, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0124, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 10.024660912453761, | |
| "grad_norm": 0.15040181577205658, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0121, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 10.036991368680642, | |
| "grad_norm": 0.16020682454109192, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0101, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 10.049321824907521, | |
| "grad_norm": 0.16651003062725067, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0132, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 10.061652281134402, | |
| "grad_norm": 0.22436489164829254, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.013, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 10.073982737361282, | |
| "grad_norm": 0.16644471883773804, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0125, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 10.086313193588163, | |
| "grad_norm": 0.2150995433330536, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0114, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 10.098643649815044, | |
| "grad_norm": 0.15884028375148773, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0138, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 10.110974106041924, | |
| "grad_norm": 0.20107415318489075, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0124, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 10.123304562268803, | |
| "grad_norm": 0.3725082576274872, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0122, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 10.135635018495684, | |
| "grad_norm": 0.23206597566604614, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0144, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 10.147965474722564, | |
| "grad_norm": 0.10063593089580536, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.011, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 10.160295930949445, | |
| "grad_norm": 0.1741970330476761, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0101, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 10.172626387176326, | |
| "grad_norm": 0.14686159789562225, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0118, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 10.184956843403207, | |
| "grad_norm": 0.14596395194530487, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0099, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 10.197287299630087, | |
| "grad_norm": 0.12668916583061218, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0111, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 10.209617755856966, | |
| "grad_norm": 0.1675330251455307, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0172, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 10.221948212083847, | |
| "grad_norm": 0.25428757071495056, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0135, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 10.234278668310727, | |
| "grad_norm": 0.22335296869277954, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0121, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 10.246609124537608, | |
| "grad_norm": 0.16054800152778625, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0096, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 10.258939580764489, | |
| "grad_norm": 0.1781606674194336, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.013, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 10.27127003699137, | |
| "grad_norm": 0.13309413194656372, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0127, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 10.283600493218248, | |
| "grad_norm": 0.11706002801656723, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0122, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 10.295930949445129, | |
| "grad_norm": 0.13000087440013885, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0086, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 10.30826140567201, | |
| "grad_norm": 0.14367519319057465, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0115, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 10.32059186189889, | |
| "grad_norm": 0.12623301148414612, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0105, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 10.332922318125771, | |
| "grad_norm": 0.10462573915719986, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0097, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 10.345252774352652, | |
| "grad_norm": 0.09258235991001129, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0118, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 10.357583230579532, | |
| "grad_norm": 0.13510167598724365, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0132, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 10.369913686806411, | |
| "grad_norm": 0.12138457596302032, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0095, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 10.382244143033292, | |
| "grad_norm": 0.15477916598320007, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0113, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 10.394574599260173, | |
| "grad_norm": 0.13899020850658417, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.01, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 10.406905055487053, | |
| "grad_norm": 0.2337149679660797, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0105, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 10.419235511713934, | |
| "grad_norm": 0.1435757577419281, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0096, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 10.431565967940815, | |
| "grad_norm": 0.18410523235797882, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0121, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 10.443896424167693, | |
| "grad_norm": 0.14590094983577728, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0082, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 10.456226880394574, | |
| "grad_norm": 0.18835172057151794, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.011, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 10.468557336621455, | |
| "grad_norm": 0.16702938079833984, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0151, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 10.480887792848335, | |
| "grad_norm": 0.16198301315307617, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0134, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 10.493218249075216, | |
| "grad_norm": 0.14920571446418762, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0102, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 10.505548705302097, | |
| "grad_norm": 0.10073720663785934, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0107, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 10.517879161528978, | |
| "grad_norm": 0.11474102735519409, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0107, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 10.530209617755856, | |
| "grad_norm": 0.12482413649559021, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0089, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 10.542540073982737, | |
| "grad_norm": 0.19597290456295013, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0094, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 10.554870530209618, | |
| "grad_norm": 0.15705586969852448, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0124, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 10.567200986436498, | |
| "grad_norm": 0.2601618468761444, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.011, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 10.579531442663379, | |
| "grad_norm": 0.1845393180847168, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0141, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 10.59186189889026, | |
| "grad_norm": 0.14301864802837372, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0102, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 10.604192355117139, | |
| "grad_norm": 0.14288154244422913, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.01, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 10.61652281134402, | |
| "grad_norm": 0.16864368319511414, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0133, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 10.6288532675709, | |
| "grad_norm": 0.19017158448696136, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0129, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 10.64118372379778, | |
| "grad_norm": 0.19441142678260803, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0137, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 10.653514180024661, | |
| "grad_norm": 0.17680735886096954, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0113, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 10.665844636251542, | |
| "grad_norm": 0.18083566427230835, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0105, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 10.678175092478421, | |
| "grad_norm": 0.18918916583061218, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0132, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 10.690505548705302, | |
| "grad_norm": 0.23712395131587982, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0112, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 10.702836004932182, | |
| "grad_norm": 0.1545598953962326, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0084, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 10.715166461159063, | |
| "grad_norm": 0.20529277622699738, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0135, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 10.727496917385944, | |
| "grad_norm": 0.15666550397872925, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0113, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 10.739827373612824, | |
| "grad_norm": 0.17934800684452057, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0125, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 10.752157829839703, | |
| "grad_norm": 0.15567588806152344, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0096, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 10.764488286066584, | |
| "grad_norm": 0.2025386244058609, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0104, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 10.776818742293464, | |
| "grad_norm": 0.1831253468990326, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0118, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 10.789149198520345, | |
| "grad_norm": 0.2569693326950073, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0135, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 10.801479654747226, | |
| "grad_norm": 0.1676090657711029, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0116, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 10.813810110974106, | |
| "grad_norm": 0.233047753572464, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0121, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 10.826140567200987, | |
| "grad_norm": 0.12883184850215912, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.011, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 10.838471023427866, | |
| "grad_norm": 0.20078858733177185, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0119, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 10.850801479654747, | |
| "grad_norm": 0.2572806477546692, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0115, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 10.863131935881627, | |
| "grad_norm": 0.2080538123846054, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0135, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 10.875462392108508, | |
| "grad_norm": 0.1151924729347229, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.0098, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 10.887792848335389, | |
| "grad_norm": 0.27229616045951843, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0129, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 10.90012330456227, | |
| "grad_norm": 0.26446396112442017, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0102, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 10.912453760789148, | |
| "grad_norm": 0.1061343401670456, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0095, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 10.924784217016029, | |
| "grad_norm": 0.14548137784004211, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0116, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 10.93711467324291, | |
| "grad_norm": 0.14597511291503906, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0132, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 10.94944512946979, | |
| "grad_norm": 0.15564022958278656, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.009, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 10.961775585696671, | |
| "grad_norm": 0.1500059813261032, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0132, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 10.974106041923552, | |
| "grad_norm": 0.1250656545162201, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.009, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 10.986436498150432, | |
| "grad_norm": 0.1907559186220169, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0129, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 10.998766954377311, | |
| "grad_norm": 0.2593444585800171, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0102, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 11.011097410604192, | |
| "grad_norm": 0.1587124913930893, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0093, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 11.023427866831073, | |
| "grad_norm": 0.09678517282009125, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0085, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 11.035758323057953, | |
| "grad_norm": 0.061571184545755386, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0098, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 11.048088779284834, | |
| "grad_norm": 0.1364486664533615, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0121, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 11.060419235511715, | |
| "grad_norm": 0.18868908286094666, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0111, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 11.072749691738593, | |
| "grad_norm": 0.17835724353790283, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0084, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 11.085080147965474, | |
| "grad_norm": 0.19594304263591766, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0145, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 11.097410604192355, | |
| "grad_norm": 0.12442103773355484, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0107, | |
| "step": 9000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 13, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |