| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.659309564233164, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005659309564233163, | |
| "grad_norm": 14.506192207336426, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.1688, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011318619128466326, | |
| "grad_norm": 8.839621543884277, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.9318, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.01697792869269949, | |
| "grad_norm": 3.10554838180542, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.4084, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.022637238256932653, | |
| "grad_norm": 4.209934234619141, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.289, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.028296547821165818, | |
| "grad_norm": 3.8156583309173584, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2076, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03395585738539898, | |
| "grad_norm": 1.7666521072387695, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1527, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.039615166949632144, | |
| "grad_norm": 2.237152099609375, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1597, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.045274476513865305, | |
| "grad_norm": 2.67939829826355, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1255, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.050933786078098474, | |
| "grad_norm": 1.1937289237976074, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.119, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.056593095642331635, | |
| "grad_norm": 1.8054336309432983, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1256, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0622524052065648, | |
| "grad_norm": 1.6763436794281006, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1201, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06791171477079797, | |
| "grad_norm": 2.8526389598846436, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.1375, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07357102433503113, | |
| "grad_norm": 3.1760990619659424, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.1141, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07923033389926429, | |
| "grad_norm": 2.534616231918335, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.1632, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08488964346349745, | |
| "grad_norm": 1.8274834156036377, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.1228, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09054895302773061, | |
| "grad_norm": 1.4464740753173828, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.1255, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09620826259196379, | |
| "grad_norm": 2.235652208328247, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.1048, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10186757215619695, | |
| "grad_norm": 2.356511116027832, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.1135, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.10752688172043011, | |
| "grad_norm": 3.055460214614868, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.1326, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.11318619128466327, | |
| "grad_norm": 1.6213253736495972, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.1094, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11884550084889643, | |
| "grad_norm": 3.7455012798309326, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.1197, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1245048104131296, | |
| "grad_norm": 1.7306926250457764, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.1331, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.13016411997736277, | |
| "grad_norm": 1.8728036880493164, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.102, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.13582342954159593, | |
| "grad_norm": 1.3134273290634155, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.1013, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1414827391058291, | |
| "grad_norm": 1.5936145782470703, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0903, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.14714204867006225, | |
| "grad_norm": 1.9866862297058105, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0755, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.15280135823429541, | |
| "grad_norm": 1.147523045539856, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0921, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.15846066779852858, | |
| "grad_norm": 0.8689554333686829, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0637, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.16411997736276174, | |
| "grad_norm": 2.5970981121063232, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.1011, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1697792869269949, | |
| "grad_norm": 1.6930752992630005, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.1222, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17543859649122806, | |
| "grad_norm": 1.0483814477920532, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.1165, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.18109790605546122, | |
| "grad_norm": 0.8814326524734497, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0794, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.1867572156196944, | |
| "grad_norm": 0.8466112017631531, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.1113, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.19241652518392757, | |
| "grad_norm": 1.5122456550598145, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.1026, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.19807583474816073, | |
| "grad_norm": 2.2806904315948486, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.1015, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2037351443123939, | |
| "grad_norm": 1.844056487083435, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.072, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.20939445387662706, | |
| "grad_norm": 1.286731481552124, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0841, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.21505376344086022, | |
| "grad_norm": 1.5619851350784302, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0916, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.22071307300509338, | |
| "grad_norm": 1.0341761112213135, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0915, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.22637238256932654, | |
| "grad_norm": 1.0657052993774414, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.1015, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.2320316921335597, | |
| "grad_norm": 1.3276731967926025, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0945, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.23769100169779286, | |
| "grad_norm": 1.2397652864456177, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0741, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.24335031126202603, | |
| "grad_norm": 1.7711883783340454, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0996, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2490096208262592, | |
| "grad_norm": 1.4832528829574585, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.1061, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2546689303904924, | |
| "grad_norm": 1.2473294734954834, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0826, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.26032823995472554, | |
| "grad_norm": 1.1587681770324707, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0941, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.2659875495189587, | |
| "grad_norm": 1.194387674331665, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0918, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.27164685908319186, | |
| "grad_norm": 1.5423600673675537, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0927, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.277306168647425, | |
| "grad_norm": 0.9861327409744263, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.1115, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2829654782116582, | |
| "grad_norm": 1.40666663646698, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0966, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.28862478777589134, | |
| "grad_norm": 1.3362163305282593, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.1125, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2942840973401245, | |
| "grad_norm": 1.251626968383789, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0908, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.29994340690435767, | |
| "grad_norm": 1.2477635145187378, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0834, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.30560271646859083, | |
| "grad_norm": 0.7544317245483398, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0707, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.311262026032824, | |
| "grad_norm": 1.800075888633728, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.1028, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.31692133559705715, | |
| "grad_norm": 1.1727581024169922, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.1114, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3225806451612903, | |
| "grad_norm": 1.3092683553695679, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0754, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3282399547255235, | |
| "grad_norm": 1.2468684911727905, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0945, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.33389926428975664, | |
| "grad_norm": 1.1072725057601929, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0827, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3395585738539898, | |
| "grad_norm": 1.7513043880462646, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.1094, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.34521788341822296, | |
| "grad_norm": 1.4385285377502441, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.1009, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.3508771929824561, | |
| "grad_norm": 0.9453766942024231, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.1067, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3565365025466893, | |
| "grad_norm": 1.1654232740402222, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0853, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.36219581211092244, | |
| "grad_norm": 1.4993220567703247, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.108, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3678551216751556, | |
| "grad_norm": 1.2022749185562134, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0941, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3735144312393888, | |
| "grad_norm": 1.6157125234603882, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.1124, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.379173740803622, | |
| "grad_norm": 0.8949654698371887, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0776, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.38483305036785515, | |
| "grad_norm": 1.2268325090408325, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0925, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.3904923599320883, | |
| "grad_norm": 1.0547950267791748, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0786, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.39615166949632147, | |
| "grad_norm": 1.033515214920044, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.085, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.40181097906055463, | |
| "grad_norm": 0.7889791131019592, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0634, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4074702886247878, | |
| "grad_norm": 1.0996527671813965, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.1017, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.41312959818902095, | |
| "grad_norm": 1.2688416242599487, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0874, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4187889077532541, | |
| "grad_norm": 1.4887137413024902, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0614, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.4244482173174873, | |
| "grad_norm": 1.156822681427002, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.083, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.43010752688172044, | |
| "grad_norm": 0.937840461730957, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.1015, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.4357668364459536, | |
| "grad_norm": 1.1325842142105103, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.077, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.44142614601018676, | |
| "grad_norm": 1.1565141677856445, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0907, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.4470854555744199, | |
| "grad_norm": 0.8821191191673279, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0761, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4527447651386531, | |
| "grad_norm": 1.1629949808120728, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0747, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.45840407470288624, | |
| "grad_norm": 0.719484806060791, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0823, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.4640633842671194, | |
| "grad_norm": 1.6020772457122803, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0937, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.46972269383135257, | |
| "grad_norm": 0.9074944257736206, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.1057, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.47538200339558573, | |
| "grad_norm": 0.8391576409339905, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0726, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.4810413129598189, | |
| "grad_norm": 1.3409184217453003, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0928, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.48670062252405205, | |
| "grad_norm": 0.6904551982879639, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0677, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.4923599320882852, | |
| "grad_norm": 1.3853803873062134, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0811, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.4980192416525184, | |
| "grad_norm": 1.2435743808746338, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0846, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5036785512167515, | |
| "grad_norm": 0.7919995784759521, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0672, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5093378607809848, | |
| "grad_norm": 1.1187522411346436, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0602, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5149971703452179, | |
| "grad_norm": 1.1229113340377808, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0688, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5206564799094511, | |
| "grad_norm": 1.0347950458526611, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0591, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5263157894736842, | |
| "grad_norm": 1.2555299997329712, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0733, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5319750990379174, | |
| "grad_norm": 0.9478985667228699, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0681, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5376344086021505, | |
| "grad_norm": 0.9187138676643372, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.066, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5432937181663837, | |
| "grad_norm": 0.7259000539779663, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0796, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5489530277306168, | |
| "grad_norm": 0.6589801907539368, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0728, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.55461233729485, | |
| "grad_norm": 1.0876269340515137, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0637, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5602716468590832, | |
| "grad_norm": 0.8798189759254456, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0596, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5659309564233164, | |
| "grad_norm": 0.92113196849823, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0877, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5715902659875495, | |
| "grad_norm": 0.9167202711105347, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0652, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5772495755517827, | |
| "grad_norm": 1.027732014656067, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0778, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5829088851160158, | |
| "grad_norm": 0.6727028489112854, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0525, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.588568194680249, | |
| "grad_norm": 1.1351747512817383, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0598, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5942275042444821, | |
| "grad_norm": 1.283753752708435, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0719, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.5998868138087153, | |
| "grad_norm": 0.9144543409347534, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0625, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6055461233729486, | |
| "grad_norm": 0.7145938277244568, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0595, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6112054329371817, | |
| "grad_norm": 0.742757260799408, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0681, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6168647425014149, | |
| "grad_norm": 0.6105904579162598, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0638, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.622524052065648, | |
| "grad_norm": 0.6754553914070129, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0605, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6281833616298812, | |
| "grad_norm": 0.9949795603752136, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.101, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6338426711941143, | |
| "grad_norm": 0.4132203459739685, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0607, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6395019807583475, | |
| "grad_norm": 1.0556095838546753, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0657, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6451612903225806, | |
| "grad_norm": 0.6254858374595642, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0659, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6508205998868138, | |
| "grad_norm": 0.676054060459137, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0764, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.656479909451047, | |
| "grad_norm": 0.6152122020721436, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0581, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6621392190152802, | |
| "grad_norm": 0.9762745499610901, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0695, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6677985285795133, | |
| "grad_norm": 0.7954534292221069, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0556, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6734578381437465, | |
| "grad_norm": 0.7913191914558411, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0713, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6791171477079796, | |
| "grad_norm": 1.2579820156097412, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0762, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6847764572722128, | |
| "grad_norm": 1.192491888999939, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0905, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.6904357668364459, | |
| "grad_norm": 1.1196470260620117, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0679, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.6960950764006791, | |
| "grad_norm": 0.9146842956542969, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0698, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7017543859649122, | |
| "grad_norm": 0.5354324579238892, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0731, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7074136955291455, | |
| "grad_norm": 0.7416520714759827, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0809, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7130730050933786, | |
| "grad_norm": 0.7727601528167725, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0729, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7187323146576118, | |
| "grad_norm": 0.7373871207237244, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0395, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7243916242218449, | |
| "grad_norm": 0.9921310544013977, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.062, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7300509337860781, | |
| "grad_norm": 0.685374915599823, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0563, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7357102433503112, | |
| "grad_norm": 0.997697651386261, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0616, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7413695529145444, | |
| "grad_norm": 1.0013808012008667, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0662, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7470288624787776, | |
| "grad_norm": 0.8838229179382324, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0642, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7526881720430108, | |
| "grad_norm": 0.9542595744132996, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0553, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.758347481607244, | |
| "grad_norm": 0.5350849628448486, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0431, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7640067911714771, | |
| "grad_norm": 0.8132240772247314, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0717, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7696661007357103, | |
| "grad_norm": 0.9164685010910034, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0692, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.7753254102999434, | |
| "grad_norm": 0.9166828393936157, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0525, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.7809847198641766, | |
| "grad_norm": 0.5645913481712341, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0488, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7866440294284097, | |
| "grad_norm": 0.8486982583999634, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0623, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7923033389926429, | |
| "grad_norm": 0.9856374859809875, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0427, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.797962648556876, | |
| "grad_norm": 0.9763184189796448, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0662, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8036219581211093, | |
| "grad_norm": 0.5652852654457092, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0624, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8092812676853424, | |
| "grad_norm": 0.8615242838859558, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0575, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8149405772495756, | |
| "grad_norm": 1.01888108253479, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0723, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8205998868138087, | |
| "grad_norm": 0.571245551109314, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0723, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8262591963780419, | |
| "grad_norm": 0.5587224960327148, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0567, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.831918505942275, | |
| "grad_norm": 0.983090877532959, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0614, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8375778155065082, | |
| "grad_norm": 0.7602547407150269, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0452, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8432371250707413, | |
| "grad_norm": 0.7132800817489624, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0544, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8488964346349746, | |
| "grad_norm": 0.4686054289340973, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0565, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8545557441992077, | |
| "grad_norm": 1.0880781412124634, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0585, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8602150537634409, | |
| "grad_norm": 1.143423318862915, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0611, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.865874363327674, | |
| "grad_norm": 0.7778660655021667, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0582, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.8715336728919072, | |
| "grad_norm": 0.733134925365448, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0616, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8771929824561403, | |
| "grad_norm": 1.2035789489746094, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0646, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8828522920203735, | |
| "grad_norm": 1.4871290922164917, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0747, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8885116015846066, | |
| "grad_norm": 1.6757930517196655, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0632, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.8941709111488398, | |
| "grad_norm": 0.7295994758605957, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0537, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.8998302207130731, | |
| "grad_norm": 0.8169795870780945, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0697, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9054895302773062, | |
| "grad_norm": 0.8239006400108337, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0714, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9111488398415394, | |
| "grad_norm": 0.7182111144065857, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0434, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9168081494057725, | |
| "grad_norm": 0.7491622567176819, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0551, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9224674589700057, | |
| "grad_norm": 0.7354273796081543, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.1136, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9281267685342388, | |
| "grad_norm": 0.7655503153800964, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0772, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.933786078098472, | |
| "grad_norm": 1.1188385486602783, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0753, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9394453876627051, | |
| "grad_norm": 0.9626965522766113, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0622, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9451046972269384, | |
| "grad_norm": 0.7576481699943542, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0546, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.9507640067911715, | |
| "grad_norm": 0.6366353034973145, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0536, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.9564233163554047, | |
| "grad_norm": 0.6421340703964233, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0568, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.9620826259196378, | |
| "grad_norm": 0.6514986753463745, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0519, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 1.2975069284439087, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.065, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9734012450481041, | |
| "grad_norm": 0.791313648223877, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.058, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9790605546123373, | |
| "grad_norm": 0.9186238050460815, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0575, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9847198641765704, | |
| "grad_norm": 0.6246191263198853, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0476, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9903791737408036, | |
| "grad_norm": 0.6215393543243408, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0735, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.9960384833050367, | |
| "grad_norm": 0.6971435546875, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0547, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.0016977928692699, | |
| "grad_norm": 0.7553586959838867, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0457, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.007357102433503, | |
| "grad_norm": 0.7966594099998474, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0611, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.0130164119977363, | |
| "grad_norm": 0.9479690194129944, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0422, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.0186757215619695, | |
| "grad_norm": 0.8228363990783691, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0734, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.0243350311262025, | |
| "grad_norm": 0.9321158528327942, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0504, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.0299943406904357, | |
| "grad_norm": 0.6621798276901245, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0732, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.035653650254669, | |
| "grad_norm": 0.7835595011711121, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0595, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.0413129598189022, | |
| "grad_norm": 0.6544864773750305, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0849, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.0469722693831351, | |
| "grad_norm": 1.661457896232605, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0986, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.0526315789473684, | |
| "grad_norm": 0.8697350025177002, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0744, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.0582908885116016, | |
| "grad_norm": 0.8535569906234741, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0646, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.0639501980758348, | |
| "grad_norm": 0.5321581959724426, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0447, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.069609507640068, | |
| "grad_norm": 0.6567709445953369, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0466, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.075268817204301, | |
| "grad_norm": 0.6248086094856262, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0517, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.0809281267685342, | |
| "grad_norm": 0.7937766313552856, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0683, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.0865874363327674, | |
| "grad_norm": 1.025101900100708, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0595, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.0922467458970007, | |
| "grad_norm": 0.7608640789985657, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0428, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.0979060554612337, | |
| "grad_norm": 0.49536848068237305, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0453, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1035653650254669, | |
| "grad_norm": 0.609743595123291, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0376, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1092246745897, | |
| "grad_norm": 0.5043293833732605, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0579, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.1148839841539333, | |
| "grad_norm": 0.5044483542442322, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.045, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1205432937181663, | |
| "grad_norm": 0.5287529826164246, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.047, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1262026032823995, | |
| "grad_norm": 0.5549773573875427, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0429, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1318619128466327, | |
| "grad_norm": 0.4656256139278412, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0604, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.137521222410866, | |
| "grad_norm": 0.6236544847488403, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0763, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.143180531975099, | |
| "grad_norm": 0.4793378710746765, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.054, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.1488398415393322, | |
| "grad_norm": 0.6340910792350769, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0424, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.1544991511035654, | |
| "grad_norm": 0.6548799276351929, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0628, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.1601584606677986, | |
| "grad_norm": 0.646457850933075, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0623, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.1658177702320316, | |
| "grad_norm": 0.7396799921989441, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0516, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.1714770797962648, | |
| "grad_norm": 0.5293351411819458, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0464, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.177136389360498, | |
| "grad_norm": 0.8705312609672546, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0511, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.1827956989247312, | |
| "grad_norm": 0.7432714104652405, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0481, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.1884550084889645, | |
| "grad_norm": 0.7466390132904053, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0553, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.1941143180531975, | |
| "grad_norm": 0.9503251314163208, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0679, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.1997736276174307, | |
| "grad_norm": 0.770002007484436, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.049, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.2054329371816639, | |
| "grad_norm": 1.1000735759735107, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0561, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.2110922467458969, | |
| "grad_norm": 0.695549488067627, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0378, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.21675155631013, | |
| "grad_norm": 1.4372471570968628, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.062, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.2224108658743633, | |
| "grad_norm": 0.7232362031936646, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0404, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.2280701754385965, | |
| "grad_norm": 0.6811502575874329, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0485, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.2337294850028298, | |
| "grad_norm": 1.0631557703018188, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.056, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.2393887945670627, | |
| "grad_norm": 0.5628013014793396, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0463, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.245048104131296, | |
| "grad_norm": 0.42298072576522827, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0403, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.2507074136955292, | |
| "grad_norm": 0.7429174184799194, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0484, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.2563667232597622, | |
| "grad_norm": 0.6854789853096008, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0431, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.2620260328239954, | |
| "grad_norm": 0.680793285369873, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0527, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.2676853423882286, | |
| "grad_norm": 0.6899171471595764, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0448, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.2733446519524618, | |
| "grad_norm": 0.6361444592475891, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0461, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.279003961516695, | |
| "grad_norm": 0.7933545112609863, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0461, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.284663271080928, | |
| "grad_norm": 0.8648480772972107, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.043, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.2903225806451613, | |
| "grad_norm": 0.685413658618927, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0542, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.2959818902093945, | |
| "grad_norm": 0.8425791263580322, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0691, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.3016411997736277, | |
| "grad_norm": 0.5436529517173767, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.05, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.307300509337861, | |
| "grad_norm": 0.9092062711715698, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0503, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.312959818902094, | |
| "grad_norm": 0.602950394153595, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.042, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.3186191284663271, | |
| "grad_norm": 0.7941600680351257, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0499, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.3242784380305603, | |
| "grad_norm": 0.786613941192627, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0487, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.3299377475947933, | |
| "grad_norm": 0.6765789985656738, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0533, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.3355970571590265, | |
| "grad_norm": 0.7006556987762451, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0528, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.3412563667232598, | |
| "grad_norm": 0.7602994441986084, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0528, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.346915676287493, | |
| "grad_norm": 1.3614463806152344, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0578, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.3525749858517262, | |
| "grad_norm": 0.7470871210098267, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0562, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.3582342954159592, | |
| "grad_norm": 0.7833662629127502, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0514, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.3638936049801924, | |
| "grad_norm": 0.6521514058113098, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0432, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.3695529145444256, | |
| "grad_norm": 0.6200571060180664, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0732, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.3752122241086586, | |
| "grad_norm": 0.7283011078834534, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0452, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.3808715336728918, | |
| "grad_norm": 0.7621099948883057, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0515, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.386530843237125, | |
| "grad_norm": 0.7514267563819885, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0394, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.3921901528013583, | |
| "grad_norm": 0.7734144926071167, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0486, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.3978494623655915, | |
| "grad_norm": 0.6210748553276062, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.039, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.4035087719298245, | |
| "grad_norm": 0.5797284245491028, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0456, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.4091680814940577, | |
| "grad_norm": 0.626136064529419, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0541, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.414827391058291, | |
| "grad_norm": 0.2991216778755188, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.036, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.4204867006225241, | |
| "grad_norm": 0.5612489581108093, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0338, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.4261460101867574, | |
| "grad_norm": 0.4333272874355316, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0444, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.4318053197509903, | |
| "grad_norm": 0.5372508764266968, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0377, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.4374646293152236, | |
| "grad_norm": 0.5010121464729309, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0353, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.4431239388794568, | |
| "grad_norm": 0.5633225440979004, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0481, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.4487832484436898, | |
| "grad_norm": 0.6071559190750122, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.049, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.454442558007923, | |
| "grad_norm": 0.8190767765045166, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0511, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.4601018675721562, | |
| "grad_norm": 0.6717620491981506, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0446, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.4657611771363894, | |
| "grad_norm": 0.9050039052963257, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0454, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.4714204867006226, | |
| "grad_norm": 0.6766312122344971, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0462, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.4770797962648556, | |
| "grad_norm": 0.4999978244304657, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0533, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.4827391058290889, | |
| "grad_norm": 0.4319976568222046, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0442, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.488398415393322, | |
| "grad_norm": 0.7079813480377197, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0556, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.494057724957555, | |
| "grad_norm": 0.9638972282409668, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0527, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.4997170345217883, | |
| "grad_norm": 0.7943388223648071, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0585, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.5053763440860215, | |
| "grad_norm": 0.629218339920044, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0497, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.5110356536502547, | |
| "grad_norm": 0.5880003571510315, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0573, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.516694963214488, | |
| "grad_norm": 0.6889175176620483, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0681, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.522354272778721, | |
| "grad_norm": 0.3247281312942505, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0431, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.5280135823429541, | |
| "grad_norm": 0.6539881229400635, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0343, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.5336728919071874, | |
| "grad_norm": 0.6488775610923767, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.047, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.5393322014714204, | |
| "grad_norm": 0.8400557041168213, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0754, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.5449915110356538, | |
| "grad_norm": 0.5844991207122803, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0409, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.5506508205998868, | |
| "grad_norm": 1.1271346807479858, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0522, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.55631013016412, | |
| "grad_norm": 0.8452271223068237, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0551, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.5619694397283532, | |
| "grad_norm": 0.6306042075157166, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0467, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.5676287492925862, | |
| "grad_norm": 0.616023600101471, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0477, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.5732880588568194, | |
| "grad_norm": 0.9345172047615051, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0502, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.5789473684210527, | |
| "grad_norm": 0.723246157169342, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0545, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.5846066779852856, | |
| "grad_norm": 0.49784278869628906, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0518, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.590265987549519, | |
| "grad_norm": 0.5574387907981873, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0627, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.595925297113752, | |
| "grad_norm": 0.6356534957885742, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0616, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.6015846066779853, | |
| "grad_norm": 0.6085881590843201, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0573, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.6072439162422185, | |
| "grad_norm": 0.49020498991012573, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0544, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.6129032258064515, | |
| "grad_norm": 0.8643253445625305, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0648, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.6185625353706847, | |
| "grad_norm": 0.626186728477478, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0479, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.624221844934918, | |
| "grad_norm": 0.4494522511959076, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.042, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.629881154499151, | |
| "grad_norm": 0.766018807888031, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0475, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.6355404640633844, | |
| "grad_norm": 0.5061770081520081, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.052, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.6411997736276174, | |
| "grad_norm": 0.7420628666877747, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0575, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.6468590831918506, | |
| "grad_norm": 0.5609028339385986, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.045, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.6525183927560838, | |
| "grad_norm": 0.3088698387145996, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0527, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.6581777023203168, | |
| "grad_norm": 0.43243131041526794, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0353, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.6638370118845502, | |
| "grad_norm": 0.8786044716835022, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0464, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.6694963214487832, | |
| "grad_norm": 0.441781610250473, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0365, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.6751556310130165, | |
| "grad_norm": 0.5830533504486084, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.039, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.6808149405772497, | |
| "grad_norm": 0.4785899817943573, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0406, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.6864742501414827, | |
| "grad_norm": 0.9931580424308777, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0501, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.6921335597057159, | |
| "grad_norm": 0.41342973709106445, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.041, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.697792869269949, | |
| "grad_norm": 0.47967806458473206, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0538, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.703452178834182, | |
| "grad_norm": 0.76365065574646, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0553, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.7091114883984155, | |
| "grad_norm": 0.6275987029075623, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0313, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.7147707979626485, | |
| "grad_norm": 0.8967359662055969, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0444, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.7204301075268817, | |
| "grad_norm": 0.43494531512260437, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0392, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.726089417091115, | |
| "grad_norm": 0.6516427397727966, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0452, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.731748726655348, | |
| "grad_norm": 0.5176613926887512, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0395, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.7374080362195812, | |
| "grad_norm": 0.5854260325431824, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0338, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.7430673457838144, | |
| "grad_norm": 0.9170899987220764, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0522, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.7487266553480474, | |
| "grad_norm": 0.5007301568984985, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0458, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.7543859649122808, | |
| "grad_norm": 0.5097112059593201, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0467, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.7600452744765138, | |
| "grad_norm": 0.5052613019943237, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0349, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.765704584040747, | |
| "grad_norm": 0.35661929845809937, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0303, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.7713638936049803, | |
| "grad_norm": 0.23859961330890656, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0343, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.7770232031692133, | |
| "grad_norm": 0.5440965294837952, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0464, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.7826825127334465, | |
| "grad_norm": 0.7961117625236511, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0406, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.7883418222976797, | |
| "grad_norm": 0.508758544921875, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0442, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.794001131861913, | |
| "grad_norm": 0.5264890193939209, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0336, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.7996604414261461, | |
| "grad_norm": 0.7343180179595947, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0552, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.8053197509903791, | |
| "grad_norm": 0.6147553324699402, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0489, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.8109790605546123, | |
| "grad_norm": 0.6342501640319824, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0514, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.8166383701188455, | |
| "grad_norm": 0.6614866256713867, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0385, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.8222976796830785, | |
| "grad_norm": 0.5475081205368042, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0471, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.827956989247312, | |
| "grad_norm": 0.37897610664367676, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0465, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.833616298811545, | |
| "grad_norm": 0.5038880705833435, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0451, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.8392756083757782, | |
| "grad_norm": 0.6834940910339355, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0601, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.8449349179400114, | |
| "grad_norm": 0.7319331169128418, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.033, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.8505942275042444, | |
| "grad_norm": 0.8380953669548035, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0464, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.8562535370684776, | |
| "grad_norm": 0.4649491310119629, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0506, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.8619128466327108, | |
| "grad_norm": 0.4398675858974457, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0372, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.8675721561969438, | |
| "grad_norm": 0.6413957476615906, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0414, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.8732314657611773, | |
| "grad_norm": 0.4823025166988373, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0424, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.8788907753254103, | |
| "grad_norm": 0.47241145372390747, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0644, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.8845500848896435, | |
| "grad_norm": 1.091975212097168, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0576, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.8902093944538767, | |
| "grad_norm": 0.5624361038208008, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0348, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.8958687040181097, | |
| "grad_norm": 0.5308264493942261, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0547, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.901528013582343, | |
| "grad_norm": 0.5924705862998962, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0311, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.9071873231465761, | |
| "grad_norm": 0.671329140663147, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0432, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.9128466327108091, | |
| "grad_norm": 0.42409855127334595, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0627, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.9185059422750426, | |
| "grad_norm": 0.8025627732276917, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0433, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.9241652518392756, | |
| "grad_norm": 0.8151332139968872, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0464, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.9298245614035088, | |
| "grad_norm": 0.4845048487186432, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0454, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 0.37321311235427856, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.034, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.941143180531975, | |
| "grad_norm": 0.7626938223838806, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.038, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.9468024900962084, | |
| "grad_norm": 0.7977511882781982, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0457, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.9524617996604414, | |
| "grad_norm": 0.807888388633728, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0386, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.9581211092246746, | |
| "grad_norm": 0.6336291432380676, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0399, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.9637804187889079, | |
| "grad_norm": 0.8386486768722534, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0588, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.9694397283531409, | |
| "grad_norm": 0.736029326915741, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0427, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.975099037917374, | |
| "grad_norm": 0.6496866941452026, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0469, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.9807583474816073, | |
| "grad_norm": 0.4239360988140106, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0332, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 1.9864176570458403, | |
| "grad_norm": 0.7490051984786987, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0424, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 1.9920769666100737, | |
| "grad_norm": 0.8489423394203186, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0427, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 1.9977362761743067, | |
| "grad_norm": 0.4871460795402527, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0413, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 2.0033955857385397, | |
| "grad_norm": 0.5171214938163757, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0497, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.009054895302773, | |
| "grad_norm": 0.49935320019721985, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0428, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 2.014714204867006, | |
| "grad_norm": 0.6899267435073853, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0393, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.0203735144312396, | |
| "grad_norm": 0.6978103518486023, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.039, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 2.0260328239954726, | |
| "grad_norm": 0.4505501091480255, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0379, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.0316921335597056, | |
| "grad_norm": 0.6588614583015442, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0468, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.037351443123939, | |
| "grad_norm": 0.47646525502204895, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0314, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.043010752688172, | |
| "grad_norm": 0.5849595665931702, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0423, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.048670062252405, | |
| "grad_norm": 0.7265652418136597, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0428, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.0543293718166384, | |
| "grad_norm": 0.6475759148597717, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.035, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.0599886813808714, | |
| "grad_norm": 1.025071382522583, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0364, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.065647990945105, | |
| "grad_norm": 0.7076042294502258, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0292, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.071307300509338, | |
| "grad_norm": 0.5352051258087158, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0381, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.076966610073571, | |
| "grad_norm": 0.6327780485153198, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0351, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.0826259196378043, | |
| "grad_norm": 0.4013051986694336, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0329, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.0882852292020373, | |
| "grad_norm": 0.3340098261833191, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0374, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.0939445387662703, | |
| "grad_norm": 0.5839859843254089, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0418, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.0996038483305037, | |
| "grad_norm": 1.155350923538208, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0321, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.1052631578947367, | |
| "grad_norm": 0.4015667140483856, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0283, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.11092246745897, | |
| "grad_norm": 0.424580454826355, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0344, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.116581777023203, | |
| "grad_norm": 0.37337833642959595, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0271, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.122241086587436, | |
| "grad_norm": 0.41936591267585754, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0354, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.1279003961516696, | |
| "grad_norm": 0.621281087398529, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0377, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.1335597057159026, | |
| "grad_norm": 0.5020797848701477, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.0533, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.139219015280136, | |
| "grad_norm": 0.8350841403007507, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0557, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.144878324844369, | |
| "grad_norm": 0.8346255421638489, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.047, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.150537634408602, | |
| "grad_norm": 0.881129264831543, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0726, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.1561969439728355, | |
| "grad_norm": 0.6044751405715942, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0345, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.1618562535370685, | |
| "grad_norm": 0.5880212187767029, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0498, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.1675155631013014, | |
| "grad_norm": 0.8480750322341919, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.049, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.173174872665535, | |
| "grad_norm": 0.6445808410644531, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0352, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.178834182229768, | |
| "grad_norm": 0.5758914351463318, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0426, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.1844934917940013, | |
| "grad_norm": 0.6860023736953735, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.04, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.1901528013582343, | |
| "grad_norm": 0.46748650074005127, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0379, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.1958121109224673, | |
| "grad_norm": 0.7767139077186584, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0309, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.2014714204867007, | |
| "grad_norm": 0.5371547937393188, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0415, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.2071307300509337, | |
| "grad_norm": 0.7050542831420898, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0384, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.212790039615167, | |
| "grad_norm": 0.966188907623291, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0434, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.2184493491794, | |
| "grad_norm": 0.794049859046936, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.039, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.224108658743633, | |
| "grad_norm": 0.5228994488716125, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0487, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.2297679683078666, | |
| "grad_norm": 0.6201066374778748, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0588, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.2354272778720996, | |
| "grad_norm": 0.4783194661140442, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0476, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.2410865874363326, | |
| "grad_norm": 0.5686235427856445, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0491, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.246745897000566, | |
| "grad_norm": 0.38256895542144775, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0291, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.252405206564799, | |
| "grad_norm": 0.5158995985984802, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0356, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.258064516129032, | |
| "grad_norm": 0.5597074627876282, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0312, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.2637238256932655, | |
| "grad_norm": 0.5796484351158142, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0403, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.2693831352574985, | |
| "grad_norm": 0.43213656544685364, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0372, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.275042444821732, | |
| "grad_norm": 0.8799070715904236, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0287, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.280701754385965, | |
| "grad_norm": 0.5011523962020874, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0357, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.286361063950198, | |
| "grad_norm": 0.4229520261287689, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0246, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.2920203735144313, | |
| "grad_norm": 0.8594684600830078, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0415, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.2976796830786643, | |
| "grad_norm": 0.5908093452453613, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0328, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.3033389926428978, | |
| "grad_norm": 0.239020437002182, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0266, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.3089983022071308, | |
| "grad_norm": 0.7022054195404053, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0464, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.3146576117713638, | |
| "grad_norm": 0.8178216218948364, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0446, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.320316921335597, | |
| "grad_norm": 0.5563772320747375, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0273, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.32597623089983, | |
| "grad_norm": 1.1647361516952515, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0343, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.331635540464063, | |
| "grad_norm": 0.5406816005706787, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0335, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.3372948500282966, | |
| "grad_norm": 0.5563187003135681, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0402, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.3429541595925296, | |
| "grad_norm": 0.5906765460968018, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0461, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.348613469156763, | |
| "grad_norm": 0.4335680603981018, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0369, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.354272778720996, | |
| "grad_norm": 0.38988831639289856, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0363, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.359932088285229, | |
| "grad_norm": 0.39675620198249817, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0384, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.3655913978494625, | |
| "grad_norm": 0.4085799753665924, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0324, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.3712507074136955, | |
| "grad_norm": 0.4118804931640625, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0302, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.376910016977929, | |
| "grad_norm": 0.4113968312740326, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0238, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.382569326542162, | |
| "grad_norm": 0.4689813554286957, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0374, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.388228636106395, | |
| "grad_norm": 0.6826220750808716, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0393, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.3938879456706283, | |
| "grad_norm": 0.9193120002746582, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0447, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.3995472552348613, | |
| "grad_norm": 0.62978595495224, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0501, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.4052065647990943, | |
| "grad_norm": 0.6345353126525879, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0448, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.4108658743633278, | |
| "grad_norm": 0.8327631950378418, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0466, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.4165251839275608, | |
| "grad_norm": 0.4961281716823578, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0396, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.4221844934917938, | |
| "grad_norm": 0.47766831517219543, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0425, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.427843803056027, | |
| "grad_norm": 0.5118998885154724, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.038, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.43350311262026, | |
| "grad_norm": 0.5439344048500061, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0358, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.4391624221844936, | |
| "grad_norm": 0.8164300918579102, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0281, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.4448217317487266, | |
| "grad_norm": 0.6689225435256958, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0587, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.4504810413129596, | |
| "grad_norm": 0.4825805425643921, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0347, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.456140350877193, | |
| "grad_norm": 0.5034956336021423, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0274, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.461799660441426, | |
| "grad_norm": 0.45087069272994995, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0291, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.4674589700056595, | |
| "grad_norm": 0.7518239617347717, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0311, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.4731182795698925, | |
| "grad_norm": 0.3112383782863617, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0286, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.4787775891341255, | |
| "grad_norm": 0.6246465444564819, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0332, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.484436898698359, | |
| "grad_norm": 0.4958966076374054, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0274, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.490096208262592, | |
| "grad_norm": 0.46587511897087097, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0361, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.495755517826825, | |
| "grad_norm": 0.7619835138320923, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0284, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.5014148273910584, | |
| "grad_norm": 0.3014388084411621, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0408, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.5070741369552914, | |
| "grad_norm": 0.5010896921157837, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0377, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.5127334465195243, | |
| "grad_norm": 0.47420522570610046, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0406, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.518392756083758, | |
| "grad_norm": 0.7290257811546326, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0452, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.524052065647991, | |
| "grad_norm": 0.5837309956550598, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0405, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.5297113752122242, | |
| "grad_norm": 0.6096610426902771, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0339, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.535370684776457, | |
| "grad_norm": 0.673401951789856, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0339, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.5410299943406907, | |
| "grad_norm": 0.726558268070221, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0326, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.5466893039049237, | |
| "grad_norm": 0.8387537002563477, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0391, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 2.5523486134691566, | |
| "grad_norm": 0.904774010181427, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0531, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 2.55800792303339, | |
| "grad_norm": 0.5178611874580383, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0339, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 2.563667232597623, | |
| "grad_norm": 0.5299800038337708, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0364, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 2.569326542161856, | |
| "grad_norm": 0.515885591506958, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0358, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 2.5749858517260895, | |
| "grad_norm": 0.5696628093719482, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0407, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 2.5806451612903225, | |
| "grad_norm": 0.45487692952156067, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0294, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 2.5863044708545555, | |
| "grad_norm": 0.5714346170425415, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0322, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 2.591963780418789, | |
| "grad_norm": 0.3328671455383301, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0379, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 2.597623089983022, | |
| "grad_norm": 0.5089686512947083, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0393, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 2.6032823995472554, | |
| "grad_norm": 0.4880335330963135, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0346, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 2.6089417091114884, | |
| "grad_norm": 0.3896096348762512, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0328, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 2.614601018675722, | |
| "grad_norm": 0.8328500390052795, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0486, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 2.620260328239955, | |
| "grad_norm": 0.8080193400382996, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0339, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 2.625919637804188, | |
| "grad_norm": 0.5415770411491394, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0391, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 2.6315789473684212, | |
| "grad_norm": 0.6859717965126038, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.031, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 2.6372382569326542, | |
| "grad_norm": 0.6185169816017151, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0375, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 2.6428975664968872, | |
| "grad_norm": 0.5620154142379761, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0308, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 2.6485568760611207, | |
| "grad_norm": 0.3828488886356354, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0374, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 2.6542161856253537, | |
| "grad_norm": 0.5923011898994446, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0322, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 2.6598754951895867, | |
| "grad_norm": 0.4397177994251251, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0413, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 2.66553480475382, | |
| "grad_norm": 0.4128766357898712, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0279, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 2.671194114318053, | |
| "grad_norm": 0.47550642490386963, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0363, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 2.676853423882286, | |
| "grad_norm": 0.6374404430389404, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0366, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 2.6825127334465195, | |
| "grad_norm": 0.3073052763938904, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.025, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 2.688172043010753, | |
| "grad_norm": 0.5139605402946472, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0363, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 2.693831352574986, | |
| "grad_norm": 0.6438497304916382, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0336, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 2.699490662139219, | |
| "grad_norm": 0.8812306523323059, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0401, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 2.7051499717034524, | |
| "grad_norm": 0.35547712445259094, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0545, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 2.7108092812676854, | |
| "grad_norm": 0.3681725561618805, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.033, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 2.7164685908319184, | |
| "grad_norm": 0.3613463044166565, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0247, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 2.722127900396152, | |
| "grad_norm": 0.37430834770202637, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0276, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 2.727787209960385, | |
| "grad_norm": 0.763028621673584, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0489, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 2.733446519524618, | |
| "grad_norm": 0.9758390784263611, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0353, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 2.7391058290888513, | |
| "grad_norm": 0.6927124261856079, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0402, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 2.7447651386530842, | |
| "grad_norm": 0.4335505962371826, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0323, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 2.7504244482173172, | |
| "grad_norm": 0.642966091632843, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0328, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 2.7560837577815507, | |
| "grad_norm": 0.4464760422706604, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0293, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 2.7617430673457837, | |
| "grad_norm": 0.4965338110923767, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0284, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 2.767402376910017, | |
| "grad_norm": 0.7036542892456055, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0504, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 2.77306168647425, | |
| "grad_norm": 0.29114800691604614, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0285, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 2.7787209960384835, | |
| "grad_norm": 0.6166044473648071, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.026, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 2.7843803056027165, | |
| "grad_norm": 0.637798011302948, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0361, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 2.7900396151669495, | |
| "grad_norm": 0.3737649917602539, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0255, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 2.795698924731183, | |
| "grad_norm": 0.5660009980201721, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0311, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 2.801358234295416, | |
| "grad_norm": 0.40252718329429626, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0305, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 2.807017543859649, | |
| "grad_norm": 0.6519100666046143, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0279, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 2.8126768534238824, | |
| "grad_norm": 0.3294301927089691, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0375, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 2.8183361629881154, | |
| "grad_norm": 0.3593049645423889, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0304, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 2.8239954725523484, | |
| "grad_norm": 0.3873436152935028, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.031, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 2.829654782116582, | |
| "grad_norm": 0.7387383580207825, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0448, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 2.835314091680815, | |
| "grad_norm": 0.5776609182357788, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0412, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 2.8409734012450483, | |
| "grad_norm": 0.34580954909324646, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0363, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 2.8466327108092813, | |
| "grad_norm": 0.3362334072589874, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0291, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 2.8522920203735147, | |
| "grad_norm": 0.320499986410141, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0213, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 2.8579513299377477, | |
| "grad_norm": 0.5263609290122986, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0308, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 2.8636106395019807, | |
| "grad_norm": 0.34841442108154297, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.027, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 2.869269949066214, | |
| "grad_norm": 0.47967901825904846, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0405, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 2.874929258630447, | |
| "grad_norm": 0.8032472729682922, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0363, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 2.88058856819468, | |
| "grad_norm": 0.5181282758712769, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0348, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 2.8862478777589136, | |
| "grad_norm": 0.4246527850627899, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0375, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 2.8919071873231466, | |
| "grad_norm": 0.9782278537750244, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.041, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 2.8975664968873796, | |
| "grad_norm": 0.8772940039634705, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0389, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 0.5722554326057434, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0335, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 2.908885116015846, | |
| "grad_norm": 0.5590901970863342, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0343, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 2.914544425580079, | |
| "grad_norm": 0.4556905925273895, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0328, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 2.9202037351443124, | |
| "grad_norm": 0.7099003791809082, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0374, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 2.9258630447085454, | |
| "grad_norm": 0.518718957901001, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0264, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 2.931522354272779, | |
| "grad_norm": 0.32491767406463623, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0258, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 2.937181663837012, | |
| "grad_norm": 0.5351004004478455, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0417, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 2.9428409734012453, | |
| "grad_norm": 0.4286375343799591, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0273, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 2.9485002829654783, | |
| "grad_norm": 0.6190813779830933, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0329, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 2.9541595925297113, | |
| "grad_norm": 0.6987380981445312, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0241, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 2.9598189020939447, | |
| "grad_norm": 0.3838080167770386, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0282, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 2.9654782116581777, | |
| "grad_norm": 0.6405492424964905, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0351, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 2.9711375212224107, | |
| "grad_norm": 0.5722482800483704, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0315, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 2.976796830786644, | |
| "grad_norm": 0.5997118949890137, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0244, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 2.982456140350877, | |
| "grad_norm": 0.3658483028411865, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0302, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 2.98811544991511, | |
| "grad_norm": 0.43296298384666443, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0369, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 2.9937747594793436, | |
| "grad_norm": 0.26728492975234985, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0343, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 2.9994340690435766, | |
| "grad_norm": 0.6191973090171814, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.025, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 3.00509337860781, | |
| "grad_norm": 0.4005588889122009, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.02, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 3.010752688172043, | |
| "grad_norm": 0.4203624725341797, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0254, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 3.016411997736276, | |
| "grad_norm": 0.41406959295272827, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0389, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 3.0220713073005094, | |
| "grad_norm": 0.3125941753387451, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0284, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 3.0277306168647424, | |
| "grad_norm": 0.6467152237892151, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0294, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 3.033389926428976, | |
| "grad_norm": 0.46983492374420166, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0271, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 3.039049235993209, | |
| "grad_norm": 0.31741878390312195, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0267, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 3.044708545557442, | |
| "grad_norm": 0.28420332074165344, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0265, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 3.0503678551216753, | |
| "grad_norm": 0.5284949541091919, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0208, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 3.0560271646859083, | |
| "grad_norm": 0.5990930795669556, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0288, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 3.0616864742501413, | |
| "grad_norm": 0.4833022654056549, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0245, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 3.0673457838143747, | |
| "grad_norm": 0.4014180600643158, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0263, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 3.0730050933786077, | |
| "grad_norm": 0.2624795138835907, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0203, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 3.078664402942841, | |
| "grad_norm": 0.5820720791816711, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0372, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 3.084323712507074, | |
| "grad_norm": 0.4845047891139984, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0205, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 3.089983022071307, | |
| "grad_norm": 0.3671550750732422, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0252, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 3.0956423316355406, | |
| "grad_norm": 0.645203173160553, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0296, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 3.1013016411997736, | |
| "grad_norm": 0.2925666272640228, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0213, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 3.1069609507640066, | |
| "grad_norm": 0.40954533219337463, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0284, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 3.11262026032824, | |
| "grad_norm": 0.377370685338974, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0331, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 3.118279569892473, | |
| "grad_norm": 0.4786716103553772, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.033, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 3.1239388794567065, | |
| "grad_norm": 0.2982279360294342, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0236, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 3.1295981890209394, | |
| "grad_norm": 0.3954036235809326, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.026, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 3.1352574985851724, | |
| "grad_norm": 0.46236851811408997, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0202, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 3.140916808149406, | |
| "grad_norm": 0.5871313214302063, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.029, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 3.146576117713639, | |
| "grad_norm": 0.3890996277332306, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0233, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 3.152235427277872, | |
| "grad_norm": 0.6300520300865173, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0327, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 3.1578947368421053, | |
| "grad_norm": 0.3230230510234833, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0234, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 3.1635540464063383, | |
| "grad_norm": 0.5044695734977722, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0263, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 3.1692133559705717, | |
| "grad_norm": 0.42241913080215454, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0238, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 3.1748726655348047, | |
| "grad_norm": 0.506126880645752, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0278, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 3.1805319750990377, | |
| "grad_norm": 0.3469611406326294, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0186, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 3.186191284663271, | |
| "grad_norm": 0.3190121054649353, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0229, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 3.191850594227504, | |
| "grad_norm": 0.224530428647995, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0218, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 3.1975099037917376, | |
| "grad_norm": 0.583054780960083, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0362, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 3.2031692133559706, | |
| "grad_norm": 0.5004111528396606, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0223, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 3.2088285229202036, | |
| "grad_norm": 0.4681668281555176, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0257, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 3.214487832484437, | |
| "grad_norm": 0.25892138481140137, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0309, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 3.22014714204867, | |
| "grad_norm": 0.659767210483551, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0349, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 3.225806451612903, | |
| "grad_norm": 0.5110019445419312, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.032, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 3.2314657611771365, | |
| "grad_norm": 0.8555366396903992, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0266, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 3.2371250707413695, | |
| "grad_norm": 0.37470516562461853, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0279, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 3.242784380305603, | |
| "grad_norm": 0.7342115044593811, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0273, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 3.248443689869836, | |
| "grad_norm": 0.6888209581375122, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0249, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 3.254102999434069, | |
| "grad_norm": 0.5219807624816895, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0232, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 3.2597623089983023, | |
| "grad_norm": 0.8390067219734192, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0256, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 3.2654216185625353, | |
| "grad_norm": 0.6247023940086365, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0281, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 3.2710809281267688, | |
| "grad_norm": 0.4523935616016388, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0286, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 3.2767402376910018, | |
| "grad_norm": 0.5269297361373901, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0268, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 3.2823995472552348, | |
| "grad_norm": 0.48714950680732727, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0321, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 3.288058856819468, | |
| "grad_norm": 0.5200141072273254, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0192, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 3.293718166383701, | |
| "grad_norm": 0.5562568306922913, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0223, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 3.299377475947934, | |
| "grad_norm": 0.5858548879623413, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0297, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 3.3050367855121676, | |
| "grad_norm": 0.33869001269340515, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0251, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 3.3106960950764006, | |
| "grad_norm": 0.45930442214012146, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0187, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 3.3163554046406336, | |
| "grad_norm": 0.5437840819358826, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0303, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 3.322014714204867, | |
| "grad_norm": 0.5779958367347717, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0327, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 3.3276740237691, | |
| "grad_norm": 0.8774251937866211, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0276, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.6216500401496887, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.033, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 3.3389926428975665, | |
| "grad_norm": 0.5506871342658997, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0454, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 3.3446519524617995, | |
| "grad_norm": 0.7217140793800354, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0332, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 3.350311262026033, | |
| "grad_norm": 0.6366479992866516, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.026, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 3.355970571590266, | |
| "grad_norm": 0.4815925657749176, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0232, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 3.3616298811544993, | |
| "grad_norm": 0.9379783272743225, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0277, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 3.3672891907187323, | |
| "grad_norm": 0.4577707052230835, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0233, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 3.3729485002829653, | |
| "grad_norm": 0.36198124289512634, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0228, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 3.3786078098471988, | |
| "grad_norm": 0.3477252423763275, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0193, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 3.3842671194114318, | |
| "grad_norm": 0.5713914036750793, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0276, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 3.3899264289756648, | |
| "grad_norm": 0.6365956664085388, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0328, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 3.395585738539898, | |
| "grad_norm": 0.7436591386795044, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0343, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 3.401245048104131, | |
| "grad_norm": 1.1299272775650024, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0435, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 3.4069043576683646, | |
| "grad_norm": 0.3610575497150421, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.023, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 3.4125636672325976, | |
| "grad_norm": 0.5312737226486206, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0263, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 3.4182229767968306, | |
| "grad_norm": 0.5494548678398132, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.032, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 3.423882286361064, | |
| "grad_norm": 0.4840650260448456, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0264, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 3.429541595925297, | |
| "grad_norm": 0.5452207922935486, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0278, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 3.4352009054895305, | |
| "grad_norm": 0.3490133285522461, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0211, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 3.4408602150537635, | |
| "grad_norm": 0.39896267652511597, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0354, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 3.4465195246179965, | |
| "grad_norm": 0.5362284779548645, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0293, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 3.45217883418223, | |
| "grad_norm": 0.4318121075630188, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0317, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 3.457838143746463, | |
| "grad_norm": 0.5870428681373596, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0269, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 3.463497453310696, | |
| "grad_norm": 0.5336974263191223, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0257, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 3.4691567628749294, | |
| "grad_norm": 0.406678169965744, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0183, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 3.4748160724391624, | |
| "grad_norm": 0.634650707244873, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0249, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 3.4804753820033953, | |
| "grad_norm": 0.27335265278816223, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.031, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 3.486134691567629, | |
| "grad_norm": 0.3521565794944763, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0414, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 3.491794001131862, | |
| "grad_norm": 0.3947822153568268, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0339, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 3.497453310696095, | |
| "grad_norm": 0.4416903257369995, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.027, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 3.503112620260328, | |
| "grad_norm": 0.4432899057865143, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0288, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 3.5087719298245617, | |
| "grad_norm": 0.45889851450920105, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0241, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 3.5144312393887946, | |
| "grad_norm": 0.4277569353580475, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0254, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 3.5200905489530276, | |
| "grad_norm": 0.47739022970199585, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0268, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 3.525749858517261, | |
| "grad_norm": 0.789215087890625, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0303, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 3.531409168081494, | |
| "grad_norm": 0.2821437418460846, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0258, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 3.537068477645727, | |
| "grad_norm": 0.4800223410129547, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0338, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 3.5427277872099605, | |
| "grad_norm": 0.3567802608013153, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0243, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 3.5483870967741935, | |
| "grad_norm": 0.4538363814353943, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0249, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 3.5540464063384265, | |
| "grad_norm": 0.36534032225608826, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.028, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 3.55970571590266, | |
| "grad_norm": 0.4637555778026581, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0309, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 3.565365025466893, | |
| "grad_norm": 0.31388649344444275, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0321, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 3.571024335031126, | |
| "grad_norm": 0.5784611105918884, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0261, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 3.5766836445953594, | |
| "grad_norm": 0.4061717092990875, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0224, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 3.582342954159593, | |
| "grad_norm": 0.34512901306152344, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0274, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 3.588002263723826, | |
| "grad_norm": 0.2854452133178711, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.031, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 3.593661573288059, | |
| "grad_norm": 0.4145554304122925, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0248, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 3.5993208828522922, | |
| "grad_norm": 0.4759776294231415, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0341, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 3.6049801924165252, | |
| "grad_norm": 0.39346179366111755, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0162, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 3.6106395019807582, | |
| "grad_norm": 0.25690701603889465, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0231, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 3.6162988115449917, | |
| "grad_norm": 0.5031017065048218, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.033, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 3.6219581211092247, | |
| "grad_norm": 0.4231465458869934, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0217, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 3.6276174306734577, | |
| "grad_norm": 0.5258224606513977, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0222, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 3.633276740237691, | |
| "grad_norm": 0.3988799750804901, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0264, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 3.638936049801924, | |
| "grad_norm": 0.33015578985214233, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0226, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 3.644595359366157, | |
| "grad_norm": 0.30468347668647766, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0223, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 3.6502546689303905, | |
| "grad_norm": 0.31607723236083984, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0143, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 3.6559139784946235, | |
| "grad_norm": 0.31934604048728943, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0251, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 3.661573288058857, | |
| "grad_norm": 0.34353768825531006, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0223, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 3.66723259762309, | |
| "grad_norm": 0.4591050446033478, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0171, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 3.6728919071873234, | |
| "grad_norm": 0.4569452106952667, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0262, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 3.6785512167515564, | |
| "grad_norm": 0.3188132643699646, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0209, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 3.6842105263157894, | |
| "grad_norm": 0.3605244755744934, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0196, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 3.689869835880023, | |
| "grad_norm": 0.39840972423553467, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0207, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 3.695529145444256, | |
| "grad_norm": 0.32871973514556885, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0223, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 3.701188455008489, | |
| "grad_norm": 0.6761415004730225, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0324, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 3.7068477645727222, | |
| "grad_norm": 0.3579584062099457, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0177, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 3.7125070741369552, | |
| "grad_norm": 0.8756101131439209, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0329, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 3.7181663837011882, | |
| "grad_norm": 0.5549876093864441, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0207, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 3.7238256932654217, | |
| "grad_norm": 0.3724808394908905, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0341, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 3.7294850028296547, | |
| "grad_norm": 0.6460519433021545, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0311, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 3.735144312393888, | |
| "grad_norm": 0.48827290534973145, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0302, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 3.740803621958121, | |
| "grad_norm": 0.33132031559944153, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.016, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 3.7464629315223545, | |
| "grad_norm": 0.3171054720878601, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0175, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 3.7521222410865875, | |
| "grad_norm": 0.4657711684703827, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0199, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 3.7577815506508205, | |
| "grad_norm": 0.288437157869339, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0194, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 3.763440860215054, | |
| "grad_norm": 0.44233739376068115, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.024, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 3.769100169779287, | |
| "grad_norm": 0.32472509145736694, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0171, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 3.77475947934352, | |
| "grad_norm": 0.3336578607559204, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0194, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 3.7804187889077534, | |
| "grad_norm": 0.6103697419166565, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0206, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 3.7860780984719864, | |
| "grad_norm": 0.3058294653892517, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0187, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 3.7917374080362194, | |
| "grad_norm": 0.3847391903400421, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0269, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 3.797396717600453, | |
| "grad_norm": 0.4608091115951538, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.024, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 3.803056027164686, | |
| "grad_norm": 0.23236976563930511, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0289, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 3.808715336728919, | |
| "grad_norm": 0.3241671919822693, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0226, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 3.8143746462931523, | |
| "grad_norm": 0.2845827043056488, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0358, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 3.8200339558573853, | |
| "grad_norm": 0.8334740400314331, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0259, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 3.8256932654216187, | |
| "grad_norm": 0.3732583522796631, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0216, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 3.8313525749858517, | |
| "grad_norm": 0.6674580574035645, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.025, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 3.837011884550085, | |
| "grad_norm": 0.4493323564529419, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0212, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 3.842671194114318, | |
| "grad_norm": 0.40532025694847107, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0287, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 3.848330503678551, | |
| "grad_norm": 0.5855029225349426, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0211, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 3.8539898132427846, | |
| "grad_norm": 0.642335832118988, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.02, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 3.8596491228070176, | |
| "grad_norm": 0.24802717566490173, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0181, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 3.8653084323712505, | |
| "grad_norm": 0.4547887146472931, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0179, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 3.870967741935484, | |
| "grad_norm": 0.39535802602767944, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0347, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 3.876627051499717, | |
| "grad_norm": 0.6071020364761353, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0314, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 3.88228636106395, | |
| "grad_norm": 0.44565150141716003, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.022, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 3.8879456706281834, | |
| "grad_norm": 0.6935356259346008, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0303, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 3.8936049801924164, | |
| "grad_norm": 0.406706303358078, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0307, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 3.89926428975665, | |
| "grad_norm": 0.33041876554489136, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.023, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 3.904923599320883, | |
| "grad_norm": 0.4668940007686615, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0328, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 3.9105829088851163, | |
| "grad_norm": 0.4430356025695801, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0231, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 3.9162422184493493, | |
| "grad_norm": 0.9641812443733215, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0229, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 3.9219015280135823, | |
| "grad_norm": 0.3597238063812256, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0234, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 3.9275608375778157, | |
| "grad_norm": 0.3482387661933899, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0261, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 3.9332201471420487, | |
| "grad_norm": 0.348222017288208, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0198, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 3.9388794567062817, | |
| "grad_norm": 0.4402678608894348, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0179, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 3.944538766270515, | |
| "grad_norm": 0.3544287085533142, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0204, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 3.950198075834748, | |
| "grad_norm": 0.7409390211105347, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0246, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 3.955857385398981, | |
| "grad_norm": 0.6544904708862305, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0347, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 3.9615166949632146, | |
| "grad_norm": 0.5040102601051331, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.02, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 3.9671760045274476, | |
| "grad_norm": 0.2518904209136963, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0354, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 3.9728353140916806, | |
| "grad_norm": 0.4520137310028076, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0186, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 3.978494623655914, | |
| "grad_norm": 0.276895672082901, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0236, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 3.984153933220147, | |
| "grad_norm": 0.27124443650245667, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0199, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 3.9898132427843804, | |
| "grad_norm": 0.3213484287261963, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0176, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 3.9954725523486134, | |
| "grad_norm": 0.5860376954078674, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0247, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 4.001131861912847, | |
| "grad_norm": 0.2233385443687439, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0227, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 4.006791171477079, | |
| "grad_norm": 0.3871348798274994, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.028, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 4.012450481041313, | |
| "grad_norm": 0.6563515663146973, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0277, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 4.018109790605546, | |
| "grad_norm": 0.545616090297699, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0202, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 4.023769100169779, | |
| "grad_norm": 0.2558697462081909, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0228, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 4.029428409734012, | |
| "grad_norm": 0.31855130195617676, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0203, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 4.035087719298246, | |
| "grad_norm": 0.23440046608448029, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0154, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 4.040747028862479, | |
| "grad_norm": 0.33446216583251953, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0322, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 4.046406338426712, | |
| "grad_norm": 0.2720486521720886, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.032, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 4.052065647990945, | |
| "grad_norm": 0.4796851575374603, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0177, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 4.057724957555179, | |
| "grad_norm": 0.3703482151031494, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0232, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 4.063384267119411, | |
| "grad_norm": 0.5428142547607422, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0206, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 4.069043576683645, | |
| "grad_norm": 0.4209301769733429, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0294, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 4.074702886247878, | |
| "grad_norm": 0.21641351282596588, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0141, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 4.080362195812111, | |
| "grad_norm": 0.5476838946342468, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0164, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 4.086021505376344, | |
| "grad_norm": 0.4869919717311859, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0238, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 4.0916808149405774, | |
| "grad_norm": 0.164511039853096, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0179, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 4.09734012450481, | |
| "grad_norm": 0.3579815924167633, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0236, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 4.102999434069043, | |
| "grad_norm": 0.46660980582237244, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0237, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 4.108658743633277, | |
| "grad_norm": 0.4709692597389221, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0229, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 4.11431805319751, | |
| "grad_norm": 0.21272122859954834, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0152, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 4.119977362761743, | |
| "grad_norm": 0.3281286060810089, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.013, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 4.125636672325976, | |
| "grad_norm": 0.43052250146865845, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0164, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 4.13129598189021, | |
| "grad_norm": 0.37970268726348877, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0232, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 4.136955291454442, | |
| "grad_norm": 0.46434861421585083, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0248, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 4.142614601018676, | |
| "grad_norm": 0.28842875361442566, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0107, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 4.148273910582909, | |
| "grad_norm": 0.36098814010620117, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0188, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 4.153933220147142, | |
| "grad_norm": 0.35521721839904785, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.018, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 4.159592529711375, | |
| "grad_norm": 0.36936667561531067, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.015, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 4.165251839275609, | |
| "grad_norm": 0.2968006134033203, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0161, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 4.170911148839841, | |
| "grad_norm": 0.6336629390716553, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0235, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 4.176570458404075, | |
| "grad_norm": 0.4255976378917694, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.017, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 4.182229767968308, | |
| "grad_norm": 0.3603611886501312, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.023, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 4.187889077532541, | |
| "grad_norm": 0.49528074264526367, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0178, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 4.193548387096774, | |
| "grad_norm": 0.28436949849128723, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0181, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 4.1992076966610075, | |
| "grad_norm": 0.497397243976593, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0202, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 4.204867006225241, | |
| "grad_norm": 0.48627689480781555, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0291, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 4.2105263157894735, | |
| "grad_norm": 0.2106613963842392, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0136, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 4.216185625353707, | |
| "grad_norm": 0.24168846011161804, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0249, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 4.22184493491794, | |
| "grad_norm": 0.46466273069381714, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0195, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 4.227504244482173, | |
| "grad_norm": 0.6745490431785583, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0219, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 4.233163554046406, | |
| "grad_norm": 0.21338589489459991, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0214, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 4.23882286361064, | |
| "grad_norm": 0.39897558093070984, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0239, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 4.244482173174872, | |
| "grad_norm": 0.4335300326347351, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0185, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 4.250141482739106, | |
| "grad_norm": 0.3278849422931671, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0162, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 4.255800792303339, | |
| "grad_norm": 0.2869303524494171, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0155, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 4.261460101867572, | |
| "grad_norm": 0.43340399861335754, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0146, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 4.267119411431805, | |
| "grad_norm": 0.4681690037250519, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0151, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 4.272778720996039, | |
| "grad_norm": 0.39153608679771423, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0222, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 4.278438030560272, | |
| "grad_norm": 0.2339085340499878, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0152, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 4.284097340124505, | |
| "grad_norm": 0.30296626687049866, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0287, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 4.289756649688738, | |
| "grad_norm": 0.40544393658638, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0166, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 4.2954159592529715, | |
| "grad_norm": 0.38694241642951965, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0278, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 4.301075268817204, | |
| "grad_norm": 0.27713772654533386, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0256, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 4.3067345783814375, | |
| "grad_norm": 0.5354537963867188, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0268, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 4.312393887945671, | |
| "grad_norm": 0.27956539392471313, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0207, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 4.3180531975099035, | |
| "grad_norm": 0.3939666450023651, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0186, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 4.323712507074137, | |
| "grad_norm": 0.3555355668067932, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0126, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 4.32937181663837, | |
| "grad_norm": 0.45406582951545715, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0212, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 4.335031126202603, | |
| "grad_norm": 0.41438305377960205, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0231, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 4.340690435766836, | |
| "grad_norm": 0.1785048395395279, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0157, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 4.34634974533107, | |
| "grad_norm": 0.37624722719192505, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.021, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 4.352009054895303, | |
| "grad_norm": 0.3121233582496643, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0203, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 4.357668364459536, | |
| "grad_norm": 0.4113762676715851, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0126, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 4.363327674023769, | |
| "grad_norm": 0.4757934510707855, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0165, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 4.368986983588003, | |
| "grad_norm": 0.4884026348590851, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.028, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 4.374646293152235, | |
| "grad_norm": 0.44030284881591797, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0168, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 4.380305602716469, | |
| "grad_norm": 0.2994650602340698, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0135, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 4.385964912280702, | |
| "grad_norm": 0.2755189538002014, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0191, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 4.391624221844935, | |
| "grad_norm": 0.434190958738327, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0177, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 4.397283531409168, | |
| "grad_norm": 0.4349735677242279, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0201, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 4.4029428409734015, | |
| "grad_norm": 0.30483144521713257, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0233, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 4.408602150537634, | |
| "grad_norm": 0.2641529440879822, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0168, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 4.4142614601018675, | |
| "grad_norm": 0.28446123003959656, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0137, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 4.419920769666101, | |
| "grad_norm": 0.43393540382385254, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0202, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 4.425580079230334, | |
| "grad_norm": 0.39954882860183716, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0166, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 4.431239388794567, | |
| "grad_norm": 0.4561370313167572, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0174, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 4.4368986983588, | |
| "grad_norm": 0.2922416031360626, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0119, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 4.442558007923034, | |
| "grad_norm": 0.3477330207824707, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0217, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 4.448217317487266, | |
| "grad_norm": 0.381676584482193, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0209, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 4.4538766270515, | |
| "grad_norm": 0.5662440657615662, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0275, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 4.459535936615733, | |
| "grad_norm": 0.22791215777397156, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0197, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 4.465195246179966, | |
| "grad_norm": 0.36128297448158264, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0152, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 4.470854555744199, | |
| "grad_norm": 0.5986441373825073, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0228, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 4.476513865308433, | |
| "grad_norm": 0.37303632497787476, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0278, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 4.482173174872665, | |
| "grad_norm": 0.5574079751968384, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0296, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 4.487832484436899, | |
| "grad_norm": 0.4958466589450836, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.022, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 4.493491794001132, | |
| "grad_norm": 0.6211679577827454, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0199, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 4.499151103565365, | |
| "grad_norm": 0.2561228573322296, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0204, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 4.504810413129598, | |
| "grad_norm": 0.4060359001159668, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.016, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 4.5104697226938315, | |
| "grad_norm": 0.47181347012519836, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0199, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 4.516129032258064, | |
| "grad_norm": 0.22250010073184967, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0174, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 4.5217883418222975, | |
| "grad_norm": 0.3580704629421234, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0125, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 4.527447651386531, | |
| "grad_norm": 0.21652798354625702, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0194, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 4.533106960950764, | |
| "grad_norm": 0.3229580223560333, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0155, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 4.538766270514997, | |
| "grad_norm": 0.31920507550239563, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0187, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 4.54442558007923, | |
| "grad_norm": 0.17051509022712708, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0147, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 4.550084889643464, | |
| "grad_norm": 0.2453758269548416, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0127, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 4.555744199207696, | |
| "grad_norm": 0.27052152156829834, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0159, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 4.56140350877193, | |
| "grad_norm": 0.2720597982406616, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.018, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 4.567062818336163, | |
| "grad_norm": 0.20707684755325317, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0143, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 4.572722127900396, | |
| "grad_norm": 0.37861260771751404, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.018, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 4.578381437464629, | |
| "grad_norm": 0.26799649000167847, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0273, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 4.584040747028863, | |
| "grad_norm": 0.2633807361125946, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0293, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 4.589700056593095, | |
| "grad_norm": 0.13268502056598663, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0191, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 4.595359366157329, | |
| "grad_norm": 0.32166966795921326, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0165, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 4.601018675721562, | |
| "grad_norm": 0.42073673009872437, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0215, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 4.6066779852857955, | |
| "grad_norm": 0.2821817398071289, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0296, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 4.612337294850028, | |
| "grad_norm": 0.42356571555137634, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.014, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 4.6179966044142615, | |
| "grad_norm": 0.313291072845459, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0217, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 4.623655913978495, | |
| "grad_norm": 0.6416903734207153, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0185, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 4.6293152235427275, | |
| "grad_norm": 0.26457569003105164, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0193, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 4.634974533106961, | |
| "grad_norm": 0.1700776219367981, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0153, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 4.640633842671194, | |
| "grad_norm": 0.1664831042289734, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0151, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 4.646293152235427, | |
| "grad_norm": 0.13171158730983734, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0118, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 4.65195246179966, | |
| "grad_norm": 0.17852510511875153, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0153, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 4.657611771363894, | |
| "grad_norm": 0.3008864223957062, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0133, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 4.663271080928126, | |
| "grad_norm": 0.3318842053413391, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0175, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 4.66893039049236, | |
| "grad_norm": 0.16649554669857025, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0133, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 4.674589700056593, | |
| "grad_norm": 0.23691275715827942, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0087, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 4.680249009620827, | |
| "grad_norm": 0.2552962303161621, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0129, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 4.685908319185059, | |
| "grad_norm": 0.2878318428993225, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0166, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 4.691567628749293, | |
| "grad_norm": 0.21222403645515442, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0207, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 4.697226938313526, | |
| "grad_norm": 0.3449961245059967, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0188, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 4.702886247877759, | |
| "grad_norm": 0.12952852249145508, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0116, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 4.708545557441992, | |
| "grad_norm": 0.17324061691761017, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0144, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 4.7142048670062255, | |
| "grad_norm": 0.7605016231536865, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.019, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 4.719864176570458, | |
| "grad_norm": 0.23774351179599762, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0102, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 4.7255234861346915, | |
| "grad_norm": 0.5098357796669006, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0187, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 4.731182795698925, | |
| "grad_norm": 0.48201507329940796, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.018, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 4.7368421052631575, | |
| "grad_norm": 0.3019934594631195, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0113, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 4.742501414827391, | |
| "grad_norm": 0.495006263256073, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0218, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 4.748160724391624, | |
| "grad_norm": 0.33595865964889526, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0187, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 4.753820033955858, | |
| "grad_norm": 0.44678542017936707, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0146, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 4.75947934352009, | |
| "grad_norm": 0.15218102931976318, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0187, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 4.765138653084324, | |
| "grad_norm": 0.22666513919830322, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0135, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 4.770797962648556, | |
| "grad_norm": 0.3472532331943512, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.015, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 4.77645727221279, | |
| "grad_norm": 0.4426356852054596, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0174, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 4.782116581777023, | |
| "grad_norm": 0.177797332406044, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0205, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 4.787775891341257, | |
| "grad_norm": 0.4254482388496399, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0147, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 4.793435200905489, | |
| "grad_norm": 0.3482140898704529, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0144, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 4.799094510469723, | |
| "grad_norm": 0.36761054396629333, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0127, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 4.804753820033956, | |
| "grad_norm": 0.17115193605422974, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0169, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 4.810413129598189, | |
| "grad_norm": 0.3664368987083435, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0199, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 4.816072439162422, | |
| "grad_norm": 0.25585654377937317, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0218, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 4.8217317487266556, | |
| "grad_norm": 0.1754721999168396, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0108, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 4.827391058290889, | |
| "grad_norm": 0.5733800530433655, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0144, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 4.8330503678551215, | |
| "grad_norm": 0.23090040683746338, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0163, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 4.838709677419355, | |
| "grad_norm": 0.179311603307724, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0226, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 4.8443689869835875, | |
| "grad_norm": 0.3688429892063141, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0187, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 4.850028296547821, | |
| "grad_norm": 0.24394282698631287, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0364, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 4.855687606112054, | |
| "grad_norm": 0.3167862296104431, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0197, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 4.861346915676288, | |
| "grad_norm": 0.5820156335830688, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.025, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 4.86700622524052, | |
| "grad_norm": 0.226038858294487, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0093, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 4.872665534804754, | |
| "grad_norm": 0.12436351925134659, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0096, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 4.878324844368987, | |
| "grad_norm": 0.23296402394771576, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0182, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 4.88398415393322, | |
| "grad_norm": 0.4382390081882477, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0236, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 4.889643463497453, | |
| "grad_norm": 0.21248957514762878, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.013, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 4.895302773061687, | |
| "grad_norm": 0.37292659282684326, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0181, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 4.900962082625919, | |
| "grad_norm": 0.14314575493335724, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0185, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 4.906621392190153, | |
| "grad_norm": 0.25116947293281555, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0107, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 4.912280701754386, | |
| "grad_norm": 0.24996954202651978, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0128, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 4.917940011318619, | |
| "grad_norm": 0.19975966215133667, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0111, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 4.923599320882852, | |
| "grad_norm": 0.32479751110076904, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0158, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 4.929258630447086, | |
| "grad_norm": 0.14419406652450562, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0128, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 4.934917940011319, | |
| "grad_norm": 0.31757551431655884, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0224, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 4.9405772495755516, | |
| "grad_norm": 0.38625890016555786, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0171, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 4.946236559139785, | |
| "grad_norm": 0.28305289149284363, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0105, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 4.951895868704018, | |
| "grad_norm": 0.5939130187034607, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0206, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 4.957555178268251, | |
| "grad_norm": 0.45923322439193726, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0139, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 4.963214487832484, | |
| "grad_norm": 0.17273946106433868, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.017, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 4.968873797396718, | |
| "grad_norm": 0.32904428243637085, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0125, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 4.97453310696095, | |
| "grad_norm": 0.5931819081306458, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.0224, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 4.980192416525184, | |
| "grad_norm": 0.5407785773277283, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0147, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 4.985851726089417, | |
| "grad_norm": 0.19974103569984436, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0141, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 4.99151103565365, | |
| "grad_norm": 0.33167314529418945, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.014, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 4.997170345217883, | |
| "grad_norm": 0.21541465818881989, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0158, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 5.002829654782117, | |
| "grad_norm": 0.15989074110984802, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0123, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 5.00848896434635, | |
| "grad_norm": 0.38332706689834595, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0101, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 5.014148273910583, | |
| "grad_norm": 0.25430190563201904, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0146, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 5.019807583474816, | |
| "grad_norm": 0.3547205626964569, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0122, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 5.02546689303905, | |
| "grad_norm": 0.21467237174510956, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0194, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 5.031126202603282, | |
| "grad_norm": 0.2861272096633911, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0182, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 5.036785512167516, | |
| "grad_norm": 0.40209969878196716, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0146, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 5.042444821731749, | |
| "grad_norm": 0.38302555680274963, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0146, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 5.048104131295982, | |
| "grad_norm": 0.34767237305641174, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.016, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 5.053763440860215, | |
| "grad_norm": 0.187392920255661, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0129, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 5.0594227504244484, | |
| "grad_norm": 0.3076670169830322, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0142, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 5.065082059988681, | |
| "grad_norm": 0.24371568858623505, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0177, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 5.070741369552914, | |
| "grad_norm": 0.20141568779945374, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0088, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 5.076400679117148, | |
| "grad_norm": 0.18076495826244354, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.01, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 5.082059988681381, | |
| "grad_norm": 0.16600769758224487, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0113, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 5.087719298245614, | |
| "grad_norm": 0.26359665393829346, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0168, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 5.093378607809847, | |
| "grad_norm": 0.32241716980934143, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.032, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 5.099037917374081, | |
| "grad_norm": 0.6367690563201904, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0136, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 5.104697226938313, | |
| "grad_norm": 0.3206840753555298, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0093, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 5.110356536502547, | |
| "grad_norm": 0.3884199559688568, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0195, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 5.11601584606678, | |
| "grad_norm": 0.20583972334861755, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0154, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 5.121675155631013, | |
| "grad_norm": 0.3200293481349945, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0163, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 5.127334465195246, | |
| "grad_norm": 0.33171623945236206, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0187, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 5.13299377475948, | |
| "grad_norm": 0.19206714630126953, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0107, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 5.138653084323712, | |
| "grad_norm": 0.2751140296459198, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0099, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 5.144312393887946, | |
| "grad_norm": 0.24944934248924255, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0249, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 5.149971703452179, | |
| "grad_norm": 0.5298340916633606, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0208, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 5.155631013016412, | |
| "grad_norm": 0.20584413409233093, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0105, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 5.161290322580645, | |
| "grad_norm": 0.2957272529602051, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.017, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 5.1669496321448785, | |
| "grad_norm": 0.17460356652736664, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0233, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 5.172608941709112, | |
| "grad_norm": 0.3751950263977051, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.01, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 5.1782682512733444, | |
| "grad_norm": 0.16106566786766052, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0163, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 5.183927560837578, | |
| "grad_norm": 0.3512350022792816, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0115, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 5.189586870401811, | |
| "grad_norm": 0.09471239149570465, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0129, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 5.195246179966044, | |
| "grad_norm": 0.3261120915412903, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0141, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 5.200905489530277, | |
| "grad_norm": 0.14130228757858276, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0105, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 5.206564799094511, | |
| "grad_norm": 0.33845049142837524, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.014, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 5.212224108658743, | |
| "grad_norm": 0.5717782974243164, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0163, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 5.217883418222977, | |
| "grad_norm": 0.43369486927986145, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0229, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 5.22354272778721, | |
| "grad_norm": 0.24910669028759003, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0152, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 5.229202037351443, | |
| "grad_norm": 0.25324907898902893, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0125, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 5.234861346915676, | |
| "grad_norm": 0.1568821221590042, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0242, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 5.24052065647991, | |
| "grad_norm": 0.31048205494880676, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0159, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 5.246179966044142, | |
| "grad_norm": 0.5406427979469299, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0124, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 5.251839275608376, | |
| "grad_norm": 0.4390304386615753, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0129, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 5.257498585172609, | |
| "grad_norm": 0.19887875020503998, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.02, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 5.2631578947368425, | |
| "grad_norm": 0.1655147671699524, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0154, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 5.268817204301075, | |
| "grad_norm": 0.2915429472923279, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0239, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 5.2744765138653085, | |
| "grad_norm": 0.384115993976593, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0182, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 5.280135823429542, | |
| "grad_norm": 0.19533221423625946, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0271, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 5.2857951329937745, | |
| "grad_norm": 0.36194220185279846, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0177, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 5.291454442558008, | |
| "grad_norm": 0.2928744852542877, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0097, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 5.297113752122241, | |
| "grad_norm": 0.41840288043022156, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0134, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 5.302773061686474, | |
| "grad_norm": 0.5431681871414185, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0227, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 5.308432371250707, | |
| "grad_norm": 0.27649563550949097, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0139, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 5.314091680814941, | |
| "grad_norm": 0.27125483751296997, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0144, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 5.319750990379173, | |
| "grad_norm": 0.27272504568099976, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0173, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 5.325410299943407, | |
| "grad_norm": 0.3312033414840698, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0273, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 5.33106960950764, | |
| "grad_norm": 0.29762130975723267, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0111, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 5.336728919071874, | |
| "grad_norm": 0.2990401089191437, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0159, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 5.342388228636106, | |
| "grad_norm": 0.4782230257987976, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0118, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 5.34804753820034, | |
| "grad_norm": 0.21451210975646973, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0146, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 5.353706847764573, | |
| "grad_norm": 0.3441447913646698, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0082, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 5.359366157328806, | |
| "grad_norm": 0.35575538873672485, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0168, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 5.365025466893039, | |
| "grad_norm": 0.27196231484413147, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0205, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 5.3706847764572725, | |
| "grad_norm": 0.23656480014324188, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0081, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 5.376344086021505, | |
| "grad_norm": 0.10927730053663254, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0115, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 5.3820033955857385, | |
| "grad_norm": 0.43316495418548584, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0136, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 5.387662705149972, | |
| "grad_norm": 0.24791935086250305, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0077, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 5.3933220147142045, | |
| "grad_norm": 0.17370861768722534, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0218, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 5.398981324278438, | |
| "grad_norm": 0.561249852180481, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0149, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 5.404640633842671, | |
| "grad_norm": 0.1811702996492386, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.025, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 5.410299943406905, | |
| "grad_norm": 0.4653280973434448, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0163, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 5.415959252971137, | |
| "grad_norm": 0.30958282947540283, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0155, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 5.421618562535371, | |
| "grad_norm": 0.3639081120491028, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0216, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 5.427277872099604, | |
| "grad_norm": 0.16603532433509827, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0133, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 5.432937181663837, | |
| "grad_norm": 0.2216721922159195, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.016, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 5.43859649122807, | |
| "grad_norm": 0.27220794558525085, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0154, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 5.444255800792304, | |
| "grad_norm": 0.191203311085701, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0144, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 5.449915110356536, | |
| "grad_norm": 0.17640888690948486, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0179, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 5.45557441992077, | |
| "grad_norm": 0.09340283274650574, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.012, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 5.461233729485003, | |
| "grad_norm": 0.1469435691833496, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0173, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 5.466893039049236, | |
| "grad_norm": 0.24418553709983826, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.015, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 5.472552348613469, | |
| "grad_norm": 0.19976800680160522, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0095, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 5.4782116581777025, | |
| "grad_norm": 0.18360495567321777, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0151, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 5.483870967741936, | |
| "grad_norm": 0.44020748138427734, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.012, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 5.4895302773061685, | |
| "grad_norm": 0.21414652466773987, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0093, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 5.495189586870402, | |
| "grad_norm": 0.29714521765708923, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0157, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 5.5008488964346345, | |
| "grad_norm": 0.11837577819824219, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0073, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 5.506508205998868, | |
| "grad_norm": 0.1690998524427414, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0169, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 5.512167515563101, | |
| "grad_norm": 0.14515431225299835, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0161, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 5.517826825127335, | |
| "grad_norm": 0.4104876220226288, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0221, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 5.523486134691567, | |
| "grad_norm": 0.14933814108371735, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0093, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 5.529145444255801, | |
| "grad_norm": 0.3486264646053314, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0169, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 5.534804753820034, | |
| "grad_norm": 0.4453631341457367, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0184, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 5.540464063384267, | |
| "grad_norm": 0.22598163783550262, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0131, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 5.5461233729485, | |
| "grad_norm": 0.21766339242458344, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0124, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 5.551782682512734, | |
| "grad_norm": 0.26409411430358887, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0109, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 5.557441992076967, | |
| "grad_norm": 0.3744887113571167, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0242, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 5.5631013016412, | |
| "grad_norm": 0.23755554854869843, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0148, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 5.568760611205433, | |
| "grad_norm": 0.6062955260276794, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.02, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 5.574419920769666, | |
| "grad_norm": 0.18671253323554993, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0189, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 5.580079230333899, | |
| "grad_norm": 0.18308652937412262, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0132, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 5.5857385398981325, | |
| "grad_norm": 0.21284526586532593, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0094, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 5.591397849462366, | |
| "grad_norm": 0.3942609131336212, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0128, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 5.5970571590265985, | |
| "grad_norm": 0.14613506197929382, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0106, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 5.602716468590832, | |
| "grad_norm": 0.22569288313388824, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0161, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 5.608375778155065, | |
| "grad_norm": 0.2921515107154846, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0161, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 5.614035087719298, | |
| "grad_norm": 0.3103187084197998, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0131, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 5.619694397283531, | |
| "grad_norm": 0.20665791630744934, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0182, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 5.625353706847765, | |
| "grad_norm": 0.2384548783302307, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0114, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 5.631013016411997, | |
| "grad_norm": 0.2406599372625351, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0138, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 5.636672325976231, | |
| "grad_norm": 0.13295477628707886, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0105, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 5.642331635540464, | |
| "grad_norm": 0.2226768136024475, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0097, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 5.647990945104697, | |
| "grad_norm": 0.6699599027633667, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.023, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 5.65365025466893, | |
| "grad_norm": 0.2477726936340332, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0181, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 5.659309564233164, | |
| "grad_norm": 0.2732313871383667, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.014, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 5.659309564233164, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.04171483232602477, | |
| "train_runtime": 7285.8266, | |
| "train_samples_per_second": 10.98, | |
| "train_steps_per_second": 1.373 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 6, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |