| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 1022, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0019569471624266144, |
| "grad_norm": 11.799306353936723, |
| "learning_rate": 9.99997637686018e-06, |
| "loss": 0.3921, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.003913894324853229, |
| "grad_norm": 9.071400905820305, |
| "learning_rate": 9.999905507663936e-06, |
| "loss": 0.3715, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.005870841487279843, |
| "grad_norm": 8.084328896157814, |
| "learning_rate": 9.999787393080931e-06, |
| "loss": 0.3395, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.007827788649706457, |
| "grad_norm": 7.186674154101679, |
| "learning_rate": 9.99962203422726e-06, |
| "loss": 0.2708, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.009784735812133072, |
| "grad_norm": 6.7559024399645216, |
| "learning_rate": 9.999409432665442e-06, |
| "loss": 0.2674, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.011741682974559686, |
| "grad_norm": 5.556933892748166, |
| "learning_rate": 9.9991495904044e-06, |
| "loss": 0.2309, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0136986301369863, |
| "grad_norm": 5.420241896018174, |
| "learning_rate": 9.998842509899456e-06, |
| "loss": 0.2575, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.015655577299412915, |
| "grad_norm": 5.938476261444258, |
| "learning_rate": 9.998488194052287e-06, |
| "loss": 0.2538, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.01761252446183953, |
| "grad_norm": 7.336142784715855, |
| "learning_rate": 9.998086646210916e-06, |
| "loss": 0.2846, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.019569471624266144, |
| "grad_norm": 6.775648106874203, |
| "learning_rate": 9.997637870169673e-06, |
| "loss": 0.3058, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.021526418786692758, |
| "grad_norm": 6.50888726389833, |
| "learning_rate": 9.997141870169154e-06, |
| "loss": 0.2961, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.023483365949119372, |
| "grad_norm": 5.4448820703340175, |
| "learning_rate": 9.996598650896191e-06, |
| "loss": 0.2403, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.025440313111545987, |
| "grad_norm": 6.431587971006118, |
| "learning_rate": 9.996008217483806e-06, |
| "loss": 0.302, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.0273972602739726, |
| "grad_norm": 6.324084453217792, |
| "learning_rate": 9.995370575511151e-06, |
| "loss": 0.3003, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.029354207436399216, |
| "grad_norm": 4.930742779407249, |
| "learning_rate": 9.994685731003469e-06, |
| "loss": 0.2263, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.03131115459882583, |
| "grad_norm": 5.5169959262857295, |
| "learning_rate": 9.993953690432032e-06, |
| "loss": 0.2709, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.033268101761252444, |
| "grad_norm": 5.068994374098398, |
| "learning_rate": 9.99317446071408e-06, |
| "loss": 0.2293, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.03522504892367906, |
| "grad_norm": 5.210498384135931, |
| "learning_rate": 9.99234804921275e-06, |
| "loss": 0.2609, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03718199608610567, |
| "grad_norm": 6.033693173608315, |
| "learning_rate": 9.991474463737018e-06, |
| "loss": 0.2766, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03913894324853229, |
| "grad_norm": 7.075281920451076, |
| "learning_rate": 9.990553712541617e-06, |
| "loss": 0.3563, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.0410958904109589, |
| "grad_norm": 6.018234510787989, |
| "learning_rate": 9.989585804326963e-06, |
| "loss": 0.286, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.043052837573385516, |
| "grad_norm": 6.50724940764653, |
| "learning_rate": 9.988570748239062e-06, |
| "loss": 0.299, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.04500978473581213, |
| "grad_norm": 6.297952826417235, |
| "learning_rate": 9.987508553869444e-06, |
| "loss": 0.3047, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.046966731898238745, |
| "grad_norm": 5.517644570588827, |
| "learning_rate": 9.986399231255057e-06, |
| "loss": 0.2504, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.04892367906066536, |
| "grad_norm": 5.0625254379789695, |
| "learning_rate": 9.985242790878168e-06, |
| "loss": 0.2859, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.050880626223091974, |
| "grad_norm": 5.718797467422794, |
| "learning_rate": 9.984039243666284e-06, |
| "loss": 0.2738, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.05283757338551859, |
| "grad_norm": 5.29250542490486, |
| "learning_rate": 9.982788600992027e-06, |
| "loss": 0.2571, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.0547945205479452, |
| "grad_norm": 6.637085136667847, |
| "learning_rate": 9.98149087467304e-06, |
| "loss": 0.2606, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.05675146771037182, |
| "grad_norm": 5.221410013166089, |
| "learning_rate": 9.980146076971873e-06, |
| "loss": 0.3145, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.05870841487279843, |
| "grad_norm": 7.925014737492298, |
| "learning_rate": 9.978754220595861e-06, |
| "loss": 0.3167, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.060665362035225046, |
| "grad_norm": 6.1823785798268265, |
| "learning_rate": 9.977315318697013e-06, |
| "loss": 0.289, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.06262230919765166, |
| "grad_norm": 5.1544915998462635, |
| "learning_rate": 9.975829384871884e-06, |
| "loss": 0.2446, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.06457925636007827, |
| "grad_norm": 5.221450488910585, |
| "learning_rate": 9.974296433161437e-06, |
| "loss": 0.2788, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.06653620352250489, |
| "grad_norm": 6.982587328218915, |
| "learning_rate": 9.97271647805093e-06, |
| "loss": 0.341, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.0684931506849315, |
| "grad_norm": 5.85310616310938, |
| "learning_rate": 9.97108953446976e-06, |
| "loss": 0.2494, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.07045009784735812, |
| "grad_norm": 5.914165551401318, |
| "learning_rate": 9.969415617791336e-06, |
| "loss": 0.2932, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.07240704500978473, |
| "grad_norm": 5.593094789000428, |
| "learning_rate": 9.967694743832923e-06, |
| "loss": 0.2437, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.07436399217221135, |
| "grad_norm": 3.9874103242441405, |
| "learning_rate": 9.965926928855498e-06, |
| "loss": 0.1962, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.07632093933463796, |
| "grad_norm": 5.806941694329281, |
| "learning_rate": 9.964112189563601e-06, |
| "loss": 0.2769, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.07827788649706457, |
| "grad_norm": 5.4647885250252495, |
| "learning_rate": 9.962250543105167e-06, |
| "loss": 0.2631, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.08023483365949119, |
| "grad_norm": 5.173210442496862, |
| "learning_rate": 9.960342007071368e-06, |
| "loss": 0.2181, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.0821917808219178, |
| "grad_norm": 6.05092688494635, |
| "learning_rate": 9.95838659949645e-06, |
| "loss": 0.2831, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.08414872798434442, |
| "grad_norm": 5.809824216434075, |
| "learning_rate": 9.956384338857561e-06, |
| "loss": 0.2261, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.08610567514677103, |
| "grad_norm": 5.615317030542591, |
| "learning_rate": 9.954335244074575e-06, |
| "loss": 0.2499, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.08806262230919765, |
| "grad_norm": 4.8179437827185865, |
| "learning_rate": 9.952239334509909e-06, |
| "loss": 0.2417, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.09001956947162426, |
| "grad_norm": 4.939680465287023, |
| "learning_rate": 9.950096629968353e-06, |
| "loss": 0.2131, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.09197651663405088, |
| "grad_norm": 4.746643558286443, |
| "learning_rate": 9.947907150696868e-06, |
| "loss": 0.2426, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.09393346379647749, |
| "grad_norm": 4.90305717851164, |
| "learning_rate": 9.945670917384404e-06, |
| "loss": 0.1937, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.0958904109589041, |
| "grad_norm": 5.225583282829709, |
| "learning_rate": 9.943387951161702e-06, |
| "loss": 0.2995, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.09784735812133072, |
| "grad_norm": 5.318652230849387, |
| "learning_rate": 9.941058273601097e-06, |
| "loss": 0.2682, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.09980430528375733, |
| "grad_norm": 5.678914824755625, |
| "learning_rate": 9.938681906716305e-06, |
| "loss": 0.2575, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.10176125244618395, |
| "grad_norm": 6.541086720852764, |
| "learning_rate": 9.936258872962229e-06, |
| "loss": 0.2985, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.10371819960861056, |
| "grad_norm": 6.612229835184093, |
| "learning_rate": 9.93378919523473e-06, |
| "loss": 0.2561, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.10567514677103718, |
| "grad_norm": 5.499960343426062, |
| "learning_rate": 9.931272896870427e-06, |
| "loss": 0.2569, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.10763209393346379, |
| "grad_norm": 3.8497118841940474, |
| "learning_rate": 9.928710001646467e-06, |
| "loss": 0.1946, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.1095890410958904, |
| "grad_norm": 4.472445126891697, |
| "learning_rate": 9.926100533780304e-06, |
| "loss": 0.2603, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.11154598825831702, |
| "grad_norm": 4.984365114945483, |
| "learning_rate": 9.923444517929467e-06, |
| "loss": 0.3049, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.11350293542074363, |
| "grad_norm": 6.2531682890711595, |
| "learning_rate": 9.92074197919133e-06, |
| "loss": 0.2758, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.11545988258317025, |
| "grad_norm": 6.323712329935732, |
| "learning_rate": 9.917992943102872e-06, |
| "loss": 0.275, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.11741682974559686, |
| "grad_norm": 6.006316558855426, |
| "learning_rate": 9.91519743564044e-06, |
| "loss": 0.3, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.11937377690802348, |
| "grad_norm": 5.237122670475276, |
| "learning_rate": 9.912355483219498e-06, |
| "loss": 0.256, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.12133072407045009, |
| "grad_norm": 5.3434538730468955, |
| "learning_rate": 9.909467112694385e-06, |
| "loss": 0.2405, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.1232876712328767, |
| "grad_norm": 7.64218543279621, |
| "learning_rate": 9.906532351358047e-06, |
| "loss": 0.3352, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.12524461839530332, |
| "grad_norm": 6.306024380048756, |
| "learning_rate": 9.903551226941801e-06, |
| "loss": 0.2966, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.12720156555772993, |
| "grad_norm": 4.951525462105939, |
| "learning_rate": 9.900523767615052e-06, |
| "loss": 0.2363, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.12915851272015655, |
| "grad_norm": 5.712211304687038, |
| "learning_rate": 9.897450001985038e-06, |
| "loss": 0.2779, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.13111545988258316, |
| "grad_norm": 5.369343691263528, |
| "learning_rate": 9.894329959096559e-06, |
| "loss": 0.2862, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.13307240704500978, |
| "grad_norm": 5.999615815494337, |
| "learning_rate": 9.891163668431696e-06, |
| "loss": 0.264, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.1350293542074364, |
| "grad_norm": 5.223665545747361, |
| "learning_rate": 9.887951159909541e-06, |
| "loss": 0.2482, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.136986301369863, |
| "grad_norm": 5.201023958695433, |
| "learning_rate": 9.88469246388591e-06, |
| "loss": 0.2378, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.13894324853228962, |
| "grad_norm": 4.976899927508783, |
| "learning_rate": 9.881387611153053e-06, |
| "loss": 0.191, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.14090019569471623, |
| "grad_norm": 6.731758183608815, |
| "learning_rate": 9.878036632939374e-06, |
| "loss": 0.2978, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.14285714285714285, |
| "grad_norm": 6.241811202289688, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.2728, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.14481409001956946, |
| "grad_norm": 6.039613077041295, |
| "learning_rate": 9.871196427162094e-06, |
| "loss": 0.2532, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.14677103718199608, |
| "grad_norm": 5.601214241682582, |
| "learning_rate": 9.867707264233349e-06, |
| "loss": 0.2381, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.1487279843444227, |
| "grad_norm": 4.428546555088426, |
| "learning_rate": 9.86417210509288e-06, |
| "loss": 0.2135, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.1506849315068493, |
| "grad_norm": 5.593003578595402, |
| "learning_rate": 9.860590983145307e-06, |
| "loss": 0.2943, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.15264187866927592, |
| "grad_norm": 5.663196218735621, |
| "learning_rate": 9.85696393222957e-06, |
| "loss": 0.2448, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.15459882583170254, |
| "grad_norm": 6.154278820420972, |
| "learning_rate": 9.853290986618604e-06, |
| "loss": 0.2325, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.15655577299412915, |
| "grad_norm": 5.242696275873486, |
| "learning_rate": 9.849572181019008e-06, |
| "loss": 0.2776, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.15851272015655576, |
| "grad_norm": 6.17430903893526, |
| "learning_rate": 9.845807550570727e-06, |
| "loss": 0.3252, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.16046966731898238, |
| "grad_norm": 5.93388012276641, |
| "learning_rate": 9.84199713084672e-06, |
| "loss": 0.2422, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.162426614481409, |
| "grad_norm": 5.875943289232853, |
| "learning_rate": 9.83814095785262e-06, |
| "loss": 0.2782, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.1643835616438356, |
| "grad_norm": 6.34069274360398, |
| "learning_rate": 9.834239068026388e-06, |
| "loss": 0.2758, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.16634050880626222, |
| "grad_norm": 7.78971985203112, |
| "learning_rate": 9.830291498237983e-06, |
| "loss": 0.2625, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.16829745596868884, |
| "grad_norm": 4.243729570884694, |
| "learning_rate": 9.826298285789002e-06, |
| "loss": 0.2296, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.17025440313111545, |
| "grad_norm": 5.784895019809536, |
| "learning_rate": 9.822259468412329e-06, |
| "loss": 0.2363, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.17221135029354206, |
| "grad_norm": 6.620430938023602, |
| "learning_rate": 9.818175084271786e-06, |
| "loss": 0.3048, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.17416829745596868, |
| "grad_norm": 5.435473945928452, |
| "learning_rate": 9.814045171961762e-06, |
| "loss": 0.2981, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.1761252446183953, |
| "grad_norm": 5.504988408256857, |
| "learning_rate": 9.809869770506855e-06, |
| "loss": 0.2813, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.1780821917808219, |
| "grad_norm": 5.723417251107196, |
| "learning_rate": 9.805648919361505e-06, |
| "loss": 0.2314, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.18003913894324852, |
| "grad_norm": 4.844982045481805, |
| "learning_rate": 9.801382658409611e-06, |
| "loss": 0.2754, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.18199608610567514, |
| "grad_norm": 6.478194419656021, |
| "learning_rate": 9.797071027964166e-06, |
| "loss": 0.2831, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.18395303326810175, |
| "grad_norm": 5.739871469015499, |
| "learning_rate": 9.792714068766872e-06, |
| "loss": 0.2976, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.18590998043052837, |
| "grad_norm": 5.053352378941711, |
| "learning_rate": 9.788311821987748e-06, |
| "loss": 0.2264, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.18786692759295498, |
| "grad_norm": 4.664481662870994, |
| "learning_rate": 9.783864329224752e-06, |
| "loss": 0.2403, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.1898238747553816, |
| "grad_norm": 5.483018961200942, |
| "learning_rate": 9.779371632503382e-06, |
| "loss": 0.3052, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.1917808219178082, |
| "grad_norm": 6.038012132652183, |
| "learning_rate": 9.774833774276278e-06, |
| "loss": 0.3167, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.19373776908023482, |
| "grad_norm": 6.283062136799681, |
| "learning_rate": 9.770250797422822e-06, |
| "loss": 0.2652, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.19569471624266144, |
| "grad_norm": 5.761938553849526, |
| "learning_rate": 9.765622745248739e-06, |
| "loss": 0.2503, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.19765166340508805, |
| "grad_norm": 5.857550971678673, |
| "learning_rate": 9.760949661485676e-06, |
| "loss": 0.2477, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.19960861056751467, |
| "grad_norm": 5.23520330139525, |
| "learning_rate": 9.7562315902908e-06, |
| "loss": 0.2424, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.20156555772994128, |
| "grad_norm": 5.425300843199305, |
| "learning_rate": 9.75146857624637e-06, |
| "loss": 0.2488, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.2035225048923679, |
| "grad_norm": 4.774938800248574, |
| "learning_rate": 9.746660664359326e-06, |
| "loss": 0.2597, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.2054794520547945, |
| "grad_norm": 6.015137146066156, |
| "learning_rate": 9.741807900060858e-06, |
| "loss": 0.2876, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.20743639921722112, |
| "grad_norm": 6.927162355493675, |
| "learning_rate": 9.73691032920598e-06, |
| "loss": 0.2645, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.20939334637964774, |
| "grad_norm": 6.180417248433654, |
| "learning_rate": 9.731967998073087e-06, |
| "loss": 0.244, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.21135029354207435, |
| "grad_norm": 5.299310131247702, |
| "learning_rate": 9.726980953363536e-06, |
| "loss": 0.2531, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.21330724070450097, |
| "grad_norm": 4.998447982850896, |
| "learning_rate": 9.721949242201185e-06, |
| "loss": 0.2767, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.21526418786692758, |
| "grad_norm": 5.444310839375347, |
| "learning_rate": 9.716872912131964e-06, |
| "loss": 0.2563, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.2172211350293542, |
| "grad_norm": 5.837231676548247, |
| "learning_rate": 9.711752011123412e-06, |
| "loss": 0.2925, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.2191780821917808, |
| "grad_norm": 4.717591595402819, |
| "learning_rate": 9.706586587564236e-06, |
| "loss": 0.2087, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.22113502935420742, |
| "grad_norm": 5.701829838804545, |
| "learning_rate": 9.701376690263845e-06, |
| "loss": 0.2878, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.22309197651663404, |
| "grad_norm": 5.724677660646745, |
| "learning_rate": 9.696122368451887e-06, |
| "loss": 0.2854, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.22504892367906065, |
| "grad_norm": 5.118274023754872, |
| "learning_rate": 9.690823671777799e-06, |
| "loss": 0.256, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.22700587084148727, |
| "grad_norm": 3.938130169670896, |
| "learning_rate": 9.685480650310319e-06, |
| "loss": 0.1557, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.22896281800391388, |
| "grad_norm": 5.076858526954786, |
| "learning_rate": 9.680093354537027e-06, |
| "loss": 0.2422, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.2309197651663405, |
| "grad_norm": 6.560356514563373, |
| "learning_rate": 9.67466183536386e-06, |
| "loss": 0.3129, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.2328767123287671, |
| "grad_norm": 4.919654384546219, |
| "learning_rate": 9.669186144114627e-06, |
| "loss": 0.1956, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.23483365949119372, |
| "grad_norm": 6.230884483756757, |
| "learning_rate": 9.663666332530541e-06, |
| "loss": 0.3222, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.23679060665362034, |
| "grad_norm": 4.922354044501764, |
| "learning_rate": 9.658102452769712e-06, |
| "loss": 0.2828, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.23874755381604695, |
| "grad_norm": 4.86433412342521, |
| "learning_rate": 9.652494557406666e-06, |
| "loss": 0.2771, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.24070450097847357, |
| "grad_norm": 5.416844861972858, |
| "learning_rate": 9.646842699431842e-06, |
| "loss": 0.3725, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.24266144814090018, |
| "grad_norm": 5.891369991275679, |
| "learning_rate": 9.641146932251088e-06, |
| "loss": 0.2779, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.2446183953033268, |
| "grad_norm": 4.855710972259783, |
| "learning_rate": 9.635407309685171e-06, |
| "loss": 0.2033, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.2465753424657534, |
| "grad_norm": 5.772237582276315, |
| "learning_rate": 9.62962388596925e-06, |
| "loss": 0.2775, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.24853228962818003, |
| "grad_norm": 4.510173078246663, |
| "learning_rate": 9.623796715752378e-06, |
| "loss": 0.1748, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.25048923679060664, |
| "grad_norm": 5.341101677129826, |
| "learning_rate": 9.617925854096975e-06, |
| "loss": 0.2786, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.25244618395303325, |
| "grad_norm": 5.6482462923379355, |
| "learning_rate": 9.61201135647832e-06, |
| "loss": 0.2587, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.25440313111545987, |
| "grad_norm": 6.042162277944827, |
| "learning_rate": 9.606053278784009e-06, |
| "loss": 0.3171, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.2563600782778865, |
| "grad_norm": 5.691666334883734, |
| "learning_rate": 9.600051677313446e-06, |
| "loss": 0.2746, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.2583170254403131, |
| "grad_norm": 5.630737269908476, |
| "learning_rate": 9.5940066087773e-06, |
| "loss": 0.2832, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.2602739726027397, |
| "grad_norm": 5.277163708538677, |
| "learning_rate": 9.587918130296969e-06, |
| "loss": 0.232, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.2622309197651663, |
| "grad_norm": 6.979775309520188, |
| "learning_rate": 9.581786299404046e-06, |
| "loss": 0.2367, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.26418786692759294, |
| "grad_norm": 4.5526212557467645, |
| "learning_rate": 9.57561117403977e-06, |
| "loss": 0.2776, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.26614481409001955, |
| "grad_norm": 5.06956402393906, |
| "learning_rate": 9.56939281255448e-06, |
| "loss": 0.2223, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.26810176125244617, |
| "grad_norm": 6.846119182348714, |
| "learning_rate": 9.563131273707065e-06, |
| "loss": 0.299, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.2700587084148728, |
| "grad_norm": 5.252278776367356, |
| "learning_rate": 9.556826616664408e-06, |
| "loss": 0.3141, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.2720156555772994, |
| "grad_norm": 5.4862625993155625, |
| "learning_rate": 9.550478901000827e-06, |
| "loss": 0.253, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.273972602739726, |
| "grad_norm": 7.124130613611975, |
| "learning_rate": 9.544088186697515e-06, |
| "loss": 0.3218, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.2759295499021526, |
| "grad_norm": 4.7634020084720605, |
| "learning_rate": 9.537654534141964e-06, |
| "loss": 0.2266, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.27788649706457924, |
| "grad_norm": 5.727440796019865, |
| "learning_rate": 9.531178004127404e-06, |
| "loss": 0.2892, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.27984344422700586, |
| "grad_norm": 5.556704448929323, |
| "learning_rate": 9.524658657852223e-06, |
| "loss": 0.2595, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.28180039138943247, |
| "grad_norm": 4.625534332939895, |
| "learning_rate": 9.518096556919396e-06, |
| "loss": 0.2785, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.2837573385518591, |
| "grad_norm": 5.565184064248509, |
| "learning_rate": 9.511491763335893e-06, |
| "loss": 0.2725, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.2857142857142857, |
| "grad_norm": 5.613977383750768, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.2782, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.2876712328767123, |
| "grad_norm": 5.481384294931405, |
| "learning_rate": 9.498154348261217e-06, |
| "loss": 0.2612, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.2896281800391389, |
| "grad_norm": 4.049851143771165, |
| "learning_rate": 9.491421852798695e-06, |
| "loss": 0.2256, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.29158512720156554, |
| "grad_norm": 4.794544342250457, |
| "learning_rate": 9.484646916741602e-06, |
| "loss": 0.2423, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.29354207436399216, |
| "grad_norm": 5.0136505926303245, |
| "learning_rate": 9.477829604108044e-06, |
| "loss": 0.2347, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.29549902152641877, |
| "grad_norm": 6.5855311194671415, |
| "learning_rate": 9.47096997931655e-06, |
| "loss": 0.296, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.2974559686888454, |
| "grad_norm": 4.161372277191444, |
| "learning_rate": 9.464068107185476e-06, |
| "loss": 0.2194, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.299412915851272, |
| "grad_norm": 5.669155044409392, |
| "learning_rate": 9.45712405293237e-06, |
| "loss": 0.3238, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.3013698630136986, |
| "grad_norm": 6.15037175844158, |
| "learning_rate": 9.450137882173385e-06, |
| "loss": 0.3291, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.30332681017612523, |
| "grad_norm": 5.902065678073617, |
| "learning_rate": 9.443109660922632e-06, |
| "loss": 0.2565, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.30528375733855184, |
| "grad_norm": 5.189446349811056, |
| "learning_rate": 9.436039455591574e-06, |
| "loss": 0.2447, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.30724070450097846, |
| "grad_norm": 4.903593592755765, |
| "learning_rate": 9.42892733298839e-06, |
| "loss": 0.2647, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.30919765166340507, |
| "grad_norm": 5.850034454844104, |
| "learning_rate": 9.421773360317348e-06, |
| "loss": 0.2883, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.3111545988258317, |
| "grad_norm": 5.435378749438725, |
| "learning_rate": 9.414577605178166e-06, |
| "loss": 0.3074, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.3131115459882583, |
| "grad_norm": 5.049945823422824, |
| "learning_rate": 9.407340135565375e-06, |
| "loss": 0.3051, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.3150684931506849, |
| "grad_norm": 6.187918833681916, |
| "learning_rate": 9.40006101986768e-06, |
| "loss": 0.29, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.31702544031311153, |
| "grad_norm": 4.781711044784747, |
| "learning_rate": 9.392740326867304e-06, |
| "loss": 0.2813, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.31898238747553814, |
| "grad_norm": 4.728635001273645, |
| "learning_rate": 9.385378125739352e-06, |
| "loss": 0.284, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.32093933463796476, |
| "grad_norm": 4.709309410031883, |
| "learning_rate": 9.377974486051149e-06, |
| "loss": 0.2726, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.32289628180039137, |
| "grad_norm": 5.224313091563094, |
| "learning_rate": 9.370529477761574e-06, |
| "loss": 0.2353, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.324853228962818, |
| "grad_norm": 5.751137934359563, |
| "learning_rate": 9.363043171220423e-06, |
| "loss": 0.2371, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.3268101761252446, |
| "grad_norm": 4.847840299855201, |
| "learning_rate": 9.355515637167719e-06, |
| "loss": 0.2713, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.3287671232876712, |
| "grad_norm": 5.854360333871478, |
| "learning_rate": 9.347946946733055e-06, |
| "loss": 0.2926, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.33072407045009783, |
| "grad_norm": 4.9493020794201, |
| "learning_rate": 9.34033717143493e-06, |
| "loss": 0.2819, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.33268101761252444, |
| "grad_norm": 5.778580342531429, |
| "learning_rate": 9.332686383180055e-06, |
| "loss": 0.2755, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.33463796477495106, |
| "grad_norm": 6.694433155681199, |
| "learning_rate": 9.324994654262687e-06, |
| "loss": 0.3378, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.33659491193737767, |
| "grad_norm": 5.681913374887109, |
| "learning_rate": 9.31726205736394e-06, |
| "loss": 0.2824, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.3385518590998043, |
| "grad_norm": 5.883542702227213, |
| "learning_rate": 9.309488665551104e-06, |
| "loss": 0.26, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.3405088062622309, |
| "grad_norm": 5.2460438970810825, |
| "learning_rate": 9.301674552276942e-06, |
| "loss": 0.249, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.3424657534246575, |
| "grad_norm": 5.564804914389966, |
| "learning_rate": 9.293819791379016e-06, |
| "loss": 0.2639, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.34442270058708413, |
| "grad_norm": 4.421871659920564, |
| "learning_rate": 9.28592445707897e-06, |
| "loss": 0.2126, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.34637964774951074, |
| "grad_norm": 5.492265714431881, |
| "learning_rate": 9.277988623981837e-06, |
| "loss": 0.2265, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.34833659491193736, |
| "grad_norm": 6.263458751760306, |
| "learning_rate": 9.270012367075337e-06, |
| "loss": 0.2711, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.350293542074364, |
| "grad_norm": 6.443254593721858, |
| "learning_rate": 9.26199576172916e-06, |
| "loss": 0.329, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.3522504892367906, |
| "grad_norm": 4.651778294744157, |
| "learning_rate": 9.253938883694266e-06, |
| "loss": 0.2764, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.3542074363992172, |
| "grad_norm": 4.531134647397048, |
| "learning_rate": 9.245841809102156e-06, |
| "loss": 0.2578, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.3561643835616438, |
| "grad_norm": 6.574771938039552, |
| "learning_rate": 9.237704614464157e-06, |
| "loss": 0.2813, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.35812133072407043, |
| "grad_norm": 4.520737006800694, |
| "learning_rate": 9.229527376670706e-06, |
| "loss": 0.2229, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.36007827788649704, |
| "grad_norm": 4.754536314873896, |
| "learning_rate": 9.221310172990616e-06, |
| "loss": 0.2098, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.36203522504892366, |
| "grad_norm": 4.692996483156187, |
| "learning_rate": 9.213053081070346e-06, |
| "loss": 0.233, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.3639921722113503, |
| "grad_norm": 4.691452698767112, |
| "learning_rate": 9.204756178933274e-06, |
| "loss": 0.2153, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.3659491193737769, |
| "grad_norm": 4.593885336342877, |
| "learning_rate": 9.19641954497895e-06, |
| "loss": 0.2062, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.3679060665362035, |
| "grad_norm": 5.271133644958324, |
| "learning_rate": 9.18804325798236e-06, |
| "loss": 0.3188, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.3698630136986301, |
| "grad_norm": 4.509657738199191, |
| "learning_rate": 9.179627397093184e-06, |
| "loss": 0.2324, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.37181996086105673, |
| "grad_norm": 5.567651948773206, |
| "learning_rate": 9.171172041835048e-06, |
| "loss": 0.229, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.37377690802348335, |
| "grad_norm": 4.504381483531835, |
| "learning_rate": 9.162677272104767e-06, |
| "loss": 0.2551, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.37573385518590996, |
| "grad_norm": 5.115664838222219, |
| "learning_rate": 9.154143168171594e-06, |
| "loss": 0.226, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.3776908023483366, |
| "grad_norm": 4.607384131535977, |
| "learning_rate": 9.145569810676458e-06, |
| "loss": 0.2021, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.3796477495107632, |
| "grad_norm": 4.744013992722361, |
| "learning_rate": 9.136957280631212e-06, |
| "loss": 0.1907, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.3816046966731898, |
| "grad_norm": 5.189694479002932, |
| "learning_rate": 9.128305659417856e-06, |
| "loss": 0.249, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.3835616438356164, |
| "grad_norm": 4.348009572465306, |
| "learning_rate": 9.119615028787771e-06, |
| "loss": 0.2344, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.38551859099804303, |
| "grad_norm": 5.320265170382303, |
| "learning_rate": 9.110885470860953e-06, |
| "loss": 0.2835, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.38747553816046965, |
| "grad_norm": 5.318427681518386, |
| "learning_rate": 9.102117068125227e-06, |
| "loss": 0.2806, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.38943248532289626, |
| "grad_norm": 4.50647765168503, |
| "learning_rate": 9.093309903435475e-06, |
| "loss": 0.2501, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.3913894324853229, |
| "grad_norm": 5.636571110464994, |
| "learning_rate": 9.084464060012849e-06, |
| "loss": 0.3018, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.3933463796477495, |
| "grad_norm": 4.904681244771519, |
| "learning_rate": 9.075579621443991e-06, |
| "loss": 0.2047, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.3953033268101761, |
| "grad_norm": 5.710961572299329, |
| "learning_rate": 9.066656671680231e-06, |
| "loss": 0.2406, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.3972602739726027, |
| "grad_norm": 5.65567878880531, |
| "learning_rate": 9.057695295036806e-06, |
| "loss": 0.2569, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.39921722113502933, |
| "grad_norm": 5.981523521470041, |
| "learning_rate": 9.048695576192058e-06, |
| "loss": 0.2822, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.40117416829745595, |
| "grad_norm": 4.7183129513555615, |
| "learning_rate": 9.039657600186633e-06, |
| "loss": 0.2325, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.40313111545988256, |
| "grad_norm": 5.2045773706648175, |
| "learning_rate": 9.03058145242268e-06, |
| "loss": 0.2568, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.4050880626223092, |
| "grad_norm": 4.740030279167598, |
| "learning_rate": 9.021467218663042e-06, |
| "loss": 0.2431, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.4070450097847358, |
| "grad_norm": 5.002550250045506, |
| "learning_rate": 9.012314985030445e-06, |
| "loss": 0.234, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.4090019569471624, |
| "grad_norm": 5.7230181148868535, |
| "learning_rate": 9.003124838006689e-06, |
| "loss": 0.285, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.410958904109589, |
| "grad_norm": 3.9032699082087285, |
| "learning_rate": 8.993896864431825e-06, |
| "loss": 0.196, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.41291585127201563, |
| "grad_norm": 5.812099974849448, |
| "learning_rate": 8.984631151503335e-06, |
| "loss": 0.2184, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.41487279843444225, |
| "grad_norm": 4.914010824108234, |
| "learning_rate": 8.975327786775316e-06, |
| "loss": 0.2364, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.41682974559686886, |
| "grad_norm": 6.410176085952657, |
| "learning_rate": 8.965986858157636e-06, |
| "loss": 0.2692, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.4187866927592955, |
| "grad_norm": 6.118029036856543, |
| "learning_rate": 8.956608453915126e-06, |
| "loss": 0.2996, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.4207436399217221, |
| "grad_norm": 4.936422884416708, |
| "learning_rate": 8.947192662666724e-06, |
| "loss": 0.2215, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.4227005870841487, |
| "grad_norm": 5.893330849083552, |
| "learning_rate": 8.937739573384653e-06, |
| "loss": 0.2149, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.4246575342465753, |
| "grad_norm": 5.159883518384683, |
| "learning_rate": 8.928249275393572e-06, |
| "loss": 0.3036, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.42661448140900193, |
| "grad_norm": 5.810151369391932, |
| "learning_rate": 8.918721858369738e-06, |
| "loss": 0.1827, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.42857142857142855, |
| "grad_norm": 4.895357225298453, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.2863, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.43052837573385516, |
| "grad_norm": 4.9403025942460586, |
| "learning_rate": 8.899556027681708e-06, |
| "loss": 0.2322, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.4324853228962818, |
| "grad_norm": 5.1940958898461025, |
| "learning_rate": 8.88991779512035e-06, |
| "loss": 0.2523, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.4344422700587084, |
| "grad_norm": 4.959484694271567, |
| "learning_rate": 8.880242805730208e-06, |
| "loss": 0.2388, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.436399217221135, |
| "grad_norm": 5.129438202619225, |
| "learning_rate": 8.870531150932727e-06, |
| "loss": 0.3057, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.4383561643835616, |
| "grad_norm": 6.3076613748173225, |
| "learning_rate": 8.860782922495821e-06, |
| "loss": 0.2142, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.44031311154598823, |
| "grad_norm": 4.594840857930758, |
| "learning_rate": 8.850998212532998e-06, |
| "loss": 0.2789, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.44227005870841485, |
| "grad_norm": 4.876067591210402, |
| "learning_rate": 8.84117711350248e-06, |
| "loss": 0.2484, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.44422700587084146, |
| "grad_norm": 4.972465123245647, |
| "learning_rate": 8.831319718206353e-06, |
| "loss": 0.224, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.4461839530332681, |
| "grad_norm": 6.575275531292241, |
| "learning_rate": 8.821426119789662e-06, |
| "loss": 0.2526, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.4481409001956947, |
| "grad_norm": 5.465633429296181, |
| "learning_rate": 8.811496411739552e-06, |
| "loss": 0.2559, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.4500978473581213, |
| "grad_norm": 5.67986380171748, |
| "learning_rate": 8.801530687884378e-06, |
| "loss": 0.3078, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.4520547945205479, |
| "grad_norm": 5.078588674927216, |
| "learning_rate": 8.791529042392813e-06, |
| "loss": 0.2229, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.45401174168297453, |
| "grad_norm": 6.402099200697542, |
| "learning_rate": 8.781491569772966e-06, |
| "loss": 0.3283, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.45596868884540115, |
| "grad_norm": 4.544628312898552, |
| "learning_rate": 8.771418364871483e-06, |
| "loss": 0.2386, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.45792563600782776, |
| "grad_norm": 4.311958716206395, |
| "learning_rate": 8.761309522872657e-06, |
| "loss": 0.2056, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.4598825831702544, |
| "grad_norm": 5.002176886988587, |
| "learning_rate": 8.751165139297522e-06, |
| "loss": 0.3168, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.461839530332681, |
| "grad_norm": 5.235804483837322, |
| "learning_rate": 8.740985310002956e-06, |
| "loss": 0.2048, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.4637964774951076, |
| "grad_norm": 4.3444962298108445, |
| "learning_rate": 8.730770131180771e-06, |
| "loss": 0.1882, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.4657534246575342, |
| "grad_norm": 5.33614972698478, |
| "learning_rate": 8.720519699356804e-06, |
| "loss": 0.2515, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.46771037181996084, |
| "grad_norm": 4.396665715893306, |
| "learning_rate": 8.71023411139001e-06, |
| "loss": 0.1844, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.46966731898238745, |
| "grad_norm": 4.8914502962160835, |
| "learning_rate": 8.699913464471543e-06, |
| "loss": 0.2352, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.47162426614481406, |
| "grad_norm": 4.485944326712265, |
| "learning_rate": 8.689557856123838e-06, |
| "loss": 0.2345, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.4735812133072407, |
| "grad_norm": 4.291815888926778, |
| "learning_rate": 8.679167384199686e-06, |
| "loss": 0.1953, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.4755381604696673, |
| "grad_norm": 5.564308049824509, |
| "learning_rate": 8.668742146881316e-06, |
| "loss": 0.2431, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.4774951076320939, |
| "grad_norm": 5.3700516394519795, |
| "learning_rate": 8.658282242679461e-06, |
| "loss": 0.2432, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.4794520547945205, |
| "grad_norm": 4.40303549855684, |
| "learning_rate": 8.647787770432439e-06, |
| "loss": 0.1873, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.48140900195694714, |
| "grad_norm": 5.897955839198346, |
| "learning_rate": 8.6372588293052e-06, |
| "loss": 0.2986, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.48336594911937375, |
| "grad_norm": 5.224974777320559, |
| "learning_rate": 8.626695518788403e-06, |
| "loss": 0.2447, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.48532289628180036, |
| "grad_norm": 5.855146535022888, |
| "learning_rate": 8.616097938697476e-06, |
| "loss": 0.2739, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.487279843444227, |
| "grad_norm": 4.986122616494966, |
| "learning_rate": 8.60546618917166e-06, |
| "loss": 0.2653, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.4892367906066536, |
| "grad_norm": 6.422196949208445, |
| "learning_rate": 8.594800370673083e-06, |
| "loss": 0.3408, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.4911937377690802, |
| "grad_norm": 5.538809884516984, |
| "learning_rate": 8.584100583985791e-06, |
| "loss": 0.2853, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.4931506849315068, |
| "grad_norm": 4.44743719894818, |
| "learning_rate": 8.573366930214807e-06, |
| "loss": 0.2328, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.49510763209393344, |
| "grad_norm": 5.552388833737808, |
| "learning_rate": 8.562599510785171e-06, |
| "loss": 0.2705, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.49706457925636005, |
| "grad_norm": 4.782572954482146, |
| "learning_rate": 8.551798427440985e-06, |
| "loss": 0.2652, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.49902152641878667, |
| "grad_norm": 4.817005628317594, |
| "learning_rate": 8.540963782244455e-06, |
| "loss": 0.2206, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.5009784735812133, |
| "grad_norm": 6.020012232779313, |
| "learning_rate": 8.53009567757491e-06, |
| "loss": 0.2779, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.50293542074364, |
| "grad_norm": 4.56085390435194, |
| "learning_rate": 8.519194216127854e-06, |
| "loss": 0.228, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.5048923679060665, |
| "grad_norm": 3.6402329265262967, |
| "learning_rate": 8.50825950091399e-06, |
| "loss": 0.2014, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.5068493150684932, |
| "grad_norm": 5.573641615856577, |
| "learning_rate": 8.497291635258235e-06, |
| "loss": 0.3235, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.5088062622309197, |
| "grad_norm": 5.681240423888136, |
| "learning_rate": 8.486290722798765e-06, |
| "loss": 0.2814, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.5107632093933464, |
| "grad_norm": 4.836103032462812, |
| "learning_rate": 8.475256867486011e-06, |
| "loss": 0.2158, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.512720156555773, |
| "grad_norm": 4.411439320367133, |
| "learning_rate": 8.464190173581698e-06, |
| "loss": 0.2113, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.5146771037181996, |
| "grad_norm": 4.231683865481744, |
| "learning_rate": 8.45309074565785e-06, |
| "loss": 0.1914, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.5166340508806262, |
| "grad_norm": 4.822080175059688, |
| "learning_rate": 8.441958688595802e-06, |
| "loss": 0.2532, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.5185909980430529, |
| "grad_norm": 4.335907291256831, |
| "learning_rate": 8.430794107585208e-06, |
| "loss": 0.2365, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.5205479452054794, |
| "grad_norm": 6.232439367741047, |
| "learning_rate": 8.419597108123054e-06, |
| "loss": 0.3096, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.5225048923679061, |
| "grad_norm": 4.765536983931931, |
| "learning_rate": 8.408367796012652e-06, |
| "loss": 0.246, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.5244618395303327, |
| "grad_norm": 4.508149938894216, |
| "learning_rate": 8.397106277362647e-06, |
| "loss": 0.2356, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.5264187866927593, |
| "grad_norm": 6.090049747836594, |
| "learning_rate": 8.38581265858601e-06, |
| "loss": 0.2319, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.5283757338551859, |
| "grad_norm": 5.342498467703633, |
| "learning_rate": 8.374487046399035e-06, |
| "loss": 0.2691, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.5303326810176126, |
| "grad_norm": 5.072303818523408, |
| "learning_rate": 8.363129547820333e-06, |
| "loss": 0.2636, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.5322896281800391, |
| "grad_norm": 5.3881835040068005, |
| "learning_rate": 8.35174027016981e-06, |
| "loss": 0.2319, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.5342465753424658, |
| "grad_norm": 6.14188513953673, |
| "learning_rate": 8.340319321067668e-06, |
| "loss": 0.3033, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.5362035225048923, |
| "grad_norm": 4.92571616923771, |
| "learning_rate": 8.328866808433378e-06, |
| "loss": 0.2544, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.538160469667319, |
| "grad_norm": 4.547265931225427, |
| "learning_rate": 8.317382840484663e-06, |
| "loss": 0.2602, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.5401174168297456, |
| "grad_norm": 5.407139213186438, |
| "learning_rate": 8.305867525736475e-06, |
| "loss": 0.2618, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.5420743639921722, |
| "grad_norm": 4.479304829656101, |
| "learning_rate": 8.29432097299997e-06, |
| "loss": 0.2237, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.5440313111545988, |
| "grad_norm": 4.702989254247259, |
| "learning_rate": 8.28274329138148e-06, |
| "loss": 0.2748, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.5459882583170255, |
| "grad_norm": 5.720048816141749, |
| "learning_rate": 8.271134590281482e-06, |
| "loss": 0.2559, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.547945205479452, |
| "grad_norm": 4.990000362815749, |
| "learning_rate": 8.259494979393563e-06, |
| "loss": 0.3063, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.5499021526418787, |
| "grad_norm": 5.360524995395144, |
| "learning_rate": 8.247824568703386e-06, |
| "loss": 0.2263, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.5518590998043053, |
| "grad_norm": 4.534945903000907, |
| "learning_rate": 8.236123468487649e-06, |
| "loss": 0.2213, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.5538160469667319, |
| "grad_norm": 5.920133059574043, |
| "learning_rate": 8.224391789313038e-06, |
| "loss": 0.2595, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.5557729941291585, |
| "grad_norm": 6.116003788760588, |
| "learning_rate": 8.2126296420352e-06, |
| "loss": 0.2691, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.5577299412915852, |
| "grad_norm": 5.7120077342415705, |
| "learning_rate": 8.200837137797665e-06, |
| "loss": 0.3438, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.5596868884540117, |
| "grad_norm": 4.934956303000625, |
| "learning_rate": 8.189014388030834e-06, |
| "loss": 0.2691, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.5616438356164384, |
| "grad_norm": 4.1221425004180725, |
| "learning_rate": 8.177161504450887e-06, |
| "loss": 0.2029, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.5636007827788649, |
| "grad_norm": 4.642864467173339, |
| "learning_rate": 8.16527859905876e-06, |
| "loss": 0.2425, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.5655577299412916, |
| "grad_norm": 4.157952188139466, |
| "learning_rate": 8.153365784139065e-06, |
| "loss": 0.2308, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.5675146771037182, |
| "grad_norm": 4.15123656221922, |
| "learning_rate": 8.141423172259038e-06, |
| "loss": 0.1997, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.5694716242661448, |
| "grad_norm": 6.56219758739563, |
| "learning_rate": 8.129450876267475e-06, |
| "loss": 0.2808, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 7.278779826358803, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.2651, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.5733855185909981, |
| "grad_norm": 4.470449842017094, |
| "learning_rate": 8.105417684746327e-06, |
| "loss": 0.207, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.5753424657534246, |
| "grad_norm": 4.15571279033962, |
| "learning_rate": 8.093357016312518e-06, |
| "loss": 0.2059, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.5772994129158513, |
| "grad_norm": 6.4195682542913834, |
| "learning_rate": 8.081267117956581e-06, |
| "loss": 0.2091, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.5792563600782779, |
| "grad_norm": 4.1273855645560635, |
| "learning_rate": 8.069148103919064e-06, |
| "loss": 0.1764, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.5812133072407045, |
| "grad_norm": 5.6861558256126115, |
| "learning_rate": 8.057000088715631e-06, |
| "loss": 0.3401, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.5831702544031311, |
| "grad_norm": 4.706318658816322, |
| "learning_rate": 8.044823187135984e-06, |
| "loss": 0.2076, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.5851272015655578, |
| "grad_norm": 5.269459779116369, |
| "learning_rate": 8.032617514242784e-06, |
| "loss": 0.3019, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.5870841487279843, |
| "grad_norm": 4.1156829124413, |
| "learning_rate": 8.020383185370559e-06, |
| "loss": 0.1804, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.589041095890411, |
| "grad_norm": 6.650873965565284, |
| "learning_rate": 8.008120316124612e-06, |
| "loss": 0.2852, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.5909980430528375, |
| "grad_norm": 4.485178846860813, |
| "learning_rate": 7.995829022379937e-06, |
| "loss": 0.235, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.5929549902152642, |
| "grad_norm": 4.256808704734016, |
| "learning_rate": 7.983509420280107e-06, |
| "loss": 0.2321, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.5949119373776908, |
| "grad_norm": 4.895440604183134, |
| "learning_rate": 7.9711616262362e-06, |
| "loss": 0.2519, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.5968688845401174, |
| "grad_norm": 5.417537828482808, |
| "learning_rate": 7.958785756925684e-06, |
| "loss": 0.2698, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.598825831702544, |
| "grad_norm": 4.218683147983488, |
| "learning_rate": 7.94638192929131e-06, |
| "loss": 0.2024, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.6007827788649707, |
| "grad_norm": 4.526449090089896, |
| "learning_rate": 7.933950260540023e-06, |
| "loss": 0.2507, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.6027397260273972, |
| "grad_norm": 4.682240761567048, |
| "learning_rate": 7.921490868141843e-06, |
| "loss": 0.2776, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.6046966731898239, |
| "grad_norm": 3.8252646312498415, |
| "learning_rate": 7.909003869828757e-06, |
| "loss": 0.2226, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.6066536203522505, |
| "grad_norm": 4.674189874945427, |
| "learning_rate": 7.896489383593606e-06, |
| "loss": 0.1775, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.6086105675146771, |
| "grad_norm": 4.345377154588714, |
| "learning_rate": 7.883947527688976e-06, |
| "loss": 0.1987, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.6105675146771037, |
| "grad_norm": 4.430135152231841, |
| "learning_rate": 7.871378420626072e-06, |
| "loss": 0.2591, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.6125244618395304, |
| "grad_norm": 4.8511026140799505, |
| "learning_rate": 7.858782181173602e-06, |
| "loss": 0.2524, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.6144814090019569, |
| "grad_norm": 4.204925199655333, |
| "learning_rate": 7.84615892835666e-06, |
| "loss": 0.2221, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.6164383561643836, |
| "grad_norm": 4.655030786087024, |
| "learning_rate": 7.83350878145559e-06, |
| "loss": 0.2622, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.6183953033268101, |
| "grad_norm": 5.360096473338045, |
| "learning_rate": 7.820831860004867e-06, |
| "loss": 0.1865, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.6203522504892368, |
| "grad_norm": 5.16735050993756, |
| "learning_rate": 7.808128283791967e-06, |
| "loss": 0.2539, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.6223091976516634, |
| "grad_norm": 4.224066897069397, |
| "learning_rate": 7.795398172856234e-06, |
| "loss": 0.1763, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.62426614481409, |
| "grad_norm": 4.464099730993358, |
| "learning_rate": 7.782641647487741e-06, |
| "loss": 0.2293, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.6262230919765166, |
| "grad_norm": 5.011832349001161, |
| "learning_rate": 7.769858828226165e-06, |
| "loss": 0.2245, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.6281800391389433, |
| "grad_norm": 3.826115563008368, |
| "learning_rate": 7.757049835859635e-06, |
| "loss": 0.1769, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.6301369863013698, |
| "grad_norm": 5.008784468728881, |
| "learning_rate": 7.744214791423597e-06, |
| "loss": 0.2733, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.6320939334637965, |
| "grad_norm": 5.428145102024377, |
| "learning_rate": 7.731353816199672e-06, |
| "loss": 0.2088, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.6340508806262231, |
| "grad_norm": 6.234452796021084, |
| "learning_rate": 7.718467031714506e-06, |
| "loss": 0.2625, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.6360078277886497, |
| "grad_norm": 4.975002080963604, |
| "learning_rate": 7.705554559738623e-06, |
| "loss": 0.3036, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.6379647749510763, |
| "grad_norm": 4.737259074961045, |
| "learning_rate": 7.692616522285278e-06, |
| "loss": 0.2219, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.639921722113503, |
| "grad_norm": 3.8979165458755305, |
| "learning_rate": 7.679653041609296e-06, |
| "loss": 0.1622, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.6418786692759295, |
| "grad_norm": 5.081862847037666, |
| "learning_rate": 7.666664240205922e-06, |
| "loss": 0.2566, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.6438356164383562, |
| "grad_norm": 4.790600125497883, |
| "learning_rate": 7.653650240809667e-06, |
| "loss": 0.2359, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.6457925636007827, |
| "grad_norm": 4.512712501134018, |
| "learning_rate": 7.640611166393142e-06, |
| "loss": 0.2226, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.6477495107632094, |
| "grad_norm": 4.018884435453013, |
| "learning_rate": 7.627547140165899e-06, |
| "loss": 0.183, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.649706457925636, |
| "grad_norm": 4.698951021240157, |
| "learning_rate": 7.614458285573262e-06, |
| "loss": 0.2013, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.6516634050880626, |
| "grad_norm": 5.653759827756558, |
| "learning_rate": 7.60134472629517e-06, |
| "loss": 0.2191, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.6536203522504892, |
| "grad_norm": 5.342758848365416, |
| "learning_rate": 7.588206586245001e-06, |
| "loss": 0.2727, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.6555772994129159, |
| "grad_norm": 4.82649502705225, |
| "learning_rate": 7.575043989568401e-06, |
| "loss": 0.2216, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.6575342465753424, |
| "grad_norm": 5.33534577723198, |
| "learning_rate": 7.56185706064212e-06, |
| "loss": 0.2698, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.6594911937377691, |
| "grad_norm": 6.877326028435487, |
| "learning_rate": 7.548645924072816e-06, |
| "loss": 0.2459, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.6614481409001957, |
| "grad_norm": 4.985381269763986, |
| "learning_rate": 7.535410704695907e-06, |
| "loss": 0.2413, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.6634050880626223, |
| "grad_norm": 4.814826867238619, |
| "learning_rate": 7.522151527574362e-06, |
| "loss": 0.3347, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.6653620352250489, |
| "grad_norm": 5.399143030215112, |
| "learning_rate": 7.508868517997544e-06, |
| "loss": 0.2488, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.6673189823874756, |
| "grad_norm": 4.588021966625966, |
| "learning_rate": 7.495561801480009e-06, |
| "loss": 0.2481, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.6692759295499021, |
| "grad_norm": 6.088374064261434, |
| "learning_rate": 7.4822315037603245e-06, |
| "loss": 0.2556, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.6712328767123288, |
| "grad_norm": 4.154717298483266, |
| "learning_rate": 7.468877750799887e-06, |
| "loss": 0.1761, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.6731898238747553, |
| "grad_norm": 4.419384276900518, |
| "learning_rate": 7.455500668781725e-06, |
| "loss": 0.2141, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.675146771037182, |
| "grad_norm": 4.978162648752979, |
| "learning_rate": 7.442100384109313e-06, |
| "loss": 0.2318, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.6771037181996086, |
| "grad_norm": 5.3039954365995605, |
| "learning_rate": 7.428677023405366e-06, |
| "loss": 0.2386, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.6790606653620352, |
| "grad_norm": 4.131179533137672, |
| "learning_rate": 7.415230713510657e-06, |
| "loss": 0.1759, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.6810176125244618, |
| "grad_norm": 4.5847118787384105, |
| "learning_rate": 7.40176158148281e-06, |
| "loss": 0.2022, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.6829745596868885, |
| "grad_norm": 5.044102877267168, |
| "learning_rate": 7.3882697545951e-06, |
| "loss": 0.2366, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.684931506849315, |
| "grad_norm": 4.719653227435389, |
| "learning_rate": 7.374755360335253e-06, |
| "loss": 0.1998, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.6868884540117417, |
| "grad_norm": 4.647977019719739, |
| "learning_rate": 7.361218526404237e-06, |
| "loss": 0.2067, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.6888454011741683, |
| "grad_norm": 4.809121566329024, |
| "learning_rate": 7.3476593807150606e-06, |
| "loss": 0.2474, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.6908023483365949, |
| "grad_norm": 6.007951742362696, |
| "learning_rate": 7.334078051391563e-06, |
| "loss": 0.2276, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.6927592954990215, |
| "grad_norm": 4.9392361722642235, |
| "learning_rate": 7.320474666767201e-06, |
| "loss": 0.1936, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.6947162426614482, |
| "grad_norm": 5.4303041629110105, |
| "learning_rate": 7.3068493553838386e-06, |
| "loss": 0.2201, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.6966731898238747, |
| "grad_norm": 5.20150092507419, |
| "learning_rate": 7.293202245990526e-06, |
| "loss": 0.3111, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.6986301369863014, |
| "grad_norm": 4.595699352723916, |
| "learning_rate": 7.279533467542295e-06, |
| "loss": 0.2273, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.700587084148728, |
| "grad_norm": 5.224299377753821, |
| "learning_rate": 7.265843149198931e-06, |
| "loss": 0.2199, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.7025440313111546, |
| "grad_norm": 4.79590132448363, |
| "learning_rate": 7.252131420323757e-06, |
| "loss": 0.2214, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.7045009784735812, |
| "grad_norm": 5.003578747899316, |
| "learning_rate": 7.238398410482408e-06, |
| "loss": 0.2363, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.7064579256360078, |
| "grad_norm": 4.866737354669243, |
| "learning_rate": 7.224644249441606e-06, |
| "loss": 0.2131, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.7084148727984344, |
| "grad_norm": 5.602436444876147, |
| "learning_rate": 7.210869067167942e-06, |
| "loss": 0.204, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.7103718199608611, |
| "grad_norm": 5.129384315926628, |
| "learning_rate": 7.197072993826637e-06, |
| "loss": 0.2216, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.7123287671232876, |
| "grad_norm": 5.118369282242177, |
| "learning_rate": 7.183256159780321e-06, |
| "loss": 0.3338, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.7142857142857143, |
| "grad_norm": 5.290938635919234, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.2778, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.7162426614481409, |
| "grad_norm": 5.4785889854106875, |
| "learning_rate": 7.155560732002792e-06, |
| "loss": 0.2531, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.7181996086105675, |
| "grad_norm": 5.064988017320793, |
| "learning_rate": 7.141682399972765e-06, |
| "loss": 0.2516, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.7201565557729941, |
| "grad_norm": 5.599014295713253, |
| "learning_rate": 7.127783830637625e-06, |
| "loss": 0.2808, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.7221135029354208, |
| "grad_norm": 4.939410714168095, |
| "learning_rate": 7.113865155328506e-06, |
| "loss": 0.2501, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.7240704500978473, |
| "grad_norm": 4.560805456000225, |
| "learning_rate": 7.099926505566537e-06, |
| "loss": 0.1967, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.726027397260274, |
| "grad_norm": 5.777503421061715, |
| "learning_rate": 7.085968013061585e-06, |
| "loss": 0.2432, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.7279843444227005, |
| "grad_norm": 5.119930503618484, |
| "learning_rate": 7.071989809711018e-06, |
| "loss": 0.2326, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.7299412915851272, |
| "grad_norm": 5.8149307117998035, |
| "learning_rate": 7.057992027598459e-06, |
| "loss": 0.3476, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.7318982387475538, |
| "grad_norm": 3.4819792292427487, |
| "learning_rate": 7.043974798992532e-06, |
| "loss": 0.1639, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.7338551859099804, |
| "grad_norm": 5.44764939139612, |
| "learning_rate": 7.029938256345617e-06, |
| "loss": 0.2212, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.735812133072407, |
| "grad_norm": 5.045062135140015, |
| "learning_rate": 7.015882532292598e-06, |
| "loss": 0.2404, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.7377690802348337, |
| "grad_norm": 4.738599312039875, |
| "learning_rate": 7.001807759649609e-06, |
| "loss": 0.196, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.7397260273972602, |
| "grad_norm": 4.171047126425417, |
| "learning_rate": 6.987714071412781e-06, |
| "loss": 0.1997, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.7416829745596869, |
| "grad_norm": 4.445079606658529, |
| "learning_rate": 6.973601600756978e-06, |
| "loss": 0.219, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.7436399217221135, |
| "grad_norm": 4.507079749995259, |
| "learning_rate": 6.959470481034547e-06, |
| "loss": 0.2359, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.7455968688845401, |
| "grad_norm": 5.734858328648728, |
| "learning_rate": 6.945320845774057e-06, |
| "loss": 0.1925, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.7475538160469667, |
| "grad_norm": 3.958771906119686, |
| "learning_rate": 6.931152828679033e-06, |
| "loss": 0.1682, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.7495107632093934, |
| "grad_norm": 5.006053395315967, |
| "learning_rate": 6.9169665636266925e-06, |
| "loss": 0.2492, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.7514677103718199, |
| "grad_norm": 5.3023650391091826, |
| "learning_rate": 6.902762184666687e-06, |
| "loss": 0.2624, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.7534246575342466, |
| "grad_norm": 4.365600957886763, |
| "learning_rate": 6.888539826019824e-06, |
| "loss": 0.1968, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.7553816046966731, |
| "grad_norm": 5.038212786197152, |
| "learning_rate": 6.874299622076817e-06, |
| "loss": 0.2448, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.7573385518590998, |
| "grad_norm": 4.893164546569124, |
| "learning_rate": 6.860041707396993e-06, |
| "loss": 0.2351, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.7592954990215264, |
| "grad_norm": 4.902127401384436, |
| "learning_rate": 6.845766216707037e-06, |
| "loss": 0.2269, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.761252446183953, |
| "grad_norm": 4.307766411392759, |
| "learning_rate": 6.831473284899715e-06, |
| "loss": 0.2181, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.7632093933463796, |
| "grad_norm": 4.631040367665074, |
| "learning_rate": 6.817163047032598e-06, |
| "loss": 0.2589, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.7651663405088063, |
| "grad_norm": 5.708508269038249, |
| "learning_rate": 6.802835638326784e-06, |
| "loss": 0.2491, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.7671232876712328, |
| "grad_norm": 5.362125375005584, |
| "learning_rate": 6.788491194165629e-06, |
| "loss": 0.2615, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.7690802348336595, |
| "grad_norm": 4.443138716400396, |
| "learning_rate": 6.774129850093451e-06, |
| "loss": 0.1682, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.7710371819960861, |
| "grad_norm": 4.393068720229301, |
| "learning_rate": 6.759751741814271e-06, |
| "loss": 0.2473, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.7729941291585127, |
| "grad_norm": 3.8288699207577275, |
| "learning_rate": 6.74535700519051e-06, |
| "loss": 0.1662, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.7749510763209393, |
| "grad_norm": 3.9311366575684548, |
| "learning_rate": 6.7309457762417214e-06, |
| "loss": 0.1898, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.776908023483366, |
| "grad_norm": 4.668357867730944, |
| "learning_rate": 6.7165181911432955e-06, |
| "loss": 0.2604, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.7788649706457925, |
| "grad_norm": 5.329544699386823, |
| "learning_rate": 6.702074386225175e-06, |
| "loss": 0.2492, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.7808219178082192, |
| "grad_norm": 8.59475667821416, |
| "learning_rate": 6.687614497970567e-06, |
| "loss": 0.2667, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.7827788649706457, |
| "grad_norm": 4.8981873340816255, |
| "learning_rate": 6.67313866301466e-06, |
| "loss": 0.2111, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.7847358121330724, |
| "grad_norm": 6.097029336108919, |
| "learning_rate": 6.658647018143322e-06, |
| "loss": 0.3266, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.786692759295499, |
| "grad_norm": 4.420812930920847, |
| "learning_rate": 6.6441397002918165e-06, |
| "loss": 0.2299, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.7886497064579256, |
| "grad_norm": 4.365341878054583, |
| "learning_rate": 6.629616846543497e-06, |
| "loss": 0.2653, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.7906066536203522, |
| "grad_norm": 4.494281542008356, |
| "learning_rate": 6.615078594128531e-06, |
| "loss": 0.2171, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.7925636007827789, |
| "grad_norm": 4.199122245959778, |
| "learning_rate": 6.600525080422583e-06, |
| "loss": 0.2125, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.7945205479452054, |
| "grad_norm": 4.140837834661595, |
| "learning_rate": 6.585956442945531e-06, |
| "loss": 0.2143, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.7964774951076321, |
| "grad_norm": 5.523128303920444, |
| "learning_rate": 6.571372819360158e-06, |
| "loss": 0.2622, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.7984344422700587, |
| "grad_norm": 5.386723255928109, |
| "learning_rate": 6.5567743474708545e-06, |
| "loss": 0.2321, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.8003913894324853, |
| "grad_norm": 4.888059978124536, |
| "learning_rate": 6.5421611652223185e-06, |
| "loss": 0.2218, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.8023483365949119, |
| "grad_norm": 3.8291622949762996, |
| "learning_rate": 6.52753341069825e-06, |
| "loss": 0.1709, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.8043052837573386, |
| "grad_norm": 4.348252679759753, |
| "learning_rate": 6.512891222120044e-06, |
| "loss": 0.2144, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.8062622309197651, |
| "grad_norm": 4.894889223530912, |
| "learning_rate": 6.498234737845488e-06, |
| "loss": 0.2271, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.8082191780821918, |
| "grad_norm": 4.622072264722465, |
| "learning_rate": 6.483564096367452e-06, |
| "loss": 0.2257, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.8101761252446184, |
| "grad_norm": 5.014953495369264, |
| "learning_rate": 6.468879436312584e-06, |
| "loss": 0.2783, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.812133072407045, |
| "grad_norm": 5.082289788971952, |
| "learning_rate": 6.454180896439993e-06, |
| "loss": 0.2163, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.8140900195694716, |
| "grad_norm": 5.016943498143925, |
| "learning_rate": 6.439468615639947e-06, |
| "loss": 0.2174, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.8160469667318982, |
| "grad_norm": 5.026669140596442, |
| "learning_rate": 6.424742732932547e-06, |
| "loss": 0.2544, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.8180039138943248, |
| "grad_norm": 4.892982117085112, |
| "learning_rate": 6.410003387466433e-06, |
| "loss": 0.2618, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.8199608610567515, |
| "grad_norm": 4.763431757151407, |
| "learning_rate": 6.3952507185174505e-06, |
| "loss": 0.2602, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.821917808219178, |
| "grad_norm": 4.831268541342514, |
| "learning_rate": 6.380484865487346e-06, |
| "loss": 0.1952, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.8238747553816047, |
| "grad_norm": 5.810023982986091, |
| "learning_rate": 6.365705967902439e-06, |
| "loss": 0.231, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.8258317025440313, |
| "grad_norm": 4.518985074687684, |
| "learning_rate": 6.35091416541232e-06, |
| "loss": 0.2838, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.8277886497064579, |
| "grad_norm": 4.49389191062423, |
| "learning_rate": 6.3361095977885125e-06, |
| "loss": 0.2439, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.8297455968688845, |
| "grad_norm": 4.842306119601203, |
| "learning_rate": 6.3212924049231676e-06, |
| "loss": 0.2547, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.8317025440313112, |
| "grad_norm": 4.855578382208363, |
| "learning_rate": 6.306462726827733e-06, |
| "loss": 0.2505, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.8336594911937377, |
| "grad_norm": 4.844384939575004, |
| "learning_rate": 6.29162070363163e-06, |
| "loss": 0.1988, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.8356164383561644, |
| "grad_norm": 4.350696593194919, |
| "learning_rate": 6.276766475580935e-06, |
| "loss": 0.242, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.837573385518591, |
| "grad_norm": 3.7219984815316094, |
| "learning_rate": 6.261900183037053e-06, |
| "loss": 0.2415, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.8395303326810176, |
| "grad_norm": 4.69850501369936, |
| "learning_rate": 6.247021966475385e-06, |
| "loss": 0.2541, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.8414872798434442, |
| "grad_norm": 4.347805294081271, |
| "learning_rate": 6.232131966484007e-06, |
| "loss": 0.1611, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.8434442270058709, |
| "grad_norm": 4.99401775891938, |
| "learning_rate": 6.217230323762338e-06, |
| "loss": 0.2625, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.8454011741682974, |
| "grad_norm": 4.831556787616597, |
| "learning_rate": 6.202317179119817e-06, |
| "loss": 0.2099, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.8473581213307241, |
| "grad_norm": 4.307274115687755, |
| "learning_rate": 6.187392673474562e-06, |
| "loss": 0.2325, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.8493150684931506, |
| "grad_norm": 4.3976773904729605, |
| "learning_rate": 6.1724569478520495e-06, |
| "loss": 0.1803, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.8512720156555773, |
| "grad_norm": 4.807769533726122, |
| "learning_rate": 6.15751014338377e-06, |
| "loss": 0.239, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.8532289628180039, |
| "grad_norm": 4.798338091443673, |
| "learning_rate": 6.142552401305907e-06, |
| "loss": 0.2353, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.8551859099804305, |
| "grad_norm": 5.504867602840627, |
| "learning_rate": 6.1275838629579906e-06, |
| "loss": 0.3269, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 5.587671091691377, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.2645, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.8590998043052838, |
| "grad_norm": 4.587930060890868, |
| "learning_rate": 6.097614963318883e-06, |
| "loss": 0.2621, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.8610567514677103, |
| "grad_norm": 5.140391230524692, |
| "learning_rate": 6.0826148852114945e-06, |
| "loss": 0.2261, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.863013698630137, |
| "grad_norm": 6.016051339249005, |
| "learning_rate": 6.067604577198981e-06, |
| "loss": 0.2978, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.8649706457925636, |
| "grad_norm": 5.434312801462964, |
| "learning_rate": 6.052584181117589e-06, |
| "loss": 0.2387, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.8669275929549902, |
| "grad_norm": 5.409406647702902, |
| "learning_rate": 6.037553838898882e-06, |
| "loss": 0.2553, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.8688845401174168, |
| "grad_norm": 4.765201868429886, |
| "learning_rate": 6.022513692568412e-06, |
| "loss": 0.1978, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.8708414872798435, |
| "grad_norm": 3.9078286370716317, |
| "learning_rate": 6.00746388424437e-06, |
| "loss": 0.229, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.87279843444227, |
| "grad_norm": 4.842194006730347, |
| "learning_rate": 5.992404556136247e-06, |
| "loss": 0.2435, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.8747553816046967, |
| "grad_norm": 6.438505005254891, |
| "learning_rate": 5.977335850543488e-06, |
| "loss": 0.3377, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.8767123287671232, |
| "grad_norm": 4.864623377795162, |
| "learning_rate": 5.96225790985415e-06, |
| "loss": 0.2205, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.8786692759295499, |
| "grad_norm": 5.401377636330468, |
| "learning_rate": 5.947170876543553e-06, |
| "loss": 0.2143, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.8806262230919765, |
| "grad_norm": 5.974728518714648, |
| "learning_rate": 5.932074893172934e-06, |
| "loss": 0.2904, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.8825831702544031, |
| "grad_norm": 4.22665421014589, |
| "learning_rate": 5.916970102388104e-06, |
| "loss": 0.2376, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.8845401174168297, |
| "grad_norm": 4.866260844549325, |
| "learning_rate": 5.9018566469181e-06, |
| "loss": 0.2145, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.8864970645792564, |
| "grad_norm": 5.182337826951706, |
| "learning_rate": 5.886734669573827e-06, |
| "loss": 0.2605, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.8884540117416829, |
| "grad_norm": 6.326241958980734, |
| "learning_rate": 5.87160431324672e-06, |
| "loss": 0.2323, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.8904109589041096, |
| "grad_norm": 5.7567082621231895, |
| "learning_rate": 5.856465720907388e-06, |
| "loss": 0.2774, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.8923679060665362, |
| "grad_norm": 4.4103286544786835, |
| "learning_rate": 5.841319035604267e-06, |
| "loss": 0.228, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.8943248532289628, |
| "grad_norm": 5.372817555912662, |
| "learning_rate": 5.826164400462259e-06, |
| "loss": 0.2554, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.8962818003913894, |
| "grad_norm": 6.1856557232738245, |
| "learning_rate": 5.8110019586813944e-06, |
| "loss": 0.2591, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.898238747553816, |
| "grad_norm": 4.605495241850436, |
| "learning_rate": 5.795831853535461e-06, |
| "loss": 0.2275, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.9001956947162426, |
| "grad_norm": 6.497329283244475, |
| "learning_rate": 5.780654228370669e-06, |
| "loss": 0.327, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.9021526418786693, |
| "grad_norm": 5.435980681967022, |
| "learning_rate": 5.76546922660428e-06, |
| "loss": 0.2197, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.9041095890410958, |
| "grad_norm": 5.067939458477714, |
| "learning_rate": 5.7502769917232635e-06, |
| "loss": 0.229, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.9060665362035225, |
| "grad_norm": 5.791428336133721, |
| "learning_rate": 5.735077667282935e-06, |
| "loss": 0.2367, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.9080234833659491, |
| "grad_norm": 5.927847980859983, |
| "learning_rate": 5.719871396905603e-06, |
| "loss": 0.3134, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.9099804305283757, |
| "grad_norm": 3.8674049049038026, |
| "learning_rate": 5.704658324279203e-06, |
| "loss": 0.2375, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.9119373776908023, |
| "grad_norm": 5.12473194032584, |
| "learning_rate": 5.6894385931559555e-06, |
| "loss": 0.232, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.913894324853229, |
| "grad_norm": 3.8503412758882516, |
| "learning_rate": 5.674212347350996e-06, |
| "loss": 0.1856, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.9158512720156555, |
| "grad_norm": 3.9789555210585297, |
| "learning_rate": 5.658979730741014e-06, |
| "loss": 0.1934, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.9178082191780822, |
| "grad_norm": 4.370892145071313, |
| "learning_rate": 5.643740887262905e-06, |
| "loss": 0.2179, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.9197651663405088, |
| "grad_norm": 3.726018590756779, |
| "learning_rate": 5.6284959609124e-06, |
| "loss": 0.1628, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.9217221135029354, |
| "grad_norm": 4.923370996241003, |
| "learning_rate": 5.613245095742711e-06, |
| "loss": 0.3268, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.923679060665362, |
| "grad_norm": 4.872399733446207, |
| "learning_rate": 5.5979884358631665e-06, |
| "loss": 0.3215, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.9256360078277887, |
| "grad_norm": 4.828455130779493, |
| "learning_rate": 5.582726125437846e-06, |
| "loss": 0.2216, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.9275929549902152, |
| "grad_norm": 5.0291261847471835, |
| "learning_rate": 5.567458308684233e-06, |
| "loss": 0.2361, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.9295499021526419, |
| "grad_norm": 4.983004149770571, |
| "learning_rate": 5.5521851298718295e-06, |
| "loss": 0.2499, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.9315068493150684, |
| "grad_norm": 4.9351846488554, |
| "learning_rate": 5.536906733320816e-06, |
| "loss": 0.2135, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.9334637964774951, |
| "grad_norm": 5.873646971536536, |
| "learning_rate": 5.5216232634006695e-06, |
| "loss": 0.319, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.9354207436399217, |
| "grad_norm": 5.164190875081514, |
| "learning_rate": 5.506334864528808e-06, |
| "loss": 0.2679, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.9373776908023483, |
| "grad_norm": 4.905694762090651, |
| "learning_rate": 5.491041681169226e-06, |
| "loss": 0.2034, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.9393346379647749, |
| "grad_norm": 3.955956210706832, |
| "learning_rate": 5.4757438578311275e-06, |
| "loss": 0.2359, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.9412915851272016, |
| "grad_norm": 5.821836619961792, |
| "learning_rate": 5.460441539067559e-06, |
| "loss": 0.3118, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.9432485322896281, |
| "grad_norm": 5.427216829578816, |
| "learning_rate": 5.4451348694740495e-06, |
| "loss": 0.2286, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.9452054794520548, |
| "grad_norm": 4.468828934550866, |
| "learning_rate": 5.429823993687234e-06, |
| "loss": 0.192, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.9471624266144814, |
| "grad_norm": 3.9413592286838877, |
| "learning_rate": 5.414509056383498e-06, |
| "loss": 0.1738, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.949119373776908, |
| "grad_norm": 4.368677897645313, |
| "learning_rate": 5.399190202277602e-06, |
| "loss": 0.2017, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.9510763209393346, |
| "grad_norm": 5.336286815034962, |
| "learning_rate": 5.383867576121324e-06, |
| "loss": 0.208, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.9530332681017613, |
| "grad_norm": 3.986937394132073, |
| "learning_rate": 5.368541322702074e-06, |
| "loss": 0.1967, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.9549902152641878, |
| "grad_norm": 4.9276219174051485, |
| "learning_rate": 5.353211586841547e-06, |
| "loss": 0.1987, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.9569471624266145, |
| "grad_norm": 4.207339172023414, |
| "learning_rate": 5.337878513394337e-06, |
| "loss": 0.1746, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.958904109589041, |
| "grad_norm": 4.647752828555265, |
| "learning_rate": 5.322542247246583e-06, |
| "loss": 0.2334, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.9608610567514677, |
| "grad_norm": 5.229986464116908, |
| "learning_rate": 5.307202933314586e-06, |
| "loss": 0.1934, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.9628180039138943, |
| "grad_norm": 5.566559389994177, |
| "learning_rate": 5.29186071654345e-06, |
| "loss": 0.2473, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.9647749510763209, |
| "grad_norm": 5.536402912852316, |
| "learning_rate": 5.276515741905708e-06, |
| "loss": 0.2776, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.9667318982387475, |
| "grad_norm": 4.9674366530703775, |
| "learning_rate": 5.261168154399953e-06, |
| "loss": 0.2692, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.9686888454011742, |
| "grad_norm": 5.058566494960501, |
| "learning_rate": 5.245818099049467e-06, |
| "loss": 0.2306, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.9706457925636007, |
| "grad_norm": 4.532603730141293, |
| "learning_rate": 5.23046572090085e-06, |
| "loss": 0.2464, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.9726027397260274, |
| "grad_norm": 4.706339967218909, |
| "learning_rate": 5.215111165022653e-06, |
| "loss": 0.2898, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.974559686888454, |
| "grad_norm": 4.225330968488987, |
| "learning_rate": 5.199754576504006e-06, |
| "loss": 0.2004, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.9765166340508806, |
| "grad_norm": 5.317699947588289, |
| "learning_rate": 5.184396100453242e-06, |
| "loss": 0.2977, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.9784735812133072, |
| "grad_norm": 4.884143843271352, |
| "learning_rate": 5.1690358819965334e-06, |
| "loss": 0.2172, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.9784735812133072, |
| "eval_loss": 0.2364594042301178, |
| "eval_runtime": 1.4344, |
| "eval_samples_per_second": 29.281, |
| "eval_steps_per_second": 7.669, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.9804305283757339, |
| "grad_norm": 4.008782181449354, |
| "learning_rate": 5.153674066276513e-06, |
| "loss": 0.2022, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.9823874755381604, |
| "grad_norm": 5.438634084298234, |
| "learning_rate": 5.138310798450912e-06, |
| "loss": 0.2465, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.9843444227005871, |
| "grad_norm": 5.696655918432809, |
| "learning_rate": 5.122946223691177e-06, |
| "loss": 0.2799, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.9863013698630136, |
| "grad_norm": 5.2489850843768755, |
| "learning_rate": 5.107580487181112e-06, |
| "loss": 0.1748, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.9882583170254403, |
| "grad_norm": 5.070011054589164, |
| "learning_rate": 5.092213734115489e-06, |
| "loss": 0.2122, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.9902152641878669, |
| "grad_norm": 4.424105581921241, |
| "learning_rate": 5.0768461096986935e-06, |
| "loss": 0.214, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.9921722113502935, |
| "grad_norm": 4.824808533087909, |
| "learning_rate": 5.0614777591433375e-06, |
| "loss": 0.2231, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.9941291585127201, |
| "grad_norm": 5.456995230991357, |
| "learning_rate": 5.0461088276689026e-06, |
| "loss": 0.2628, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.9960861056751468, |
| "grad_norm": 3.569092944866674, |
| "learning_rate": 5.0307394605003554e-06, |
| "loss": 0.1996, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.9980430528375733, |
| "grad_norm": 3.8032764648518285, |
| "learning_rate": 5.01536980286678e-06, |
| "loss": 0.1376, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 4.629071021259327, |
| "learning_rate": 5e-06, |
| "loss": 0.2174, |
| "step": 511 |
| }, |
| { |
| "epoch": 1.0019569471624266, |
| "grad_norm": 3.8427551458290394, |
| "learning_rate": 4.984630197133224e-06, |
| "loss": 0.1622, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.0039138943248531, |
| "grad_norm": 3.8952708983859954, |
| "learning_rate": 4.969260539499645e-06, |
| "loss": 0.1397, |
| "step": 513 |
| }, |
| { |
| "epoch": 1.00587084148728, |
| "grad_norm": 4.235353656407347, |
| "learning_rate": 4.953891172331098e-06, |
| "loss": 0.1237, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.0078277886497065, |
| "grad_norm": 3.754192531725625, |
| "learning_rate": 4.938522240856664e-06, |
| "loss": 0.1298, |
| "step": 515 |
| }, |
| { |
| "epoch": 1.009784735812133, |
| "grad_norm": 3.9074736645547112, |
| "learning_rate": 4.92315389030131e-06, |
| "loss": 0.1295, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.0117416829745598, |
| "grad_norm": 3.809107856303902, |
| "learning_rate": 4.907786265884511e-06, |
| "loss": 0.1298, |
| "step": 517 |
| }, |
| { |
| "epoch": 1.0136986301369864, |
| "grad_norm": 3.919580832865041, |
| "learning_rate": 4.89241951281889e-06, |
| "loss": 0.1091, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.015655577299413, |
| "grad_norm": 3.9989289610366394, |
| "learning_rate": 4.8770537763088234e-06, |
| "loss": 0.1203, |
| "step": 519 |
| }, |
| { |
| "epoch": 1.0176125244618395, |
| "grad_norm": 3.0090721406447627, |
| "learning_rate": 4.8616892015490905e-06, |
| "loss": 0.1024, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.019569471624266, |
| "grad_norm": 3.5985984865508556, |
| "learning_rate": 4.846325933723487e-06, |
| "loss": 0.1008, |
| "step": 521 |
| }, |
| { |
| "epoch": 1.0215264187866928, |
| "grad_norm": 3.997732164027248, |
| "learning_rate": 4.830964118003468e-06, |
| "loss": 0.1187, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.0234833659491194, |
| "grad_norm": 3.8644605408248376, |
| "learning_rate": 4.8156038995467606e-06, |
| "loss": 0.1454, |
| "step": 523 |
| }, |
| { |
| "epoch": 1.025440313111546, |
| "grad_norm": 3.4581321651764925, |
| "learning_rate": 4.800245423495997e-06, |
| "loss": 0.1503, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.0273972602739727, |
| "grad_norm": 4.060272665038936, |
| "learning_rate": 4.784888834977347e-06, |
| "loss": 0.135, |
| "step": 525 |
| }, |
| { |
| "epoch": 1.0293542074363993, |
| "grad_norm": 4.023034076518265, |
| "learning_rate": 4.769534279099151e-06, |
| "loss": 0.124, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.0313111545988258, |
| "grad_norm": 4.9129367864975295, |
| "learning_rate": 4.7541819009505354e-06, |
| "loss": 0.0951, |
| "step": 527 |
| }, |
| { |
| "epoch": 1.0332681017612524, |
| "grad_norm": 3.8685354813348947, |
| "learning_rate": 4.73883184560005e-06, |
| "loss": 0.124, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.035225048923679, |
| "grad_norm": 4.273340132070264, |
| "learning_rate": 4.723484258094293e-06, |
| "loss": 0.1028, |
| "step": 529 |
| }, |
| { |
| "epoch": 1.0371819960861057, |
| "grad_norm": 5.782170142709554, |
| "learning_rate": 4.708139283456551e-06, |
| "loss": 0.1263, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.0391389432485323, |
| "grad_norm": 4.617481375835939, |
| "learning_rate": 4.692797066685415e-06, |
| "loss": 0.1029, |
| "step": 531 |
| }, |
| { |
| "epoch": 1.0410958904109588, |
| "grad_norm": 4.179035002304323, |
| "learning_rate": 4.6774577527534195e-06, |
| "loss": 0.0904, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.0430528375733856, |
| "grad_norm": 4.195656460407086, |
| "learning_rate": 4.662121486605663e-06, |
| "loss": 0.0955, |
| "step": 533 |
| }, |
| { |
| "epoch": 1.0450097847358122, |
| "grad_norm": 4.311667759799891, |
| "learning_rate": 4.646788413158455e-06, |
| "loss": 0.0915, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.0469667318982387, |
| "grad_norm": 3.6038954591551833, |
| "learning_rate": 4.631458677297927e-06, |
| "loss": 0.0934, |
| "step": 535 |
| }, |
| { |
| "epoch": 1.0489236790606653, |
| "grad_norm": 4.726061420331676, |
| "learning_rate": 4.616132423878679e-06, |
| "loss": 0.1009, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.0508806262230919, |
| "grad_norm": 3.175390832768537, |
| "learning_rate": 4.600809797722398e-06, |
| "loss": 0.1034, |
| "step": 537 |
| }, |
| { |
| "epoch": 1.0528375733855186, |
| "grad_norm": 5.493196494974265, |
| "learning_rate": 4.585490943616504e-06, |
| "loss": 0.1116, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.0547945205479452, |
| "grad_norm": 5.065925162776278, |
| "learning_rate": 4.570176006312769e-06, |
| "loss": 0.1282, |
| "step": 539 |
| }, |
| { |
| "epoch": 1.0567514677103718, |
| "grad_norm": 6.426432026520753, |
| "learning_rate": 4.554865130525953e-06, |
| "loss": 0.1446, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.0587084148727985, |
| "grad_norm": 5.524735154781314, |
| "learning_rate": 4.539558460932442e-06, |
| "loss": 0.1154, |
| "step": 541 |
| }, |
| { |
| "epoch": 1.060665362035225, |
| "grad_norm": 3.652069427796892, |
| "learning_rate": 4.524256142168874e-06, |
| "loss": 0.1198, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.0626223091976517, |
| "grad_norm": 4.321216963326324, |
| "learning_rate": 4.508958318830776e-06, |
| "loss": 0.1502, |
| "step": 543 |
| }, |
| { |
| "epoch": 1.0645792563600782, |
| "grad_norm": 5.023924243286254, |
| "learning_rate": 4.493665135471194e-06, |
| "loss": 0.1489, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.0665362035225048, |
| "grad_norm": 3.332363361899048, |
| "learning_rate": 4.478376736599331e-06, |
| "loss": 0.0835, |
| "step": 545 |
| }, |
| { |
| "epoch": 1.0684931506849316, |
| "grad_norm": 4.8412947790154774, |
| "learning_rate": 4.463093266679185e-06, |
| "loss": 0.119, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.0704500978473581, |
| "grad_norm": 3.8534284100636182, |
| "learning_rate": 4.447814870128172e-06, |
| "loss": 0.1004, |
| "step": 547 |
| }, |
| { |
| "epoch": 1.0724070450097847, |
| "grad_norm": 3.8880038748065355, |
| "learning_rate": 4.4325416913157706e-06, |
| "loss": 0.105, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.0743639921722115, |
| "grad_norm": 4.167503190376309, |
| "learning_rate": 4.417273874562154e-06, |
| "loss": 0.1016, |
| "step": 549 |
| }, |
| { |
| "epoch": 1.076320939334638, |
| "grad_norm": 4.329372376179357, |
| "learning_rate": 4.402011564136835e-06, |
| "loss": 0.1065, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.0782778864970646, |
| "grad_norm": 3.576770735664366, |
| "learning_rate": 4.386754904257291e-06, |
| "loss": 0.0743, |
| "step": 551 |
| }, |
| { |
| "epoch": 1.0802348336594911, |
| "grad_norm": 5.235538289090008, |
| "learning_rate": 4.371504039087602e-06, |
| "loss": 0.137, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.0821917808219177, |
| "grad_norm": 5.12571022925371, |
| "learning_rate": 4.356259112737096e-06, |
| "loss": 0.1206, |
| "step": 553 |
| }, |
| { |
| "epoch": 1.0841487279843445, |
| "grad_norm": 5.954615588206797, |
| "learning_rate": 4.341020269258987e-06, |
| "loss": 0.0824, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.086105675146771, |
| "grad_norm": 3.7961237834929955, |
| "learning_rate": 4.325787652649006e-06, |
| "loss": 0.0918, |
| "step": 555 |
| }, |
| { |
| "epoch": 1.0880626223091976, |
| "grad_norm": 4.849697763057499, |
| "learning_rate": 4.310561406844045e-06, |
| "loss": 0.1079, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.0900195694716244, |
| "grad_norm": 4.120788106345601, |
| "learning_rate": 4.295341675720797e-06, |
| "loss": 0.1137, |
| "step": 557 |
| }, |
| { |
| "epoch": 1.091976516634051, |
| "grad_norm": 3.7474131048437376, |
| "learning_rate": 4.280128603094399e-06, |
| "loss": 0.1042, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.0939334637964775, |
| "grad_norm": 4.166852600194618, |
| "learning_rate": 4.264922332717066e-06, |
| "loss": 0.1222, |
| "step": 559 |
| }, |
| { |
| "epoch": 1.095890410958904, |
| "grad_norm": 4.383015385996326, |
| "learning_rate": 4.249723008276737e-06, |
| "loss": 0.0839, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.0978473581213306, |
| "grad_norm": 3.939030839446883, |
| "learning_rate": 4.234530773395721e-06, |
| "loss": 0.1009, |
| "step": 561 |
| }, |
| { |
| "epoch": 1.0998043052837574, |
| "grad_norm": 3.654210311865208, |
| "learning_rate": 4.219345771629333e-06, |
| "loss": 0.0917, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.101761252446184, |
| "grad_norm": 4.015777302607018, |
| "learning_rate": 4.20416814646454e-06, |
| "loss": 0.1187, |
| "step": 563 |
| }, |
| { |
| "epoch": 1.1037181996086105, |
| "grad_norm": 3.926543254550016, |
| "learning_rate": 4.188998041318608e-06, |
| "loss": 0.1057, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.1056751467710373, |
| "grad_norm": 3.8357352270982212, |
| "learning_rate": 4.173835599537741e-06, |
| "loss": 0.114, |
| "step": 565 |
| }, |
| { |
| "epoch": 1.1076320939334638, |
| "grad_norm": 4.304377270476764, |
| "learning_rate": 4.158680964395734e-06, |
| "loss": 0.086, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.1095890410958904, |
| "grad_norm": 5.003081380206431, |
| "learning_rate": 4.143534279092613e-06, |
| "loss": 0.1362, |
| "step": 567 |
| }, |
| { |
| "epoch": 1.111545988258317, |
| "grad_norm": 4.148975892758542, |
| "learning_rate": 4.1283956867532825e-06, |
| "loss": 0.1072, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.1135029354207435, |
| "grad_norm": 4.0171672732722845, |
| "learning_rate": 4.113265330426175e-06, |
| "loss": 0.1097, |
| "step": 569 |
| }, |
| { |
| "epoch": 1.1154598825831703, |
| "grad_norm": 4.6475354044425705, |
| "learning_rate": 4.098143353081902e-06, |
| "loss": 0.1085, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.1174168297455969, |
| "grad_norm": 6.357138337723725, |
| "learning_rate": 4.083029897611897e-06, |
| "loss": 0.1533, |
| "step": 571 |
| }, |
| { |
| "epoch": 1.1193737769080234, |
| "grad_norm": 4.4746623255372855, |
| "learning_rate": 4.067925106827068e-06, |
| "loss": 0.0903, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.1213307240704502, |
| "grad_norm": 3.9802360681692863, |
| "learning_rate": 4.052829123456448e-06, |
| "loss": 0.0998, |
| "step": 573 |
| }, |
| { |
| "epoch": 1.1232876712328768, |
| "grad_norm": 4.0328233856127795, |
| "learning_rate": 4.037742090145851e-06, |
| "loss": 0.1057, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.1252446183953033, |
| "grad_norm": 5.841134293998721, |
| "learning_rate": 4.0226641494565125e-06, |
| "loss": 0.1634, |
| "step": 575 |
| }, |
| { |
| "epoch": 1.1272015655577299, |
| "grad_norm": 4.63266227242096, |
| "learning_rate": 4.007595443863755e-06, |
| "loss": 0.122, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.1291585127201564, |
| "grad_norm": 4.094466514633347, |
| "learning_rate": 3.992536115755631e-06, |
| "loss": 0.1225, |
| "step": 577 |
| }, |
| { |
| "epoch": 1.1311154598825832, |
| "grad_norm": 3.1193134142553807, |
| "learning_rate": 3.977486307431589e-06, |
| "loss": 0.0635, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.1330724070450098, |
| "grad_norm": 4.307317777172555, |
| "learning_rate": 3.962446161101119e-06, |
| "loss": 0.1403, |
| "step": 579 |
| }, |
| { |
| "epoch": 1.1350293542074363, |
| "grad_norm": 3.924496871173705, |
| "learning_rate": 3.9474158188824145e-06, |
| "loss": 0.1322, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.1369863013698631, |
| "grad_norm": 3.46372160544185, |
| "learning_rate": 3.93239542280102e-06, |
| "loss": 0.1041, |
| "step": 581 |
| }, |
| { |
| "epoch": 1.1389432485322897, |
| "grad_norm": 4.428848003757606, |
| "learning_rate": 3.917385114788508e-06, |
| "loss": 0.1138, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.1409001956947162, |
| "grad_norm": 4.065494758498614, |
| "learning_rate": 3.902385036681118e-06, |
| "loss": 0.0908, |
| "step": 583 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 3.3952426960192335, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.0779, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.1448140900195694, |
| "grad_norm": 3.8086475210808683, |
| "learning_rate": 3.87241613704201e-06, |
| "loss": 0.0881, |
| "step": 585 |
| }, |
| { |
| "epoch": 1.1467710371819961, |
| "grad_norm": 4.786659120190779, |
| "learning_rate": 3.857447598694095e-06, |
| "loss": 0.0985, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.1487279843444227, |
| "grad_norm": 4.568636407392966, |
| "learning_rate": 3.842489856616231e-06, |
| "loss": 0.1028, |
| "step": 587 |
| }, |
| { |
| "epoch": 1.1506849315068493, |
| "grad_norm": 4.633244153334011, |
| "learning_rate": 3.827543052147952e-06, |
| "loss": 0.0937, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.152641878669276, |
| "grad_norm": 4.35064634519792, |
| "learning_rate": 3.8126073265254377e-06, |
| "loss": 0.0964, |
| "step": 589 |
| }, |
| { |
| "epoch": 1.1545988258317026, |
| "grad_norm": 4.538575708505469, |
| "learning_rate": 3.797682820880184e-06, |
| "loss": 0.1392, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.1565557729941291, |
| "grad_norm": 3.627035748959267, |
| "learning_rate": 3.782769676237663e-06, |
| "loss": 0.0887, |
| "step": 591 |
| }, |
| { |
| "epoch": 1.1585127201565557, |
| "grad_norm": 3.208256326995343, |
| "learning_rate": 3.7678680335159955e-06, |
| "loss": 0.0999, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.1604696673189823, |
| "grad_norm": 4.557727149904393, |
| "learning_rate": 3.7529780335246164e-06, |
| "loss": 0.1583, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.162426614481409, |
| "grad_norm": 4.769441192343556, |
| "learning_rate": 3.7380998169629477e-06, |
| "loss": 0.1376, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.1643835616438356, |
| "grad_norm": 5.041161727616145, |
| "learning_rate": 3.7232335244190656e-06, |
| "loss": 0.1131, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.1663405088062622, |
| "grad_norm": 4.611361808519149, |
| "learning_rate": 3.708379296368372e-06, |
| "loss": 0.1043, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.168297455968689, |
| "grad_norm": 4.586537889442937, |
| "learning_rate": 3.6935372731722686e-06, |
| "loss": 0.1162, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.1702544031311155, |
| "grad_norm": 5.235542548240522, |
| "learning_rate": 3.6787075950768337e-06, |
| "loss": 0.1516, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.172211350293542, |
| "grad_norm": 4.30522601455294, |
| "learning_rate": 3.663890402211489e-06, |
| "loss": 0.1524, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.1741682974559686, |
| "grad_norm": 5.656904183659268, |
| "learning_rate": 3.649085834587683e-06, |
| "loss": 0.1969, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.1761252446183952, |
| "grad_norm": 5.005785948390351, |
| "learning_rate": 3.6342940320975613e-06, |
| "loss": 0.1232, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.178082191780822, |
| "grad_norm": 3.576894929918264, |
| "learning_rate": 3.6195151345126556e-06, |
| "loss": 0.0801, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.1800391389432485, |
| "grad_norm": 5.058805331277829, |
| "learning_rate": 3.6047492814825508e-06, |
| "loss": 0.1156, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.181996086105675, |
| "grad_norm": 4.405281557651745, |
| "learning_rate": 3.5899966125335684e-06, |
| "loss": 0.1159, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.1839530332681019, |
| "grad_norm": 4.176822691472827, |
| "learning_rate": 3.575257267067453e-06, |
| "loss": 0.1016, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.1859099804305284, |
| "grad_norm": 3.8740186490263246, |
| "learning_rate": 3.5605313843600555e-06, |
| "loss": 0.109, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.187866927592955, |
| "grad_norm": 4.037809127908, |
| "learning_rate": 3.5458191035600077e-06, |
| "loss": 0.0969, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.1898238747553815, |
| "grad_norm": 4.829686627315676, |
| "learning_rate": 3.531120563687419e-06, |
| "loss": 0.1366, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.191780821917808, |
| "grad_norm": 4.790235026697958, |
| "learning_rate": 3.5164359036325483e-06, |
| "loss": 0.1474, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.1937377690802349, |
| "grad_norm": 4.635265624898373, |
| "learning_rate": 3.5017652621545133e-06, |
| "loss": 0.0919, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.1956947162426614, |
| "grad_norm": 4.5939255364958, |
| "learning_rate": 3.487108777879957e-06, |
| "loss": 0.0939, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.197651663405088, |
| "grad_norm": 4.0290387280577615, |
| "learning_rate": 3.4724665893017517e-06, |
| "loss": 0.0983, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.1996086105675148, |
| "grad_norm": 3.3952400519321215, |
| "learning_rate": 3.457838834777682e-06, |
| "loss": 0.0896, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.2015655577299413, |
| "grad_norm": 4.251162118780837, |
| "learning_rate": 3.4432256525291468e-06, |
| "loss": 0.0982, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.203522504892368, |
| "grad_norm": 4.533237967009769, |
| "learning_rate": 3.428627180639844e-06, |
| "loss": 0.0855, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.2054794520547945, |
| "grad_norm": 8.970864840003866, |
| "learning_rate": 3.4140435570544708e-06, |
| "loss": 0.1552, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.207436399217221, |
| "grad_norm": 4.601634794879478, |
| "learning_rate": 3.3994749195774173e-06, |
| "loss": 0.094, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.2093933463796478, |
| "grad_norm": 4.4815721110730875, |
| "learning_rate": 3.3849214058714707e-06, |
| "loss": 0.0825, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.2113502935420744, |
| "grad_norm": 4.191696461213302, |
| "learning_rate": 3.370383153456504e-06, |
| "loss": 0.1096, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.213307240704501, |
| "grad_norm": 3.8095305864914244, |
| "learning_rate": 3.355860299708187e-06, |
| "loss": 0.1057, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.2152641878669277, |
| "grad_norm": 3.694604771815809, |
| "learning_rate": 3.341352981856678e-06, |
| "loss": 0.1034, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.2172211350293543, |
| "grad_norm": 4.37978090338329, |
| "learning_rate": 3.326861336985341e-06, |
| "loss": 0.0966, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.2191780821917808, |
| "grad_norm": 3.834581768500533, |
| "learning_rate": 3.3123855020294344e-06, |
| "loss": 0.0821, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.2211350293542074, |
| "grad_norm": 4.051237606831452, |
| "learning_rate": 3.2979256137748283e-06, |
| "loss": 0.1114, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.223091976516634, |
| "grad_norm": 4.127875621459065, |
| "learning_rate": 3.283481808856706e-06, |
| "loss": 0.1032, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.2250489236790607, |
| "grad_norm": 4.085294065740527, |
| "learning_rate": 3.269054223758279e-06, |
| "loss": 0.1135, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.2270058708414873, |
| "grad_norm": 4.0314888578409525, |
| "learning_rate": 3.2546429948094904e-06, |
| "loss": 0.0982, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.2289628180039138, |
| "grad_norm": 4.991909547835494, |
| "learning_rate": 3.240248258185731e-06, |
| "loss": 0.1155, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.2309197651663406, |
| "grad_norm": 4.30060848048835, |
| "learning_rate": 3.225870149906549e-06, |
| "loss": 0.1082, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.2328767123287672, |
| "grad_norm": 4.526632054214213, |
| "learning_rate": 3.2115088058343725e-06, |
| "loss": 0.1215, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.2348336594911937, |
| "grad_norm": 4.1509860255408535, |
| "learning_rate": 3.1971643616732174e-06, |
| "loss": 0.1018, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.2367906066536203, |
| "grad_norm": 4.187803098650335, |
| "learning_rate": 3.182836952967405e-06, |
| "loss": 0.0968, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.2387475538160468, |
| "grad_norm": 4.604219385913459, |
| "learning_rate": 3.1685267151002856e-06, |
| "loss": 0.1068, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.2407045009784736, |
| "grad_norm": 3.3409634710571976, |
| "learning_rate": 3.154233783292964e-06, |
| "loss": 0.1058, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.2426614481409002, |
| "grad_norm": 4.0041008708760915, |
| "learning_rate": 3.139958292603009e-06, |
| "loss": 0.0954, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.2446183953033267, |
| "grad_norm": 4.301504011447051, |
| "learning_rate": 3.125700377923186e-06, |
| "loss": 0.1085, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.2465753424657535, |
| "grad_norm": 4.086094294648085, |
| "learning_rate": 3.111460173980175e-06, |
| "loss": 0.1003, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.24853228962818, |
| "grad_norm": 4.331650876182271, |
| "learning_rate": 3.097237815333315e-06, |
| "loss": 0.0989, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.2504892367906066, |
| "grad_norm": 4.055333365709187, |
| "learning_rate": 3.0830334363733083e-06, |
| "loss": 0.086, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.2524461839530332, |
| "grad_norm": 4.76564832692381, |
| "learning_rate": 3.068847171320969e-06, |
| "loss": 0.09, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.2544031311154598, |
| "grad_norm": 4.222102325010029, |
| "learning_rate": 3.054679154225943e-06, |
| "loss": 0.1191, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.2563600782778865, |
| "grad_norm": 4.985535647898282, |
| "learning_rate": 3.0405295189654537e-06, |
| "loss": 0.0915, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.258317025440313, |
| "grad_norm": 4.349874683978335, |
| "learning_rate": 3.0263983992430245e-06, |
| "loss": 0.1067, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.2602739726027397, |
| "grad_norm": 5.601341250740894, |
| "learning_rate": 3.0122859285872214e-06, |
| "loss": 0.0791, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.2622309197651664, |
| "grad_norm": 4.103488698989504, |
| "learning_rate": 2.9981922403503914e-06, |
| "loss": 0.1444, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.264187866927593, |
| "grad_norm": 3.824444909674875, |
| "learning_rate": 2.9841174677074037e-06, |
| "loss": 0.0987, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.2661448140900196, |
| "grad_norm": 4.636973607686505, |
| "learning_rate": 2.9700617436543854e-06, |
| "loss": 0.1303, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.2681017612524461, |
| "grad_norm": 3.922935975531288, |
| "learning_rate": 2.956025201007471e-06, |
| "loss": 0.1115, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.2700587084148727, |
| "grad_norm": 3.596575493350506, |
| "learning_rate": 2.942007972401542e-06, |
| "loss": 0.0732, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.2720156555772995, |
| "grad_norm": 3.4101569321588268, |
| "learning_rate": 2.9280101902889823e-06, |
| "loss": 0.1194, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.273972602739726, |
| "grad_norm": 4.26636078461191, |
| "learning_rate": 2.914031986938417e-06, |
| "loss": 0.0983, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.2759295499021526, |
| "grad_norm": 4.5637006891458, |
| "learning_rate": 2.900073494433464e-06, |
| "loss": 0.0933, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.2778864970645794, |
| "grad_norm": 4.312397198954907, |
| "learning_rate": 2.8861348446714942e-06, |
| "loss": 0.1012, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.279843444227006, |
| "grad_norm": 4.924347455781268, |
| "learning_rate": 2.8722161693623775e-06, |
| "loss": 0.0856, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.2818003913894325, |
| "grad_norm": 3.8887584087213205, |
| "learning_rate": 2.858317600027235e-06, |
| "loss": 0.0828, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.283757338551859, |
| "grad_norm": 3.9371362930444778, |
| "learning_rate": 2.8444392679972107e-06, |
| "loss": 0.1173, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.2857142857142856, |
| "grad_norm": 4.416463259781775, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.1164, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.2876712328767124, |
| "grad_norm": 4.527433807032585, |
| "learning_rate": 2.816743840219681e-06, |
| "loss": 0.1079, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.289628180039139, |
| "grad_norm": 4.546250560220797, |
| "learning_rate": 2.802927006173365e-06, |
| "loss": 0.1389, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.2915851272015655, |
| "grad_norm": 4.3277137131144885, |
| "learning_rate": 2.789130932832059e-06, |
| "loss": 0.1051, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.2935420743639923, |
| "grad_norm": 3.642567385937408, |
| "learning_rate": 2.7753557505583945e-06, |
| "loss": 0.0962, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.2954990215264188, |
| "grad_norm": 3.8005901340456787, |
| "learning_rate": 2.7616015895175953e-06, |
| "loss": 0.0735, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.2974559686888454, |
| "grad_norm": 4.3017342589682865, |
| "learning_rate": 2.747868579676244e-06, |
| "loss": 0.1163, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.299412915851272, |
| "grad_norm": 4.432359070370526, |
| "learning_rate": 2.734156850801071e-06, |
| "loss": 0.1087, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.3013698630136985, |
| "grad_norm": 3.0833078986955114, |
| "learning_rate": 2.720466532457707e-06, |
| "loss": 0.0803, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.3033268101761253, |
| "grad_norm": 4.2306950779509, |
| "learning_rate": 2.706797754009476e-06, |
| "loss": 0.1002, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.3052837573385518, |
| "grad_norm": 3.9597272758415887, |
| "learning_rate": 2.693150644616165e-06, |
| "loss": 0.1022, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.3072407045009784, |
| "grad_norm": 5.4859974643562355, |
| "learning_rate": 2.6795253332327996e-06, |
| "loss": 0.1213, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.3091976516634052, |
| "grad_norm": 5.029114699056662, |
| "learning_rate": 2.6659219486084366e-06, |
| "loss": 0.1118, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.3111545988258317, |
| "grad_norm": 4.121544655471224, |
| "learning_rate": 2.652340619284939e-06, |
| "loss": 0.1213, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.3131115459882583, |
| "grad_norm": 3.425692929757487, |
| "learning_rate": 2.638781473595765e-06, |
| "loss": 0.0913, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.3150684931506849, |
| "grad_norm": 4.574852463029062, |
| "learning_rate": 2.6252446396647503e-06, |
| "loss": 0.1332, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.3170254403131114, |
| "grad_norm": 5.55055673749442, |
| "learning_rate": 2.6117302454049e-06, |
| "loss": 0.1572, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.3189823874755382, |
| "grad_norm": 4.041368706442845, |
| "learning_rate": 2.5982384185171905e-06, |
| "loss": 0.1381, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.3209393346379648, |
| "grad_norm": 3.727232760905372, |
| "learning_rate": 2.5847692864893423e-06, |
| "loss": 0.0742, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.3228962818003913, |
| "grad_norm": 4.104707188170785, |
| "learning_rate": 2.5713229765946355e-06, |
| "loss": 0.1201, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.324853228962818, |
| "grad_norm": 4.681418699710262, |
| "learning_rate": 2.5578996158906887e-06, |
| "loss": 0.1658, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.3268101761252447, |
| "grad_norm": 5.388307896986021, |
| "learning_rate": 2.544499331218274e-06, |
| "loss": 0.1415, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.3287671232876712, |
| "grad_norm": 3.4725472053622415, |
| "learning_rate": 2.531122249200114e-06, |
| "loss": 0.0638, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.3307240704500978, |
| "grad_norm": 5.667203879769777, |
| "learning_rate": 2.5177684962396764e-06, |
| "loss": 0.1057, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.3326810176125243, |
| "grad_norm": 4.8983379294489096, |
| "learning_rate": 2.504438198519992e-06, |
| "loss": 0.0964, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.3346379647749511, |
| "grad_norm": 4.786030249542537, |
| "learning_rate": 2.4911314820024566e-06, |
| "loss": 0.123, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.3365949119373777, |
| "grad_norm": 3.936306185468257, |
| "learning_rate": 2.477848472425638e-06, |
| "loss": 0.132, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.3385518590998042, |
| "grad_norm": 3.66918039539949, |
| "learning_rate": 2.464589295304096e-06, |
| "loss": 0.0977, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.340508806262231, |
| "grad_norm": 3.6042676901706154, |
| "learning_rate": 2.4513540759271855e-06, |
| "loss": 0.0902, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.3424657534246576, |
| "grad_norm": 4.635772831443448, |
| "learning_rate": 2.438142939357882e-06, |
| "loss": 0.0872, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.3444227005870841, |
| "grad_norm": 4.090952940459669, |
| "learning_rate": 2.4249560104315992e-06, |
| "loss": 0.1198, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.3463796477495107, |
| "grad_norm": 5.525263519303322, |
| "learning_rate": 2.4117934137550003e-06, |
| "loss": 0.097, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.3483365949119372, |
| "grad_norm": 4.643828656126609, |
| "learning_rate": 2.398655273704831e-06, |
| "loss": 0.1459, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.350293542074364, |
| "grad_norm": 5.705186158847531, |
| "learning_rate": 2.38554171442674e-06, |
| "loss": 0.1288, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.3522504892367906, |
| "grad_norm": 5.126077844851997, |
| "learning_rate": 2.3724528598341033e-06, |
| "loss": 0.1562, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.3542074363992171, |
| "grad_norm": 4.056597661156506, |
| "learning_rate": 2.35938883360686e-06, |
| "loss": 0.1181, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.356164383561644, |
| "grad_norm": 3.8782865033879097, |
| "learning_rate": 2.346349759190332e-06, |
| "loss": 0.0763, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.3581213307240705, |
| "grad_norm": 4.092232630772243, |
| "learning_rate": 2.3333357597940794e-06, |
| "loss": 0.1032, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.360078277886497, |
| "grad_norm": 5.933122244109139, |
| "learning_rate": 2.3203469583907074e-06, |
| "loss": 0.1315, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.3620352250489236, |
| "grad_norm": 4.03361305377233, |
| "learning_rate": 2.3073834777147237e-06, |
| "loss": 0.0963, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.3639921722113502, |
| "grad_norm": 4.2204673155858625, |
| "learning_rate": 2.294445440261377e-06, |
| "loss": 0.1301, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.365949119373777, |
| "grad_norm": 3.2576799078387038, |
| "learning_rate": 2.2815329682854944e-06, |
| "loss": 0.0807, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.3679060665362035, |
| "grad_norm": 3.389882588976581, |
| "learning_rate": 2.2686461838003294e-06, |
| "loss": 0.1283, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.36986301369863, |
| "grad_norm": 4.468598994190494, |
| "learning_rate": 2.2557852085764053e-06, |
| "loss": 0.11, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.3718199608610568, |
| "grad_norm": 3.6600884894509234, |
| "learning_rate": 2.2429501641403657e-06, |
| "loss": 0.0818, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.3737769080234834, |
| "grad_norm": 2.9026420320153608, |
| "learning_rate": 2.230141171773836e-06, |
| "loss": 0.0589, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.37573385518591, |
| "grad_norm": 5.025380566630861, |
| "learning_rate": 2.2173583525122595e-06, |
| "loss": 0.0925, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.3776908023483365, |
| "grad_norm": 4.004584187374531, |
| "learning_rate": 2.204601827143769e-06, |
| "loss": 0.1288, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.379647749510763, |
| "grad_norm": 4.19863751533383, |
| "learning_rate": 2.1918717162080343e-06, |
| "loss": 0.1159, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.3816046966731899, |
| "grad_norm": 4.927323619174963, |
| "learning_rate": 2.179168139995134e-06, |
| "loss": 0.1494, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.3835616438356164, |
| "grad_norm": 4.069960300489032, |
| "learning_rate": 2.1664912185444127e-06, |
| "loss": 0.0865, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.385518590998043, |
| "grad_norm": 4.181239564893912, |
| "learning_rate": 2.1538410716433415e-06, |
| "loss": 0.1049, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.3874755381604698, |
| "grad_norm": 3.959792597808117, |
| "learning_rate": 2.1412178188263975e-06, |
| "loss": 0.0907, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.3894324853228963, |
| "grad_norm": 5.082736894531653, |
| "learning_rate": 2.12862157937393e-06, |
| "loss": 0.0782, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.3913894324853229, |
| "grad_norm": 3.799245466684116, |
| "learning_rate": 2.1160524723110253e-06, |
| "loss": 0.1168, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.3933463796477494, |
| "grad_norm": 5.58059240889469, |
| "learning_rate": 2.103510616406396e-06, |
| "loss": 0.153, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.395303326810176, |
| "grad_norm": 3.204411617341165, |
| "learning_rate": 2.090996130171245e-06, |
| "loss": 0.0787, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.3972602739726028, |
| "grad_norm": 4.073472622110459, |
| "learning_rate": 2.0785091318581577e-06, |
| "loss": 0.0856, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.3992172211350293, |
| "grad_norm": 4.670662908762378, |
| "learning_rate": 2.0660497394599786e-06, |
| "loss": 0.136, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.401174168297456, |
| "grad_norm": 3.332675018296197, |
| "learning_rate": 2.053618070708691e-06, |
| "loss": 0.0621, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.4031311154598827, |
| "grad_norm": 4.948104796582292, |
| "learning_rate": 2.0412142430743175e-06, |
| "loss": 0.1022, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.4050880626223092, |
| "grad_norm": 5.048003482955334, |
| "learning_rate": 2.0288383737638007e-06, |
| "loss": 0.1036, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.4070450097847358, |
| "grad_norm": 3.6991582872191735, |
| "learning_rate": 2.0164905797198936e-06, |
| "loss": 0.1002, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.4090019569471623, |
| "grad_norm": 4.19355034898029, |
| "learning_rate": 2.004170977620066e-06, |
| "loss": 0.0842, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.410958904109589, |
| "grad_norm": 4.191950665005964, |
| "learning_rate": 1.991879683875386e-06, |
| "loss": 0.1042, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.4129158512720157, |
| "grad_norm": 4.101426383602823, |
| "learning_rate": 1.9796168146294415e-06, |
| "loss": 0.113, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.4148727984344422, |
| "grad_norm": 3.134397524419852, |
| "learning_rate": 1.9673824857572173e-06, |
| "loss": 0.0689, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.4168297455968688, |
| "grad_norm": 5.310618177534311, |
| "learning_rate": 1.9551768128640173e-06, |
| "loss": 0.0819, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.4187866927592956, |
| "grad_norm": 3.508760041022022, |
| "learning_rate": 1.94299991128437e-06, |
| "loss": 0.102, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.4207436399217221, |
| "grad_norm": 3.8789099081159577, |
| "learning_rate": 1.9308518960809353e-06, |
| "loss": 0.1, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.4227005870841487, |
| "grad_norm": 6.032817549077063, |
| "learning_rate": 1.9187328820434185e-06, |
| "loss": 0.0997, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.4246575342465753, |
| "grad_norm": 4.494274152219219, |
| "learning_rate": 1.9066429836874844e-06, |
| "loss": 0.0945, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.4266144814090018, |
| "grad_norm": 6.889957964725863, |
| "learning_rate": 1.894582315253673e-06, |
| "loss": 0.1534, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 4.202135214122852, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.0975, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.4305283757338552, |
| "grad_norm": 4.000604280540298, |
| "learning_rate": 1.8705491237325246e-06, |
| "loss": 0.1123, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.4324853228962817, |
| "grad_norm": 4.338135145949269, |
| "learning_rate": 1.8585768277409639e-06, |
| "loss": 0.0988, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.4344422700587085, |
| "grad_norm": 6.619905672563791, |
| "learning_rate": 1.8466342158609367e-06, |
| "loss": 0.1322, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.436399217221135, |
| "grad_norm": 4.106083568425506, |
| "learning_rate": 1.83472140094124e-06, |
| "loss": 0.1022, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.4383561643835616, |
| "grad_norm": 3.657932789120766, |
| "learning_rate": 1.8228384955491136e-06, |
| "loss": 0.0752, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.4403131115459882, |
| "grad_norm": 4.304650745062036, |
| "learning_rate": 1.8109856119691672e-06, |
| "loss": 0.0865, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.4422700587084147, |
| "grad_norm": 4.003359154786925, |
| "learning_rate": 1.7991628622023338e-06, |
| "loss": 0.1294, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.4442270058708415, |
| "grad_norm": 3.4603450520787793, |
| "learning_rate": 1.7873703579648033e-06, |
| "loss": 0.0708, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.446183953033268, |
| "grad_norm": 3.983121159074429, |
| "learning_rate": 1.775608210686962e-06, |
| "loss": 0.0952, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.4481409001956946, |
| "grad_norm": 3.6177033406849564, |
| "learning_rate": 1.763876531512354e-06, |
| "loss": 0.115, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.4500978473581214, |
| "grad_norm": 5.175037019106732, |
| "learning_rate": 1.7521754312966155e-06, |
| "loss": 0.114, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.452054794520548, |
| "grad_norm": 4.089364384320706, |
| "learning_rate": 1.7405050206064372e-06, |
| "loss": 0.1044, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.4540117416829745, |
| "grad_norm": 4.490165834611527, |
| "learning_rate": 1.7288654097185193e-06, |
| "loss": 0.1195, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.455968688845401, |
| "grad_norm": 4.229761822862125, |
| "learning_rate": 1.7172567086185205e-06, |
| "loss": 0.0975, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.4579256360078277, |
| "grad_norm": 3.966347722764435, |
| "learning_rate": 1.7056790270000302e-06, |
| "loss": 0.0972, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.4598825831702544, |
| "grad_norm": 4.00027510429979, |
| "learning_rate": 1.6941324742635262e-06, |
| "loss": 0.0998, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.461839530332681, |
| "grad_norm": 5.12166774551642, |
| "learning_rate": 1.682617159515338e-06, |
| "loss": 0.1481, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.4637964774951076, |
| "grad_norm": 5.637358175809999, |
| "learning_rate": 1.671133191566624e-06, |
| "loss": 0.1753, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.4657534246575343, |
| "grad_norm": 3.762243937537723, |
| "learning_rate": 1.6596806789323317e-06, |
| "loss": 0.0845, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.467710371819961, |
| "grad_norm": 4.124521563774588, |
| "learning_rate": 1.6482597298301917e-06, |
| "loss": 0.106, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.4696673189823874, |
| "grad_norm": 4.471690793857254, |
| "learning_rate": 1.6368704521796702e-06, |
| "loss": 0.1495, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.471624266144814, |
| "grad_norm": 4.599703909847743, |
| "learning_rate": 1.625512953600966e-06, |
| "loss": 0.1083, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.4735812133072406, |
| "grad_norm": 4.054891411347765, |
| "learning_rate": 1.6141873414139914e-06, |
| "loss": 0.0684, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.4755381604696673, |
| "grad_norm": 4.409892152394275, |
| "learning_rate": 1.6028937226373536e-06, |
| "loss": 0.1372, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.477495107632094, |
| "grad_norm": 4.442490501223262, |
| "learning_rate": 1.5916322039873494e-06, |
| "loss": 0.097, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.4794520547945205, |
| "grad_norm": 3.5630364003593176, |
| "learning_rate": 1.5804028918769488e-06, |
| "loss": 0.0711, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.4814090019569472, |
| "grad_norm": 5.587777214599153, |
| "learning_rate": 1.5692058924147924e-06, |
| "loss": 0.1225, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.4833659491193738, |
| "grad_norm": 3.638905958619047, |
| "learning_rate": 1.5580413114042003e-06, |
| "loss": 0.1015, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.4853228962818004, |
| "grad_norm": 4.075146646238057, |
| "learning_rate": 1.546909254342151e-06, |
| "loss": 0.0786, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.487279843444227, |
| "grad_norm": 4.058063242918111, |
| "learning_rate": 1.535809826418304e-06, |
| "loss": 0.0952, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.4892367906066535, |
| "grad_norm": 3.8393281221065347, |
| "learning_rate": 1.524743132513991e-06, |
| "loss": 0.0743, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.4911937377690803, |
| "grad_norm": 3.4820695237464845, |
| "learning_rate": 1.513709277201237e-06, |
| "loss": 0.1466, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.4931506849315068, |
| "grad_norm": 4.216181435618773, |
| "learning_rate": 1.5027083647417657e-06, |
| "loss": 0.1082, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.4951076320939334, |
| "grad_norm": 4.144699973695991, |
| "learning_rate": 1.4917404990860112e-06, |
| "loss": 0.1176, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.4970645792563602, |
| "grad_norm": 3.596951619296797, |
| "learning_rate": 1.4808057838721451e-06, |
| "loss": 0.0984, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.4990215264187867, |
| "grad_norm": 3.6962030668906354, |
| "learning_rate": 1.4699043224250919e-06, |
| "loss": 0.1018, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.5009784735812133, |
| "grad_norm": 3.7680131258772334, |
| "learning_rate": 1.4590362177555467e-06, |
| "loss": 0.0884, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.5029354207436398, |
| "grad_norm": 3.661003862254977, |
| "learning_rate": 1.4482015725590159e-06, |
| "loss": 0.0828, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.5048923679060664, |
| "grad_norm": 3.6196156619411335, |
| "learning_rate": 1.4374004892148312e-06, |
| "loss": 0.0943, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.5068493150684932, |
| "grad_norm": 4.133367368271562, |
| "learning_rate": 1.4266330697851955e-06, |
| "loss": 0.1403, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.5088062622309197, |
| "grad_norm": 5.213092855736633, |
| "learning_rate": 1.4158994160142114e-06, |
| "loss": 0.0978, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.5107632093933465, |
| "grad_norm": 3.944765480507759, |
| "learning_rate": 1.4051996293269182e-06, |
| "loss": 0.0842, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.512720156555773, |
| "grad_norm": 3.239068346820176, |
| "learning_rate": 1.3945338108283402e-06, |
| "loss": 0.0843, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.5146771037181996, |
| "grad_norm": 4.034587607639025, |
| "learning_rate": 1.3839020613025267e-06, |
| "loss": 0.0936, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.5166340508806262, |
| "grad_norm": 4.585009877276245, |
| "learning_rate": 1.373304481211598e-06, |
| "loss": 0.0811, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.5185909980430528, |
| "grad_norm": 5.276366545353715, |
| "learning_rate": 1.3627411706948029e-06, |
| "loss": 0.1167, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.5205479452054793, |
| "grad_norm": 4.735381227056701, |
| "learning_rate": 1.3522122295675616e-06, |
| "loss": 0.1015, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.522504892367906, |
| "grad_norm": 4.772944159360071, |
| "learning_rate": 1.3417177573205399e-06, |
| "loss": 0.1277, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.5244618395303327, |
| "grad_norm": 3.7533395966581207, |
| "learning_rate": 1.3312578531186881e-06, |
| "loss": 0.085, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.5264187866927594, |
| "grad_norm": 3.38401424902449, |
| "learning_rate": 1.3208326158003171e-06, |
| "loss": 0.0607, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.528375733855186, |
| "grad_norm": 3.6739884665631544, |
| "learning_rate": 1.310442143876164e-06, |
| "loss": 0.1113, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.5303326810176126, |
| "grad_norm": 4.158483660891016, |
| "learning_rate": 1.3000865355284565e-06, |
| "loss": 0.1177, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.532289628180039, |
| "grad_norm": 5.102055312682666, |
| "learning_rate": 1.2897658886099906e-06, |
| "loss": 0.0941, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.5342465753424657, |
| "grad_norm": 3.3010993411903558, |
| "learning_rate": 1.2794803006431984e-06, |
| "loss": 0.0656, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.5362035225048922, |
| "grad_norm": 5.227220951700021, |
| "learning_rate": 1.2692298688192301e-06, |
| "loss": 0.1157, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.538160469667319, |
| "grad_norm": 3.858497037240051, |
| "learning_rate": 1.2590146899970446e-06, |
| "loss": 0.1068, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.5401174168297456, |
| "grad_norm": 4.5648938430233095, |
| "learning_rate": 1.2488348607024781e-06, |
| "loss": 0.0897, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.5420743639921723, |
| "grad_norm": 4.016902406262026, |
| "learning_rate": 1.2386904771273444e-06, |
| "loss": 0.1102, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.544031311154599, |
| "grad_norm": 3.9388716251170273, |
| "learning_rate": 1.228581635128518e-06, |
| "loss": 0.1231, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.5459882583170255, |
| "grad_norm": 5.79838038861396, |
| "learning_rate": 1.218508430227035e-06, |
| "loss": 0.1206, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.547945205479452, |
| "grad_norm": 5.640803236021753, |
| "learning_rate": 1.2084709576071885e-06, |
| "loss": 0.1138, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.5499021526418786, |
| "grad_norm": 4.48616466648149, |
| "learning_rate": 1.1984693121156226e-06, |
| "loss": 0.1269, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.5518590998043051, |
| "grad_norm": 5.28421998471667, |
| "learning_rate": 1.1885035882604478e-06, |
| "loss": 0.1022, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.553816046966732, |
| "grad_norm": 4.245951087215516, |
| "learning_rate": 1.1785738802103396e-06, |
| "loss": 0.1062, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.5557729941291585, |
| "grad_norm": 5.537592750305017, |
| "learning_rate": 1.1686802817936488e-06, |
| "loss": 0.1665, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.5577299412915853, |
| "grad_norm": 4.468206727039808, |
| "learning_rate": 1.158822886497521e-06, |
| "loss": 0.0931, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.5596868884540118, |
| "grad_norm": 3.6068876424920644, |
| "learning_rate": 1.1490017874670046e-06, |
| "loss": 0.0867, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.5616438356164384, |
| "grad_norm": 5.373299678033352, |
| "learning_rate": 1.1392170775041788e-06, |
| "loss": 0.1018, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.563600782778865, |
| "grad_norm": 5.961069680812374, |
| "learning_rate": 1.1294688490672744e-06, |
| "loss": 0.0962, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.5655577299412915, |
| "grad_norm": 3.833800229636545, |
| "learning_rate": 1.1197571942697938e-06, |
| "loss": 0.0928, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.567514677103718, |
| "grad_norm": 4.680627587667085, |
| "learning_rate": 1.1100822048796494e-06, |
| "loss": 0.1237, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.5694716242661448, |
| "grad_norm": 3.9270763404939357, |
| "learning_rate": 1.1004439723182942e-06, |
| "loss": 0.0844, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.5714285714285714, |
| "grad_norm": 3.7033113007217544, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.0843, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.5733855185909982, |
| "grad_norm": 3.7769570716701506, |
| "learning_rate": 1.0812781416302642e-06, |
| "loss": 0.0979, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.5753424657534247, |
| "grad_norm": 4.98510035748949, |
| "learning_rate": 1.0717507246064273e-06, |
| "loss": 0.1265, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.5772994129158513, |
| "grad_norm": 4.479493447103632, |
| "learning_rate": 1.0622604266153485e-06, |
| "loss": 0.0879, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.5792563600782779, |
| "grad_norm": 4.742786631464434, |
| "learning_rate": 1.0528073373332782e-06, |
| "loss": 0.1316, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.5812133072407044, |
| "grad_norm": 4.061545947758483, |
| "learning_rate": 1.0433915460848764e-06, |
| "loss": 0.1188, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.583170254403131, |
| "grad_norm": 4.076719976894027, |
| "learning_rate": 1.0340131418423648e-06, |
| "loss": 0.106, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.5851272015655578, |
| "grad_norm": 3.490821526652022, |
| "learning_rate": 1.0246722132246857e-06, |
| "loss": 0.06, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.5870841487279843, |
| "grad_norm": 4.446865774578547, |
| "learning_rate": 1.015368848496665e-06, |
| "loss": 0.1355, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.589041095890411, |
| "grad_norm": 5.654816419080488, |
| "learning_rate": 1.0061031355681766e-06, |
| "loss": 0.1396, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.5909980430528377, |
| "grad_norm": 4.0856070366401696, |
| "learning_rate": 9.968751619933104e-07, |
| "loss": 0.1225, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.5929549902152642, |
| "grad_norm": 3.768686043330507, |
| "learning_rate": 9.876850149695556e-07, |
| "loss": 0.0813, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.5949119373776908, |
| "grad_norm": 4.337549329697585, |
| "learning_rate": 9.785327813369589e-07, |
| "loss": 0.1429, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.5968688845401173, |
| "grad_norm": 3.856352942446284, |
| "learning_rate": 9.694185475773216e-07, |
| "loss": 0.1076, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.5988258317025439, |
| "grad_norm": 4.266743378582753, |
| "learning_rate": 9.603423998133682e-07, |
| "loss": 0.156, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.6007827788649707, |
| "grad_norm": 6.042302521390855, |
| "learning_rate": 9.513044238079428e-07, |
| "loss": 0.0984, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.6027397260273972, |
| "grad_norm": 4.475198995158474, |
| "learning_rate": 9.423047049631956e-07, |
| "loss": 0.1148, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.604696673189824, |
| "grad_norm": 6.117812098789544, |
| "learning_rate": 9.333433283197702e-07, |
| "loss": 0.1527, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.6066536203522506, |
| "grad_norm": 4.097813928060744, |
| "learning_rate": 9.244203785560096e-07, |
| "loss": 0.106, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.6086105675146771, |
| "grad_norm": 4.604570218042885, |
| "learning_rate": 9.15535939987151e-07, |
| "loss": 0.1065, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.6105675146771037, |
| "grad_norm": 5.111783181647974, |
| "learning_rate": 9.066900965645259e-07, |
| "loss": 0.1625, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.6125244618395302, |
| "grad_norm": 3.6084675312969288, |
| "learning_rate": 8.978829318747745e-07, |
| "loss": 0.0975, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.6144814090019568, |
| "grad_norm": 4.364483990378606, |
| "learning_rate": 8.891145291390479e-07, |
| "loss": 0.0993, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.6164383561643836, |
| "grad_norm": 4.953846209166518, |
| "learning_rate": 8.803849712122292e-07, |
| "loss": 0.1059, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.6183953033268101, |
| "grad_norm": 4.542337976588578, |
| "learning_rate": 8.71694340582146e-07, |
| "loss": 0.117, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.620352250489237, |
| "grad_norm": 3.826586909937013, |
| "learning_rate": 8.63042719368789e-07, |
| "loss": 0.0785, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.6223091976516635, |
| "grad_norm": 4.673280301311597, |
| "learning_rate": 8.544301893235424e-07, |
| "loss": 0.139, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.62426614481409, |
| "grad_norm": 4.189603489319969, |
| "learning_rate": 8.458568318284088e-07, |
| "loss": 0.114, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.6262230919765166, |
| "grad_norm": 3.7995138231635948, |
| "learning_rate": 8.37322727895234e-07, |
| "loss": 0.0814, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.6281800391389432, |
| "grad_norm": 3.9524713903530797, |
| "learning_rate": 8.288279581649528e-07, |
| "loss": 0.0918, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.6301369863013697, |
| "grad_norm": 3.3612114691279666, |
| "learning_rate": 8.203726029068149e-07, |
| "loss": 0.072, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.6320939334637965, |
| "grad_norm": 4.021264247212569, |
| "learning_rate": 8.119567420176411e-07, |
| "loss": 0.1012, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.634050880626223, |
| "grad_norm": 4.365471467810539, |
| "learning_rate": 8.035804550210524e-07, |
| "loss": 0.1206, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.6360078277886498, |
| "grad_norm": 3.66662093579357, |
| "learning_rate": 7.952438210667268e-07, |
| "loss": 0.0986, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.6379647749510764, |
| "grad_norm": 3.9768121559210243, |
| "learning_rate": 7.869469189296536e-07, |
| "loss": 0.0622, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.639921722113503, |
| "grad_norm": 4.547297414460687, |
| "learning_rate": 7.786898270093846e-07, |
| "loss": 0.101, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.6418786692759295, |
| "grad_norm": 3.5194567700541697, |
| "learning_rate": 7.704726233292953e-07, |
| "loss": 0.0879, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.643835616438356, |
| "grad_norm": 4.172024139332337, |
| "learning_rate": 7.622953855358456e-07, |
| "loss": 0.0607, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.6457925636007826, |
| "grad_norm": 3.9888254956835016, |
| "learning_rate": 7.541581908978457e-07, |
| "loss": 0.1137, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.6477495107632094, |
| "grad_norm": 3.951495682236661, |
| "learning_rate": 7.460611163057347e-07, |
| "loss": 0.0738, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.649706457925636, |
| "grad_norm": 3.50640651064002, |
| "learning_rate": 7.380042382708396e-07, |
| "loss": 0.0799, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.6516634050880628, |
| "grad_norm": 4.135671120117707, |
| "learning_rate": 7.299876329246652e-07, |
| "loss": 0.0948, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.6536203522504893, |
| "grad_norm": 4.828656246265704, |
| "learning_rate": 7.220113760181641e-07, |
| "loss": 0.1147, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.6555772994129159, |
| "grad_norm": 3.8142127174985077, |
| "learning_rate": 7.140755429210316e-07, |
| "loss": 0.0874, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.6575342465753424, |
| "grad_norm": 4.56817554038463, |
| "learning_rate": 7.061802086209857e-07, |
| "loss": 0.1103, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.659491193737769, |
| "grad_norm": 3.7020090148188483, |
| "learning_rate": 6.983254477230589e-07, |
| "loss": 0.0899, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.6614481409001955, |
| "grad_norm": 4.120090519250235, |
| "learning_rate": 6.905113344488984e-07, |
| "loss": 0.1487, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.6634050880626223, |
| "grad_norm": 4.373557099523684, |
| "learning_rate": 6.827379426360614e-07, |
| "loss": 0.1039, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.6653620352250489, |
| "grad_norm": 5.702927821737731, |
| "learning_rate": 6.750053457373146e-07, |
| "loss": 0.1539, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.6673189823874757, |
| "grad_norm": 4.606439817873833, |
| "learning_rate": 6.673136168199467e-07, |
| "loss": 0.1191, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.6692759295499022, |
| "grad_norm": 4.907425891654169, |
| "learning_rate": 6.596628285650713e-07, |
| "loss": 0.1212, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.6712328767123288, |
| "grad_norm": 4.891795011959262, |
| "learning_rate": 6.52053053266945e-07, |
| "loss": 0.1018, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.6731898238747553, |
| "grad_norm": 3.9608030650424637, |
| "learning_rate": 6.444843628322845e-07, |
| "loss": 0.1015, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.675146771037182, |
| "grad_norm": 4.299026278880434, |
| "learning_rate": 6.36956828779579e-07, |
| "loss": 0.0958, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.6771037181996085, |
| "grad_norm": 6.428577937588378, |
| "learning_rate": 6.294705222384262e-07, |
| "loss": 0.1422, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.6790606653620352, |
| "grad_norm": 3.8390986620429044, |
| "learning_rate": 6.22025513948854e-07, |
| "loss": 0.0935, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.6810176125244618, |
| "grad_norm": 3.938849101751284, |
| "learning_rate": 6.14621874260648e-07, |
| "loss": 0.085, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.6829745596868886, |
| "grad_norm": 3.9545472129043273, |
| "learning_rate": 6.072596731326974e-07, |
| "loss": 0.0852, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.6849315068493151, |
| "grad_norm": 5.1309890387425625, |
| "learning_rate": 5.999389801323219e-07, |
| "loss": 0.1318, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.6868884540117417, |
| "grad_norm": 4.079304409272266, |
| "learning_rate": 5.92659864434626e-07, |
| "loss": 0.0916, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.6888454011741683, |
| "grad_norm": 3.4907261926675752, |
| "learning_rate": 5.854223948218362e-07, |
| "loss": 0.1095, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.6908023483365948, |
| "grad_norm": 4.022136036337963, |
| "learning_rate": 5.782266396826536e-07, |
| "loss": 0.1044, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.6927592954990214, |
| "grad_norm": 4.08780087506643, |
| "learning_rate": 5.710726670116107e-07, |
| "loss": 0.1102, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.6947162426614482, |
| "grad_norm": 3.844167622182678, |
| "learning_rate": 5.639605444084273e-07, |
| "loss": 0.1049, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.6966731898238747, |
| "grad_norm": 3.672367690944528, |
| "learning_rate": 5.5689033907737e-07, |
| "loss": 0.089, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.6986301369863015, |
| "grad_norm": 4.285307066934989, |
| "learning_rate": 5.498621178266167e-07, |
| "loss": 0.0847, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.700587084148728, |
| "grad_norm": 3.502416895162508, |
| "learning_rate": 5.428759470676304e-07, |
| "loss": 0.0926, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.7025440313111546, |
| "grad_norm": 4.406921576828976, |
| "learning_rate": 5.359318928145262e-07, |
| "loss": 0.083, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.7045009784735812, |
| "grad_norm": 5.142683880923871, |
| "learning_rate": 5.290300206834492e-07, |
| "loss": 0.0909, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.7064579256360077, |
| "grad_norm": 4.220172224500206, |
| "learning_rate": 5.221703958919572e-07, |
| "loss": 0.1018, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.7084148727984343, |
| "grad_norm": 4.480931526252475, |
| "learning_rate": 5.15353083258398e-07, |
| "loss": 0.0932, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.710371819960861, |
| "grad_norm": 4.615711329189151, |
| "learning_rate": 5.085781472013051e-07, |
| "loss": 0.1023, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.7123287671232876, |
| "grad_norm": 3.16461137672761, |
| "learning_rate": 5.018456517387837e-07, |
| "loss": 0.0871, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 5.519915608300547, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.1074, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.716242661448141, |
| "grad_norm": 3.829892721427292, |
| "learning_rate": 4.885082366641081e-07, |
| "loss": 0.0805, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.7181996086105675, |
| "grad_norm": 3.8720716837847413, |
| "learning_rate": 4.819034430806047e-07, |
| "loss": 0.0471, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.720156555772994, |
| "grad_norm": 3.8736817885466355, |
| "learning_rate": 4.7534134214777704e-07, |
| "loss": 0.0991, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.7221135029354206, |
| "grad_norm": 4.344575183219679, |
| "learning_rate": 4.688219958725981e-07, |
| "loss": 0.1152, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.7240704500978472, |
| "grad_norm": 3.3498742588867327, |
| "learning_rate": 4.623454658580373e-07, |
| "loss": 0.0967, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.726027397260274, |
| "grad_norm": 3.5719607829739797, |
| "learning_rate": 4.5591181330248534e-07, |
| "loss": 0.0648, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.7279843444227005, |
| "grad_norm": 6.279688464770935, |
| "learning_rate": 4.495210989991733e-07, |
| "loss": 0.1241, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.7299412915851273, |
| "grad_norm": 4.645876128496678, |
| "learning_rate": 4.4317338333559335e-07, |
| "loss": 0.1057, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.7318982387475539, |
| "grad_norm": 3.4545730232497016, |
| "learning_rate": 4.368687262929372e-07, |
| "loss": 0.0943, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.7338551859099804, |
| "grad_norm": 4.2675741095525614, |
| "learning_rate": 4.306071874455225e-07, |
| "loss": 0.0895, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.735812133072407, |
| "grad_norm": 4.587319507681367, |
| "learning_rate": 4.243888259602319e-07, |
| "loss": 0.0856, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.7377690802348336, |
| "grad_norm": 4.238802480534314, |
| "learning_rate": 4.1821370059595577e-07, |
| "loss": 0.0972, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.7397260273972601, |
| "grad_norm": 4.046861718820185, |
| "learning_rate": 4.1208186970303097e-07, |
| "loss": 0.0962, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.741682974559687, |
| "grad_norm": 4.473000735873614, |
| "learning_rate": 4.0599339122270097e-07, |
| "loss": 0.0933, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.7436399217221135, |
| "grad_norm": 5.749427332341581, |
| "learning_rate": 3.999483226865547e-07, |
| "loss": 0.1079, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.7455968688845402, |
| "grad_norm": 4.6477957819510785, |
| "learning_rate": 3.939467212159925e-07, |
| "loss": 0.0862, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.7475538160469668, |
| "grad_norm": 4.312005931364259, |
| "learning_rate": 3.879886435216823e-07, |
| "loss": 0.1187, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.7495107632093934, |
| "grad_norm": 3.4902421114333575, |
| "learning_rate": 3.8207414590302525e-07, |
| "loss": 0.0764, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.75146771037182, |
| "grad_norm": 4.418877180930197, |
| "learning_rate": 3.762032842476238e-07, |
| "loss": 0.1066, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.7534246575342465, |
| "grad_norm": 5.9043462354643985, |
| "learning_rate": 3.70376114030751e-07, |
| "loss": 0.0978, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.755381604696673, |
| "grad_norm": 3.7942503077993766, |
| "learning_rate": 3.6459269031483e-07, |
| "loss": 0.0713, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.7573385518590998, |
| "grad_norm": 5.206126864628456, |
| "learning_rate": 3.5885306774891214e-07, |
| "loss": 0.14, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.7592954990215264, |
| "grad_norm": 4.563983085908942, |
| "learning_rate": 3.5315730056815933e-07, |
| "loss": 0.0976, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.7612524461839532, |
| "grad_norm": 4.268385187485801, |
| "learning_rate": 3.475054425933344e-07, |
| "loss": 0.1481, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.7632093933463797, |
| "grad_norm": 3.2556169213335036, |
| "learning_rate": 3.4189754723028825e-07, |
| "loss": 0.0877, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.7651663405088063, |
| "grad_norm": 3.955387092840525, |
| "learning_rate": 3.363336674694601e-07, |
| "loss": 0.0946, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.7671232876712328, |
| "grad_norm": 3.768390678050895, |
| "learning_rate": 3.308138558853746e-07, |
| "loss": 0.0807, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.7690802348336594, |
| "grad_norm": 4.555287330204552, |
| "learning_rate": 3.2533816463614253e-07, |
| "loss": 0.1379, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.771037181996086, |
| "grad_norm": 4.266288907182965, |
| "learning_rate": 3.1990664546297287e-07, |
| "loss": 0.0836, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.7729941291585127, |
| "grad_norm": 4.359649675014245, |
| "learning_rate": 3.145193496896809e-07, |
| "loss": 0.0801, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.7749510763209393, |
| "grad_norm": 4.0346239446962775, |
| "learning_rate": 3.0917632822220214e-07, |
| "loss": 0.1155, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.776908023483366, |
| "grad_norm": 5.704535959002592, |
| "learning_rate": 3.0387763154811457e-07, |
| "loss": 0.1143, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.7788649706457926, |
| "grad_norm": 4.495452528797144, |
| "learning_rate": 2.986233097361579e-07, |
| "loss": 0.0933, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.7808219178082192, |
| "grad_norm": 3.88466293599196, |
| "learning_rate": 2.934134124357646e-07, |
| "loss": 0.0798, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.7827788649706457, |
| "grad_norm": 4.090778235057145, |
| "learning_rate": 2.882479888765888e-07, |
| "loss": 0.1317, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.7847358121330723, |
| "grad_norm": 3.7936967524686827, |
| "learning_rate": 2.83127087868037e-07, |
| "loss": 0.1058, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.7866927592954989, |
| "grad_norm": 4.4617099942633285, |
| "learning_rate": 2.780507577988156e-07, |
| "loss": 0.0958, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.7886497064579256, |
| "grad_norm": 5.140838367322725, |
| "learning_rate": 2.730190466364652e-07, |
| "loss": 0.1306, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.7906066536203522, |
| "grad_norm": 4.227564662820708, |
| "learning_rate": 2.6803200192691393e-07, |
| "loss": 0.1408, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.792563600782779, |
| "grad_norm": 4.369213580165944, |
| "learning_rate": 2.630896707940228e-07, |
| "loss": 0.1094, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.7945205479452055, |
| "grad_norm": 3.773973257656429, |
| "learning_rate": 2.5819209993914185e-07, |
| "loss": 0.0941, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.796477495107632, |
| "grad_norm": 4.662153755602524, |
| "learning_rate": 2.533393356406749e-07, |
| "loss": 0.1191, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.7984344422700587, |
| "grad_norm": 4.113945161657945, |
| "learning_rate": 2.485314237536307e-07, |
| "loss": 0.0916, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.8003913894324852, |
| "grad_norm": 4.385564864397683, |
| "learning_rate": 2.4376840970920133e-07, |
| "loss": 0.112, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.8023483365949118, |
| "grad_norm": 4.368580475586065, |
| "learning_rate": 2.3905033851432393e-07, |
| "loss": 0.1054, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.8043052837573386, |
| "grad_norm": 5.170536336422904, |
| "learning_rate": 2.3437725475126126e-07, |
| "loss": 0.1508, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.8062622309197651, |
| "grad_norm": 4.933859800068266, |
| "learning_rate": 2.297492025771786e-07, |
| "loss": 0.1584, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.808219178082192, |
| "grad_norm": 4.248062707167283, |
| "learning_rate": 2.2516622572372416e-07, |
| "loss": 0.0901, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.8101761252446185, |
| "grad_norm": 4.3498888467821315, |
| "learning_rate": 2.206283674966192e-07, |
| "loss": 0.0976, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.812133072407045, |
| "grad_norm": 5.685102997353433, |
| "learning_rate": 2.1613567077524878e-07, |
| "loss": 0.1151, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.8140900195694716, |
| "grad_norm": 4.234345155861104, |
| "learning_rate": 2.1168817801225262e-07, |
| "loss": 0.0922, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.8160469667318981, |
| "grad_norm": 3.5674851827200698, |
| "learning_rate": 2.0728593123312934e-07, |
| "loss": 0.0685, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.8180039138943247, |
| "grad_norm": 4.631468503999614, |
| "learning_rate": 2.0292897203583405e-07, |
| "loss": 0.1224, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.8199608610567515, |
| "grad_norm": 4.6101346182854765, |
| "learning_rate": 1.986173415903897e-07, |
| "loss": 0.0944, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.821917808219178, |
| "grad_norm": 3.729007146367404, |
| "learning_rate": 1.9435108063849684e-07, |
| "loss": 0.1175, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.8238747553816048, |
| "grad_norm": 4.448745913631739, |
| "learning_rate": 1.9013022949314563e-07, |
| "loss": 0.1067, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.8258317025440314, |
| "grad_norm": 4.791699752285871, |
| "learning_rate": 1.8595482803823894e-07, |
| "loss": 0.1586, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.827788649706458, |
| "grad_norm": 4.57746817640965, |
| "learning_rate": 1.8182491572821536e-07, |
| "loss": 0.0852, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.8297455968688845, |
| "grad_norm": 4.748063804555274, |
| "learning_rate": 1.777405315876718e-07, |
| "loss": 0.0932, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.831702544031311, |
| "grad_norm": 4.182479843784433, |
| "learning_rate": 1.737017142109998e-07, |
| "loss": 0.0866, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.8336594911937376, |
| "grad_norm": 4.03998893601018, |
| "learning_rate": 1.6970850176201704e-07, |
| "loss": 0.0999, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.8356164383561644, |
| "grad_norm": 3.618508470058152, |
| "learning_rate": 1.6576093197361253e-07, |
| "loss": 0.0944, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.837573385518591, |
| "grad_norm": 3.6971222410488416, |
| "learning_rate": 1.6185904214738203e-07, |
| "loss": 0.1289, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.8395303326810177, |
| "grad_norm": 3.9583521818815086, |
| "learning_rate": 1.5800286915328034e-07, |
| "loss": 0.0856, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.8414872798434443, |
| "grad_norm": 4.841416189786882, |
| "learning_rate": 1.5419244942927402e-07, |
| "loss": 0.1441, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.8434442270058709, |
| "grad_norm": 3.6128745419595396, |
| "learning_rate": 1.5042781898099433e-07, |
| "loss": 0.0882, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.8454011741682974, |
| "grad_norm": 4.546958407180465, |
| "learning_rate": 1.4670901338139754e-07, |
| "loss": 0.1482, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.847358121330724, |
| "grad_norm": 4.532750160857723, |
| "learning_rate": 1.430360677704301e-07, |
| "loss": 0.1114, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.8493150684931505, |
| "grad_norm": 3.461773937048237, |
| "learning_rate": 1.39409016854693e-07, |
| "loss": 0.086, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.8512720156555773, |
| "grad_norm": 4.701448013488457, |
| "learning_rate": 1.3582789490712179e-07, |
| "loss": 0.0909, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.8532289628180039, |
| "grad_norm": 3.2520032739972207, |
| "learning_rate": 1.3229273576665148e-07, |
| "loss": 0.0793, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.8551859099804306, |
| "grad_norm": 4.219402967554837, |
| "learning_rate": 1.2880357283790778e-07, |
| "loss": 0.1143, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.8571428571428572, |
| "grad_norm": 3.7615691029132097, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.0811, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.8590998043052838, |
| "grad_norm": 3.9628210443757417, |
| "learning_rate": 1.219633670606274e-07, |
| "loss": 0.0781, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.8610567514677103, |
| "grad_norm": 4.630449733178535, |
| "learning_rate": 1.1861238884694759e-07, |
| "loss": 0.1145, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.8630136986301369, |
| "grad_norm": 5.080120507620893, |
| "learning_rate": 1.1530753611409151e-07, |
| "loss": 0.1189, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.8649706457925634, |
| "grad_norm": 3.6837537538648846, |
| "learning_rate": 1.1204884009046013e-07, |
| "loss": 0.0921, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.8669275929549902, |
| "grad_norm": 4.795906146184331, |
| "learning_rate": 1.0883633156830553e-07, |
| "loss": 0.1089, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.8688845401174168, |
| "grad_norm": 4.4042003891494526, |
| "learning_rate": 1.0567004090344224e-07, |
| "loss": 0.1263, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.8708414872798436, |
| "grad_norm": 3.5574123976335286, |
| "learning_rate": 1.0254999801496246e-07, |
| "loss": 0.0817, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.8727984344422701, |
| "grad_norm": 4.38693844489413, |
| "learning_rate": 9.947623238494853e-08, |
| "loss": 0.1112, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.8747553816046967, |
| "grad_norm": 4.047514782436208, |
| "learning_rate": 9.644877305819977e-08, |
| "loss": 0.1004, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.8767123287671232, |
| "grad_norm": 4.497453730043549, |
| "learning_rate": 9.346764864195335e-08, |
| "loss": 0.0854, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.8786692759295498, |
| "grad_norm": 4.33952582715022, |
| "learning_rate": 9.053288730561716e-08, |
| "loss": 0.0629, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.8806262230919764, |
| "grad_norm": 4.727657750246326, |
| "learning_rate": 8.764451678050234e-08, |
| "loss": 0.1474, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.8825831702544031, |
| "grad_norm": 4.966674185268151, |
| "learning_rate": 8.480256435956124e-08, |
| "loss": 0.1215, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.8845401174168297, |
| "grad_norm": 4.022302738761801, |
| "learning_rate": 8.200705689712873e-08, |
| "loss": 0.0839, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.8864970645792565, |
| "grad_norm": 3.9047866743874433, |
| "learning_rate": 7.925802080867129e-08, |
| "loss": 0.0851, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.888454011741683, |
| "grad_norm": 4.459574200823327, |
| "learning_rate": 7.655548207053332e-08, |
| "loss": 0.0963, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.8904109589041096, |
| "grad_norm": 4.656780696609488, |
| "learning_rate": 7.389946621969679e-08, |
| "loss": 0.1294, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.8923679060665362, |
| "grad_norm": 3.1195401065188473, |
| "learning_rate": 7.128999835353423e-08, |
| "loss": 0.0769, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.8943248532289627, |
| "grad_norm": 3.158573389544524, |
| "learning_rate": 6.872710312957498e-08, |
| "loss": 0.0632, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.8962818003913893, |
| "grad_norm": 4.6323955295318795, |
| "learning_rate": 6.621080476527209e-08, |
| "loss": 0.1019, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.898238747553816, |
| "grad_norm": 4.592731714227012, |
| "learning_rate": 6.374112703777302e-08, |
| "loss": 0.0787, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.9001956947162426, |
| "grad_norm": 3.945572468335623, |
| "learning_rate": 6.131809328369542e-08, |
| "loss": 0.1189, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.9021526418786694, |
| "grad_norm": 5.296037162919323, |
| "learning_rate": 5.8941726398903944e-08, |
| "loss": 0.0998, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.904109589041096, |
| "grad_norm": 5.283337422525095, |
| "learning_rate": 5.661204883829763e-08, |
| "loss": 0.135, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.9060665362035225, |
| "grad_norm": 4.351294086275507, |
| "learning_rate": 5.432908261559733e-08, |
| "loss": 0.1111, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.908023483365949, |
| "grad_norm": 4.304542572724281, |
| "learning_rate": 5.209284930313418e-08, |
| "loss": 0.1252, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.9099804305283756, |
| "grad_norm": 3.6505375083000207, |
| "learning_rate": 4.9903370031649246e-08, |
| "loss": 0.1007, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.9119373776908022, |
| "grad_norm": 5.016104246317523, |
| "learning_rate": 4.776066549009195e-08, |
| "loss": 0.163, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.913894324853229, |
| "grad_norm": 4.54774910715563, |
| "learning_rate": 4.5664755925426406e-08, |
| "loss": 0.0883, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.9158512720156555, |
| "grad_norm": 4.378211620643958, |
| "learning_rate": 4.3615661142439316e-08, |
| "loss": 0.0847, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.9178082191780823, |
| "grad_norm": 3.4921510560456372, |
| "learning_rate": 4.1613400503550114e-08, |
| "loss": 0.0765, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.9197651663405089, |
| "grad_norm": 4.436939832367673, |
| "learning_rate": 3.9657992928632813e-08, |
| "loss": 0.1217, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.9217221135029354, |
| "grad_norm": 4.775505116598373, |
| "learning_rate": 3.774945689483445e-08, |
| "loss": 0.1132, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.923679060665362, |
| "grad_norm": 4.051522898490529, |
| "learning_rate": 3.5887810436399106e-08, |
| "loss": 0.0993, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.9256360078277885, |
| "grad_norm": 4.681022368677815, |
| "learning_rate": 3.407307114450253e-08, |
| "loss": 0.1322, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.927592954990215, |
| "grad_norm": 4.052665378852793, |
| "learning_rate": 3.2305256167078916e-08, |
| "loss": 0.0975, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.9295499021526419, |
| "grad_norm": 3.7183380037438143, |
| "learning_rate": 3.058438220866544e-08, |
| "loss": 0.0664, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.9315068493150684, |
| "grad_norm": 6.588953943230916, |
| "learning_rate": 2.8910465530240793e-08, |
| "loss": 0.0963, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.9334637964774952, |
| "grad_norm": 4.562221772602165, |
| "learning_rate": 2.7283521949070802e-08, |
| "loss": 0.1085, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.9354207436399218, |
| "grad_norm": 3.573801356682234, |
| "learning_rate": 2.570356683856301e-08, |
| "loss": 0.0946, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.9373776908023483, |
| "grad_norm": 4.186988643237791, |
| "learning_rate": 2.4170615128117358e-08, |
| "loss": 0.1186, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.939334637964775, |
| "grad_norm": 4.474122048882353, |
| "learning_rate": 2.2684681302986288e-08, |
| "loss": 0.1265, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.9412915851272015, |
| "grad_norm": 4.014302020309316, |
| "learning_rate": 2.12457794041393e-08, |
| "loss": 0.082, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.943248532289628, |
| "grad_norm": 3.468788267129184, |
| "learning_rate": 1.9853923028128607e-08, |
| "loss": 0.0779, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.9452054794520548, |
| "grad_norm": 3.5789155806062274, |
| "learning_rate": 1.850912532696092e-08, |
| "loss": 0.0777, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.9471624266144814, |
| "grad_norm": 3.572080685996561, |
| "learning_rate": 1.7211399007974748e-08, |
| "loss": 0.0992, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.9491193737769081, |
| "grad_norm": 4.461302823432116, |
| "learning_rate": 1.596075633371774e-08, |
| "loss": 0.1231, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.9510763209393347, |
| "grad_norm": 4.545263926377433, |
| "learning_rate": 1.4757209121832317e-08, |
| "loss": 0.0698, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.9530332681017613, |
| "grad_norm": 4.165392685216529, |
| "learning_rate": 1.3600768744944648e-08, |
| "loss": 0.1025, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.9549902152641878, |
| "grad_norm": 3.891690587291322, |
| "learning_rate": 1.2491446130555306e-08, |
| "loss": 0.1069, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.9569471624266144, |
| "grad_norm": 4.312757850115399, |
| "learning_rate": 1.1429251760938232e-08, |
| "loss": 0.0943, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.9569471624266144, |
| "eval_loss": 0.24081729352474213, |
| "eval_runtime": 1.4291, |
| "eval_samples_per_second": 29.389, |
| "eval_steps_per_second": 7.697, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.958904109589041, |
| "grad_norm": 4.583064109683525, |
| "learning_rate": 1.041419567303914e-08, |
| "loss": 0.1183, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.9608610567514677, |
| "grad_norm": 3.661084846514591, |
| "learning_rate": 9.446287458383385e-09, |
| "loss": 0.089, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.9628180039138943, |
| "grad_norm": 3.7578297333477932, |
| "learning_rate": 8.525536262982692e-09, |
| "loss": 0.1373, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.964774951076321, |
| "grad_norm": 5.385765955699253, |
| "learning_rate": 7.651950787251339e-09, |
| "loss": 0.1241, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.9667318982387476, |
| "grad_norm": 4.074273910754245, |
| "learning_rate": 6.825539285922334e-09, |
| "loss": 0.1073, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.9686888454011742, |
| "grad_norm": 5.467518017211509, |
| "learning_rate": 6.046309567968589e-09, |
| "loss": 0.1306, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.9706457925636007, |
| "grad_norm": 4.72312419812635, |
| "learning_rate": 5.3142689965318685e-09, |
| "loss": 0.1575, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.9726027397260273, |
| "grad_norm": 4.000819631530494, |
| "learning_rate": 4.629424488850065e-09, |
| "loss": 0.1064, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.9745596868884538, |
| "grad_norm": 5.406606829213884, |
| "learning_rate": 3.9917825161950305e-09, |
| "loss": 0.1119, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.9765166340508806, |
| "grad_norm": 3.3947943238400997, |
| "learning_rate": 3.4013491038087376e-09, |
| "loss": 0.0944, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.9784735812133072, |
| "grad_norm": 3.4793758554893506, |
| "learning_rate": 2.858129830847767e-09, |
| "loss": 0.0901, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.980430528375734, |
| "grad_norm": 4.573524215735245, |
| "learning_rate": 2.3621298303294626e-09, |
| "loss": 0.107, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.9823874755381605, |
| "grad_norm": 4.475062960257702, |
| "learning_rate": 1.9133537890853036e-09, |
| "loss": 0.0833, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.984344422700587, |
| "grad_norm": 4.388722373868882, |
| "learning_rate": 1.5118059477142732e-09, |
| "loss": 0.1181, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.9863013698630136, |
| "grad_norm": 3.682413873786922, |
| "learning_rate": 1.1574901005456662e-09, |
| "loss": 0.1082, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.9882583170254402, |
| "grad_norm": 4.280116533354375, |
| "learning_rate": 8.504095956002323e-10, |
| "loss": 0.1156, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.9902152641878668, |
| "grad_norm": 4.135437238067301, |
| "learning_rate": 5.905673345601992e-10, |
| "loss": 0.1057, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.9921722113502935, |
| "grad_norm": 5.581298186505506, |
| "learning_rate": 3.7796577274096244e-10, |
| "loss": 0.0886, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.99412915851272, |
| "grad_norm": 4.3925052279434365, |
| "learning_rate": 2.1260691906999087e-10, |
| "loss": 0.0957, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.9960861056751469, |
| "grad_norm": 4.024069405388864, |
| "learning_rate": 9.449233606573238e-11, |
| "loss": 0.0929, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.9980430528375734, |
| "grad_norm": 4.281113804317461, |
| "learning_rate": 2.3623139821515874e-11, |
| "loss": 0.1283, |
| "step": 1021 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 3.262218625090435, |
| "learning_rate": 0.0, |
| "loss": 0.078, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 1022, |
| "total_flos": 1463157116928.0, |
| "train_loss": 0.1781597776108817, |
| "train_runtime": 613.9051, |
| "train_samples_per_second": 13.305, |
| "train_steps_per_second": 1.665 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 1022, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 50000000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1463157116928.0, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|