| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 1.1873840445269017, |
| "eval_steps": 500, |
| "global_step": 160, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.01736689789892289, |
| "learning_rate": 7.142857142857143e-06, |
| "loss": 1.3968, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.01, |
| "grad_norm": 0.017027077372532427, |
| "learning_rate": 1.4285714285714285e-05, |
| "loss": 1.4007, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.016201064682232102, |
| "learning_rate": 2.1428571428571428e-05, |
| "loss": 1.4031, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.03, |
| "grad_norm": 0.017052502856808767, |
| "learning_rate": 2.857142857142857e-05, |
| "loss": 1.4268, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.017589796386440718, |
| "learning_rate": 3.571428571428572e-05, |
| "loss": 1.4357, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.018257751591257577, |
| "learning_rate": 4.2857142857142856e-05, |
| "loss": 1.4047, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.05, |
| "grad_norm": 0.018295865167676045, |
| "learning_rate": 5e-05, |
| "loss": 1.4105, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.02117599813759531, |
| "learning_rate": 5.714285714285714e-05, |
| "loss": 1.3693, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.02205957316713292, |
| "learning_rate": 6.428571428571429e-05, |
| "loss": 1.3524, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.07, |
| "grad_norm": 0.023657076230798574, |
| "learning_rate": 7.142857142857143e-05, |
| "loss": 1.3913, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.027315806443124473, |
| "learning_rate": 7.857142857142858e-05, |
| "loss": 1.4036, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.09, |
| "grad_norm": 0.028918483559492007, |
| "learning_rate": 8.571428571428571e-05, |
| "loss": 1.4478, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.03141992210034351, |
| "learning_rate": 9.285714285714286e-05, |
| "loss": 1.3865, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.02716522203126749, |
| "learning_rate": 0.0001, |
| "loss": 1.3674, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.11, |
| "grad_norm": 0.04349952540870549, |
| "learning_rate": 9.999617556940085e-05, |
| "loss": 1.3423, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.02315330665074004, |
| "learning_rate": 9.998470286265416e-05, |
| "loss": 1.3017, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.02981766810064017, |
| "learning_rate": 9.996558363482277e-05, |
| "loss": 1.3578, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.13, |
| "grad_norm": 0.018058552912596896, |
| "learning_rate": 9.993882081071306e-05, |
| "loss": 1.3604, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.01617412339760896, |
| "learning_rate": 9.99044184844276e-05, |
| "loss": 1.3599, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.15, |
| "grad_norm": 0.01571914020346384, |
| "learning_rate": 9.986238191873874e-05, |
| "loss": 1.3314, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.01794066179347792, |
| "learning_rate": 9.98127175442836e-05, |
| "loss": 1.3244, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.020268083274328504, |
| "learning_rate": 9.975543295858035e-05, |
| "loss": 1.3502, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.17, |
| "grad_norm": 0.022181555162566497, |
| "learning_rate": 9.969053692486583e-05, |
| "loss": 1.3378, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.021942253446061308, |
| "learning_rate": 9.961803937075516e-05, |
| "loss": 1.2991, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.022098681290951168, |
| "learning_rate": 9.953795138672291e-05, |
| "loss": 1.3299, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.19, |
| "grad_norm": 0.019480865843562824, |
| "learning_rate": 9.945028522440653e-05, |
| "loss": 1.2898, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.019522365809044647, |
| "learning_rate": 9.93550542947322e-05, |
| "loss": 1.3358, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.21, |
| "grad_norm": 0.01626195754263703, |
| "learning_rate": 9.925227316586316e-05, |
| "loss": 1.3236, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.015208995645064876, |
| "learning_rate": 9.91419575609712e-05, |
| "loss": 1.3095, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.014245114044458424, |
| "learning_rate": 9.902412435583128e-05, |
| "loss": 1.2887, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.23, |
| "grad_norm": 0.018282343094477974, |
| "learning_rate": 9.889879157624002e-05, |
| "loss": 1.3037, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.013227523223555335, |
| "learning_rate": 9.876597839525814e-05, |
| "loss": 1.291, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.01347818214113649, |
| "learning_rate": 9.862570513027735e-05, |
| "loss": 1.351, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 0.013651640512312836, |
| "learning_rate": 9.847799323991234e-05, |
| "loss": 1.3239, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.013176778064524027, |
| "learning_rate": 9.832286532071802e-05, |
| "loss": 1.3285, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.01339867538916516, |
| "learning_rate": 9.816034510373286e-05, |
| "loss": 1.3151, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.27, |
| "grad_norm": 0.013215819889291318, |
| "learning_rate": 9.799045745084847e-05, |
| "loss": 1.3401, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.013158439789624273, |
| "learning_rate": 9.781322835100638e-05, |
| "loss": 1.3366, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.29, |
| "grad_norm": 0.012039494551754166, |
| "learning_rate": 9.76286849162223e-05, |
| "loss": 1.3187, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.012156687939496454, |
| "learning_rate": 9.743685537743856e-05, |
| "loss": 1.308, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.015724501188273832, |
| "learning_rate": 9.72377690802055e-05, |
| "loss": 1.2725, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.31, |
| "grad_norm": 0.01236228699037047, |
| "learning_rate": 9.70314564801922e-05, |
| "loss": 1.2936, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.011472953053820326, |
| "learning_rate": 9.681794913852746e-05, |
| "loss": 1.2899, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.012026745769038633, |
| "learning_rate": 9.659727971697174e-05, |
| "loss": 1.2681, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.33, |
| "grad_norm": 0.011343760163365182, |
| "learning_rate": 9.636948197292052e-05, |
| "loss": 1.2824, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.011947389090492217, |
| "learning_rate": 9.613459075424034e-05, |
| "loss": 1.3101, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.35, |
| "grad_norm": 0.011797898208483941, |
| "learning_rate": 9.589264199393776e-05, |
| "loss": 1.3065, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.012117930154062621, |
| "learning_rate": 9.564367270466247e-05, |
| "loss": 1.3168, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.012075709345252717, |
| "learning_rate": 9.538772097304521e-05, |
| "loss": 1.3017, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.37, |
| "grad_norm": 0.016464363967779214, |
| "learning_rate": 9.512482595387132e-05, |
| "loss": 1.3192, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.01258114656504482, |
| "learning_rate": 9.485502786409107e-05, |
| "loss": 1.3028, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.015912293882306635, |
| "learning_rate": 9.457836797666722e-05, |
| "loss": 1.3188, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.39, |
| "grad_norm": 0.013021737938041136, |
| "learning_rate": 9.429488861426137e-05, |
| "loss": 1.3306, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.010972354267984178, |
| "learning_rate": 9.400463314275943e-05, |
| "loss": 1.2738, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.41, |
| "grad_norm": 0.015962110281111695, |
| "learning_rate": 9.370764596463763e-05, |
| "loss": 1.2998, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.012606746770630168, |
| "learning_rate": 9.340397251217009e-05, |
| "loss": 1.301, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.01343997346580131, |
| "learning_rate": 9.309365924047853e-05, |
| "loss": 1.2821, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.43, |
| "grad_norm": 0.01999816247440519, |
| "learning_rate": 9.277675362042581e-05, |
| "loss": 1.2801, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.011840965361160609, |
| "learning_rate": 9.245330413135395e-05, |
| "loss": 1.2646, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.012345942063074974, |
| "learning_rate": 9.212336025366788e-05, |
| "loss": 1.2654, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.45, |
| "grad_norm": 0.011357300497689855, |
| "learning_rate": 9.178697246126607e-05, |
| "loss": 1.2286, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.0119333070271641, |
| "learning_rate": 9.144419221381919e-05, |
| "loss": 1.269, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.019987714588407004, |
| "learning_rate": 9.109507194889792e-05, |
| "loss": 1.3249, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.47, |
| "grad_norm": 0.012629696476057141, |
| "learning_rate": 9.073966507395122e-05, |
| "loss": 1.2814, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.01514642919032275, |
| "learning_rate": 9.03780259581362e-05, |
| "loss": 1.2987, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.49, |
| "grad_norm": 0.01217033214667514, |
| "learning_rate": 9.001020992400087e-05, |
| "loss": 1.3174, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.013313442038870889, |
| "learning_rate": 8.963627323902104e-05, |
| "loss": 1.3361, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.013140512018867165, |
| "learning_rate": 8.925627310699275e-05, |
| "loss": 1.2949, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.51, |
| "grad_norm": 0.012629795148567, |
| "learning_rate": 8.887026765928129e-05, |
| "loss": 1.313, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.01267262432517559, |
| "learning_rate": 8.84783159459285e-05, |
| "loss": 1.2777, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.012491865926420912, |
| "learning_rate": 8.808047792661939e-05, |
| "loss": 1.2867, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.53, |
| "grad_norm": 0.07419724215245581, |
| "learning_rate": 8.767681446150977e-05, |
| "loss": 1.2957, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.013685509971746855, |
| "learning_rate": 8.726738730191595e-05, |
| "loss": 1.3099, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.55, |
| "grad_norm": 0.01255329844529139, |
| "learning_rate": 8.68522590808682e-05, |
| "loss": 1.3064, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.013555284699966613, |
| "learning_rate": 8.643149330352937e-05, |
| "loss": 1.3343, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.012428652847928174, |
| "learning_rate": 8.600515433748003e-05, |
| "loss": 1.2939, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.57, |
| "grad_norm": 0.013730881730960102, |
| "learning_rate": 8.557330740287166e-05, |
| "loss": 1.3471, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.012536875741319477, |
| "learning_rate": 8.51360185624495e-05, |
| "loss": 1.2874, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.01315736800211036, |
| "learning_rate": 8.469335471144646e-05, |
| "loss": 1.3271, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.59, |
| "grad_norm": 0.012790573071406599, |
| "learning_rate": 8.424538356734957e-05, |
| "loss": 1.2776, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.012774514721889157, |
| "learning_rate": 8.379217365954089e-05, |
| "loss": 1.3239, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.61, |
| "grad_norm": 0.013206124918576702, |
| "learning_rate": 8.333379431881397e-05, |
| "loss": 1.3169, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.012150938166283229, |
| "learning_rate": 8.287031566676791e-05, |
| "loss": 1.3474, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.01296507972827761, |
| "learning_rate": 8.240180860508027e-05, |
| "loss": 1.2432, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.63, |
| "grad_norm": 0.012431111381985924, |
| "learning_rate": 8.192834480466071e-05, |
| "loss": 1.2638, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.013206165711432155, |
| "learning_rate": 8.144999669468714e-05, |
| "loss": 1.2272, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.013107051117932556, |
| "learning_rate": 8.096683745152544e-05, |
| "loss": 1.3318, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.65, |
| "grad_norm": 0.02417096924036142, |
| "learning_rate": 8.047894098753541e-05, |
| "loss": 1.3244, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.012897246540957289, |
| "learning_rate": 7.998638193976366e-05, |
| "loss": 1.2507, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.67, |
| "grad_norm": 0.013675475740782888, |
| "learning_rate": 7.948923565852598e-05, |
| "loss": 1.2856, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.013091972232698349, |
| "learning_rate": 7.898757819588037e-05, |
| "loss": 1.286, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.013533547626105997, |
| "learning_rate": 7.848148629399285e-05, |
| "loss": 1.2917, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.69, |
| "grad_norm": 0.013985718493269256, |
| "learning_rate": 7.797103737339766e-05, |
| "loss": 1.2814, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.012376190644291445, |
| "learning_rate": 7.745630952115363e-05, |
| "loss": 1.25, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.012580525752519058, |
| "learning_rate": 7.693738147889868e-05, |
| "loss": 1.2387, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.71, |
| "grad_norm": 0.01280307621246117, |
| "learning_rate": 7.641433263080418e-05, |
| "loss": 1.2729, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.01346417935213005, |
| "learning_rate": 7.58872429914309e-05, |
| "loss": 1.2846, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.012689348395905247, |
| "learning_rate": 7.535619319348866e-05, |
| "loss": 1.3234, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.73, |
| "grad_norm": 0.014176549248225716, |
| "learning_rate": 7.482126447550132e-05, |
| "loss": 1.3098, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.012665629411883726, |
| "learning_rate": 7.428253866937918e-05, |
| "loss": 1.3076, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.012301371470685241, |
| "learning_rate": 7.374009818790057e-05, |
| "loss": 1.2843, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.013326801939436429, |
| "learning_rate": 7.319402601210448e-05, |
| "loss": 1.2821, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.012305830268215724, |
| "learning_rate": 7.264440567859645e-05, |
| "loss": 1.3097, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.77, |
| "grad_norm": 0.015048797529506962, |
| "learning_rate": 7.209132126676934e-05, |
| "loss": 1.2876, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.01836099524577789, |
| "learning_rate": 7.153485738594112e-05, |
| "loss": 1.2855, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.01395973727861205, |
| "learning_rate": 7.097509916241145e-05, |
| "loss": 1.3342, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.79, |
| "grad_norm": 0.012705698786448718, |
| "learning_rate": 7.041213222643952e-05, |
| "loss": 1.2649, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.012865924289881111, |
| "learning_rate": 6.984604269914436e-05, |
| "loss": 1.2805, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.81, |
| "grad_norm": 0.013770157843605553, |
| "learning_rate": 6.927691717933039e-05, |
| "loss": 1.3429, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.013491603805534848, |
| "learning_rate": 6.870484273023968e-05, |
| "loss": 1.3288, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.023074917576336243, |
| "learning_rate": 6.812990686623335e-05, |
| "loss": 1.3031, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.83, |
| "grad_norm": 0.01360635130244037, |
| "learning_rate": 6.755219753940388e-05, |
| "loss": 1.2731, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.015779011261624655, |
| "learning_rate": 6.697180312612034e-05, |
| "loss": 1.3228, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.0191668822690555, |
| "learning_rate": 6.638881241350884e-05, |
| "loss": 1.2764, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.85, |
| "grad_norm": 0.01432796902850521, |
| "learning_rate": 6.580331458587022e-05, |
| "loss": 1.2711, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.013611387104348411, |
| "learning_rate": 6.52153992110368e-05, |
| "loss": 1.3004, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.87, |
| "grad_norm": 0.01399516730154587, |
| "learning_rate": 6.462515622667056e-05, |
| "loss": 1.2873, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.012770900961118808, |
| "learning_rate": 6.403267592650465e-05, |
| "loss": 1.2504, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.015945892933358158, |
| "learning_rate": 6.343804894653072e-05, |
| "loss": 1.2729, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.89, |
| "grad_norm": 0.013482605970181992, |
| "learning_rate": 6.28413662511334e-05, |
| "loss": 1.2511, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.01378724352547337, |
| "learning_rate": 6.224271911917507e-05, |
| "loss": 1.2795, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.013380712527366646, |
| "learning_rate": 6.164219913003207e-05, |
| "loss": 1.2899, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.91, |
| "grad_norm": 0.013150310617423817, |
| "learning_rate": 6.10398981495853e-05, |
| "loss": 1.2691, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.013440245678550199, |
| "learning_rate": 6.043590831616677e-05, |
| "loss": 1.292, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.93, |
| "grad_norm": 0.014263259675451731, |
| "learning_rate": 5.9830322026464435e-05, |
| "loss": 1.3098, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.013569190009703554, |
| "learning_rate": 5.92232319213878e-05, |
| "loss": 1.2721, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.013956420143396445, |
| "learning_rate": 5.861473087189584e-05, |
| "loss": 1.3348, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.95, |
| "grad_norm": 0.013219085418619908, |
| "learning_rate": 5.800491196478989e-05, |
| "loss": 1.2506, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.01391932503516543, |
| "learning_rate": 5.739386848847346e-05, |
| "loss": 1.2505, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.013286230114586647, |
| "learning_rate": 5.6781693918681275e-05, |
| "loss": 1.2736, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.97, |
| "grad_norm": 0.014026096614615548, |
| "learning_rate": 5.616848190417965e-05, |
| "loss": 1.2942, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 0.013600317168386526, |
| "learning_rate": 5.5554326252440235e-05, |
| "loss": 1.2915, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 0.013507822076023955, |
| "learning_rate": 5.4939320915289714e-05, |
| "loss": 1.3112, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.99, |
| "grad_norm": 0.014000478245483857, |
| "learning_rate": 5.432355997453729e-05, |
| "loss": 1.2811, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.01420975223503783, |
| "learning_rate": 5.370713762758232e-05, |
| "loss": 1.3203, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.01, |
| "grad_norm": 0.013270421327306004, |
| "learning_rate": 5.3090148173004216e-05, |
| "loss": 1.2095, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.02, |
| "grad_norm": 0.013841368832750186, |
| "learning_rate": 5.247268599613696e-05, |
| "loss": 1.2439, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.02, |
| "grad_norm": 0.013599294458212107, |
| "learning_rate": 5.185484555463026e-05, |
| "loss": 1.2876, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.03, |
| "grad_norm": 0.01403933789982391, |
| "learning_rate": 5.123672136399975e-05, |
| "loss": 1.2487, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.04, |
| "grad_norm": 0.013785304842120809, |
| "learning_rate": 5.0618407983168146e-05, |
| "loss": 1.2908, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.05, |
| "grad_norm": 0.02499232225633767, |
| "learning_rate": 5e-05, |
| "loss": 1.2542, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.05, |
| "grad_norm": 0.013748729590013059, |
| "learning_rate": 4.938159201683186e-05, |
| "loss": 1.3158, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.06, |
| "grad_norm": 0.015244834626064248, |
| "learning_rate": 4.876327863600025e-05, |
| "loss": 1.2813, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.07, |
| "grad_norm": 0.013074690579273199, |
| "learning_rate": 4.814515444536974e-05, |
| "loss": 1.2589, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.08, |
| "grad_norm": 0.013640971548809892, |
| "learning_rate": 4.752731400386306e-05, |
| "loss": 1.3409, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.08, |
| "grad_norm": 0.013857785253189963, |
| "learning_rate": 4.69098518269958e-05, |
| "loss": 1.2884, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.09, |
| "grad_norm": 0.01439159312691957, |
| "learning_rate": 4.62928623724177e-05, |
| "loss": 1.3038, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.1, |
| "grad_norm": 0.01369785011946924, |
| "learning_rate": 4.567644002546273e-05, |
| "loss": 1.297, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.11, |
| "grad_norm": 0.016775596557033395, |
| "learning_rate": 4.506067908471029e-05, |
| "loss": 1.2421, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.11, |
| "grad_norm": 0.017413598587797105, |
| "learning_rate": 4.4445673747559776e-05, |
| "loss": 1.3068, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.12, |
| "grad_norm": 0.015992833931677785, |
| "learning_rate": 4.383151809582035e-05, |
| "loss": 1.2794, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.13, |
| "grad_norm": 0.01579232038410006, |
| "learning_rate": 4.3218306081318716e-05, |
| "loss": 1.3043, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.14, |
| "grad_norm": 0.026770388446340592, |
| "learning_rate": 4.2606131511526555e-05, |
| "loss": 1.2837, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.14, |
| "grad_norm": 0.031691067064066485, |
| "learning_rate": 4.1995088035210124e-05, |
| "loss": 1.2788, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.15, |
| "grad_norm": 0.0162069083852305, |
| "learning_rate": 4.138526912810418e-05, |
| "loss": 1.2895, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.16, |
| "grad_norm": 0.015730822077210757, |
| "learning_rate": 4.077676807861221e-05, |
| "loss": 1.3028, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.17, |
| "grad_norm": 0.017380121781761425, |
| "learning_rate": 4.016967797353558e-05, |
| "loss": 1.2553, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.17, |
| "grad_norm": 0.014829204054633923, |
| "learning_rate": 3.956409168383325e-05, |
| "loss": 1.2804, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.18, |
| "grad_norm": 0.014816527490603743, |
| "learning_rate": 3.89601018504147e-05, |
| "loss": 1.2691, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.19, |
| "grad_norm": 0.014742205868113261, |
| "learning_rate": 3.835780086996794e-05, |
| "loss": 1.3161, |
| "step": 160 |
| } |
| ], |
| "logging_steps": 1.0, |
| "max_steps": 268, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 20, |
| "total_flos": 2473123236544512.0, |
| "train_batch_size": 3, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|