| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 2.980891719745223, |
| "eval_steps": 500, |
| "global_step": 156, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01910828025477707, |
| "grad_norm": 7.283066272735596, |
| "learning_rate": 6.25e-07, |
| "loss": 1.1498, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.03821656050955414, |
| "grad_norm": 7.387139797210693, |
| "learning_rate": 1.25e-06, |
| "loss": 1.1469, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.05732484076433121, |
| "grad_norm": 7.194793224334717, |
| "learning_rate": 1.8750000000000003e-06, |
| "loss": 1.1608, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.07643312101910828, |
| "grad_norm": 7.103536128997803, |
| "learning_rate": 2.5e-06, |
| "loss": 1.1289, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.09554140127388536, |
| "grad_norm": 6.497398853302002, |
| "learning_rate": 3.125e-06, |
| "loss": 1.1272, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.11464968152866242, |
| "grad_norm": 4.629354000091553, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 1.0227, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.1337579617834395, |
| "grad_norm": 2.5543441772460938, |
| "learning_rate": 4.3750000000000005e-06, |
| "loss": 1.0106, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.15286624203821655, |
| "grad_norm": 2.3123137950897217, |
| "learning_rate": 5e-06, |
| "loss": 0.9752, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.17197452229299362, |
| "grad_norm": 4.898059368133545, |
| "learning_rate": 5.625e-06, |
| "loss": 0.9848, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.1910828025477707, |
| "grad_norm": 5.3169264793396, |
| "learning_rate": 6.25e-06, |
| "loss": 0.9968, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.21019108280254778, |
| "grad_norm": 5.212041854858398, |
| "learning_rate": 6.875e-06, |
| "loss": 0.9595, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.22929936305732485, |
| "grad_norm": 3.953948974609375, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.9641, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.2484076433121019, |
| "grad_norm": 3.704425096511841, |
| "learning_rate": 8.125000000000001e-06, |
| "loss": 0.8944, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.267515923566879, |
| "grad_norm": 3.1059200763702393, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 0.8578, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.28662420382165604, |
| "grad_norm": 2.5509462356567383, |
| "learning_rate": 9.375000000000001e-06, |
| "loss": 0.8804, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.3057324840764331, |
| "grad_norm": 2.304457426071167, |
| "learning_rate": 1e-05, |
| "loss": 0.8432, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.3248407643312102, |
| "grad_norm": 2.506471633911133, |
| "learning_rate": 9.998741174712534e-06, |
| "loss": 0.8422, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.34394904458598724, |
| "grad_norm": 2.3640732765197754, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 0.8418, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.3630573248407643, |
| "grad_norm": 2.196169853210449, |
| "learning_rate": 9.98867437523228e-06, |
| "loss": 0.8514, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.3821656050955414, |
| "grad_norm": 1.7853914499282837, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.8462, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.4012738853503185, |
| "grad_norm": 1.5007359981536865, |
| "learning_rate": 9.968561049466214e-06, |
| "loss": 0.8171, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.42038216560509556, |
| "grad_norm": 1.2658205032348633, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 0.8074, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.4394904458598726, |
| "grad_norm": 1.3059478998184204, |
| "learning_rate": 9.938441702975689e-06, |
| "loss": 0.8377, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.4585987261146497, |
| "grad_norm": 1.0882668495178223, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.8195, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.47770700636942676, |
| "grad_norm": 1.0192159414291382, |
| "learning_rate": 9.898376992116179e-06, |
| "loss": 0.8041, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.4968152866242038, |
| "grad_norm": 0.9797454476356506, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.844, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.5159235668789809, |
| "grad_norm": 0.9786008596420288, |
| "learning_rate": 9.848447601883436e-06, |
| "loss": 0.7693, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.535031847133758, |
| "grad_norm": 0.9011598229408264, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.807, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.554140127388535, |
| "grad_norm": 0.826195240020752, |
| "learning_rate": 9.788754083424654e-06, |
| "loss": 0.7848, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.5732484076433121, |
| "grad_norm": 0.9426838755607605, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.8061, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.5923566878980892, |
| "grad_norm": 0.9041593074798584, |
| "learning_rate": 9.719416651541839e-06, |
| "loss": 0.7731, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.6114649681528662, |
| "grad_norm": 0.9566060304641724, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.7955, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.6305732484076433, |
| "grad_norm": 0.763576328754425, |
| "learning_rate": 9.640574942595195e-06, |
| "loss": 0.7941, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.6496815286624203, |
| "grad_norm": 1.0218584537506104, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.7971, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.6687898089171974, |
| "grad_norm": 1.0102124214172363, |
| "learning_rate": 9.552387733294081e-06, |
| "loss": 0.738, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.6878980891719745, |
| "grad_norm": 0.7672615051269531, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.7804, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.7070063694267515, |
| "grad_norm": 1.0719943046569824, |
| "learning_rate": 9.45503262094184e-06, |
| "loss": 0.7807, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.7261146496815286, |
| "grad_norm": 0.9412849545478821, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.8137, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.7452229299363057, |
| "grad_norm": 0.8677616715431213, |
| "learning_rate": 9.348705665778479e-06, |
| "loss": 0.7743, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.7643312101910829, |
| "grad_norm": 0.9233507513999939, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.7574, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.7834394904458599, |
| "grad_norm": 1.9202914237976074, |
| "learning_rate": 9.233620996141421e-06, |
| "loss": 0.8454, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.802547770700637, |
| "grad_norm": 0.8707855343818665, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.7502, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.821656050955414, |
| "grad_norm": 0.8542220592498779, |
| "learning_rate": 9.110010377239552e-06, |
| "loss": 0.8205, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.8407643312101911, |
| "grad_norm": 0.7179370522499084, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.7447, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.8598726114649682, |
| "grad_norm": 0.7615282535552979, |
| "learning_rate": 8.978122744408905e-06, |
| "loss": 0.7994, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.8789808917197452, |
| "grad_norm": 0.6987951397895813, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.7636, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.8980891719745223, |
| "grad_norm": 0.8840264678001404, |
| "learning_rate": 8.838223701790057e-06, |
| "loss": 0.7604, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.9171974522292994, |
| "grad_norm": 0.7975447773933411, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.7555, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.9363057324840764, |
| "grad_norm": 0.834041953086853, |
| "learning_rate": 8.690594987436705e-06, |
| "loss": 0.7581, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.9554140127388535, |
| "grad_norm": 0.6846311092376709, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.7254, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.9745222929936306, |
| "grad_norm": 0.7656162977218628, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.7786, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.9936305732484076, |
| "grad_norm": 0.6645858287811279, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.7752, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.0127388535031847, |
| "grad_norm": 0.6466763019561768, |
| "learning_rate": 8.373352729660373e-06, |
| "loss": 0.697, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.0318471337579618, |
| "grad_norm": 0.6180476546287537, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 0.6652, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.0509554140127388, |
| "grad_norm": 0.6721435785293579, |
| "learning_rate": 8.204378069925121e-06, |
| "loss": 0.6916, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.070063694267516, |
| "grad_norm": 0.7600250840187073, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.6793, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.089171974522293, |
| "grad_norm": 0.6684707403182983, |
| "learning_rate": 8.0289502192041e-06, |
| "loss": 0.6743, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.10828025477707, |
| "grad_norm": 0.7330480217933655, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.6754, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.127388535031847, |
| "grad_norm": 0.6713646650314331, |
| "learning_rate": 7.84742246584226e-06, |
| "loss": 0.66, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.1464968152866242, |
| "grad_norm": 0.6231556534767151, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.6564, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.1656050955414012, |
| "grad_norm": 0.7284997701644897, |
| "learning_rate": 7.660160382576683e-06, |
| "loss": 0.6834, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.1847133757961783, |
| "grad_norm": 0.6809297204017639, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.6453, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.2038216560509554, |
| "grad_norm": 0.6243814826011658, |
| "learning_rate": 7.467541090321735e-06, |
| "loss": 0.6666, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.2229299363057324, |
| "grad_norm": 0.6623536944389343, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.6858, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.2420382165605095, |
| "grad_norm": 0.6827297806739807, |
| "learning_rate": 7.269952498697734e-06, |
| "loss": 0.6577, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.2611464968152866, |
| "grad_norm": 0.6385012269020081, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.6833, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.2802547770700636, |
| "grad_norm": 0.7074577212333679, |
| "learning_rate": 7.067792524832604e-06, |
| "loss": 0.6732, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.2993630573248407, |
| "grad_norm": 0.6454325318336487, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.6671, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.3184713375796178, |
| "grad_norm": 0.6871979832649231, |
| "learning_rate": 6.8614682920097265e-06, |
| "loss": 0.6744, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.3375796178343948, |
| "grad_norm": 0.6541727185249329, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.6529, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.356687898089172, |
| "grad_norm": 0.6993772983551025, |
| "learning_rate": 6.651395309775837e-06, |
| "loss": 0.6548, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.3757961783439492, |
| "grad_norm": 0.7177384495735168, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.6556, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.394904458598726, |
| "grad_norm": 0.6087607741355896, |
| "learning_rate": 6.437996637160086e-06, |
| "loss": 0.6261, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.4140127388535033, |
| "grad_norm": 0.6564837694168091, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.6632, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.4331210191082802, |
| "grad_norm": 0.6872571110725403, |
| "learning_rate": 6.2217020306894705e-06, |
| "loss": 0.6684, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.4522292993630574, |
| "grad_norm": 0.6152129173278809, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.6491, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.4713375796178343, |
| "grad_norm": 0.6309750080108643, |
| "learning_rate": 6.002947078916365e-06, |
| "loss": 0.6533, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.4904458598726116, |
| "grad_norm": 0.6075384616851807, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.6631, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.5095541401273884, |
| "grad_norm": 0.6240233778953552, |
| "learning_rate": 5.782172325201155e-06, |
| "loss": 0.6312, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.5286624203821657, |
| "grad_norm": 0.6493943929672241, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.6583, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.5477707006369426, |
| "grad_norm": 0.6722797155380249, |
| "learning_rate": 5.559822380516539e-06, |
| "loss": 0.6638, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.5668789808917198, |
| "grad_norm": 0.6353743076324463, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.6516, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.5859872611464967, |
| "grad_norm": 0.638333261013031, |
| "learning_rate": 5.336345028060199e-06, |
| "loss": 0.6592, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.605095541401274, |
| "grad_norm": 0.7258989810943604, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.6816, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.6242038216560508, |
| "grad_norm": 0.6479246616363525, |
| "learning_rate": 5.112190321479026e-06, |
| "loss": 0.6678, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.643312101910828, |
| "grad_norm": 0.6187183856964111, |
| "learning_rate": 5e-06, |
| "loss": 0.649, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.662420382165605, |
| "grad_norm": 0.6216777563095093, |
| "learning_rate": 4.887809678520976e-06, |
| "loss": 0.6587, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.6815286624203822, |
| "grad_norm": 0.6864318251609802, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.6515, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.700636942675159, |
| "grad_norm": 0.5659608244895935, |
| "learning_rate": 4.663654971939802e-06, |
| "loss": 0.6576, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.7197452229299364, |
| "grad_norm": 0.601165235042572, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.6585, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.7388535031847132, |
| "grad_norm": 0.6609579920768738, |
| "learning_rate": 4.4401776194834615e-06, |
| "loss": 0.6659, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.7579617834394905, |
| "grad_norm": 0.6505253314971924, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.6703, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.7770700636942676, |
| "grad_norm": 0.6021715402603149, |
| "learning_rate": 4.217827674798845e-06, |
| "loss": 0.6813, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.7961783439490446, |
| "grad_norm": 2.0026886463165283, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.6986, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.8152866242038217, |
| "grad_norm": 0.583001434803009, |
| "learning_rate": 3.997052921083637e-06, |
| "loss": 0.6531, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.8343949044585988, |
| "grad_norm": 0.6410379409790039, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.6655, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.8535031847133758, |
| "grad_norm": 0.6351285576820374, |
| "learning_rate": 3.778297969310529e-06, |
| "loss": 0.6533, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.872611464968153, |
| "grad_norm": 0.5866528749465942, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.6845, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.89171974522293, |
| "grad_norm": 0.569778323173523, |
| "learning_rate": 3.562003362839914e-06, |
| "loss": 0.6494, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.910828025477707, |
| "grad_norm": 0.554115891456604, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.6423, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.929936305732484, |
| "grad_norm": 0.5609800815582275, |
| "learning_rate": 3.3486046902241663e-06, |
| "loss": 0.6306, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.9490445859872612, |
| "grad_norm": 0.5329970717430115, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.6657, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.9681528662420382, |
| "grad_norm": 0.5660147666931152, |
| "learning_rate": 3.1385317079902743e-06, |
| "loss": 0.6599, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.9872611464968153, |
| "grad_norm": 0.5551929473876953, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.6617, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.0063694267515926, |
| "grad_norm": 0.5826137661933899, |
| "learning_rate": 2.932207475167398e-06, |
| "loss": 0.6042, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.0254777070063694, |
| "grad_norm": 0.5620737075805664, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.5824, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.0445859872611467, |
| "grad_norm": 0.6190159916877747, |
| "learning_rate": 2.7300475013022666e-06, |
| "loss": 0.576, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.0636942675159236, |
| "grad_norm": 0.5424576997756958, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.5832, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.082802547770701, |
| "grad_norm": 0.5414028167724609, |
| "learning_rate": 2.532458909678266e-06, |
| "loss": 0.5767, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.1019108280254777, |
| "grad_norm": 0.5423881411552429, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.5692, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.121019108280255, |
| "grad_norm": 0.5424202680587769, |
| "learning_rate": 2.339839617423318e-06, |
| "loss": 0.5891, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.140127388535032, |
| "grad_norm": 0.6213288307189941, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.5799, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.159235668789809, |
| "grad_norm": 0.5852116942405701, |
| "learning_rate": 2.1525775341577404e-06, |
| "loss": 0.5949, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.178343949044586, |
| "grad_norm": 0.5753389596939087, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.566, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.1974522292993632, |
| "grad_norm": 0.5467677712440491, |
| "learning_rate": 1.971049780795901e-06, |
| "loss": 0.603, |
| "step": 115 |
| }, |
| { |
| "epoch": 2.21656050955414, |
| "grad_norm": 0.5652862191200256, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.5849, |
| "step": 116 |
| }, |
| { |
| "epoch": 2.2356687898089174, |
| "grad_norm": 0.5632086992263794, |
| "learning_rate": 1.7956219300748796e-06, |
| "loss": 0.6131, |
| "step": 117 |
| }, |
| { |
| "epoch": 2.254777070063694, |
| "grad_norm": 0.5731777548789978, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.5665, |
| "step": 118 |
| }, |
| { |
| "epoch": 2.2738853503184715, |
| "grad_norm": 0.5389222502708435, |
| "learning_rate": 1.6266472703396286e-06, |
| "loss": 0.5709, |
| "step": 119 |
| }, |
| { |
| "epoch": 2.2929936305732483, |
| "grad_norm": 0.5164850950241089, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.5732, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.3121019108280256, |
| "grad_norm": 0.5392653346061707, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.5803, |
| "step": 121 |
| }, |
| { |
| "epoch": 2.3312101910828025, |
| "grad_norm": 0.5912431478500366, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.5934, |
| "step": 122 |
| }, |
| { |
| "epoch": 2.3503184713375798, |
| "grad_norm": 0.5611681938171387, |
| "learning_rate": 1.3094050125632973e-06, |
| "loss": 0.5743, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.3694267515923566, |
| "grad_norm": 0.598409116268158, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.575, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.388535031847134, |
| "grad_norm": 0.5573514103889465, |
| "learning_rate": 1.1617762982099446e-06, |
| "loss": 0.5681, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.4076433121019107, |
| "grad_norm": 0.5197586417198181, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.5417, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.426751592356688, |
| "grad_norm": 0.5155230760574341, |
| "learning_rate": 1.0218772555910955e-06, |
| "loss": 0.5835, |
| "step": 127 |
| }, |
| { |
| "epoch": 2.445859872611465, |
| "grad_norm": 0.5502220988273621, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.5685, |
| "step": 128 |
| }, |
| { |
| "epoch": 2.464968152866242, |
| "grad_norm": 0.5894801020622253, |
| "learning_rate": 8.899896227604509e-07, |
| "loss": 0.5897, |
| "step": 129 |
| }, |
| { |
| "epoch": 2.484076433121019, |
| "grad_norm": 1.0258440971374512, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.5928, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.5031847133757963, |
| "grad_norm": 0.5264387130737305, |
| "learning_rate": 7.663790038585794e-07, |
| "loss": 0.5872, |
| "step": 131 |
| }, |
| { |
| "epoch": 2.522292993630573, |
| "grad_norm": 0.5449411273002625, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.588, |
| "step": 132 |
| }, |
| { |
| "epoch": 2.5414012738853504, |
| "grad_norm": 0.5089368224143982, |
| "learning_rate": 6.512943342215234e-07, |
| "loss": 0.6063, |
| "step": 133 |
| }, |
| { |
| "epoch": 2.5605095541401273, |
| "grad_norm": 0.5194270610809326, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.5653, |
| "step": 134 |
| }, |
| { |
| "epoch": 2.5796178343949046, |
| "grad_norm": 0.46488288044929504, |
| "learning_rate": 5.449673790581611e-07, |
| "loss": 0.6105, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.5987261146496814, |
| "grad_norm": 0.546769380569458, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.5746, |
| "step": 136 |
| }, |
| { |
| "epoch": 2.6178343949044587, |
| "grad_norm": 0.5026277899742126, |
| "learning_rate": 4.4761226670592074e-07, |
| "loss": 0.5826, |
| "step": 137 |
| }, |
| { |
| "epoch": 2.6369426751592355, |
| "grad_norm": 0.509587287902832, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.5774, |
| "step": 138 |
| }, |
| { |
| "epoch": 2.656050955414013, |
| "grad_norm": 0.5122659802436829, |
| "learning_rate": 3.5942505740480583e-07, |
| "loss": 0.5886, |
| "step": 139 |
| }, |
| { |
| "epoch": 2.6751592356687897, |
| "grad_norm": 0.5195062160491943, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.5855, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.694267515923567, |
| "grad_norm": 0.5074093341827393, |
| "learning_rate": 2.8058334845816214e-07, |
| "loss": 0.5774, |
| "step": 141 |
| }, |
| { |
| "epoch": 2.713375796178344, |
| "grad_norm": 0.5081133246421814, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.5752, |
| "step": 142 |
| }, |
| { |
| "epoch": 2.732484076433121, |
| "grad_norm": 0.49944645166397095, |
| "learning_rate": 2.1124591657534776e-07, |
| "loss": 0.6027, |
| "step": 143 |
| }, |
| { |
| "epoch": 2.7515923566878984, |
| "grad_norm": 0.5384798645973206, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.5705, |
| "step": 144 |
| }, |
| { |
| "epoch": 2.770700636942675, |
| "grad_norm": 0.4824913442134857, |
| "learning_rate": 1.5155239811656562e-07, |
| "loss": 0.5458, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.789808917197452, |
| "grad_norm": 0.5386421084403992, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.5973, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.8089171974522293, |
| "grad_norm": 0.5020357370376587, |
| "learning_rate": 1.0162300788382263e-07, |
| "loss": 0.5524, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.8280254777070066, |
| "grad_norm": 0.5273739695549011, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.5675, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.8471337579617835, |
| "grad_norm": 0.5206378102302551, |
| "learning_rate": 6.15582970243117e-08, |
| "loss": 0.5573, |
| "step": 149 |
| }, |
| { |
| "epoch": 2.8662420382165603, |
| "grad_norm": 0.48928534984588623, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.5716, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.8853503184713376, |
| "grad_norm": 0.52506422996521, |
| "learning_rate": 3.143895053378698e-08, |
| "loss": 0.5911, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.904458598726115, |
| "grad_norm": 0.4814032018184662, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.5804, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.9235668789808917, |
| "grad_norm": 0.4976033866405487, |
| "learning_rate": 1.132562476771959e-08, |
| "loss": 0.5846, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.9426751592356686, |
| "grad_norm": 0.5002295970916748, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.582, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.961783439490446, |
| "grad_norm": 0.49213045835494995, |
| "learning_rate": 1.2588252874673469e-09, |
| "loss": 0.5942, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.980891719745223, |
| "grad_norm": 0.5032421946525574, |
| "learning_rate": 0.0, |
| "loss": 0.5574, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.980891719745223, |
| "step": 156, |
| "total_flos": 1.4444846521853542e+17, |
| "train_loss": 0.7008312650215931, |
| "train_runtime": 4021.9564, |
| "train_samples_per_second": 3.73, |
| "train_steps_per_second": 0.039 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 156, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.4444846521853542e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|