| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 156, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.019230769230769232, |
| "grad_norm": 6.966817855834961, |
| "learning_rate": 6.25e-07, |
| "loss": 1.0486, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.038461538461538464, |
| "grad_norm": 7.038541793823242, |
| "learning_rate": 1.25e-06, |
| "loss": 1.0618, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.057692307692307696, |
| "grad_norm": 7.20219612121582, |
| "learning_rate": 1.8750000000000003e-06, |
| "loss": 1.0296, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.07692307692307693, |
| "grad_norm": 6.6908955574035645, |
| "learning_rate": 2.5e-06, |
| "loss": 1.0376, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.09615384615384616, |
| "grad_norm": 6.1468987464904785, |
| "learning_rate": 3.125e-06, |
| "loss": 1.0199, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.11538461538461539, |
| "grad_norm": 4.434096336364746, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.9741, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.1346153846153846, |
| "grad_norm": 2.856332540512085, |
| "learning_rate": 4.3750000000000005e-06, |
| "loss": 0.9578, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.15384615384615385, |
| "grad_norm": 2.7081027030944824, |
| "learning_rate": 5e-06, |
| "loss": 0.9389, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.17307692307692307, |
| "grad_norm": 4.221141338348389, |
| "learning_rate": 5.625e-06, |
| "loss": 0.9251, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.19230769230769232, |
| "grad_norm": 4.619787216186523, |
| "learning_rate": 6.25e-06, |
| "loss": 0.929, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.21153846153846154, |
| "grad_norm": 4.410218715667725, |
| "learning_rate": 6.875e-06, |
| "loss": 0.9117, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.23076923076923078, |
| "grad_norm": 3.20845890045166, |
| "learning_rate": 7.500000000000001e-06, |
| "loss": 0.8544, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.25, |
| "grad_norm": 3.037214994430542, |
| "learning_rate": 8.125000000000001e-06, |
| "loss": 0.8393, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.2692307692307692, |
| "grad_norm": 2.277648448944092, |
| "learning_rate": 8.750000000000001e-06, |
| "loss": 0.8304, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.28846153846153844, |
| "grad_norm": 1.7641620635986328, |
| "learning_rate": 9.375000000000001e-06, |
| "loss": 0.8086, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.3076923076923077, |
| "grad_norm": 1.4881336688995361, |
| "learning_rate": 1e-05, |
| "loss": 0.7887, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.3269230769230769, |
| "grad_norm": 1.780328631401062, |
| "learning_rate": 9.998741174712534e-06, |
| "loss": 0.7822, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.34615384615384615, |
| "grad_norm": 1.6369305849075317, |
| "learning_rate": 9.994965332706574e-06, |
| "loss": 0.7578, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.36538461538461536, |
| "grad_norm": 1.2220097780227661, |
| "learning_rate": 9.98867437523228e-06, |
| "loss": 0.747, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.38461538461538464, |
| "grad_norm": 1.1201010942459106, |
| "learning_rate": 9.979871469976197e-06, |
| "loss": 0.7333, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.40384615384615385, |
| "grad_norm": 1.2044910192489624, |
| "learning_rate": 9.968561049466214e-06, |
| "loss": 0.729, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.4230769230769231, |
| "grad_norm": 0.9985323548316956, |
| "learning_rate": 9.954748808839675e-06, |
| "loss": 0.7331, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.4423076923076923, |
| "grad_norm": 0.7969001531600952, |
| "learning_rate": 9.938441702975689e-06, |
| "loss": 0.7146, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.46153846153846156, |
| "grad_norm": 0.9972831606864929, |
| "learning_rate": 9.91964794299315e-06, |
| "loss": 0.7211, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.4807692307692308, |
| "grad_norm": 0.9880993962287903, |
| "learning_rate": 9.898376992116179e-06, |
| "loss": 0.703, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.9266494512557983, |
| "learning_rate": 9.874639560909118e-06, |
| "loss": 0.7087, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.5192307692307693, |
| "grad_norm": 0.882935643196106, |
| "learning_rate": 9.848447601883436e-06, |
| "loss": 0.7069, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.5384615384615384, |
| "grad_norm": 0.7236116528511047, |
| "learning_rate": 9.819814303479268e-06, |
| "loss": 0.6714, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.5576923076923077, |
| "grad_norm": 0.7752331495285034, |
| "learning_rate": 9.788754083424654e-06, |
| "loss": 0.6837, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.5769230769230769, |
| "grad_norm": 0.9294746518135071, |
| "learning_rate": 9.755282581475769e-06, |
| "loss": 0.6832, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.5961538461538461, |
| "grad_norm": 0.6968167424201965, |
| "learning_rate": 9.719416651541839e-06, |
| "loss": 0.6919, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.6153846153846154, |
| "grad_norm": 0.8141052722930908, |
| "learning_rate": 9.681174353198687e-06, |
| "loss": 0.6799, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.6346153846153846, |
| "grad_norm": 0.6246001124382019, |
| "learning_rate": 9.640574942595195e-06, |
| "loss": 0.6812, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.6538461538461539, |
| "grad_norm": 0.6437559127807617, |
| "learning_rate": 9.597638862757255e-06, |
| "loss": 0.6851, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.6730769230769231, |
| "grad_norm": 0.6685044765472412, |
| "learning_rate": 9.552387733294081e-06, |
| "loss": 0.6673, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.6923076923076923, |
| "grad_norm": 0.6421700716018677, |
| "learning_rate": 9.504844339512096e-06, |
| "loss": 0.668, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.7115384615384616, |
| "grad_norm": 0.7625846266746521, |
| "learning_rate": 9.45503262094184e-06, |
| "loss": 0.6539, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.7307692307692307, |
| "grad_norm": 0.7253762483596802, |
| "learning_rate": 9.40297765928369e-06, |
| "loss": 0.654, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.6435613632202148, |
| "learning_rate": 9.348705665778479e-06, |
| "loss": 0.6697, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.7692307692307693, |
| "grad_norm": 0.6515916585922241, |
| "learning_rate": 9.292243968009332e-06, |
| "loss": 0.6597, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.7884615384615384, |
| "grad_norm": 0.6834498643875122, |
| "learning_rate": 9.233620996141421e-06, |
| "loss": 0.6554, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.8076923076923077, |
| "grad_norm": 0.5808966755867004, |
| "learning_rate": 9.172866268606514e-06, |
| "loss": 0.658, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.8269230769230769, |
| "grad_norm": 0.6413999795913696, |
| "learning_rate": 9.110010377239552e-06, |
| "loss": 0.6513, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.8461538461538461, |
| "grad_norm": 0.7106669545173645, |
| "learning_rate": 9.045084971874738e-06, |
| "loss": 0.6554, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.8653846153846154, |
| "grad_norm": 0.5817821025848389, |
| "learning_rate": 8.978122744408905e-06, |
| "loss": 0.6638, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.8846153846153846, |
| "grad_norm": 0.7833232283592224, |
| "learning_rate": 8.90915741234015e-06, |
| "loss": 0.6683, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.9038461538461539, |
| "grad_norm": 0.6540783643722534, |
| "learning_rate": 8.838223701790057e-06, |
| "loss": 0.6363, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.9230769230769231, |
| "grad_norm": 0.7414817214012146, |
| "learning_rate": 8.765357330018056e-06, |
| "loss": 0.6661, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.9423076923076923, |
| "grad_norm": 0.7550224661827087, |
| "learning_rate": 8.690594987436705e-06, |
| "loss": 0.6499, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.9615384615384616, |
| "grad_norm": 0.5707510709762573, |
| "learning_rate": 8.613974319136959e-06, |
| "loss": 0.6338, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.9807692307692307, |
| "grad_norm": 0.6949760913848877, |
| "learning_rate": 8.535533905932739e-06, |
| "loss": 0.6367, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.752463161945343, |
| "learning_rate": 8.455313244934324e-06, |
| "loss": 0.6403, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.0192307692307692, |
| "grad_norm": 0.5614742040634155, |
| "learning_rate": 8.373352729660373e-06, |
| "loss": 0.6427, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.0384615384615385, |
| "grad_norm": 0.5981480479240417, |
| "learning_rate": 8.289693629698564e-06, |
| "loss": 0.6213, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.0576923076923077, |
| "grad_norm": 0.5645704865455627, |
| "learning_rate": 8.204378069925121e-06, |
| "loss": 0.6264, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.0769230769230769, |
| "grad_norm": 0.6066609025001526, |
| "learning_rate": 8.117449009293668e-06, |
| "loss": 0.6284, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.0961538461538463, |
| "grad_norm": 0.6348351836204529, |
| "learning_rate": 8.0289502192041e-06, |
| "loss": 0.6041, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.1153846153846154, |
| "grad_norm": 0.5784773230552673, |
| "learning_rate": 7.938926261462366e-06, |
| "loss": 0.6201, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.1346153846153846, |
| "grad_norm": 0.5792120695114136, |
| "learning_rate": 7.84742246584226e-06, |
| "loss": 0.6136, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.1538461538461537, |
| "grad_norm": 0.5399219989776611, |
| "learning_rate": 7.754484907260513e-06, |
| "loss": 0.598, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.1730769230769231, |
| "grad_norm": 0.5761382579803467, |
| "learning_rate": 7.660160382576683e-06, |
| "loss": 0.6083, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.1923076923076923, |
| "grad_norm": 0.4965008497238159, |
| "learning_rate": 7.564496387029532e-06, |
| "loss": 0.6072, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.2115384615384615, |
| "grad_norm": 0.5501433610916138, |
| "learning_rate": 7.467541090321735e-06, |
| "loss": 0.6108, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.2307692307692308, |
| "grad_norm": 0.5454249382019043, |
| "learning_rate": 7.369343312364994e-06, |
| "loss": 0.6218, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.25, |
| "grad_norm": 0.5427682399749756, |
| "learning_rate": 7.269952498697734e-06, |
| "loss": 0.6182, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.2692307692307692, |
| "grad_norm": 0.576603889465332, |
| "learning_rate": 7.169418695587791e-06, |
| "loss": 0.6092, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.2884615384615383, |
| "grad_norm": 0.5845782160758972, |
| "learning_rate": 7.067792524832604e-06, |
| "loss": 0.6038, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.3076923076923077, |
| "grad_norm": 0.5027605891227722, |
| "learning_rate": 6.965125158269619e-06, |
| "loss": 0.6, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.3269230769230769, |
| "grad_norm": 0.5731894373893738, |
| "learning_rate": 6.8614682920097265e-06, |
| "loss": 0.6019, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.3461538461538463, |
| "grad_norm": 0.5413023829460144, |
| "learning_rate": 6.7568741204067145e-06, |
| "loss": 0.5924, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.3653846153846154, |
| "grad_norm": 0.5625474452972412, |
| "learning_rate": 6.651395309775837e-06, |
| "loss": 0.6047, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.3846153846153846, |
| "grad_norm": 0.5386149287223816, |
| "learning_rate": 6.545084971874738e-06, |
| "loss": 0.6153, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.4038461538461537, |
| "grad_norm": 0.5398693680763245, |
| "learning_rate": 6.437996637160086e-06, |
| "loss": 0.6136, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.4230769230769231, |
| "grad_norm": 0.5411742329597473, |
| "learning_rate": 6.330184227833376e-06, |
| "loss": 0.5981, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.4423076923076923, |
| "grad_norm": 0.5154157280921936, |
| "learning_rate": 6.2217020306894705e-06, |
| "loss": 0.6205, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.4615384615384617, |
| "grad_norm": 0.4937479496002197, |
| "learning_rate": 6.112604669781572e-06, |
| "loss": 0.6196, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.4807692307692308, |
| "grad_norm": 0.4764734208583832, |
| "learning_rate": 6.002947078916365e-06, |
| "loss": 0.6079, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.49523475766181946, |
| "learning_rate": 5.892784473993184e-06, |
| "loss": 0.5995, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.5192307692307692, |
| "grad_norm": 0.4664310812950134, |
| "learning_rate": 5.782172325201155e-06, |
| "loss": 0.6001, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.5384615384615383, |
| "grad_norm": 0.4587920308113098, |
| "learning_rate": 5.671166329088278e-06, |
| "loss": 0.6051, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.5576923076923077, |
| "grad_norm": 0.4844333529472351, |
| "learning_rate": 5.559822380516539e-06, |
| "loss": 0.6112, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.5769230769230769, |
| "grad_norm": 0.4959803819656372, |
| "learning_rate": 5.448196544517168e-06, |
| "loss": 0.5986, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.5961538461538463, |
| "grad_norm": 0.46690163016319275, |
| "learning_rate": 5.336345028060199e-06, |
| "loss": 0.5865, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.6153846153846154, |
| "grad_norm": 0.4700402021408081, |
| "learning_rate": 5.224324151752575e-06, |
| "loss": 0.592, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.6346153846153846, |
| "grad_norm": 0.48742589354515076, |
| "learning_rate": 5.112190321479026e-06, |
| "loss": 0.5877, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.6538461538461537, |
| "grad_norm": 0.5387303233146667, |
| "learning_rate": 5e-06, |
| "loss": 0.6089, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.6730769230769231, |
| "grad_norm": 0.4711591303348541, |
| "learning_rate": 4.887809678520976e-06, |
| "loss": 0.5971, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.6923076923076923, |
| "grad_norm": 0.6208007335662842, |
| "learning_rate": 4.775675848247427e-06, |
| "loss": 0.6103, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.7115384615384617, |
| "grad_norm": 0.51828932762146, |
| "learning_rate": 4.663654971939802e-06, |
| "loss": 0.5902, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.7307692307692308, |
| "grad_norm": 0.547057569026947, |
| "learning_rate": 4.551803455482833e-06, |
| "loss": 0.5968, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.535789966583252, |
| "learning_rate": 4.4401776194834615e-06, |
| "loss": 0.6068, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.7692307692307692, |
| "grad_norm": 0.46714895963668823, |
| "learning_rate": 4.3288336709117246e-06, |
| "loss": 0.6094, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.7884615384615383, |
| "grad_norm": 0.4995303750038147, |
| "learning_rate": 4.217827674798845e-06, |
| "loss": 0.6126, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.8076923076923077, |
| "grad_norm": 0.49513861536979675, |
| "learning_rate": 4.107215526006818e-06, |
| "loss": 0.6039, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.8269230769230769, |
| "grad_norm": 0.47320276498794556, |
| "learning_rate": 3.997052921083637e-06, |
| "loss": 0.6022, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.8461538461538463, |
| "grad_norm": 0.45565494894981384, |
| "learning_rate": 3.887395330218429e-06, |
| "loss": 0.6101, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.8653846153846154, |
| "grad_norm": 0.4798398017883301, |
| "learning_rate": 3.778297969310529e-06, |
| "loss": 0.589, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.8846153846153846, |
| "grad_norm": 0.4274550676345825, |
| "learning_rate": 3.669815772166625e-06, |
| "loss": 0.5977, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.9038461538461537, |
| "grad_norm": 0.3736909031867981, |
| "learning_rate": 3.562003362839914e-06, |
| "loss": 0.5928, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.9230769230769231, |
| "grad_norm": 0.4486611485481262, |
| "learning_rate": 3.4549150281252635e-06, |
| "loss": 0.6038, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.9423076923076923, |
| "grad_norm": 0.4843849539756775, |
| "learning_rate": 3.3486046902241663e-06, |
| "loss": 0.6028, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.9615384615384617, |
| "grad_norm": 0.48637160658836365, |
| "learning_rate": 3.2431258795932863e-06, |
| "loss": 0.5972, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.9807692307692308, |
| "grad_norm": 0.4148430824279785, |
| "learning_rate": 3.1385317079902743e-06, |
| "loss": 0.5972, |
| "step": 103 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.4409301280975342, |
| "learning_rate": 3.0348748417303826e-06, |
| "loss": 0.5809, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.019230769230769, |
| "grad_norm": 0.4860322177410126, |
| "learning_rate": 2.932207475167398e-06, |
| "loss": 0.5669, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.0384615384615383, |
| "grad_norm": 0.43656036257743835, |
| "learning_rate": 2.83058130441221e-06, |
| "loss": 0.5696, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.0576923076923075, |
| "grad_norm": 0.41414326429367065, |
| "learning_rate": 2.7300475013022666e-06, |
| "loss": 0.5737, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.076923076923077, |
| "grad_norm": 0.4215674102306366, |
| "learning_rate": 2.6306566876350072e-06, |
| "loss": 0.5706, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.0961538461538463, |
| "grad_norm": 0.42652758955955505, |
| "learning_rate": 2.532458909678266e-06, |
| "loss": 0.5593, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.1153846153846154, |
| "grad_norm": 0.42439064383506775, |
| "learning_rate": 2.43550361297047e-06, |
| "loss": 0.5759, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.1346153846153846, |
| "grad_norm": 0.43323108553886414, |
| "learning_rate": 2.339839617423318e-06, |
| "loss": 0.5707, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.1538461538461537, |
| "grad_norm": 0.37839657068252563, |
| "learning_rate": 2.245515092739488e-06, |
| "loss": 0.5674, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.173076923076923, |
| "grad_norm": 0.40047696232795715, |
| "learning_rate": 2.1525775341577404e-06, |
| "loss": 0.5705, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.1923076923076925, |
| "grad_norm": 0.42089319229125977, |
| "learning_rate": 2.061073738537635e-06, |
| "loss": 0.5764, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.2115384615384617, |
| "grad_norm": 0.3860110938549042, |
| "learning_rate": 1.971049780795901e-06, |
| "loss": 0.5671, |
| "step": 115 |
| }, |
| { |
| "epoch": 2.230769230769231, |
| "grad_norm": 0.4362318217754364, |
| "learning_rate": 1.8825509907063328e-06, |
| "loss": 0.5633, |
| "step": 116 |
| }, |
| { |
| "epoch": 2.25, |
| "grad_norm": 0.394808292388916, |
| "learning_rate": 1.7956219300748796e-06, |
| "loss": 0.5605, |
| "step": 117 |
| }, |
| { |
| "epoch": 2.269230769230769, |
| "grad_norm": 0.37052440643310547, |
| "learning_rate": 1.7103063703014372e-06, |
| "loss": 0.5799, |
| "step": 118 |
| }, |
| { |
| "epoch": 2.2884615384615383, |
| "grad_norm": 0.40220147371292114, |
| "learning_rate": 1.6266472703396286e-06, |
| "loss": 0.558, |
| "step": 119 |
| }, |
| { |
| "epoch": 2.3076923076923075, |
| "grad_norm": 0.41691508889198303, |
| "learning_rate": 1.544686755065677e-06, |
| "loss": 0.5884, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.326923076923077, |
| "grad_norm": 0.3663041591644287, |
| "learning_rate": 1.4644660940672628e-06, |
| "loss": 0.5631, |
| "step": 121 |
| }, |
| { |
| "epoch": 2.3461538461538463, |
| "grad_norm": 0.3626846373081207, |
| "learning_rate": 1.3860256808630429e-06, |
| "loss": 0.5996, |
| "step": 122 |
| }, |
| { |
| "epoch": 2.3653846153846154, |
| "grad_norm": 0.3722762167453766, |
| "learning_rate": 1.3094050125632973e-06, |
| "loss": 0.5666, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.3846153846153846, |
| "grad_norm": 0.37946224212646484, |
| "learning_rate": 1.234642669981946e-06, |
| "loss": 0.5705, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.4038461538461537, |
| "grad_norm": 0.38795050978660583, |
| "learning_rate": 1.1617762982099446e-06, |
| "loss": 0.5476, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.423076923076923, |
| "grad_norm": 0.3951879143714905, |
| "learning_rate": 1.0908425876598512e-06, |
| "loss": 0.5695, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.4423076923076925, |
| "grad_norm": 0.3844876289367676, |
| "learning_rate": 1.0218772555910955e-06, |
| "loss": 0.5759, |
| "step": 127 |
| }, |
| { |
| "epoch": 2.4615384615384617, |
| "grad_norm": 0.4004686176776886, |
| "learning_rate": 9.549150281252633e-07, |
| "loss": 0.5874, |
| "step": 128 |
| }, |
| { |
| "epoch": 2.480769230769231, |
| "grad_norm": 0.3749890625476837, |
| "learning_rate": 8.899896227604509e-07, |
| "loss": 0.5743, |
| "step": 129 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.3737080991268158, |
| "learning_rate": 8.271337313934869e-07, |
| "loss": 0.5633, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.519230769230769, |
| "grad_norm": 0.3584887981414795, |
| "learning_rate": 7.663790038585794e-07, |
| "loss": 0.5791, |
| "step": 131 |
| }, |
| { |
| "epoch": 2.5384615384615383, |
| "grad_norm": 0.3564335107803345, |
| "learning_rate": 7.077560319906696e-07, |
| "loss": 0.5614, |
| "step": 132 |
| }, |
| { |
| "epoch": 2.5576923076923075, |
| "grad_norm": 0.36480650305747986, |
| "learning_rate": 6.512943342215234e-07, |
| "loss": 0.5567, |
| "step": 133 |
| }, |
| { |
| "epoch": 2.5769230769230766, |
| "grad_norm": 0.37006235122680664, |
| "learning_rate": 5.9702234071631e-07, |
| "loss": 0.5533, |
| "step": 134 |
| }, |
| { |
| "epoch": 2.5961538461538463, |
| "grad_norm": 0.3698856234550476, |
| "learning_rate": 5.449673790581611e-07, |
| "loss": 0.5568, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.6153846153846154, |
| "grad_norm": 0.4254326820373535, |
| "learning_rate": 4.951556604879049e-07, |
| "loss": 0.5703, |
| "step": 136 |
| }, |
| { |
| "epoch": 2.6346153846153846, |
| "grad_norm": 0.3639049232006073, |
| "learning_rate": 4.4761226670592074e-07, |
| "loss": 0.567, |
| "step": 137 |
| }, |
| { |
| "epoch": 2.6538461538461537, |
| "grad_norm": 0.4002777636051178, |
| "learning_rate": 4.0236113724274716e-07, |
| "loss": 0.568, |
| "step": 138 |
| }, |
| { |
| "epoch": 2.6730769230769234, |
| "grad_norm": 0.3550013601779938, |
| "learning_rate": 3.5942505740480583e-07, |
| "loss": 0.5572, |
| "step": 139 |
| }, |
| { |
| "epoch": 2.6923076923076925, |
| "grad_norm": 0.34738948941230774, |
| "learning_rate": 3.18825646801314e-07, |
| "loss": 0.5727, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.7115384615384617, |
| "grad_norm": 0.3737678825855255, |
| "learning_rate": 2.8058334845816214e-07, |
| "loss": 0.5675, |
| "step": 141 |
| }, |
| { |
| "epoch": 2.730769230769231, |
| "grad_norm": 0.35525545477867126, |
| "learning_rate": 2.447174185242324e-07, |
| "loss": 0.5583, |
| "step": 142 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.3212038278579712, |
| "learning_rate": 2.1124591657534776e-07, |
| "loss": 0.5786, |
| "step": 143 |
| }, |
| { |
| "epoch": 2.769230769230769, |
| "grad_norm": 0.3356379270553589, |
| "learning_rate": 1.801856965207338e-07, |
| "loss": 0.5677, |
| "step": 144 |
| }, |
| { |
| "epoch": 2.7884615384615383, |
| "grad_norm": 0.3492369055747986, |
| "learning_rate": 1.5155239811656562e-07, |
| "loss": 0.5613, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.8076923076923075, |
| "grad_norm": 0.3516460955142975, |
| "learning_rate": 1.253604390908819e-07, |
| "loss": 0.5469, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.8269230769230766, |
| "grad_norm": 0.3170640170574188, |
| "learning_rate": 1.0162300788382263e-07, |
| "loss": 0.5912, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.8461538461538463, |
| "grad_norm": 0.3446912169456482, |
| "learning_rate": 8.035205700685167e-08, |
| "loss": 0.5653, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.8653846153846154, |
| "grad_norm": 0.3214910328388214, |
| "learning_rate": 6.15582970243117e-08, |
| "loss": 0.5686, |
| "step": 149 |
| }, |
| { |
| "epoch": 2.8846153846153846, |
| "grad_norm": 0.3370175063610077, |
| "learning_rate": 4.52511911603265e-08, |
| "loss": 0.5777, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.9038461538461537, |
| "grad_norm": 0.3743438422679901, |
| "learning_rate": 3.143895053378698e-08, |
| "loss": 0.5589, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.9230769230769234, |
| "grad_norm": 0.37999406456947327, |
| "learning_rate": 2.012853002380466e-08, |
| "loss": 0.5611, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.9423076923076925, |
| "grad_norm": 0.3216012120246887, |
| "learning_rate": 1.132562476771959e-08, |
| "loss": 0.5804, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.9615384615384617, |
| "grad_norm": 0.32785430550575256, |
| "learning_rate": 5.034667293427053e-09, |
| "loss": 0.5826, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.980769230769231, |
| "grad_norm": 0.3461929261684418, |
| "learning_rate": 1.2588252874673469e-09, |
| "loss": 0.5681, |
| "step": 155 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.34216320514678955, |
| "learning_rate": 0.0, |
| "loss": 0.5769, |
| "step": 156 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 156, |
| "total_flos": 2.001762612454359e+17, |
| "train_loss": 0.64515655201215, |
| "train_runtime": 3841.9317, |
| "train_samples_per_second": 3.886, |
| "train_steps_per_second": 0.041 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 156, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 100, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.001762612454359e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|