| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 150, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.02, |
| "grad_norm": 0.013391899641337773, |
| "learning_rate": 5.333333333333334e-06, |
| "loss": 0.1771, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.04, |
| "grad_norm": 0.014385218619921848, |
| "learning_rate": 1.0666666666666667e-05, |
| "loss": 0.175, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.06, |
| "grad_norm": 0.01324986731548473, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 0.1724, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.08, |
| "grad_norm": 0.013428609705916352, |
| "learning_rate": 2.1333333333333335e-05, |
| "loss": 0.1842, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.1, |
| "grad_norm": 0.015319938560619022, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.1716, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.12, |
| "grad_norm": 0.013245404686358316, |
| "learning_rate": 3.2000000000000005e-05, |
| "loss": 0.1683, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.14, |
| "grad_norm": 0.016582496341800892, |
| "learning_rate": 3.733333333333334e-05, |
| "loss": 0.1762, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.16, |
| "grad_norm": 0.018837775298985476, |
| "learning_rate": 4.266666666666667e-05, |
| "loss": 0.172, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.18, |
| "grad_norm": 0.01947593508955509, |
| "learning_rate": 4.8e-05, |
| "loss": 0.1927, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.019000357638673485, |
| "learning_rate": 5.333333333333333e-05, |
| "loss": 0.1744, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.22, |
| "grad_norm": 0.0204170056945283, |
| "learning_rate": 5.8666666666666665e-05, |
| "loss": 0.1565, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.24, |
| "grad_norm": 0.01686714651709014, |
| "learning_rate": 6.400000000000001e-05, |
| "loss": 0.1601, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.26, |
| "grad_norm": 0.014978710750220541, |
| "learning_rate": 6.933333333333334e-05, |
| "loss": 0.1623, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.28, |
| "grad_norm": 0.011382478005757193, |
| "learning_rate": 7.466666666666667e-05, |
| "loss": 0.1559, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.3, |
| "grad_norm": 0.017188231844732872, |
| "learning_rate": 8e-05, |
| "loss": 0.1633, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 0.018845889529232454, |
| "learning_rate": 7.998916964717848e-05, |
| "loss": 0.1491, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.34, |
| "grad_norm": 0.02760722006528874, |
| "learning_rate": 7.9956684453541e-05, |
| "loss": 0.1487, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.36, |
| "grad_norm": 0.02292888741072651, |
| "learning_rate": 7.990256201039297e-05, |
| "loss": 0.1483, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.38, |
| "grad_norm": 0.01701711302929458, |
| "learning_rate": 7.982683162599218e-05, |
| "loss": 0.1493, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.01293090022797047, |
| "learning_rate": 7.972953430967773e-05, |
| "loss": 0.1467, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.42, |
| "grad_norm": 0.008944741908023426, |
| "learning_rate": 7.961072274966282e-05, |
| "loss": 0.145, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.44, |
| "grad_norm": 0.009323595351143302, |
| "learning_rate": 7.947046128450319e-05, |
| "loss": 0.1273, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.46, |
| "grad_norm": 0.009389930205544681, |
| "learning_rate": 7.930882586825653e-05, |
| "loss": 0.121, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.48, |
| "grad_norm": 0.010784860554936075, |
| "learning_rate": 7.912590402935223e-05, |
| "loss": 0.1255, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.5, |
| "grad_norm": 0.011830840840008667, |
| "learning_rate": 7.892179482319297e-05, |
| "loss": 0.1183, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.52, |
| "grad_norm": 0.01169967439189995, |
| "learning_rate": 7.869660877851456e-05, |
| "loss": 0.1251, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.54, |
| "grad_norm": 0.011661876331376117, |
| "learning_rate": 7.845046783753276e-05, |
| "loss": 0.1176, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.56, |
| "grad_norm": 0.012101443338336989, |
| "learning_rate": 7.818350528990929e-05, |
| "loss": 0.1174, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.58, |
| "grad_norm": 0.009893288346633035, |
| "learning_rate": 7.789586570057317e-05, |
| "loss": 0.1227, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 0.010188343617630637, |
| "learning_rate": 7.758770483143634e-05, |
| "loss": 0.1297, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.62, |
| "grad_norm": 0.010265810841241766, |
| "learning_rate": 7.72591895570457e-05, |
| "loss": 0.1074, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.009517235711920324, |
| "learning_rate": 7.69104977742177e-05, |
| "loss": 0.1016, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.66, |
| "grad_norm": 0.010169685680518684, |
| "learning_rate": 7.654181830570404e-05, |
| "loss": 0.1049, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.68, |
| "grad_norm": 0.012222235577185922, |
| "learning_rate": 7.615335079794083e-05, |
| "loss": 0.1069, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.7, |
| "grad_norm": 0.009928105717610674, |
| "learning_rate": 7.57453056129365e-05, |
| "loss": 0.1054, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.72, |
| "grad_norm": 0.009561933975056934, |
| "learning_rate": 7.531790371435709e-05, |
| "loss": 0.0974, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.74, |
| "grad_norm": 0.009683261711418601, |
| "learning_rate": 7.48713765478705e-05, |
| "loss": 0.1102, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.76, |
| "grad_norm": 0.008659388491423553, |
| "learning_rate": 7.440596591581463e-05, |
| "loss": 0.0971, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.78, |
| "grad_norm": 0.00832757787364234, |
| "learning_rate": 7.392192384625704e-05, |
| "loss": 0.0869, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 0.008492912497809898, |
| "learning_rate": 7.341951245651747e-05, |
| "loss": 0.0957, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.82, |
| "grad_norm": 0.008067256140394826, |
| "learning_rate": 7.28990038112265e-05, |
| "loss": 0.0882, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.84, |
| "grad_norm": 0.008013915199609433, |
| "learning_rate": 7.236067977499791e-05, |
| "loss": 0.0792, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.86, |
| "grad_norm": 0.008841624045567351, |
| "learning_rate": 7.180483185979392e-05, |
| "loss": 0.0929, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.88, |
| "grad_norm": 0.00756330243925261, |
| "learning_rate": 7.123176106706638e-05, |
| "loss": 0.0793, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.9, |
| "grad_norm": 0.00964034128381484, |
| "learning_rate": 7.064177772475912e-05, |
| "loss": 0.08, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.92, |
| "grad_norm": 0.008076121340433274, |
| "learning_rate": 7.003520131925997e-05, |
| "loss": 0.1026, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.94, |
| "grad_norm": 0.008019653744343234, |
| "learning_rate": 6.941236032239316e-05, |
| "loss": 0.0823, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.009070534202209784, |
| "learning_rate": 6.877359201354606e-05, |
| "loss": 0.0794, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.98, |
| "grad_norm": 0.008015961544844111, |
| "learning_rate": 6.811924229702648e-05, |
| "loss": 0.0766, |
| "step": 49 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.011928653560934234, |
| "learning_rate": 6.744966551474936e-05, |
| "loss": 0.0891, |
| "step": 50 |
| }, |
| { |
| "epoch": 1.02, |
| "grad_norm": 0.010519199913508366, |
| "learning_rate": 6.676522425435433e-05, |
| "loss": 0.0795, |
| "step": 51 |
| }, |
| { |
| "epoch": 1.04, |
| "grad_norm": 0.009023624801782884, |
| "learning_rate": 6.606628915285822e-05, |
| "loss": 0.0641, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.06, |
| "grad_norm": 0.009248979409998536, |
| "learning_rate": 6.53532386959484e-05, |
| "loss": 0.0809, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.08, |
| "grad_norm": 0.008290583205008384, |
| "learning_rate": 6.462645901302633e-05, |
| "loss": 0.0759, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.1, |
| "grad_norm": 0.009957925625246425, |
| "learning_rate": 6.388634366811146e-05, |
| "loss": 0.0747, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.12, |
| "grad_norm": 0.010705100489026913, |
| "learning_rate": 6.313329344671946e-05, |
| "loss": 0.0682, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.1400000000000001, |
| "grad_norm": 0.007538713589764434, |
| "learning_rate": 6.236771613882987e-05, |
| "loss": 0.0777, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.16, |
| "grad_norm": 0.011061682075860911, |
| "learning_rate": 6.159002631806052e-05, |
| "loss": 0.1128, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.18, |
| "grad_norm": 0.014892481813022763, |
| "learning_rate": 6.0800645117168616e-05, |
| "loss": 0.0653, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 0.00889627576517376, |
| "learning_rate": 6.000000000000001e-05, |
| "loss": 0.0738, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.22, |
| "grad_norm": 0.011858615800498145, |
| "learning_rate": 5.918852453000986e-05, |
| "loss": 0.0702, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.24, |
| "grad_norm": 0.00894882752613072, |
| "learning_rate": 5.836665813548047e-05, |
| "loss": 0.0759, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.26, |
| "grad_norm": 0.007654287645206094, |
| "learning_rate": 5.75348458715631e-05, |
| "loss": 0.0919, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.00924583864343229, |
| "learning_rate": 5.669353817927272e-05, |
| "loss": 0.0642, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.3, |
| "grad_norm": 0.015185462936117156, |
| "learning_rate": 5.584319064156628e-05, |
| "loss": 0.0661, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.32, |
| "grad_norm": 0.006970215334790489, |
| "learning_rate": 5.4984263736636494e-05, |
| "loss": 0.0725, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.34, |
| "grad_norm": 0.018097416323382704, |
| "learning_rate": 5.4117222588554756e-05, |
| "loss": 0.0714, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.3599999999999999, |
| "grad_norm": 0.008955812230636281, |
| "learning_rate": 5.324253671539833e-05, |
| "loss": 0.0669, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.38, |
| "grad_norm": 0.007941876504979732, |
| "learning_rate": 5.23606797749979e-05, |
| "loss": 0.071, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.010121924786886187, |
| "learning_rate": 5.1472129308443616e-05, |
| "loss": 0.0687, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.42, |
| "grad_norm": 0.013219338674355003, |
| "learning_rate": 5.05773664814881e-05, |
| "loss": 0.0965, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.44, |
| "grad_norm": 0.008164358619583638, |
| "learning_rate": 4.967687582398671e-05, |
| "loss": 0.0735, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.46, |
| "grad_norm": 0.006369056034064783, |
| "learning_rate": 4.877114496751613e-05, |
| "loss": 0.0802, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.48, |
| "grad_norm": 0.01158232961484988, |
| "learning_rate": 4.786066438131321e-05, |
| "loss": 0.0646, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.5, |
| "grad_norm": 0.01638654001464813, |
| "learning_rate": 4.694592710667723e-05, |
| "loss": 0.0716, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.52, |
| "grad_norm": 0.006950169961086381, |
| "learning_rate": 4.602742848997933e-05, |
| "loss": 0.073, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.54, |
| "grad_norm": 0.007061170364033767, |
| "learning_rate": 4.51056659144238e-05, |
| "loss": 0.0686, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.56, |
| "grad_norm": 0.008803318888699108, |
| "learning_rate": 4.418113853070614e-05, |
| "loss": 0.068, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.58, |
| "grad_norm": 0.011621079882191566, |
| "learning_rate": 4.3254346986714334e-05, |
| "loss": 0.0585, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.008734930322594118, |
| "learning_rate": 4.2325793156419035e-05, |
| "loss": 0.0632, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.62, |
| "grad_norm": 0.0065275876667679955, |
| "learning_rate": 4.139597986810005e-05, |
| "loss": 0.0666, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.6400000000000001, |
| "grad_norm": 0.006608508406593912, |
| "learning_rate": 4.046541063205589e-05, |
| "loss": 0.0688, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.6600000000000001, |
| "grad_norm": 0.00662613257539641, |
| "learning_rate": 3.953458936794413e-05, |
| "loss": 0.067, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.6800000000000002, |
| "grad_norm": 0.009214139716661036, |
| "learning_rate": 3.860402013189998e-05, |
| "loss": 0.0648, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.7, |
| "grad_norm": 0.008142789380605992, |
| "learning_rate": 3.767420684358097e-05, |
| "loss": 0.0843, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.72, |
| "grad_norm": 0.006245522025610198, |
| "learning_rate": 3.674565301328568e-05, |
| "loss": 0.0725, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.74, |
| "grad_norm": 0.007072250201997174, |
| "learning_rate": 3.581886146929387e-05, |
| "loss": 0.0694, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.76, |
| "grad_norm": 0.006483099079687594, |
| "learning_rate": 3.4894334085576215e-05, |
| "loss": 0.0577, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.78, |
| "grad_norm": 0.0064462232352788685, |
| "learning_rate": 3.397257151002068e-05, |
| "loss": 0.0635, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.006570930023355807, |
| "learning_rate": 3.305407289332279e-05, |
| "loss": 0.0546, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.8199999999999998, |
| "grad_norm": 0.006670623149067776, |
| "learning_rate": 3.213933561868679e-05, |
| "loss": 0.0648, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.8399999999999999, |
| "grad_norm": 0.006071243161492562, |
| "learning_rate": 3.122885503248386e-05, |
| "loss": 0.0809, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.8599999999999999, |
| "grad_norm": 0.006794849569302196, |
| "learning_rate": 3.0323124176013297e-05, |
| "loss": 0.0582, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.88, |
| "grad_norm": 0.00687191181075092, |
| "learning_rate": 2.9422633518511926e-05, |
| "loss": 0.0618, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.9, |
| "grad_norm": 0.006930115190911128, |
| "learning_rate": 2.8527870691556404e-05, |
| "loss": 0.0661, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.006276405920613077, |
| "learning_rate": 2.7639320225002108e-05, |
| "loss": 0.0809, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.94, |
| "grad_norm": 0.006452330627893446, |
| "learning_rate": 2.6757463284601682e-05, |
| "loss": 0.0773, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.96, |
| "grad_norm": 0.006680490430799073, |
| "learning_rate": 2.5882777411445254e-05, |
| "loss": 0.0695, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.98, |
| "grad_norm": 0.006451655061938755, |
| "learning_rate": 2.501573626336352e-05, |
| "loss": 0.0737, |
| "step": 99 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.006021923613908717, |
| "learning_rate": 2.4156809358433728e-05, |
| "loss": 0.0556, |
| "step": 100 |
| }, |
| { |
| "epoch": 2.02, |
| "grad_norm": 0.0069729084266872765, |
| "learning_rate": 2.330646182072729e-05, |
| "loss": 0.0687, |
| "step": 101 |
| }, |
| { |
| "epoch": 2.04, |
| "grad_norm": 0.006603773544535684, |
| "learning_rate": 2.24651541284369e-05, |
| "loss": 0.0703, |
| "step": 102 |
| }, |
| { |
| "epoch": 2.06, |
| "grad_norm": 0.008179376610615753, |
| "learning_rate": 2.1633341864519526e-05, |
| "loss": 0.1235, |
| "step": 103 |
| }, |
| { |
| "epoch": 2.08, |
| "grad_norm": 0.006440518392511573, |
| "learning_rate": 2.0811475469990167e-05, |
| "loss": 0.06, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.1, |
| "grad_norm": 0.005862947387442514, |
| "learning_rate": 2.0000000000000012e-05, |
| "loss": 0.0616, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.12, |
| "grad_norm": 0.006408702732244933, |
| "learning_rate": 1.9199354882831387e-05, |
| "loss": 0.066, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.14, |
| "grad_norm": 0.007615518291468386, |
| "learning_rate": 1.8409973681939498e-05, |
| "loss": 0.0551, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.16, |
| "grad_norm": 0.00644840004614146, |
| "learning_rate": 1.7632283861170135e-05, |
| "loss": 0.0634, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.18, |
| "grad_norm": 0.007456023531785948, |
| "learning_rate": 1.686670655328054e-05, |
| "loss": 0.0675, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.2, |
| "grad_norm": 0.007048538194164224, |
| "learning_rate": 1.6113656331888563e-05, |
| "loss": 0.0727, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.22, |
| "grad_norm": 0.006508549116604809, |
| "learning_rate": 1.537354098697367e-05, |
| "loss": 0.0706, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.006469971399876978, |
| "learning_rate": 1.4646761304051587e-05, |
| "loss": 0.0659, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.26, |
| "grad_norm": 0.006342191441419636, |
| "learning_rate": 1.3933710847141795e-05, |
| "loss": 0.0658, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.2800000000000002, |
| "grad_norm": 0.006544057245293904, |
| "learning_rate": 1.3234775745645684e-05, |
| "loss": 0.0582, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.3, |
| "grad_norm": 0.006319282414410432, |
| "learning_rate": 1.2550334485250661e-05, |
| "loss": 0.0639, |
| "step": 115 |
| }, |
| { |
| "epoch": 2.32, |
| "grad_norm": 0.006602033999550898, |
| "learning_rate": 1.1880757702973531e-05, |
| "loss": 0.0652, |
| "step": 116 |
| }, |
| { |
| "epoch": 2.34, |
| "grad_norm": 0.006856651781476521, |
| "learning_rate": 1.1226407986453963e-05, |
| "loss": 0.0542, |
| "step": 117 |
| }, |
| { |
| "epoch": 2.36, |
| "grad_norm": 0.006252248165291451, |
| "learning_rate": 1.0587639677606857e-05, |
| "loss": 0.0635, |
| "step": 118 |
| }, |
| { |
| "epoch": 2.38, |
| "grad_norm": 0.006191356382798315, |
| "learning_rate": 9.964798680740033e-06, |
| "loss": 0.0707, |
| "step": 119 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.006642344934896972, |
| "learning_rate": 9.358222275240884e-06, |
| "loss": 0.0719, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.42, |
| "grad_norm": 0.006399146689124093, |
| "learning_rate": 8.768238932933632e-06, |
| "loss": 0.0647, |
| "step": 121 |
| }, |
| { |
| "epoch": 2.44, |
| "grad_norm": 0.006711583222264059, |
| "learning_rate": 8.195168140206084e-06, |
| "loss": 0.0504, |
| "step": 122 |
| }, |
| { |
| "epoch": 2.46, |
| "grad_norm": 0.0062840164348528064, |
| "learning_rate": 7.639320225002106e-06, |
| "loss": 0.0675, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.48, |
| "grad_norm": 0.006352190255441595, |
| "learning_rate": 7.1009961887735075e-06, |
| "loss": 0.0702, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.5, |
| "grad_norm": 0.006795768399362247, |
| "learning_rate": 6.58048754348255e-06, |
| "loss": 0.0651, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.52, |
| "grad_norm": 0.006284321276195934, |
| "learning_rate": 6.078076153742962e-06, |
| "loss": 0.0599, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.54, |
| "grad_norm": 0.006560543075796909, |
| "learning_rate": 5.5940340841853915e-06, |
| "loss": 0.0632, |
| "step": 127 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.007630005366271543, |
| "learning_rate": 5.128623452129508e-06, |
| "loss": 0.0689, |
| "step": 128 |
| }, |
| { |
| "epoch": 2.58, |
| "grad_norm": 0.007301816896376997, |
| "learning_rate": 4.6820962856429205e-06, |
| "loss": 0.0702, |
| "step": 129 |
| }, |
| { |
| "epoch": 2.6, |
| "grad_norm": 0.006534915427749746, |
| "learning_rate": 4.254694387063514e-06, |
| "loss": 0.0605, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.62, |
| "grad_norm": 0.007091145311402699, |
| "learning_rate": 3.846649202059181e-06, |
| "loss": 0.0775, |
| "step": 131 |
| }, |
| { |
| "epoch": 2.64, |
| "grad_norm": 0.0063288636954183675, |
| "learning_rate": 3.458181694295961e-06, |
| "loss": 0.0566, |
| "step": 132 |
| }, |
| { |
| "epoch": 2.66, |
| "grad_norm": 0.006674200647907618, |
| "learning_rate": 3.0895022257823083e-06, |
| "loss": 0.0608, |
| "step": 133 |
| }, |
| { |
| "epoch": 2.68, |
| "grad_norm": 0.0069466303684673335, |
| "learning_rate": 2.7408104429543025e-06, |
| "loss": 0.0577, |
| "step": 134 |
| }, |
| { |
| "epoch": 2.7, |
| "grad_norm": 0.00674158078131976, |
| "learning_rate": 2.4122951685636674e-06, |
| "loss": 0.0602, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.7199999999999998, |
| "grad_norm": 0.006169003275316678, |
| "learning_rate": 2.104134299426832e-06, |
| "loss": 0.0558, |
| "step": 136 |
| }, |
| { |
| "epoch": 2.74, |
| "grad_norm": 0.00664789423160661, |
| "learning_rate": 1.8164947100907238e-06, |
| "loss": 0.0626, |
| "step": 137 |
| }, |
| { |
| "epoch": 2.76, |
| "grad_norm": 0.006446902969313409, |
| "learning_rate": 1.5495321624672443e-06, |
| "loss": 0.0694, |
| "step": 138 |
| }, |
| { |
| "epoch": 2.7800000000000002, |
| "grad_norm": 0.006509881084720479, |
| "learning_rate": 1.3033912214854482e-06, |
| "loss": 0.0628, |
| "step": 139 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.007417385698972413, |
| "learning_rate": 1.0782051768070477e-06, |
| "loss": 0.0849, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.82, |
| "grad_norm": 0.006460685451381708, |
| "learning_rate": 8.740959706477725e-07, |
| "loss": 0.0687, |
| "step": 141 |
| }, |
| { |
| "epoch": 2.84, |
| "grad_norm": 0.00648752564206921, |
| "learning_rate": 6.911741317434706e-07, |
| "loss": 0.0596, |
| "step": 142 |
| }, |
| { |
| "epoch": 2.86, |
| "grad_norm": 0.007697276081985276, |
| "learning_rate": 5.295387154968312e-07, |
| "loss": 0.0937, |
| "step": 143 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.006408241724487181, |
| "learning_rate": 3.8927725033718553e-07, |
| "loss": 0.0661, |
| "step": 144 |
| }, |
| { |
| "epoch": 2.9, |
| "grad_norm": 0.007205527969917677, |
| "learning_rate": 2.704656903222791e-07, |
| "loss": 0.0738, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.92, |
| "grad_norm": 0.006300262375257302, |
| "learning_rate": 1.7316837400782604e-07, |
| "loss": 0.0614, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.94, |
| "grad_norm": 0.006830407668171554, |
| "learning_rate": 9.74379896070321e-08, |
| "loss": 0.0674, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.96, |
| "grad_norm": 0.00694495195567172, |
| "learning_rate": 4.331554645901737e-08, |
| "loss": 0.0674, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.98, |
| "grad_norm": 0.006057813991517264, |
| "learning_rate": 1.0830352821531442e-08, |
| "loss": 0.0666, |
| "step": 149 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.006402340064759971, |
| "learning_rate": 0.0, |
| "loss": 0.0734, |
| "step": 150 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 150, |
| "total_flos": 155212002230272.0, |
| "train_loss": 0.08886419877409935, |
| "train_runtime": 2347.0493, |
| "train_samples_per_second": 0.511, |
| "train_steps_per_second": 0.064 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 150, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": false, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 155212002230272.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|