| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.955414012738854, |
| "eval_steps": 500, |
| "global_step": 364, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01910828025477707, |
| "grad_norm": 14.990140022095986, |
| "learning_rate": 5.405405405405406e-07, |
| "loss": 1.5145, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.03821656050955414, |
| "grad_norm": 14.385565542515142, |
| "learning_rate": 1.0810810810810812e-06, |
| "loss": 1.4886, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.05732484076433121, |
| "grad_norm": 14.479366215173409, |
| "learning_rate": 1.6216216216216219e-06, |
| "loss": 1.4943, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.07643312101910828, |
| "grad_norm": 14.524746880859054, |
| "learning_rate": 2.1621621621621623e-06, |
| "loss": 1.5135, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.09554140127388536, |
| "grad_norm": 12.585870030810783, |
| "learning_rate": 2.702702702702703e-06, |
| "loss": 1.4553, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.11464968152866242, |
| "grad_norm": 10.656848802496679, |
| "learning_rate": 3.2432432432432437e-06, |
| "loss": 1.3436, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.1337579617834395, |
| "grad_norm": 7.960921811444013, |
| "learning_rate": 3.7837837837837844e-06, |
| "loss": 1.2728, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.15286624203821655, |
| "grad_norm": 6.812177612569246, |
| "learning_rate": 4.324324324324325e-06, |
| "loss": 1.2222, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.17197452229299362, |
| "grad_norm": 4.986047582488592, |
| "learning_rate": 4.864864864864866e-06, |
| "loss": 1.1818, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.1910828025477707, |
| "grad_norm": 6.412552449052916, |
| "learning_rate": 5.405405405405406e-06, |
| "loss": 1.1491, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.21019108280254778, |
| "grad_norm": 5.494825671780005, |
| "learning_rate": 5.945945945945947e-06, |
| "loss": 1.0885, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.22929936305732485, |
| "grad_norm": 4.430026646060167, |
| "learning_rate": 6.486486486486487e-06, |
| "loss": 1.0825, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.2484076433121019, |
| "grad_norm": 4.42759103791487, |
| "learning_rate": 7.027027027027028e-06, |
| "loss": 1.0223, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.267515923566879, |
| "grad_norm": 5.062626572152773, |
| "learning_rate": 7.567567567567569e-06, |
| "loss": 1.0311, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.28662420382165604, |
| "grad_norm": 4.648519435630669, |
| "learning_rate": 8.108108108108109e-06, |
| "loss": 0.9974, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.3057324840764331, |
| "grad_norm": 3.704566808626819, |
| "learning_rate": 8.64864864864865e-06, |
| "loss": 0.9995, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.3248407643312102, |
| "grad_norm": 2.7515165681456812, |
| "learning_rate": 9.189189189189191e-06, |
| "loss": 0.944, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.34394904458598724, |
| "grad_norm": 2.8097914129434836, |
| "learning_rate": 9.729729729729732e-06, |
| "loss": 0.9407, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.3630573248407643, |
| "grad_norm": 3.2291111191358435, |
| "learning_rate": 1.027027027027027e-05, |
| "loss": 0.9214, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.3821656050955414, |
| "grad_norm": 2.5625348519696294, |
| "learning_rate": 1.0810810810810812e-05, |
| "loss": 0.9183, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.4012738853503185, |
| "grad_norm": 2.232749262686051, |
| "learning_rate": 1.1351351351351352e-05, |
| "loss": 0.9078, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.42038216560509556, |
| "grad_norm": 1.9156573821747163, |
| "learning_rate": 1.1891891891891894e-05, |
| "loss": 0.878, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.4394904458598726, |
| "grad_norm": 1.941941511919396, |
| "learning_rate": 1.2432432432432433e-05, |
| "loss": 0.8715, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.4585987261146497, |
| "grad_norm": 2.0994478371302074, |
| "learning_rate": 1.2972972972972975e-05, |
| "loss": 0.8773, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.47770700636942676, |
| "grad_norm": 1.6275791350191224, |
| "learning_rate": 1.3513513513513515e-05, |
| "loss": 0.8805, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.4968152866242038, |
| "grad_norm": 1.515599235439942, |
| "learning_rate": 1.4054054054054055e-05, |
| "loss": 0.8635, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.5159235668789809, |
| "grad_norm": 1.6443236891879496, |
| "learning_rate": 1.4594594594594596e-05, |
| "loss": 0.8514, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.535031847133758, |
| "grad_norm": 2.1566568550834795, |
| "learning_rate": 1.5135135135135138e-05, |
| "loss": 0.8666, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.554140127388535, |
| "grad_norm": 1.865335585637504, |
| "learning_rate": 1.5675675675675676e-05, |
| "loss": 0.8535, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.5732484076433121, |
| "grad_norm": 1.513729648970103, |
| "learning_rate": 1.6216216216216218e-05, |
| "loss": 0.8754, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.5923566878980892, |
| "grad_norm": 1.5420010766356778, |
| "learning_rate": 1.6756756756756757e-05, |
| "loss": 0.8624, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.6114649681528662, |
| "grad_norm": 2.0504598465800044, |
| "learning_rate": 1.72972972972973e-05, |
| "loss": 0.8505, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.6305732484076433, |
| "grad_norm": 1.6138987956162945, |
| "learning_rate": 1.783783783783784e-05, |
| "loss": 0.8609, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.6496815286624203, |
| "grad_norm": 1.4520722307746579, |
| "learning_rate": 1.8378378378378383e-05, |
| "loss": 0.8408, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.6687898089171974, |
| "grad_norm": 1.7223251738619296, |
| "learning_rate": 1.891891891891892e-05, |
| "loss": 0.8289, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.6878980891719745, |
| "grad_norm": 1.5293784012050076, |
| "learning_rate": 1.9459459459459463e-05, |
| "loss": 0.8407, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.7070063694267515, |
| "grad_norm": 1.640633324255724, |
| "learning_rate": 2e-05, |
| "loss": 0.8339, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.7261146496815286, |
| "grad_norm": 1.4337126211032725, |
| "learning_rate": 1.9999538500851633e-05, |
| "loss": 0.8484, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.7452229299363057, |
| "grad_norm": 1.3829040809921898, |
| "learning_rate": 1.9998154046002822e-05, |
| "loss": 0.8237, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.7643312101910829, |
| "grad_norm": 1.3338599298109723, |
| "learning_rate": 1.9995846763238514e-05, |
| "loss": 0.7996, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.7834394904458599, |
| "grad_norm": 1.6437827729744903, |
| "learning_rate": 1.9992616865520515e-05, |
| "loss": 0.8503, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.802547770700637, |
| "grad_norm": 1.5035408873313147, |
| "learning_rate": 1.9988464650967834e-05, |
| "loss": 0.8288, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.821656050955414, |
| "grad_norm": 1.9307767442896708, |
| "learning_rate": 1.9983390502829168e-05, |
| "loss": 0.836, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.8407643312101911, |
| "grad_norm": 1.6121481786901808, |
| "learning_rate": 1.9977394889447526e-05, |
| "loss": 0.8268, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.8598726114649682, |
| "grad_norm": 1.737736803264758, |
| "learning_rate": 1.9970478364217e-05, |
| "loss": 0.836, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.8789808917197452, |
| "grad_norm": 1.7605640838470302, |
| "learning_rate": 1.9962641565531694e-05, |
| "loss": 0.8257, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.8980891719745223, |
| "grad_norm": 1.6081290667717165, |
| "learning_rate": 1.9953885216726788e-05, |
| "loss": 0.8443, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.9171974522292994, |
| "grad_norm": 1.7541275129594776, |
| "learning_rate": 1.994421012601179e-05, |
| "loss": 0.8366, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.9363057324840764, |
| "grad_norm": 1.383033283548282, |
| "learning_rate": 1.9933617186395917e-05, |
| "loss": 0.8154, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.9554140127388535, |
| "grad_norm": 1.6777566379598123, |
| "learning_rate": 1.99221073756057e-05, |
| "loss": 0.836, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.9745222929936306, |
| "grad_norm": 1.3270894313186803, |
| "learning_rate": 1.990968175599471e-05, |
| "loss": 0.8229, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.9936305732484076, |
| "grad_norm": 1.311413944058582, |
| "learning_rate": 1.9896341474445526e-05, |
| "loss": 0.7993, |
| "step": 52 |
| }, |
| { |
| "epoch": 1.0127388535031847, |
| "grad_norm": 1.5134518938324237, |
| "learning_rate": 1.9882087762263857e-05, |
| "loss": 0.7211, |
| "step": 53 |
| }, |
| { |
| "epoch": 1.0318471337579618, |
| "grad_norm": 1.4457472761668542, |
| "learning_rate": 1.9866921935064907e-05, |
| "loss": 0.65, |
| "step": 54 |
| }, |
| { |
| "epoch": 1.0509554140127388, |
| "grad_norm": 1.6984985951003597, |
| "learning_rate": 1.985084539265195e-05, |
| "loss": 0.6405, |
| "step": 55 |
| }, |
| { |
| "epoch": 1.070063694267516, |
| "grad_norm": 1.340217284611253, |
| "learning_rate": 1.983385961888711e-05, |
| "loss": 0.6557, |
| "step": 56 |
| }, |
| { |
| "epoch": 1.089171974522293, |
| "grad_norm": 1.4683215371294849, |
| "learning_rate": 1.9815966181554412e-05, |
| "loss": 0.6382, |
| "step": 57 |
| }, |
| { |
| "epoch": 1.10828025477707, |
| "grad_norm": 1.3073685584419914, |
| "learning_rate": 1.9797166732215078e-05, |
| "loss": 0.628, |
| "step": 58 |
| }, |
| { |
| "epoch": 1.127388535031847, |
| "grad_norm": 1.4163134544358502, |
| "learning_rate": 1.977746300605507e-05, |
| "loss": 0.5882, |
| "step": 59 |
| }, |
| { |
| "epoch": 1.1464968152866242, |
| "grad_norm": 1.4044561953506427, |
| "learning_rate": 1.975685682172497e-05, |
| "loss": 0.6272, |
| "step": 60 |
| }, |
| { |
| "epoch": 1.1656050955414012, |
| "grad_norm": 1.353708898359601, |
| "learning_rate": 1.973535008117207e-05, |
| "loss": 0.6121, |
| "step": 61 |
| }, |
| { |
| "epoch": 1.1847133757961783, |
| "grad_norm": 1.256242946923662, |
| "learning_rate": 1.9712944769464864e-05, |
| "loss": 0.6169, |
| "step": 62 |
| }, |
| { |
| "epoch": 1.2038216560509554, |
| "grad_norm": 1.2353934101359754, |
| "learning_rate": 1.9689642954609808e-05, |
| "loss": 0.6123, |
| "step": 63 |
| }, |
| { |
| "epoch": 1.2229299363057324, |
| "grad_norm": 1.242011368111671, |
| "learning_rate": 1.9665446787360444e-05, |
| "loss": 0.6738, |
| "step": 64 |
| }, |
| { |
| "epoch": 1.2420382165605095, |
| "grad_norm": 1.1948083066768818, |
| "learning_rate": 1.9640358501018885e-05, |
| "loss": 0.5984, |
| "step": 65 |
| }, |
| { |
| "epoch": 1.2611464968152866, |
| "grad_norm": 1.227900627562625, |
| "learning_rate": 1.9614380411229693e-05, |
| "loss": 0.6112, |
| "step": 66 |
| }, |
| { |
| "epoch": 1.2802547770700636, |
| "grad_norm": 1.2766612501501673, |
| "learning_rate": 1.9587514915766124e-05, |
| "loss": 0.6423, |
| "step": 67 |
| }, |
| { |
| "epoch": 1.2993630573248407, |
| "grad_norm": 1.3704435778321213, |
| "learning_rate": 1.9559764494308838e-05, |
| "loss": 0.5744, |
| "step": 68 |
| }, |
| { |
| "epoch": 1.3184713375796178, |
| "grad_norm": 1.4139525066526928, |
| "learning_rate": 1.9531131708217005e-05, |
| "loss": 0.5943, |
| "step": 69 |
| }, |
| { |
| "epoch": 1.3375796178343948, |
| "grad_norm": 1.259594103517775, |
| "learning_rate": 1.950161920029191e-05, |
| "loss": 0.63, |
| "step": 70 |
| }, |
| { |
| "epoch": 1.356687898089172, |
| "grad_norm": 1.5341795996533234, |
| "learning_rate": 1.9471229694533003e-05, |
| "loss": 0.6243, |
| "step": 71 |
| }, |
| { |
| "epoch": 1.3757961783439492, |
| "grad_norm": 1.2433858014815762, |
| "learning_rate": 1.943996599588649e-05, |
| "loss": 0.6225, |
| "step": 72 |
| }, |
| { |
| "epoch": 1.394904458598726, |
| "grad_norm": 1.2066678457467106, |
| "learning_rate": 1.940783098998643e-05, |
| "loss": 0.6018, |
| "step": 73 |
| }, |
| { |
| "epoch": 1.4140127388535033, |
| "grad_norm": 1.2748999280410294, |
| "learning_rate": 1.93748276428884e-05, |
| "loss": 0.5936, |
| "step": 74 |
| }, |
| { |
| "epoch": 1.4331210191082802, |
| "grad_norm": 1.2920229857639585, |
| "learning_rate": 1.9340959000795707e-05, |
| "loss": 0.6397, |
| "step": 75 |
| }, |
| { |
| "epoch": 1.4522292993630574, |
| "grad_norm": 1.3957421713319056, |
| "learning_rate": 1.9306228189778255e-05, |
| "loss": 0.6213, |
| "step": 76 |
| }, |
| { |
| "epoch": 1.4713375796178343, |
| "grad_norm": 1.4014972976675841, |
| "learning_rate": 1.927063841548398e-05, |
| "loss": 0.6195, |
| "step": 77 |
| }, |
| { |
| "epoch": 1.4904458598726116, |
| "grad_norm": 1.2887931174342688, |
| "learning_rate": 1.9234192962842996e-05, |
| "loss": 0.5891, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.5095541401273884, |
| "grad_norm": 1.2715731353165856, |
| "learning_rate": 1.9196895195764363e-05, |
| "loss": 0.6315, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.5286624203821657, |
| "grad_norm": 1.229751944932979, |
| "learning_rate": 1.9158748556825637e-05, |
| "loss": 0.6056, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.5477707006369426, |
| "grad_norm": 1.442724258592494, |
| "learning_rate": 1.9119756566955092e-05, |
| "loss": 0.6364, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.5668789808917198, |
| "grad_norm": 1.202348821649108, |
| "learning_rate": 1.907992282510675e-05, |
| "loss": 0.6271, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.5859872611464967, |
| "grad_norm": 1.2836864625828608, |
| "learning_rate": 1.90392510079282e-05, |
| "loss": 0.6128, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.605095541401274, |
| "grad_norm": 1.1800499309121613, |
| "learning_rate": 1.8997744869421248e-05, |
| "loss": 0.6083, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.6242038216560508, |
| "grad_norm": 1.216225762413445, |
| "learning_rate": 1.8955408240595396e-05, |
| "loss": 0.6377, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.643312101910828, |
| "grad_norm": 1.1638626931156888, |
| "learning_rate": 1.891224502911428e-05, |
| "loss": 0.6178, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.662420382165605, |
| "grad_norm": 1.1974901367787416, |
| "learning_rate": 1.886825921893497e-05, |
| "loss": 0.5987, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.6815286624203822, |
| "grad_norm": 1.169886620928099, |
| "learning_rate": 1.8823454869940243e-05, |
| "loss": 0.583, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.700636942675159, |
| "grad_norm": 1.1742563632707246, |
| "learning_rate": 1.8777836117563894e-05, |
| "loss": 0.6144, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.7197452229299364, |
| "grad_norm": 1.165573756913235, |
| "learning_rate": 1.873140717240899e-05, |
| "loss": 0.6324, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.7388535031847132, |
| "grad_norm": 1.3576876293848648, |
| "learning_rate": 1.8684172319859258e-05, |
| "loss": 0.6505, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.7579617834394905, |
| "grad_norm": 1.2097778554735674, |
| "learning_rate": 1.863613591968355e-05, |
| "loss": 0.6176, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.7770700636942676, |
| "grad_norm": 1.3440536330118746, |
| "learning_rate": 1.858730240563342e-05, |
| "loss": 0.5928, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.7961783439490446, |
| "grad_norm": 1.5421403847520048, |
| "learning_rate": 1.8537676285033886e-05, |
| "loss": 0.6094, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.8152866242038217, |
| "grad_norm": 1.298905703769219, |
| "learning_rate": 1.848726213836744e-05, |
| "loss": 0.615, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.8343949044585988, |
| "grad_norm": 1.5410691337183182, |
| "learning_rate": 1.8436064618851225e-05, |
| "loss": 0.5955, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.8535031847133758, |
| "grad_norm": 1.4380886980637535, |
| "learning_rate": 1.838408845200758e-05, |
| "loss": 0.5845, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.872611464968153, |
| "grad_norm": 1.2927175032806673, |
| "learning_rate": 1.8331338435227838e-05, |
| "loss": 0.5969, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.89171974522293, |
| "grad_norm": 1.4661387293954078, |
| "learning_rate": 1.8277819437329577e-05, |
| "loss": 0.6284, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.910828025477707, |
| "grad_norm": 1.4690243089475816, |
| "learning_rate": 1.8223536398107177e-05, |
| "loss": 0.6102, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.929936305732484, |
| "grad_norm": 1.2407333702545924, |
| "learning_rate": 1.8168494327875918e-05, |
| "loss": 0.5917, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.9490445859872612, |
| "grad_norm": 1.2765830269044587, |
| "learning_rate": 1.8112698307009506e-05, |
| "loss": 0.6346, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.9681528662420382, |
| "grad_norm": 1.2005302634214932, |
| "learning_rate": 1.8056153485471167e-05, |
| "loss": 0.6232, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.9872611464968153, |
| "grad_norm": 1.148963792464725, |
| "learning_rate": 1.799886508233829e-05, |
| "loss": 0.5957, |
| "step": 104 |
| }, |
| { |
| "epoch": 2.0063694267515926, |
| "grad_norm": 1.2628345318954612, |
| "learning_rate": 1.7940838385320732e-05, |
| "loss": 0.5155, |
| "step": 105 |
| }, |
| { |
| "epoch": 2.0254777070063694, |
| "grad_norm": 1.3070169831120713, |
| "learning_rate": 1.788207875027274e-05, |
| "loss": 0.4399, |
| "step": 106 |
| }, |
| { |
| "epoch": 2.0445859872611467, |
| "grad_norm": 1.3027394925308748, |
| "learning_rate": 1.7822591600698632e-05, |
| "loss": 0.3959, |
| "step": 107 |
| }, |
| { |
| "epoch": 2.0636942675159236, |
| "grad_norm": 2.0977150823190454, |
| "learning_rate": 1.776238242725217e-05, |
| "loss": 0.4197, |
| "step": 108 |
| }, |
| { |
| "epoch": 2.082802547770701, |
| "grad_norm": 1.5059789233190988, |
| "learning_rate": 1.7701456787229805e-05, |
| "loss": 0.3949, |
| "step": 109 |
| }, |
| { |
| "epoch": 2.1019108280254777, |
| "grad_norm": 3.3191966173358805, |
| "learning_rate": 1.7639820304057745e-05, |
| "loss": 0.3931, |
| "step": 110 |
| }, |
| { |
| "epoch": 2.121019108280255, |
| "grad_norm": 1.3624451723710866, |
| "learning_rate": 1.7577478666772886e-05, |
| "loss": 0.3843, |
| "step": 111 |
| }, |
| { |
| "epoch": 2.140127388535032, |
| "grad_norm": 1.1484719310505338, |
| "learning_rate": 1.751443762949772e-05, |
| "loss": 0.3745, |
| "step": 112 |
| }, |
| { |
| "epoch": 2.159235668789809, |
| "grad_norm": 1.2700892780904618, |
| "learning_rate": 1.7450703010909263e-05, |
| "loss": 0.413, |
| "step": 113 |
| }, |
| { |
| "epoch": 2.178343949044586, |
| "grad_norm": 1.0586189376543789, |
| "learning_rate": 1.738628069370195e-05, |
| "loss": 0.3464, |
| "step": 114 |
| }, |
| { |
| "epoch": 2.1974522292993632, |
| "grad_norm": 1.1783273985129143, |
| "learning_rate": 1.732117662404469e-05, |
| "loss": 0.3512, |
| "step": 115 |
| }, |
| { |
| "epoch": 2.21656050955414, |
| "grad_norm": 1.1532860416115633, |
| "learning_rate": 1.7255396811032014e-05, |
| "loss": 0.3493, |
| "step": 116 |
| }, |
| { |
| "epoch": 2.2356687898089174, |
| "grad_norm": 1.07196230298279, |
| "learning_rate": 1.718894732612947e-05, |
| "loss": 0.3543, |
| "step": 117 |
| }, |
| { |
| "epoch": 2.254777070063694, |
| "grad_norm": 1.09377483988167, |
| "learning_rate": 1.712183430261319e-05, |
| "loss": 0.3378, |
| "step": 118 |
| }, |
| { |
| "epoch": 2.2738853503184715, |
| "grad_norm": 1.1086380438083379, |
| "learning_rate": 1.7054063935003813e-05, |
| "loss": 0.3691, |
| "step": 119 |
| }, |
| { |
| "epoch": 2.2929936305732483, |
| "grad_norm": 1.1110135807811403, |
| "learning_rate": 1.698564247849473e-05, |
| "loss": 0.3703, |
| "step": 120 |
| }, |
| { |
| "epoch": 2.3121019108280256, |
| "grad_norm": 1.0959457554638712, |
| "learning_rate": 1.691657624837472e-05, |
| "loss": 0.3382, |
| "step": 121 |
| }, |
| { |
| "epoch": 2.3312101910828025, |
| "grad_norm": 1.0763216442984707, |
| "learning_rate": 1.684687161944506e-05, |
| "loss": 0.3655, |
| "step": 122 |
| }, |
| { |
| "epoch": 2.3503184713375798, |
| "grad_norm": 1.0644643193109964, |
| "learning_rate": 1.677653502543113e-05, |
| "loss": 0.3713, |
| "step": 123 |
| }, |
| { |
| "epoch": 2.3694267515923566, |
| "grad_norm": 1.0520091051033431, |
| "learning_rate": 1.6705572958388576e-05, |
| "loss": 0.3637, |
| "step": 124 |
| }, |
| { |
| "epoch": 2.388535031847134, |
| "grad_norm": 0.9715610282842169, |
| "learning_rate": 1.6633991968104095e-05, |
| "loss": 0.3342, |
| "step": 125 |
| }, |
| { |
| "epoch": 2.4076433121019107, |
| "grad_norm": 1.0495947222277981, |
| "learning_rate": 1.6561798661490904e-05, |
| "loss": 0.362, |
| "step": 126 |
| }, |
| { |
| "epoch": 2.426751592356688, |
| "grad_norm": 1.027768605633936, |
| "learning_rate": 1.6488999701978905e-05, |
| "loss": 0.3909, |
| "step": 127 |
| }, |
| { |
| "epoch": 2.445859872611465, |
| "grad_norm": 1.0430605883090662, |
| "learning_rate": 1.6415601808899658e-05, |
| "loss": 0.3707, |
| "step": 128 |
| }, |
| { |
| "epoch": 2.464968152866242, |
| "grad_norm": 0.9897344708027874, |
| "learning_rate": 1.63416117568662e-05, |
| "loss": 0.3716, |
| "step": 129 |
| }, |
| { |
| "epoch": 2.484076433121019, |
| "grad_norm": 1.0206268353107175, |
| "learning_rate": 1.6267036375147728e-05, |
| "loss": 0.3791, |
| "step": 130 |
| }, |
| { |
| "epoch": 2.5031847133757963, |
| "grad_norm": 0.9968464604996385, |
| "learning_rate": 1.619188254703927e-05, |
| "loss": 0.3959, |
| "step": 131 |
| }, |
| { |
| "epoch": 2.522292993630573, |
| "grad_norm": 1.0450892578829158, |
| "learning_rate": 1.6116157209226356e-05, |
| "loss": 0.3935, |
| "step": 132 |
| }, |
| { |
| "epoch": 2.5414012738853504, |
| "grad_norm": 1.0420327393892306, |
| "learning_rate": 1.6039867351144778e-05, |
| "loss": 0.3732, |
| "step": 133 |
| }, |
| { |
| "epoch": 2.5605095541401273, |
| "grad_norm": 1.0658140476870972, |
| "learning_rate": 1.5963020014335437e-05, |
| "loss": 0.3679, |
| "step": 134 |
| }, |
| { |
| "epoch": 2.5796178343949046, |
| "grad_norm": 1.015910488975753, |
| "learning_rate": 1.588562229179443e-05, |
| "loss": 0.4047, |
| "step": 135 |
| }, |
| { |
| "epoch": 2.5987261146496814, |
| "grad_norm": 1.0151425054438397, |
| "learning_rate": 1.5807681327318372e-05, |
| "loss": 0.3517, |
| "step": 136 |
| }, |
| { |
| "epoch": 2.6178343949044587, |
| "grad_norm": 1.1559403742194478, |
| "learning_rate": 1.5729204314845002e-05, |
| "loss": 0.3777, |
| "step": 137 |
| }, |
| { |
| "epoch": 2.6369426751592355, |
| "grad_norm": 1.1338157549104528, |
| "learning_rate": 1.56501984977892e-05, |
| "loss": 0.3729, |
| "step": 138 |
| }, |
| { |
| "epoch": 2.656050955414013, |
| "grad_norm": 1.064291963474977, |
| "learning_rate": 1.557067116837444e-05, |
| "loss": 0.3829, |
| "step": 139 |
| }, |
| { |
| "epoch": 2.6751592356687897, |
| "grad_norm": 1.0427156725933961, |
| "learning_rate": 1.5490629666959668e-05, |
| "loss": 0.39, |
| "step": 140 |
| }, |
| { |
| "epoch": 2.694267515923567, |
| "grad_norm": 1.0226624788733183, |
| "learning_rate": 1.541008138136183e-05, |
| "loss": 0.3889, |
| "step": 141 |
| }, |
| { |
| "epoch": 2.713375796178344, |
| "grad_norm": 1.0525328188947, |
| "learning_rate": 1.5329033746173975e-05, |
| "loss": 0.3612, |
| "step": 142 |
| }, |
| { |
| "epoch": 2.732484076433121, |
| "grad_norm": 1.078767271893382, |
| "learning_rate": 1.5247494242079024e-05, |
| "loss": 0.4071, |
| "step": 143 |
| }, |
| { |
| "epoch": 2.7515923566878984, |
| "grad_norm": 0.9854161293601749, |
| "learning_rate": 1.5165470395159314e-05, |
| "loss": 0.3517, |
| "step": 144 |
| }, |
| { |
| "epoch": 2.770700636942675, |
| "grad_norm": 1.0216191388051186, |
| "learning_rate": 1.5082969776201948e-05, |
| "loss": 0.3634, |
| "step": 145 |
| }, |
| { |
| "epoch": 2.789808917197452, |
| "grad_norm": 1.0260644601169646, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.3458, |
| "step": 146 |
| }, |
| { |
| "epoch": 2.8089171974522293, |
| "grad_norm": 1.0679550777084548, |
| "learning_rate": 1.4916568724649688e-05, |
| "loss": 0.3719, |
| "step": 147 |
| }, |
| { |
| "epoch": 2.8280254777070066, |
| "grad_norm": 0.9833092792439724, |
| "learning_rate": 1.483268365084351e-05, |
| "loss": 0.3577, |
| "step": 148 |
| }, |
| { |
| "epoch": 2.8471337579617835, |
| "grad_norm": 1.0213618618762001, |
| "learning_rate": 1.4748352521159492e-05, |
| "loss": 0.3769, |
| "step": 149 |
| }, |
| { |
| "epoch": 2.8662420382165603, |
| "grad_norm": 0.9881143303734706, |
| "learning_rate": 1.466358311934654e-05, |
| "loss": 0.37, |
| "step": 150 |
| }, |
| { |
| "epoch": 2.8853503184713376, |
| "grad_norm": 1.0426528347787805, |
| "learning_rate": 1.4578383269606004e-05, |
| "loss": 0.3737, |
| "step": 151 |
| }, |
| { |
| "epoch": 2.904458598726115, |
| "grad_norm": 1.011316944159334, |
| "learning_rate": 1.4492760835869504e-05, |
| "loss": 0.3484, |
| "step": 152 |
| }, |
| { |
| "epoch": 2.9235668789808917, |
| "grad_norm": 1.0771107253387162, |
| "learning_rate": 1.4406723721073088e-05, |
| "loss": 0.3458, |
| "step": 153 |
| }, |
| { |
| "epoch": 2.9426751592356686, |
| "grad_norm": 1.0914736531721394, |
| "learning_rate": 1.4320279866427798e-05, |
| "loss": 0.3837, |
| "step": 154 |
| }, |
| { |
| "epoch": 2.961783439490446, |
| "grad_norm": 0.9879292075174337, |
| "learning_rate": 1.4233437250686695e-05, |
| "loss": 0.3379, |
| "step": 155 |
| }, |
| { |
| "epoch": 2.980891719745223, |
| "grad_norm": 1.0534558360040707, |
| "learning_rate": 1.4146203889408418e-05, |
| "loss": 0.3737, |
| "step": 156 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.9546429746260057, |
| "learning_rate": 1.4058587834217356e-05, |
| "loss": 0.3077, |
| "step": 157 |
| }, |
| { |
| "epoch": 3.0191082802547773, |
| "grad_norm": 1.0794314661371742, |
| "learning_rate": 1.3970597172060482e-05, |
| "loss": 0.2445, |
| "step": 158 |
| }, |
| { |
| "epoch": 3.038216560509554, |
| "grad_norm": 0.8725593864518827, |
| "learning_rate": 1.3882240024460928e-05, |
| "loss": 0.251, |
| "step": 159 |
| }, |
| { |
| "epoch": 3.0573248407643314, |
| "grad_norm": 0.9528452182374068, |
| "learning_rate": 1.3793524546768358e-05, |
| "loss": 0.222, |
| "step": 160 |
| }, |
| { |
| "epoch": 3.0764331210191083, |
| "grad_norm": 1.243120588259197, |
| "learning_rate": 1.3704458927406261e-05, |
| "loss": 0.2438, |
| "step": 161 |
| }, |
| { |
| "epoch": 3.0955414012738856, |
| "grad_norm": 0.9927780427766021, |
| "learning_rate": 1.3615051387116131e-05, |
| "loss": 0.2071, |
| "step": 162 |
| }, |
| { |
| "epoch": 3.1146496815286624, |
| "grad_norm": 0.8760051519137424, |
| "learning_rate": 1.3525310178198707e-05, |
| "loss": 0.2072, |
| "step": 163 |
| }, |
| { |
| "epoch": 3.1337579617834397, |
| "grad_norm": 0.993441527748417, |
| "learning_rate": 1.3435243583752294e-05, |
| "loss": 0.1903, |
| "step": 164 |
| }, |
| { |
| "epoch": 3.1528662420382165, |
| "grad_norm": 1.0122823808035941, |
| "learning_rate": 1.3344859916908206e-05, |
| "loss": 0.223, |
| "step": 165 |
| }, |
| { |
| "epoch": 3.171974522292994, |
| "grad_norm": 0.8280127986581172, |
| "learning_rate": 1.325416752006351e-05, |
| "loss": 0.2167, |
| "step": 166 |
| }, |
| { |
| "epoch": 3.1910828025477707, |
| "grad_norm": 0.9207860411765015, |
| "learning_rate": 1.3163174764110985e-05, |
| "loss": 0.2234, |
| "step": 167 |
| }, |
| { |
| "epoch": 3.210191082802548, |
| "grad_norm": 1.0087540713450258, |
| "learning_rate": 1.3071890047666498e-05, |
| "loss": 0.2607, |
| "step": 168 |
| }, |
| { |
| "epoch": 3.229299363057325, |
| "grad_norm": 0.8754252182890954, |
| "learning_rate": 1.2980321796293838e-05, |
| "loss": 0.2321, |
| "step": 169 |
| }, |
| { |
| "epoch": 3.248407643312102, |
| "grad_norm": 0.8428251952961017, |
| "learning_rate": 1.288847846172701e-05, |
| "loss": 0.2115, |
| "step": 170 |
| }, |
| { |
| "epoch": 3.267515923566879, |
| "grad_norm": 0.854200909124553, |
| "learning_rate": 1.2796368521090143e-05, |
| "loss": 0.2184, |
| "step": 171 |
| }, |
| { |
| "epoch": 3.286624203821656, |
| "grad_norm": 0.869443567808356, |
| "learning_rate": 1.2704000476115079e-05, |
| "loss": 0.2382, |
| "step": 172 |
| }, |
| { |
| "epoch": 3.305732484076433, |
| "grad_norm": 0.8459308281223218, |
| "learning_rate": 1.2611382852356632e-05, |
| "loss": 0.2425, |
| "step": 173 |
| }, |
| { |
| "epoch": 3.3248407643312103, |
| "grad_norm": 0.8148693074167046, |
| "learning_rate": 1.2518524198405699e-05, |
| "loss": 0.1893, |
| "step": 174 |
| }, |
| { |
| "epoch": 3.343949044585987, |
| "grad_norm": 0.8713896320210411, |
| "learning_rate": 1.2425433085100224e-05, |
| "loss": 0.2613, |
| "step": 175 |
| }, |
| { |
| "epoch": 3.3630573248407645, |
| "grad_norm": 0.9166069241971261, |
| "learning_rate": 1.233211810473411e-05, |
| "loss": 0.2493, |
| "step": 176 |
| }, |
| { |
| "epoch": 3.3821656050955413, |
| "grad_norm": 0.8690652011432752, |
| "learning_rate": 1.2238587870264152e-05, |
| "loss": 0.2547, |
| "step": 177 |
| }, |
| { |
| "epoch": 3.4012738853503186, |
| "grad_norm": 0.862487386493507, |
| "learning_rate": 1.2144851014515055e-05, |
| "loss": 0.2292, |
| "step": 178 |
| }, |
| { |
| "epoch": 3.4203821656050954, |
| "grad_norm": 0.8815704028252178, |
| "learning_rate": 1.2050916189382646e-05, |
| "loss": 0.2064, |
| "step": 179 |
| }, |
| { |
| "epoch": 3.4394904458598727, |
| "grad_norm": 0.8675773397668213, |
| "learning_rate": 1.1956792065035281e-05, |
| "loss": 0.25, |
| "step": 180 |
| }, |
| { |
| "epoch": 3.4585987261146496, |
| "grad_norm": 0.8933077382821798, |
| "learning_rate": 1.1862487329113606e-05, |
| "loss": 0.2358, |
| "step": 181 |
| }, |
| { |
| "epoch": 3.477707006369427, |
| "grad_norm": 0.8903704341987667, |
| "learning_rate": 1.1768010685928686e-05, |
| "loss": 0.2264, |
| "step": 182 |
| }, |
| { |
| "epoch": 3.4968152866242037, |
| "grad_norm": 0.8383070066020237, |
| "learning_rate": 1.1673370855658592e-05, |
| "loss": 0.2043, |
| "step": 183 |
| }, |
| { |
| "epoch": 3.515923566878981, |
| "grad_norm": 0.8911649033404783, |
| "learning_rate": 1.1578576573543541e-05, |
| "loss": 0.2178, |
| "step": 184 |
| }, |
| { |
| "epoch": 3.535031847133758, |
| "grad_norm": 0.8585210661904665, |
| "learning_rate": 1.1483636589079627e-05, |
| "loss": 0.2156, |
| "step": 185 |
| }, |
| { |
| "epoch": 3.554140127388535, |
| "grad_norm": 0.8162221440030065, |
| "learning_rate": 1.1388559665211241e-05, |
| "loss": 0.2548, |
| "step": 186 |
| }, |
| { |
| "epoch": 3.573248407643312, |
| "grad_norm": 0.7963772067435857, |
| "learning_rate": 1.1293354577522264e-05, |
| "loss": 0.2381, |
| "step": 187 |
| }, |
| { |
| "epoch": 3.5923566878980893, |
| "grad_norm": 0.8677746097750828, |
| "learning_rate": 1.1198030113426074e-05, |
| "loss": 0.2054, |
| "step": 188 |
| }, |
| { |
| "epoch": 3.611464968152866, |
| "grad_norm": 0.895188923289957, |
| "learning_rate": 1.1102595071354471e-05, |
| "loss": 0.2171, |
| "step": 189 |
| }, |
| { |
| "epoch": 3.6305732484076434, |
| "grad_norm": 0.8791187036486455, |
| "learning_rate": 1.1007058259945584e-05, |
| "loss": 0.2477, |
| "step": 190 |
| }, |
| { |
| "epoch": 3.6496815286624202, |
| "grad_norm": 0.8155997120595588, |
| "learning_rate": 1.0911428497230834e-05, |
| "loss": 0.2045, |
| "step": 191 |
| }, |
| { |
| "epoch": 3.6687898089171975, |
| "grad_norm": 0.8770814295828485, |
| "learning_rate": 1.0815714609821027e-05, |
| "loss": 0.2383, |
| "step": 192 |
| }, |
| { |
| "epoch": 3.6878980891719744, |
| "grad_norm": 0.819184381321191, |
| "learning_rate": 1.0719925432091671e-05, |
| "loss": 0.2143, |
| "step": 193 |
| }, |
| { |
| "epoch": 3.7070063694267517, |
| "grad_norm": 0.8250575947986096, |
| "learning_rate": 1.0624069805367558e-05, |
| "loss": 0.1944, |
| "step": 194 |
| }, |
| { |
| "epoch": 3.7261146496815285, |
| "grad_norm": 0.8276542050458014, |
| "learning_rate": 1.0528156577106703e-05, |
| "loss": 0.2404, |
| "step": 195 |
| }, |
| { |
| "epoch": 3.745222929936306, |
| "grad_norm": 0.8410279981631184, |
| "learning_rate": 1.043219460008374e-05, |
| "loss": 0.2131, |
| "step": 196 |
| }, |
| { |
| "epoch": 3.7643312101910826, |
| "grad_norm": 0.8003839667268587, |
| "learning_rate": 1.0336192731572805e-05, |
| "loss": 0.2373, |
| "step": 197 |
| }, |
| { |
| "epoch": 3.78343949044586, |
| "grad_norm": 0.8003447028390943, |
| "learning_rate": 1.0240159832530007e-05, |
| "loss": 0.2228, |
| "step": 198 |
| }, |
| { |
| "epoch": 3.802547770700637, |
| "grad_norm": 0.8107666767644914, |
| "learning_rate": 1.0144104766775574e-05, |
| "loss": 0.2245, |
| "step": 199 |
| }, |
| { |
| "epoch": 3.821656050955414, |
| "grad_norm": 0.8283501745311356, |
| "learning_rate": 1.004803640017571e-05, |
| "loss": 0.1727, |
| "step": 200 |
| }, |
| { |
| "epoch": 3.840764331210191, |
| "grad_norm": 0.8430576593378581, |
| "learning_rate": 9.951963599824294e-06, |
| "loss": 0.241, |
| "step": 201 |
| }, |
| { |
| "epoch": 3.859872611464968, |
| "grad_norm": 0.842864753329655, |
| "learning_rate": 9.855895233224431e-06, |
| "loss": 0.2361, |
| "step": 202 |
| }, |
| { |
| "epoch": 3.8789808917197455, |
| "grad_norm": 0.7894891328436988, |
| "learning_rate": 9.759840167469995e-06, |
| "loss": 0.1798, |
| "step": 203 |
| }, |
| { |
| "epoch": 3.8980891719745223, |
| "grad_norm": 0.8043963124163499, |
| "learning_rate": 9.663807268427197e-06, |
| "loss": 0.2059, |
| "step": 204 |
| }, |
| { |
| "epoch": 3.917197452229299, |
| "grad_norm": 0.8321817031652909, |
| "learning_rate": 9.56780539991626e-06, |
| "loss": 0.212, |
| "step": 205 |
| }, |
| { |
| "epoch": 3.9363057324840764, |
| "grad_norm": 0.8074239055674105, |
| "learning_rate": 9.471843422893299e-06, |
| "loss": 0.2009, |
| "step": 206 |
| }, |
| { |
| "epoch": 3.9554140127388537, |
| "grad_norm": 0.8557990210783927, |
| "learning_rate": 9.375930194632447e-06, |
| "loss": 0.2145, |
| "step": 207 |
| }, |
| { |
| "epoch": 3.9745222929936306, |
| "grad_norm": 0.7812183187749983, |
| "learning_rate": 9.28007456790833e-06, |
| "loss": 0.1866, |
| "step": 208 |
| }, |
| { |
| "epoch": 3.9936305732484074, |
| "grad_norm": 0.7736504835220844, |
| "learning_rate": 9.184285390178978e-06, |
| "loss": 0.2221, |
| "step": 209 |
| }, |
| { |
| "epoch": 4.012738853503185, |
| "grad_norm": 0.6358381739054941, |
| "learning_rate": 9.08857150276917e-06, |
| "loss": 0.1033, |
| "step": 210 |
| }, |
| { |
| "epoch": 4.031847133757962, |
| "grad_norm": 0.6598743747199938, |
| "learning_rate": 8.992941740054418e-06, |
| "loss": 0.148, |
| "step": 211 |
| }, |
| { |
| "epoch": 4.050955414012739, |
| "grad_norm": 0.577662790844215, |
| "learning_rate": 8.897404928645529e-06, |
| "loss": 0.1086, |
| "step": 212 |
| }, |
| { |
| "epoch": 4.070063694267516, |
| "grad_norm": 0.667177834347151, |
| "learning_rate": 8.80196988657393e-06, |
| "loss": 0.1663, |
| "step": 213 |
| }, |
| { |
| "epoch": 4.089171974522293, |
| "grad_norm": 0.7876247378563334, |
| "learning_rate": 8.706645422477739e-06, |
| "loss": 0.1534, |
| "step": 214 |
| }, |
| { |
| "epoch": 4.10828025477707, |
| "grad_norm": 0.7587047405394588, |
| "learning_rate": 8.611440334788762e-06, |
| "loss": 0.1326, |
| "step": 215 |
| }, |
| { |
| "epoch": 4.127388535031847, |
| "grad_norm": 0.646869384984155, |
| "learning_rate": 8.516363410920376e-06, |
| "loss": 0.1116, |
| "step": 216 |
| }, |
| { |
| "epoch": 4.146496815286624, |
| "grad_norm": 0.6538670553340495, |
| "learning_rate": 8.42142342645646e-06, |
| "loss": 0.1243, |
| "step": 217 |
| }, |
| { |
| "epoch": 4.165605095541402, |
| "grad_norm": 0.6104543037349788, |
| "learning_rate": 8.326629144341408e-06, |
| "loss": 0.1251, |
| "step": 218 |
| }, |
| { |
| "epoch": 4.1847133757961785, |
| "grad_norm": 0.6638827104955027, |
| "learning_rate": 8.231989314071318e-06, |
| "loss": 0.1235, |
| "step": 219 |
| }, |
| { |
| "epoch": 4.203821656050955, |
| "grad_norm": 0.6157950485106373, |
| "learning_rate": 8.137512670886397e-06, |
| "loss": 0.1318, |
| "step": 220 |
| }, |
| { |
| "epoch": 4.222929936305732, |
| "grad_norm": 0.5807970143215085, |
| "learning_rate": 8.043207934964722e-06, |
| "loss": 0.1528, |
| "step": 221 |
| }, |
| { |
| "epoch": 4.24203821656051, |
| "grad_norm": 0.6166843267142346, |
| "learning_rate": 7.949083810617358e-06, |
| "loss": 0.1777, |
| "step": 222 |
| }, |
| { |
| "epoch": 4.261146496815287, |
| "grad_norm": 0.6029875516314757, |
| "learning_rate": 7.855148985484946e-06, |
| "loss": 0.1336, |
| "step": 223 |
| }, |
| { |
| "epoch": 4.280254777070064, |
| "grad_norm": 0.6416238599404323, |
| "learning_rate": 7.761412129735853e-06, |
| "loss": 0.1447, |
| "step": 224 |
| }, |
| { |
| "epoch": 4.2993630573248405, |
| "grad_norm": 0.5618037002140864, |
| "learning_rate": 7.667881895265895e-06, |
| "loss": 0.1153, |
| "step": 225 |
| }, |
| { |
| "epoch": 4.318471337579618, |
| "grad_norm": 0.5872125669535371, |
| "learning_rate": 7.574566914899779e-06, |
| "loss": 0.1218, |
| "step": 226 |
| }, |
| { |
| "epoch": 4.337579617834395, |
| "grad_norm": 0.6311165305084404, |
| "learning_rate": 7.481475801594302e-06, |
| "loss": 0.1191, |
| "step": 227 |
| }, |
| { |
| "epoch": 4.356687898089172, |
| "grad_norm": 0.5835850069158302, |
| "learning_rate": 7.388617147643371e-06, |
| "loss": 0.1175, |
| "step": 228 |
| }, |
| { |
| "epoch": 4.375796178343949, |
| "grad_norm": 0.5641329873077757, |
| "learning_rate": 7.295999523884921e-06, |
| "loss": 0.1268, |
| "step": 229 |
| }, |
| { |
| "epoch": 4.3949044585987265, |
| "grad_norm": 0.5710638661565366, |
| "learning_rate": 7.203631478909857e-06, |
| "loss": 0.1196, |
| "step": 230 |
| }, |
| { |
| "epoch": 4.414012738853503, |
| "grad_norm": 0.6304674375983539, |
| "learning_rate": 7.111521538272997e-06, |
| "loss": 0.162, |
| "step": 231 |
| }, |
| { |
| "epoch": 4.43312101910828, |
| "grad_norm": 0.5925648384154195, |
| "learning_rate": 7.019678203706164e-06, |
| "loss": 0.101, |
| "step": 232 |
| }, |
| { |
| "epoch": 4.452229299363057, |
| "grad_norm": 0.5759144673258558, |
| "learning_rate": 6.928109952333506e-06, |
| "loss": 0.1412, |
| "step": 233 |
| }, |
| { |
| "epoch": 4.471337579617835, |
| "grad_norm": 0.5908269020834356, |
| "learning_rate": 6.83682523588902e-06, |
| "loss": 0.1535, |
| "step": 234 |
| }, |
| { |
| "epoch": 4.490445859872612, |
| "grad_norm": 0.5973740508699396, |
| "learning_rate": 6.745832479936492e-06, |
| "loss": 0.1317, |
| "step": 235 |
| }, |
| { |
| "epoch": 4.509554140127388, |
| "grad_norm": 0.587279118111191, |
| "learning_rate": 6.655140083091794e-06, |
| "loss": 0.1214, |
| "step": 236 |
| }, |
| { |
| "epoch": 4.528662420382165, |
| "grad_norm": 0.6209961542756124, |
| "learning_rate": 6.564756416247712e-06, |
| "loss": 0.1518, |
| "step": 237 |
| }, |
| { |
| "epoch": 4.547770700636943, |
| "grad_norm": 0.579382073138381, |
| "learning_rate": 6.474689821801295e-06, |
| "loss": 0.1309, |
| "step": 238 |
| }, |
| { |
| "epoch": 4.56687898089172, |
| "grad_norm": 0.6065440171745169, |
| "learning_rate": 6.384948612883872e-06, |
| "loss": 0.1009, |
| "step": 239 |
| }, |
| { |
| "epoch": 4.585987261146497, |
| "grad_norm": 0.5585615132779307, |
| "learning_rate": 6.2955410725937405e-06, |
| "loss": 0.1217, |
| "step": 240 |
| }, |
| { |
| "epoch": 4.6050955414012735, |
| "grad_norm": 0.5924523782164844, |
| "learning_rate": 6.206475453231644e-06, |
| "loss": 0.1109, |
| "step": 241 |
| }, |
| { |
| "epoch": 4.624203821656051, |
| "grad_norm": 0.5995874401366601, |
| "learning_rate": 6.117759975539075e-06, |
| "loss": 0.1182, |
| "step": 242 |
| }, |
| { |
| "epoch": 4.643312101910828, |
| "grad_norm": 0.5576794993986748, |
| "learning_rate": 6.029402827939519e-06, |
| "loss": 0.1121, |
| "step": 243 |
| }, |
| { |
| "epoch": 4.662420382165605, |
| "grad_norm": 0.5770135050173938, |
| "learning_rate": 5.941412165782645e-06, |
| "loss": 0.1245, |
| "step": 244 |
| }, |
| { |
| "epoch": 4.681528662420382, |
| "grad_norm": 0.5929257692806121, |
| "learning_rate": 5.853796110591583e-06, |
| "loss": 0.1511, |
| "step": 245 |
| }, |
| { |
| "epoch": 4.7006369426751595, |
| "grad_norm": 0.5925990861687294, |
| "learning_rate": 5.766562749313309e-06, |
| "loss": 0.1279, |
| "step": 246 |
| }, |
| { |
| "epoch": 4.719745222929936, |
| "grad_norm": 0.5699434481371307, |
| "learning_rate": 5.6797201335722064e-06, |
| "loss": 0.1501, |
| "step": 247 |
| }, |
| { |
| "epoch": 4.738853503184713, |
| "grad_norm": 0.5785541560481104, |
| "learning_rate": 5.593276278926912e-06, |
| "loss": 0.1234, |
| "step": 248 |
| }, |
| { |
| "epoch": 4.757961783439491, |
| "grad_norm": 0.5944816781846355, |
| "learning_rate": 5.507239164130501e-06, |
| "loss": 0.1131, |
| "step": 249 |
| }, |
| { |
| "epoch": 4.777070063694268, |
| "grad_norm": 0.5951359908540822, |
| "learning_rate": 5.421616730394e-06, |
| "loss": 0.098, |
| "step": 250 |
| }, |
| { |
| "epoch": 4.796178343949045, |
| "grad_norm": 0.582629331373758, |
| "learning_rate": 5.336416880653461e-06, |
| "loss": 0.1258, |
| "step": 251 |
| }, |
| { |
| "epoch": 4.8152866242038215, |
| "grad_norm": 0.5834525291911234, |
| "learning_rate": 5.251647478840511e-06, |
| "loss": 0.1048, |
| "step": 252 |
| }, |
| { |
| "epoch": 4.834394904458598, |
| "grad_norm": 0.5991362378090112, |
| "learning_rate": 5.167316349156495e-06, |
| "loss": 0.1324, |
| "step": 253 |
| }, |
| { |
| "epoch": 4.853503184713376, |
| "grad_norm": 0.5588826595586873, |
| "learning_rate": 5.083431275350312e-06, |
| "loss": 0.0959, |
| "step": 254 |
| }, |
| { |
| "epoch": 4.872611464968153, |
| "grad_norm": 0.6020289968727252, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.1195, |
| "step": 255 |
| }, |
| { |
| "epoch": 4.89171974522293, |
| "grad_norm": 0.5775428372320027, |
| "learning_rate": 4.917030223798057e-06, |
| "loss": 0.1317, |
| "step": 256 |
| }, |
| { |
| "epoch": 4.9108280254777075, |
| "grad_norm": 0.5517699900430338, |
| "learning_rate": 4.834529604840686e-06, |
| "loss": 0.1062, |
| "step": 257 |
| }, |
| { |
| "epoch": 4.929936305732484, |
| "grad_norm": 0.5583818338051684, |
| "learning_rate": 4.7525057579209775e-06, |
| "loss": 0.1264, |
| "step": 258 |
| }, |
| { |
| "epoch": 4.949044585987261, |
| "grad_norm": 0.5703988666724562, |
| "learning_rate": 4.670966253826027e-06, |
| "loss": 0.0929, |
| "step": 259 |
| }, |
| { |
| "epoch": 4.968152866242038, |
| "grad_norm": 0.5587580672285489, |
| "learning_rate": 4.589918618638173e-06, |
| "loss": 0.1487, |
| "step": 260 |
| }, |
| { |
| "epoch": 4.987261146496815, |
| "grad_norm": 0.5754442184055244, |
| "learning_rate": 4.5093703330403385e-06, |
| "loss": 0.1104, |
| "step": 261 |
| }, |
| { |
| "epoch": 5.006369426751593, |
| "grad_norm": 0.47604933067213157, |
| "learning_rate": 4.429328831625565e-06, |
| "loss": 0.1053, |
| "step": 262 |
| }, |
| { |
| "epoch": 5.025477707006369, |
| "grad_norm": 0.44769491277310514, |
| "learning_rate": 4.349801502210801e-06, |
| "loss": 0.0756, |
| "step": 263 |
| }, |
| { |
| "epoch": 5.044585987261146, |
| "grad_norm": 0.43083222800739923, |
| "learning_rate": 4.270795685155001e-06, |
| "loss": 0.0857, |
| "step": 264 |
| }, |
| { |
| "epoch": 5.063694267515924, |
| "grad_norm": 0.4001233947597807, |
| "learning_rate": 4.192318672681631e-06, |
| "loss": 0.056, |
| "step": 265 |
| }, |
| { |
| "epoch": 5.082802547770701, |
| "grad_norm": 0.3941889030720719, |
| "learning_rate": 4.1143777082055715e-06, |
| "loss": 0.0757, |
| "step": 266 |
| }, |
| { |
| "epoch": 5.101910828025478, |
| "grad_norm": 0.4069434130632119, |
| "learning_rate": 4.036979985664566e-06, |
| "loss": 0.0881, |
| "step": 267 |
| }, |
| { |
| "epoch": 5.1210191082802545, |
| "grad_norm": 0.4402669609666307, |
| "learning_rate": 3.960132648855226e-06, |
| "loss": 0.0756, |
| "step": 268 |
| }, |
| { |
| "epoch": 5.140127388535032, |
| "grad_norm": 0.44049414089918293, |
| "learning_rate": 3.883842790773647e-06, |
| "loss": 0.0915, |
| "step": 269 |
| }, |
| { |
| "epoch": 5.159235668789809, |
| "grad_norm": 0.5617927406070873, |
| "learning_rate": 3.8081174529607346e-06, |
| "loss": 0.109, |
| "step": 270 |
| }, |
| { |
| "epoch": 5.178343949044586, |
| "grad_norm": 0.4723379246794441, |
| "learning_rate": 3.732963624852275e-06, |
| "loss": 0.0806, |
| "step": 271 |
| }, |
| { |
| "epoch": 5.197452229299363, |
| "grad_norm": 0.43295612565720193, |
| "learning_rate": 3.6583882431338047e-06, |
| "loss": 0.0627, |
| "step": 272 |
| }, |
| { |
| "epoch": 5.2165605095541405, |
| "grad_norm": 0.45553745226026043, |
| "learning_rate": 3.584398191100341e-06, |
| "loss": 0.1009, |
| "step": 273 |
| }, |
| { |
| "epoch": 5.235668789808917, |
| "grad_norm": 0.40108690481095655, |
| "learning_rate": 3.511000298021098e-06, |
| "loss": 0.0714, |
| "step": 274 |
| }, |
| { |
| "epoch": 5.254777070063694, |
| "grad_norm": 0.40423387388292076, |
| "learning_rate": 3.4382013385090985e-06, |
| "loss": 0.0777, |
| "step": 275 |
| }, |
| { |
| "epoch": 5.273885350318471, |
| "grad_norm": 0.36188868968156396, |
| "learning_rate": 3.3660080318959043e-06, |
| "loss": 0.0823, |
| "step": 276 |
| }, |
| { |
| "epoch": 5.292993630573249, |
| "grad_norm": 0.4276164374502514, |
| "learning_rate": 3.2944270416114256e-06, |
| "loss": 0.0817, |
| "step": 277 |
| }, |
| { |
| "epoch": 5.312101910828026, |
| "grad_norm": 0.4244618276711872, |
| "learning_rate": 3.223464974568874e-06, |
| "loss": 0.1013, |
| "step": 278 |
| }, |
| { |
| "epoch": 5.3312101910828025, |
| "grad_norm": 0.40711935753435863, |
| "learning_rate": 3.153128380554941e-06, |
| "loss": 0.085, |
| "step": 279 |
| }, |
| { |
| "epoch": 5.350318471337579, |
| "grad_norm": 0.4102731718512158, |
| "learning_rate": 3.0834237516252817e-06, |
| "loss": 0.126, |
| "step": 280 |
| }, |
| { |
| "epoch": 5.369426751592357, |
| "grad_norm": 0.3500596427441456, |
| "learning_rate": 3.0143575215052732e-06, |
| "loss": 0.0813, |
| "step": 281 |
| }, |
| { |
| "epoch": 5.388535031847134, |
| "grad_norm": 0.38548735206001494, |
| "learning_rate": 2.94593606499619e-06, |
| "loss": 0.1018, |
| "step": 282 |
| }, |
| { |
| "epoch": 5.407643312101911, |
| "grad_norm": 0.3755708662176069, |
| "learning_rate": 2.878165697386812e-06, |
| "loss": 0.0732, |
| "step": 283 |
| }, |
| { |
| "epoch": 5.426751592356688, |
| "grad_norm": 0.38027052274952294, |
| "learning_rate": 2.8110526738705345e-06, |
| "loss": 0.0651, |
| "step": 284 |
| }, |
| { |
| "epoch": 5.445859872611465, |
| "grad_norm": 0.40256402037943106, |
| "learning_rate": 2.7446031889679893e-06, |
| "loss": 0.0821, |
| "step": 285 |
| }, |
| { |
| "epoch": 5.464968152866242, |
| "grad_norm": 0.4191726432928864, |
| "learning_rate": 2.678823375955314e-06, |
| "loss": 0.0733, |
| "step": 286 |
| }, |
| { |
| "epoch": 5.484076433121019, |
| "grad_norm": 0.40632663104786704, |
| "learning_rate": 2.6137193062980506e-06, |
| "loss": 0.0764, |
| "step": 287 |
| }, |
| { |
| "epoch": 5.503184713375796, |
| "grad_norm": 0.3816589726850424, |
| "learning_rate": 2.5492969890907383e-06, |
| "loss": 0.0792, |
| "step": 288 |
| }, |
| { |
| "epoch": 5.522292993630574, |
| "grad_norm": 0.4064168090616113, |
| "learning_rate": 2.485562370502279e-06, |
| "loss": 0.0855, |
| "step": 289 |
| }, |
| { |
| "epoch": 5.54140127388535, |
| "grad_norm": 0.3955328560796272, |
| "learning_rate": 2.4225213332271203e-06, |
| "loss": 0.0756, |
| "step": 290 |
| }, |
| { |
| "epoch": 5.560509554140127, |
| "grad_norm": 0.38641025333156304, |
| "learning_rate": 2.3601796959422585e-06, |
| "loss": 0.0862, |
| "step": 291 |
| }, |
| { |
| "epoch": 5.579617834394904, |
| "grad_norm": 0.3698307234746336, |
| "learning_rate": 2.2985432127701945e-06, |
| "loss": 0.0476, |
| "step": 292 |
| }, |
| { |
| "epoch": 5.598726114649682, |
| "grad_norm": 0.39951138087208105, |
| "learning_rate": 2.2376175727478346e-06, |
| "loss": 0.0822, |
| "step": 293 |
| }, |
| { |
| "epoch": 5.617834394904459, |
| "grad_norm": 0.39799752448346915, |
| "learning_rate": 2.1774083993013715e-06, |
| "loss": 0.1094, |
| "step": 294 |
| }, |
| { |
| "epoch": 5.6369426751592355, |
| "grad_norm": 0.3997225350107758, |
| "learning_rate": 2.1179212497272582e-06, |
| "loss": 0.0845, |
| "step": 295 |
| }, |
| { |
| "epoch": 5.656050955414012, |
| "grad_norm": 0.3865557218719786, |
| "learning_rate": 2.0591616146792705e-06, |
| "loss": 0.0951, |
| "step": 296 |
| }, |
| { |
| "epoch": 5.67515923566879, |
| "grad_norm": 0.45083097110031933, |
| "learning_rate": 2.0011349176617133e-06, |
| "loss": 0.0891, |
| "step": 297 |
| }, |
| { |
| "epoch": 5.694267515923567, |
| "grad_norm": 0.401699893254878, |
| "learning_rate": 1.9438465145288377e-06, |
| "loss": 0.0694, |
| "step": 298 |
| }, |
| { |
| "epoch": 5.713375796178344, |
| "grad_norm": 0.39056593793324884, |
| "learning_rate": 1.8873016929904942e-06, |
| "loss": 0.107, |
| "step": 299 |
| }, |
| { |
| "epoch": 5.732484076433121, |
| "grad_norm": 0.40090489846349914, |
| "learning_rate": 1.8315056721240831e-06, |
| "loss": 0.0748, |
| "step": 300 |
| }, |
| { |
| "epoch": 5.751592356687898, |
| "grad_norm": 0.35412931082743904, |
| "learning_rate": 1.7764636018928249e-06, |
| "loss": 0.0733, |
| "step": 301 |
| }, |
| { |
| "epoch": 5.770700636942675, |
| "grad_norm": 0.40398464926101857, |
| "learning_rate": 1.722180562670428e-06, |
| "loss": 0.1067, |
| "step": 302 |
| }, |
| { |
| "epoch": 5.789808917197452, |
| "grad_norm": 0.39946500505114957, |
| "learning_rate": 1.6686615647721638e-06, |
| "loss": 0.1, |
| "step": 303 |
| }, |
| { |
| "epoch": 5.80891719745223, |
| "grad_norm": 0.39758527930540954, |
| "learning_rate": 1.6159115479924259e-06, |
| "loss": 0.0833, |
| "step": 304 |
| }, |
| { |
| "epoch": 5.828025477707007, |
| "grad_norm": 0.41708635213572515, |
| "learning_rate": 1.5639353811487744e-06, |
| "loss": 0.0515, |
| "step": 305 |
| }, |
| { |
| "epoch": 5.8471337579617835, |
| "grad_norm": 0.41188326189259594, |
| "learning_rate": 1.5127378616325606e-06, |
| "loss": 0.0773, |
| "step": 306 |
| }, |
| { |
| "epoch": 5.86624203821656, |
| "grad_norm": 0.3745565856400691, |
| "learning_rate": 1.462323714966114e-06, |
| "loss": 0.0602, |
| "step": 307 |
| }, |
| { |
| "epoch": 5.885350318471337, |
| "grad_norm": 0.40852051581658766, |
| "learning_rate": 1.4126975943665844e-06, |
| "loss": 0.0717, |
| "step": 308 |
| }, |
| { |
| "epoch": 5.904458598726115, |
| "grad_norm": 0.414260810777109, |
| "learning_rate": 1.3638640803164516e-06, |
| "loss": 0.1213, |
| "step": 309 |
| }, |
| { |
| "epoch": 5.923566878980892, |
| "grad_norm": 0.3884498532672599, |
| "learning_rate": 1.3158276801407432e-06, |
| "loss": 0.0908, |
| "step": 310 |
| }, |
| { |
| "epoch": 5.942675159235669, |
| "grad_norm": 0.384071233807421, |
| "learning_rate": 1.2685928275910142e-06, |
| "loss": 0.0812, |
| "step": 311 |
| }, |
| { |
| "epoch": 5.961783439490446, |
| "grad_norm": 0.34519516758720215, |
| "learning_rate": 1.222163882436107e-06, |
| "loss": 0.0597, |
| "step": 312 |
| }, |
| { |
| "epoch": 5.980891719745223, |
| "grad_norm": 0.3977373982189989, |
| "learning_rate": 1.1765451300597574e-06, |
| "loss": 0.0913, |
| "step": 313 |
| }, |
| { |
| "epoch": 6.0, |
| "grad_norm": 0.3878546571382882, |
| "learning_rate": 1.1317407810650372e-06, |
| "loss": 0.0884, |
| "step": 314 |
| }, |
| { |
| "epoch": 6.019108280254777, |
| "grad_norm": 0.33779153179080706, |
| "learning_rate": 1.0877549708857228e-06, |
| "loss": 0.0802, |
| "step": 315 |
| }, |
| { |
| "epoch": 6.038216560509555, |
| "grad_norm": 0.3179141578914388, |
| "learning_rate": 1.0445917594046073e-06, |
| "loss": 0.0645, |
| "step": 316 |
| }, |
| { |
| "epoch": 6.057324840764331, |
| "grad_norm": 0.3204502935987288, |
| "learning_rate": 1.0022551305787564e-06, |
| "loss": 0.0641, |
| "step": 317 |
| }, |
| { |
| "epoch": 6.076433121019108, |
| "grad_norm": 0.31828040793224005, |
| "learning_rate": 9.607489920717983e-07, |
| "loss": 0.0663, |
| "step": 318 |
| }, |
| { |
| "epoch": 6.095541401273885, |
| "grad_norm": 0.3009135223782124, |
| "learning_rate": 9.200771748932513e-07, |
| "loss": 0.0811, |
| "step": 319 |
| }, |
| { |
| "epoch": 6.114649681528663, |
| "grad_norm": 0.2578678128906716, |
| "learning_rate": 8.802434330449128e-07, |
| "loss": 0.0517, |
| "step": 320 |
| }, |
| { |
| "epoch": 6.13375796178344, |
| "grad_norm": 0.2790874578821567, |
| "learning_rate": 8.412514431743657e-07, |
| "loss": 0.0605, |
| "step": 321 |
| }, |
| { |
| "epoch": 6.1528662420382165, |
| "grad_norm": 0.3087854313268563, |
| "learning_rate": 8.031048042356393e-07, |
| "loss": 0.082, |
| "step": 322 |
| }, |
| { |
| "epoch": 6.171974522292993, |
| "grad_norm": 0.2950780829352482, |
| "learning_rate": 7.65807037157007e-07, |
| "loss": 0.0686, |
| "step": 323 |
| }, |
| { |
| "epoch": 6.191082802547771, |
| "grad_norm": 0.31169370667435575, |
| "learning_rate": 7.293615845160196e-07, |
| "loss": 0.0868, |
| "step": 324 |
| }, |
| { |
| "epoch": 6.210191082802548, |
| "grad_norm": 0.2857072437843116, |
| "learning_rate": 6.937718102217461e-07, |
| "loss": 0.0545, |
| "step": 325 |
| }, |
| { |
| "epoch": 6.229299363057325, |
| "grad_norm": 0.27631097197420473, |
| "learning_rate": 6.590409992042957e-07, |
| "loss": 0.0557, |
| "step": 326 |
| }, |
| { |
| "epoch": 6.248407643312102, |
| "grad_norm": 0.29321775110156156, |
| "learning_rate": 6.251723571116031e-07, |
| "loss": 0.0757, |
| "step": 327 |
| }, |
| { |
| "epoch": 6.267515923566879, |
| "grad_norm": 0.30475215808851175, |
| "learning_rate": 5.921690100135713e-07, |
| "loss": 0.0709, |
| "step": 328 |
| }, |
| { |
| "epoch": 6.286624203821656, |
| "grad_norm": 0.2966881009482061, |
| "learning_rate": 5.600340041135133e-07, |
| "loss": 0.0615, |
| "step": 329 |
| }, |
| { |
| "epoch": 6.305732484076433, |
| "grad_norm": 0.26393910697205086, |
| "learning_rate": 5.287703054670012e-07, |
| "loss": 0.0443, |
| "step": 330 |
| }, |
| { |
| "epoch": 6.32484076433121, |
| "grad_norm": 0.3597673902205209, |
| "learning_rate": 4.983807997080925e-07, |
| "loss": 0.0909, |
| "step": 331 |
| }, |
| { |
| "epoch": 6.343949044585988, |
| "grad_norm": 0.3013327090348725, |
| "learning_rate": 4.6886829178299676e-07, |
| "loss": 0.0667, |
| "step": 332 |
| }, |
| { |
| "epoch": 6.3630573248407645, |
| "grad_norm": 0.36243106458518387, |
| "learning_rate": 4.402355056911656e-07, |
| "loss": 0.0795, |
| "step": 333 |
| }, |
| { |
| "epoch": 6.382165605095541, |
| "grad_norm": 0.32028430641355277, |
| "learning_rate": 4.124850842338779e-07, |
| "loss": 0.0622, |
| "step": 334 |
| }, |
| { |
| "epoch": 6.401273885350318, |
| "grad_norm": 0.32768566090708884, |
| "learning_rate": 3.8561958877030957e-07, |
| "loss": 0.0587, |
| "step": 335 |
| }, |
| { |
| "epoch": 6.420382165605096, |
| "grad_norm": 0.303307793064129, |
| "learning_rate": 3.5964149898111587e-07, |
| "loss": 0.0921, |
| "step": 336 |
| }, |
| { |
| "epoch": 6.439490445859873, |
| "grad_norm": 0.2997336665485515, |
| "learning_rate": 3.345532126395579e-07, |
| "loss": 0.0621, |
| "step": 337 |
| }, |
| { |
| "epoch": 6.45859872611465, |
| "grad_norm": 0.3298346447543088, |
| "learning_rate": 3.1035704539019384e-07, |
| "loss": 0.0582, |
| "step": 338 |
| }, |
| { |
| "epoch": 6.477707006369426, |
| "grad_norm": 0.3298555361153554, |
| "learning_rate": 2.870552305351382e-07, |
| "loss": 0.0647, |
| "step": 339 |
| }, |
| { |
| "epoch": 6.496815286624204, |
| "grad_norm": 0.2996129728963715, |
| "learning_rate": 2.646499188279328e-07, |
| "loss": 0.0698, |
| "step": 340 |
| }, |
| { |
| "epoch": 6.515923566878981, |
| "grad_norm": 0.33491302387099714, |
| "learning_rate": 2.4314317827503375e-07, |
| "loss": 0.0894, |
| "step": 341 |
| }, |
| { |
| "epoch": 6.535031847133758, |
| "grad_norm": 0.302758634407778, |
| "learning_rate": 2.2253699394493066e-07, |
| "loss": 0.0741, |
| "step": 342 |
| }, |
| { |
| "epoch": 6.554140127388535, |
| "grad_norm": 0.3123601731674272, |
| "learning_rate": 2.028332677849254e-07, |
| "loss": 0.092, |
| "step": 343 |
| }, |
| { |
| "epoch": 6.573248407643312, |
| "grad_norm": 0.3009510561512071, |
| "learning_rate": 1.840338184455881e-07, |
| "loss": 0.0658, |
| "step": 344 |
| }, |
| { |
| "epoch": 6.592356687898089, |
| "grad_norm": 0.2863965586517129, |
| "learning_rate": 1.6614038111289034e-07, |
| "loss": 0.0767, |
| "step": 345 |
| }, |
| { |
| "epoch": 6.611464968152866, |
| "grad_norm": 0.30456439022535475, |
| "learning_rate": 1.49154607348051e-07, |
| "loss": 0.0679, |
| "step": 346 |
| }, |
| { |
| "epoch": 6.630573248407643, |
| "grad_norm": 0.2897959688201585, |
| "learning_rate": 1.330780649350938e-07, |
| "loss": 0.0817, |
| "step": 347 |
| }, |
| { |
| "epoch": 6.649681528662421, |
| "grad_norm": 0.3133503576071545, |
| "learning_rate": 1.1791223773614635e-07, |
| "loss": 0.0904, |
| "step": 348 |
| }, |
| { |
| "epoch": 6.6687898089171975, |
| "grad_norm": 0.30830973853117616, |
| "learning_rate": 1.0365852555447642e-07, |
| "loss": 0.0689, |
| "step": 349 |
| }, |
| { |
| "epoch": 6.687898089171974, |
| "grad_norm": 0.288367823780671, |
| "learning_rate": 9.031824400528854e-08, |
| "loss": 0.0587, |
| "step": 350 |
| }, |
| { |
| "epoch": 6.707006369426751, |
| "grad_norm": 0.3021284104708251, |
| "learning_rate": 7.789262439430012e-08, |
| "loss": 0.0702, |
| "step": 351 |
| }, |
| { |
| "epoch": 6.726114649681529, |
| "grad_norm": 0.2647136363664868, |
| "learning_rate": 6.638281360408339e-08, |
| "loss": 0.0474, |
| "step": 352 |
| }, |
| { |
| "epoch": 6.745222929936306, |
| "grad_norm": 0.2796508908753172, |
| "learning_rate": 5.578987398821345e-08, |
| "loss": 0.0721, |
| "step": 353 |
| }, |
| { |
| "epoch": 6.764331210191083, |
| "grad_norm": 0.2813296893013791, |
| "learning_rate": 4.6114783273213395e-08, |
| "loss": 0.0641, |
| "step": 354 |
| }, |
| { |
| "epoch": 6.7834394904458595, |
| "grad_norm": 0.29441527204389356, |
| "learning_rate": 3.735843446830867e-08, |
| "loss": 0.0818, |
| "step": 355 |
| }, |
| { |
| "epoch": 6.802547770700637, |
| "grad_norm": 0.2948771544793749, |
| "learning_rate": 2.9521635783001932e-08, |
| "loss": 0.0676, |
| "step": 356 |
| }, |
| { |
| "epoch": 6.821656050955414, |
| "grad_norm": 0.2887568298378835, |
| "learning_rate": 2.2605110552477162e-08, |
| "loss": 0.0926, |
| "step": 357 |
| }, |
| { |
| "epoch": 6.840764331210191, |
| "grad_norm": 0.31867546960884374, |
| "learning_rate": 1.6609497170834154e-08, |
| "loss": 0.0594, |
| "step": 358 |
| }, |
| { |
| "epoch": 6.859872611464969, |
| "grad_norm": 0.2774368466710001, |
| "learning_rate": 1.1535349032167908e-08, |
| "loss": 0.0604, |
| "step": 359 |
| }, |
| { |
| "epoch": 6.8789808917197455, |
| "grad_norm": 0.24190844368060577, |
| "learning_rate": 7.3831344794872415e-09, |
| "loss": 0.0539, |
| "step": 360 |
| }, |
| { |
| "epoch": 6.898089171974522, |
| "grad_norm": 0.2839464423499535, |
| "learning_rate": 4.153236761488266e-09, |
| "loss": 0.0409, |
| "step": 361 |
| }, |
| { |
| "epoch": 6.917197452229299, |
| "grad_norm": 0.2738860294303811, |
| "learning_rate": 1.8459539971804608e-09, |
| "loss": 0.0818, |
| "step": 362 |
| }, |
| { |
| "epoch": 6.936305732484076, |
| "grad_norm": 0.2921665833615179, |
| "learning_rate": 4.614991483686826e-10, |
| "loss": 0.0689, |
| "step": 363 |
| }, |
| { |
| "epoch": 6.955414012738854, |
| "grad_norm": 0.30226270013210504, |
| "learning_rate": 0.0, |
| "loss": 0.0803, |
| "step": 364 |
| }, |
| { |
| "epoch": 6.955414012738854, |
| "step": 364, |
| "total_flos": 9.018872245445427e+16, |
| "train_loss": 0.35375573943563543, |
| "train_runtime": 2255.9423, |
| "train_samples_per_second": 15.515, |
| "train_steps_per_second": 0.161 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 364, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.018872245445427e+16, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|