| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9969742813918305, | |
| "eval_steps": 500, | |
| "global_step": 660, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0030257186081694403, | |
| "grad_norm": 3.2359302706689097, | |
| "learning_rate": 3.0303030303030305e-07, | |
| "loss": 0.6843, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.006051437216338881, | |
| "grad_norm": 3.4195031035554595, | |
| "learning_rate": 6.060606060606061e-07, | |
| "loss": 0.7301, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.009077155824508321, | |
| "grad_norm": 2.9726245318901916, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.6748, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.012102874432677761, | |
| "grad_norm": 2.9479318978298306, | |
| "learning_rate": 1.2121212121212122e-06, | |
| "loss": 0.6892, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.015128593040847202, | |
| "grad_norm": 3.1902751673864658, | |
| "learning_rate": 1.5151515151515152e-06, | |
| "loss": 0.682, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.018154311649016642, | |
| "grad_norm": 2.954691904247298, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.7005, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02118003025718608, | |
| "grad_norm": 2.7786505550781495, | |
| "learning_rate": 2.1212121212121216e-06, | |
| "loss": 0.6795, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.024205748865355523, | |
| "grad_norm": 2.4098658032892013, | |
| "learning_rate": 2.4242424242424244e-06, | |
| "loss": 0.6616, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02723146747352496, | |
| "grad_norm": 1.7674198517391475, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.6365, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.030257186081694403, | |
| "grad_norm": 1.6495747153820528, | |
| "learning_rate": 3.0303030303030305e-06, | |
| "loss": 0.6089, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03328290468986384, | |
| "grad_norm": 1.6074523974431405, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 0.606, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.036308623298033284, | |
| "grad_norm": 1.8227003684672904, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.6053, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.039334341906202726, | |
| "grad_norm": 2.649238264473654, | |
| "learning_rate": 3.93939393939394e-06, | |
| "loss": 0.6255, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.04236006051437216, | |
| "grad_norm": 2.40801256922945, | |
| "learning_rate": 4.242424242424243e-06, | |
| "loss": 0.5894, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0453857791225416, | |
| "grad_norm": 1.7370125858685355, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.581, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.048411497730711045, | |
| "grad_norm": 1.4036337681648456, | |
| "learning_rate": 4.848484848484849e-06, | |
| "loss": 0.5733, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05143721633888049, | |
| "grad_norm": 1.2971614185365121, | |
| "learning_rate": 5.151515151515152e-06, | |
| "loss": 0.5841, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.05446293494704992, | |
| "grad_norm": 1.3884028741488217, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 0.5687, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.057488653555219364, | |
| "grad_norm": 1.3745194354883377, | |
| "learning_rate": 5.7575757575757586e-06, | |
| "loss": 0.555, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.060514372163388806, | |
| "grad_norm": 1.13104178464365, | |
| "learning_rate": 6.060606060606061e-06, | |
| "loss": 0.5466, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06354009077155824, | |
| "grad_norm": 0.9974929209249663, | |
| "learning_rate": 6.363636363636364e-06, | |
| "loss": 0.5448, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.06656580937972768, | |
| "grad_norm": 1.1602579518689689, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.5483, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.06959152798789713, | |
| "grad_norm": 1.0375023595547186, | |
| "learning_rate": 6.969696969696971e-06, | |
| "loss": 0.5224, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.07261724659606657, | |
| "grad_norm": 0.9569933874712262, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.5272, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07564296520423601, | |
| "grad_norm": 1.0098804831143144, | |
| "learning_rate": 7.5757575757575764e-06, | |
| "loss": 0.5053, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.07866868381240545, | |
| "grad_norm": 0.9053178984389543, | |
| "learning_rate": 7.87878787878788e-06, | |
| "loss": 0.5253, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08169440242057488, | |
| "grad_norm": 0.8585155739383054, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 0.4973, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.08472012102874432, | |
| "grad_norm": 0.9237564487357719, | |
| "learning_rate": 8.484848484848486e-06, | |
| "loss": 0.5192, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.08774583963691376, | |
| "grad_norm": 0.8711216921003899, | |
| "learning_rate": 8.787878787878788e-06, | |
| "loss": 0.5021, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.0907715582450832, | |
| "grad_norm": 0.8410840493343933, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.5246, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09379727685325265, | |
| "grad_norm": 0.9539580202159227, | |
| "learning_rate": 9.393939393939396e-06, | |
| "loss": 0.4967, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.09682299546142209, | |
| "grad_norm": 0.882970466475962, | |
| "learning_rate": 9.696969696969698e-06, | |
| "loss": 0.486, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.09984871406959153, | |
| "grad_norm": 0.8595319705628072, | |
| "learning_rate": 1e-05, | |
| "loss": 0.4853, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.10287443267776097, | |
| "grad_norm": 0.916799176948836, | |
| "learning_rate": 1.0303030303030304e-05, | |
| "loss": 0.4885, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1059001512859304, | |
| "grad_norm": 0.8341148837668653, | |
| "learning_rate": 1.0606060606060606e-05, | |
| "loss": 0.4896, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.10892586989409984, | |
| "grad_norm": 0.9174069456579252, | |
| "learning_rate": 1.0909090909090909e-05, | |
| "loss": 0.5194, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11195158850226929, | |
| "grad_norm": 0.9456480670636227, | |
| "learning_rate": 1.1212121212121212e-05, | |
| "loss": 0.5032, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.11497730711043873, | |
| "grad_norm": 0.824068092491293, | |
| "learning_rate": 1.1515151515151517e-05, | |
| "loss": 0.5008, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11800302571860817, | |
| "grad_norm": 0.8685530978240626, | |
| "learning_rate": 1.181818181818182e-05, | |
| "loss": 0.5041, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.12102874432677761, | |
| "grad_norm": 0.9486893469964974, | |
| "learning_rate": 1.2121212121212122e-05, | |
| "loss": 0.4896, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12405446293494705, | |
| "grad_norm": 0.837302823312357, | |
| "learning_rate": 1.2424242424242425e-05, | |
| "loss": 0.4802, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.12708018154311648, | |
| "grad_norm": 1.072658544116486, | |
| "learning_rate": 1.2727272727272728e-05, | |
| "loss": 0.5018, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.13010590015128592, | |
| "grad_norm": 0.8779117529934144, | |
| "learning_rate": 1.3030303030303032e-05, | |
| "loss": 0.5153, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.13313161875945537, | |
| "grad_norm": 0.8645121013619218, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.4858, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1361573373676248, | |
| "grad_norm": 0.9467749083085863, | |
| "learning_rate": 1.3636363636363637e-05, | |
| "loss": 0.4978, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.13918305597579425, | |
| "grad_norm": 0.8215517099240874, | |
| "learning_rate": 1.3939393939393942e-05, | |
| "loss": 0.4788, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1422087745839637, | |
| "grad_norm": 0.9356558769074285, | |
| "learning_rate": 1.4242424242424245e-05, | |
| "loss": 0.4808, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.14523449319213314, | |
| "grad_norm": 0.8359093466006164, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.4749, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14826021180030258, | |
| "grad_norm": 0.8405621735429246, | |
| "learning_rate": 1.484848484848485e-05, | |
| "loss": 0.5015, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.15128593040847202, | |
| "grad_norm": 0.9205963257608583, | |
| "learning_rate": 1.5151515151515153e-05, | |
| "loss": 0.4804, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15431164901664146, | |
| "grad_norm": 0.9796763874203765, | |
| "learning_rate": 1.5454545454545454e-05, | |
| "loss": 0.4848, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.1573373676248109, | |
| "grad_norm": 0.7764646459549941, | |
| "learning_rate": 1.575757575757576e-05, | |
| "loss": 0.4813, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.16036308623298035, | |
| "grad_norm": 0.9794105439724095, | |
| "learning_rate": 1.606060606060606e-05, | |
| "loss": 0.4708, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.16338880484114976, | |
| "grad_norm": 0.847190932870233, | |
| "learning_rate": 1.6363636363636366e-05, | |
| "loss": 0.4765, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.1664145234493192, | |
| "grad_norm": 0.9644308306888218, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.4735, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.16944024205748864, | |
| "grad_norm": 0.832999457745713, | |
| "learning_rate": 1.6969696969696972e-05, | |
| "loss": 0.4995, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17246596066565809, | |
| "grad_norm": 0.9119095253110416, | |
| "learning_rate": 1.7272727272727274e-05, | |
| "loss": 0.4889, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.17549167927382753, | |
| "grad_norm": 0.8017515106319955, | |
| "learning_rate": 1.7575757575757576e-05, | |
| "loss": 0.5021, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17851739788199697, | |
| "grad_norm": 0.8982450653826051, | |
| "learning_rate": 1.787878787878788e-05, | |
| "loss": 0.5013, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.1815431164901664, | |
| "grad_norm": 0.8376966770742111, | |
| "learning_rate": 1.8181818181818182e-05, | |
| "loss": 0.4869, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18456883509833585, | |
| "grad_norm": 0.8894859741285548, | |
| "learning_rate": 1.8484848484848487e-05, | |
| "loss": 0.4947, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.1875945537065053, | |
| "grad_norm": 0.8232940281624105, | |
| "learning_rate": 1.8787878787878792e-05, | |
| "loss": 0.4948, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.19062027231467474, | |
| "grad_norm": 1.031532778176587, | |
| "learning_rate": 1.9090909090909094e-05, | |
| "loss": 0.4889, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.19364599092284418, | |
| "grad_norm": 0.9313652956555597, | |
| "learning_rate": 1.9393939393939395e-05, | |
| "loss": 0.4847, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19667170953101362, | |
| "grad_norm": 0.8787588693818242, | |
| "learning_rate": 1.96969696969697e-05, | |
| "loss": 0.4586, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.19969742813918306, | |
| "grad_norm": 0.8525763685448345, | |
| "learning_rate": 2e-05, | |
| "loss": 0.4613, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.2027231467473525, | |
| "grad_norm": 0.8558181900943557, | |
| "learning_rate": 1.9999860139251737e-05, | |
| "loss": 0.4826, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.20574886535552195, | |
| "grad_norm": 0.8686077226666616, | |
| "learning_rate": 1.9999440560919153e-05, | |
| "loss": 0.4945, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.2087745839636914, | |
| "grad_norm": 0.8332130141685146, | |
| "learning_rate": 1.9998741276738753e-05, | |
| "loss": 0.4811, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.2118003025718608, | |
| "grad_norm": 3.420627586072789, | |
| "learning_rate": 1.999776230627102e-05, | |
| "loss": 0.4786, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.21482602118003025, | |
| "grad_norm": 11.004543075662326, | |
| "learning_rate": 1.9996503676899863e-05, | |
| "loss": 0.5118, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2178517397881997, | |
| "grad_norm": 1.325227833252734, | |
| "learning_rate": 1.9994965423831853e-05, | |
| "loss": 0.464, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.22087745839636913, | |
| "grad_norm": 0.919248700197972, | |
| "learning_rate": 1.9993147590095232e-05, | |
| "loss": 0.4859, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.22390317700453857, | |
| "grad_norm": 1.0122232947620105, | |
| "learning_rate": 1.999105022653872e-05, | |
| "loss": 0.4538, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.22692889561270801, | |
| "grad_norm": 0.9038348227239963, | |
| "learning_rate": 1.9988673391830082e-05, | |
| "loss": 0.4892, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.22995461422087746, | |
| "grad_norm": 0.961368877058074, | |
| "learning_rate": 1.9986017152454497e-05, | |
| "loss": 0.4722, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.2329803328290469, | |
| "grad_norm": 0.866563091696727, | |
| "learning_rate": 1.9983081582712684e-05, | |
| "loss": 0.4785, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.23600605143721634, | |
| "grad_norm": 1.030435775973294, | |
| "learning_rate": 1.9979866764718846e-05, | |
| "loss": 0.4627, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23903177004538578, | |
| "grad_norm": 0.9118510143932478, | |
| "learning_rate": 1.997637278839835e-05, | |
| "loss": 0.4609, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.24205748865355523, | |
| "grad_norm": 0.8415352161016965, | |
| "learning_rate": 1.9972599751485225e-05, | |
| "loss": 0.4688, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.24508320726172467, | |
| "grad_norm": 0.9185905624053753, | |
| "learning_rate": 1.9968547759519426e-05, | |
| "loss": 0.4811, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.2481089258698941, | |
| "grad_norm": 0.795864580443437, | |
| "learning_rate": 1.9964216925843876e-05, | |
| "loss": 0.4805, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.25113464447806355, | |
| "grad_norm": 0.819826870382118, | |
| "learning_rate": 1.9959607371601303e-05, | |
| "loss": 0.4406, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.25416036308623297, | |
| "grad_norm": 0.8614470294854708, | |
| "learning_rate": 1.9954719225730847e-05, | |
| "loss": 0.4705, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25718608169440244, | |
| "grad_norm": 0.8364986408574673, | |
| "learning_rate": 1.994955262496446e-05, | |
| "loss": 0.4811, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.26021180030257185, | |
| "grad_norm": 0.8832221215429533, | |
| "learning_rate": 1.9944107713823068e-05, | |
| "loss": 0.4732, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.2632375189107413, | |
| "grad_norm": 0.8200519422389976, | |
| "learning_rate": 1.9938384644612542e-05, | |
| "loss": 0.4698, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.26626323751891073, | |
| "grad_norm": 0.8273006752173561, | |
| "learning_rate": 1.9932383577419432e-05, | |
| "loss": 0.4617, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.2692889561270802, | |
| "grad_norm": 0.8162620377067755, | |
| "learning_rate": 1.9926104680106484e-05, | |
| "loss": 0.4744, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.2723146747352496, | |
| "grad_norm": 0.8699948355859637, | |
| "learning_rate": 1.9919548128307954e-05, | |
| "loss": 0.4755, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.2753403933434191, | |
| "grad_norm": 0.794825799289987, | |
| "learning_rate": 1.9912714105424694e-05, | |
| "loss": 0.4662, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.2783661119515885, | |
| "grad_norm": 0.8298348466506988, | |
| "learning_rate": 1.990560280261901e-05, | |
| "loss": 0.4818, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.2813918305597579, | |
| "grad_norm": 0.8003610532496683, | |
| "learning_rate": 1.989821441880933e-05, | |
| "loss": 0.4519, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.2844175491679274, | |
| "grad_norm": 0.7812840081055671, | |
| "learning_rate": 1.9890549160664633e-05, | |
| "loss": 0.485, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.2874432677760968, | |
| "grad_norm": 0.8549697997497775, | |
| "learning_rate": 1.9882607242598663e-05, | |
| "loss": 0.4479, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.29046898638426627, | |
| "grad_norm": 0.7961293707976221, | |
| "learning_rate": 1.9874388886763944e-05, | |
| "loss": 0.4639, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2934947049924357, | |
| "grad_norm": 0.8157001050368008, | |
| "learning_rate": 1.9865894323045558e-05, | |
| "loss": 0.4638, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.29652042360060515, | |
| "grad_norm": 0.7818887018583174, | |
| "learning_rate": 1.9857123789054707e-05, | |
| "loss": 0.4896, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.29954614220877457, | |
| "grad_norm": 0.8661961641879076, | |
| "learning_rate": 1.9848077530122083e-05, | |
| "loss": 0.4818, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.30257186081694404, | |
| "grad_norm": 0.7846459573637689, | |
| "learning_rate": 1.9838755799290993e-05, | |
| "loss": 0.4744, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.30559757942511345, | |
| "grad_norm": 0.7814632350719724, | |
| "learning_rate": 1.9829158857310288e-05, | |
| "loss": 0.4493, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.3086232980332829, | |
| "grad_norm": 0.8047168139367591, | |
| "learning_rate": 1.9819286972627066e-05, | |
| "loss": 0.4731, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.31164901664145234, | |
| "grad_norm": 0.8068503106951342, | |
| "learning_rate": 1.9809140421379168e-05, | |
| "loss": 0.5001, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.3146747352496218, | |
| "grad_norm": 0.7830988687335374, | |
| "learning_rate": 1.979871948738743e-05, | |
| "loss": 0.4554, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3177004538577912, | |
| "grad_norm": 0.7803447963259511, | |
| "learning_rate": 1.978802446214779e-05, | |
| "loss": 0.4562, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.3207261724659607, | |
| "grad_norm": 0.8114710208456569, | |
| "learning_rate": 1.9777055644823087e-05, | |
| "loss": 0.4574, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.3237518910741301, | |
| "grad_norm": 0.7592240240242137, | |
| "learning_rate": 1.9765813342234726e-05, | |
| "loss": 0.465, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.3267776096822995, | |
| "grad_norm": 0.8348844871506939, | |
| "learning_rate": 1.9754297868854075e-05, | |
| "loss": 0.4972, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.329803328290469, | |
| "grad_norm": 0.7960334009715435, | |
| "learning_rate": 1.9742509546793673e-05, | |
| "loss": 0.4363, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.3328290468986384, | |
| "grad_norm": 0.7595128633459627, | |
| "learning_rate": 1.973044870579824e-05, | |
| "loss": 0.4685, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.3358547655068079, | |
| "grad_norm": 0.79951611235352, | |
| "learning_rate": 1.9718115683235418e-05, | |
| "loss": 0.4631, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.3388804841149773, | |
| "grad_norm": 0.766673735917911, | |
| "learning_rate": 1.970551082408636e-05, | |
| "loss": 0.4619, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.34190620272314676, | |
| "grad_norm": 0.7444254626500109, | |
| "learning_rate": 1.969263448093608e-05, | |
| "loss": 0.48, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.34493192133131617, | |
| "grad_norm": 0.7505700737777958, | |
| "learning_rate": 1.9679487013963566e-05, | |
| "loss": 0.462, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.34795763993948564, | |
| "grad_norm": 0.7094816279176349, | |
| "learning_rate": 1.9666068790931733e-05, | |
| "loss": 0.4643, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.35098335854765506, | |
| "grad_norm": 0.7138425553224373, | |
| "learning_rate": 1.9652380187177128e-05, | |
| "loss": 0.4715, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.3540090771558245, | |
| "grad_norm": 0.8085998899469745, | |
| "learning_rate": 1.9638421585599422e-05, | |
| "loss": 0.4901, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.35703479576399394, | |
| "grad_norm": 0.746217714954533, | |
| "learning_rate": 1.9624193376650708e-05, | |
| "loss": 0.4522, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.3600605143721634, | |
| "grad_norm": 0.8026448876017173, | |
| "learning_rate": 1.960969595832457e-05, | |
| "loss": 0.466, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.3630862329803328, | |
| "grad_norm": 0.7831436921451729, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.4906, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.3661119515885023, | |
| "grad_norm": 0.7857715076302414, | |
| "learning_rate": 1.957989512315489e-05, | |
| "loss": 0.4559, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.3691376701966717, | |
| "grad_norm": 0.7440061379248684, | |
| "learning_rate": 1.956459253990476e-05, | |
| "loss": 0.4728, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.3721633888048411, | |
| "grad_norm": 0.8115367258166749, | |
| "learning_rate": 1.9549022414440738e-05, | |
| "loss": 0.4732, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.3751891074130106, | |
| "grad_norm": 0.8226117217591736, | |
| "learning_rate": 1.9533185182292705e-05, | |
| "loss": 0.4836, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.37821482602118, | |
| "grad_norm": 0.8074654331583047, | |
| "learning_rate": 1.9517081286462082e-05, | |
| "loss": 0.4748, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.3812405446293495, | |
| "grad_norm": 0.7603752030865338, | |
| "learning_rate": 1.9500711177409456e-05, | |
| "loss": 0.4815, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.3842662632375189, | |
| "grad_norm": 0.8153082185015335, | |
| "learning_rate": 1.9484075313041968e-05, | |
| "loss": 0.4633, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.38729198184568836, | |
| "grad_norm": 0.7412632863219616, | |
| "learning_rate": 1.9467174158700507e-05, | |
| "loss": 0.457, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.3903177004538578, | |
| "grad_norm": 0.7531696510756141, | |
| "learning_rate": 1.9450008187146685e-05, | |
| "loss": 0.4331, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.39334341906202724, | |
| "grad_norm": 0.7075674964887549, | |
| "learning_rate": 1.9432577878549635e-05, | |
| "loss": 0.4658, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.39636913767019666, | |
| "grad_norm": 0.7353791324669057, | |
| "learning_rate": 1.9414883720472557e-05, | |
| "loss": 0.4621, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.39939485627836613, | |
| "grad_norm": 0.7603630959335661, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.4732, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.40242057488653554, | |
| "grad_norm": 0.720030969674077, | |
| "learning_rate": 1.937870584301945e-05, | |
| "loss": 0.474, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.405446293494705, | |
| "grad_norm": 0.7632226378172703, | |
| "learning_rate": 1.9360223135616423e-05, | |
| "loss": 0.4792, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.4084720121028744, | |
| "grad_norm": 0.716415360885003, | |
| "learning_rate": 1.9341478602651068e-05, | |
| "loss": 0.4769, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.4114977307110439, | |
| "grad_norm": 0.7668188242718849, | |
| "learning_rate": 1.932247276844826e-05, | |
| "loss": 0.4463, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.4145234493192133, | |
| "grad_norm": 0.73111000399707, | |
| "learning_rate": 1.9303206164642037e-05, | |
| "loss": 0.4695, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.4175491679273828, | |
| "grad_norm": 0.7678556737455257, | |
| "learning_rate": 1.9283679330160726e-05, | |
| "loss": 0.464, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4205748865355522, | |
| "grad_norm": 0.8247627990970195, | |
| "learning_rate": 1.9263892811211865e-05, | |
| "loss": 0.467, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.4236006051437216, | |
| "grad_norm": 0.7119044282961678, | |
| "learning_rate": 1.9243847161266924e-05, | |
| "loss": 0.4701, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4266263237518911, | |
| "grad_norm": 0.7178023095006331, | |
| "learning_rate": 1.9223542941045817e-05, | |
| "loss": 0.4817, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.4296520423600605, | |
| "grad_norm": 0.8030882881498882, | |
| "learning_rate": 1.920298071850123e-05, | |
| "loss": 0.4834, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.43267776096822996, | |
| "grad_norm": 0.7424855637432263, | |
| "learning_rate": 1.9182161068802742e-05, | |
| "loss": 0.4411, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.4357034795763994, | |
| "grad_norm": 0.8182179546345627, | |
| "learning_rate": 1.9161084574320696e-05, | |
| "loss": 0.4505, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.43872919818456885, | |
| "grad_norm": 0.8542096196402557, | |
| "learning_rate": 1.913975182460996e-05, | |
| "loss": 0.4748, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.44175491679273826, | |
| "grad_norm": 0.7740765612229977, | |
| "learning_rate": 1.9118163416393392e-05, | |
| "loss": 0.4598, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.44478063540090773, | |
| "grad_norm": 0.9184981785372388, | |
| "learning_rate": 1.9096319953545186e-05, | |
| "loss": 0.4583, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.44780635400907715, | |
| "grad_norm": 0.7849112744997566, | |
| "learning_rate": 1.9074222047073945e-05, | |
| "loss": 0.4807, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.4508320726172466, | |
| "grad_norm": 0.7743286521384856, | |
| "learning_rate": 1.9051870315105626e-05, | |
| "loss": 0.4666, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.45385779122541603, | |
| "grad_norm": 0.7858524698653325, | |
| "learning_rate": 1.9029265382866216e-05, | |
| "loss": 0.4749, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.4568835098335855, | |
| "grad_norm": 0.7897951841284223, | |
| "learning_rate": 1.9006407882664256e-05, | |
| "loss": 0.4486, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.4599092284417549, | |
| "grad_norm": 0.8871517103169496, | |
| "learning_rate": 1.8983298453873172e-05, | |
| "loss": 0.4452, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.4629349470499244, | |
| "grad_norm": 0.7296489658649773, | |
| "learning_rate": 1.895993774291336e-05, | |
| "loss": 0.4577, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.4659606656580938, | |
| "grad_norm": 0.7821099839165611, | |
| "learning_rate": 1.8936326403234125e-05, | |
| "loss": 0.4487, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.4689863842662632, | |
| "grad_norm": 0.7457598595911572, | |
| "learning_rate": 1.891246509529539e-05, | |
| "loss": 0.4597, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.4720121028744327, | |
| "grad_norm": 0.7634201517521181, | |
| "learning_rate": 1.8888354486549238e-05, | |
| "loss": 0.4677, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.4750378214826021, | |
| "grad_norm": 0.791782233718824, | |
| "learning_rate": 1.886399525142122e-05, | |
| "loss": 0.4533, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.47806354009077157, | |
| "grad_norm": 0.7332280105795193, | |
| "learning_rate": 1.8839388071291506e-05, | |
| "loss": 0.4511, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.481089258698941, | |
| "grad_norm": 0.8474845589138872, | |
| "learning_rate": 1.881453363447582e-05, | |
| "loss": 0.4674, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.48411497730711045, | |
| "grad_norm": 0.7808562951516187, | |
| "learning_rate": 1.8789432636206197e-05, | |
| "loss": 0.4375, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.48714069591527986, | |
| "grad_norm": 0.8208993636780277, | |
| "learning_rate": 1.8764085778611507e-05, | |
| "loss": 0.4825, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.49016641452344933, | |
| "grad_norm": 0.8098942850972914, | |
| "learning_rate": 1.873849377069785e-05, | |
| "loss": 0.4312, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.49319213313161875, | |
| "grad_norm": 0.8144032451899935, | |
| "learning_rate": 1.87126573283287e-05, | |
| "loss": 0.4521, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.4962178517397882, | |
| "grad_norm": 0.7940882401011262, | |
| "learning_rate": 1.8686577174204887e-05, | |
| "loss": 0.4675, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.49924357034795763, | |
| "grad_norm": 0.7844769126279116, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 0.4675, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.5022692889561271, | |
| "grad_norm": 0.835156355502922, | |
| "learning_rate": 1.863368865556191e-05, | |
| "loss": 0.4412, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.5052950075642966, | |
| "grad_norm": 0.7271404198522912, | |
| "learning_rate": 1.8606881770448305e-05, | |
| "loss": 0.4469, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.5083207261724659, | |
| "grad_norm": 0.7694008744736777, | |
| "learning_rate": 1.8579834132349773e-05, | |
| "loss": 0.4585, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.5113464447806354, | |
| "grad_norm": 0.7226976209354771, | |
| "learning_rate": 1.8552546497846893e-05, | |
| "loss": 0.4637, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.5143721633888049, | |
| "grad_norm": 0.762126609191928, | |
| "learning_rate": 1.8525019630233463e-05, | |
| "loss": 0.4915, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5173978819969742, | |
| "grad_norm": 0.7647569713014367, | |
| "learning_rate": 1.8497254299495147e-05, | |
| "loss": 0.4417, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.5204236006051437, | |
| "grad_norm": 0.9273463790425459, | |
| "learning_rate": 1.8469251282287925e-05, | |
| "loss": 0.4661, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5234493192133132, | |
| "grad_norm": 0.7148348326781622, | |
| "learning_rate": 1.8441011361916387e-05, | |
| "loss": 0.4383, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.5264750378214826, | |
| "grad_norm": 0.7264461517962761, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.4587, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.529500756429652, | |
| "grad_norm": 0.8444221623756705, | |
| "learning_rate": 1.8383823978010077e-05, | |
| "loss": 0.466, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.5325264750378215, | |
| "grad_norm": 0.8714557331453434, | |
| "learning_rate": 1.8354878114129368e-05, | |
| "loss": 0.4046, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.5355521936459909, | |
| "grad_norm": 0.854138894906376, | |
| "learning_rate": 1.8325698546347714e-05, | |
| "loss": 0.472, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.5385779122541604, | |
| "grad_norm": 0.6978742671387683, | |
| "learning_rate": 1.8296286090880362e-05, | |
| "loss": 0.4529, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.5416036308623298, | |
| "grad_norm": 0.7824273235665554, | |
| "learning_rate": 1.8266641570456915e-05, | |
| "loss": 0.4416, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.5446293494704992, | |
| "grad_norm": 0.7454445979595118, | |
| "learning_rate": 1.8236765814298328e-05, | |
| "loss": 0.4649, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.5476550680786687, | |
| "grad_norm": 0.7220494456681561, | |
| "learning_rate": 1.820665965809373e-05, | |
| "loss": 0.4492, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.5506807866868382, | |
| "grad_norm": 0.8195530307449318, | |
| "learning_rate": 1.8176323943977034e-05, | |
| "loss": 0.4628, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.5537065052950075, | |
| "grad_norm": 0.7597610701571493, | |
| "learning_rate": 1.814575952050336e-05, | |
| "loss": 0.4543, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.556732223903177, | |
| "grad_norm": 0.8272653605135085, | |
| "learning_rate": 1.8114967242625342e-05, | |
| "loss": 0.433, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.5597579425113465, | |
| "grad_norm": 0.7725990134832289, | |
| "learning_rate": 1.808394797166919e-05, | |
| "loss": 0.4459, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.5627836611195158, | |
| "grad_norm": 0.8234284342602471, | |
| "learning_rate": 1.8052702575310588e-05, | |
| "loss": 0.4076, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.5658093797276853, | |
| "grad_norm": 0.8104715090689735, | |
| "learning_rate": 1.802123192755044e-05, | |
| "loss": 0.4624, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.5688350983358548, | |
| "grad_norm": 0.852381910994619, | |
| "learning_rate": 1.7989536908690413e-05, | |
| "loss": 0.4291, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.5718608169440242, | |
| "grad_norm": 0.7925118805588005, | |
| "learning_rate": 1.7957618405308323e-05, | |
| "loss": 0.4564, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.5748865355521936, | |
| "grad_norm": 0.6545283074949928, | |
| "learning_rate": 1.792547731023332e-05, | |
| "loss": 0.4277, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.5779122541603631, | |
| "grad_norm": 0.7801965587085363, | |
| "learning_rate": 1.789311452252092e-05, | |
| "loss": 0.4328, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.5809379727685325, | |
| "grad_norm": 0.7573829892612971, | |
| "learning_rate": 1.7860530947427878e-05, | |
| "loss": 0.4315, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.583963691376702, | |
| "grad_norm": 0.7648505243695523, | |
| "learning_rate": 1.782772749638682e-05, | |
| "loss": 0.4251, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.5869894099848714, | |
| "grad_norm": 0.8976388463825138, | |
| "learning_rate": 1.779470508698079e-05, | |
| "loss": 0.4427, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.5900151285930408, | |
| "grad_norm": 0.694017345816723, | |
| "learning_rate": 1.776146464291757e-05, | |
| "loss": 0.4525, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.5930408472012103, | |
| "grad_norm": 0.8988912977460627, | |
| "learning_rate": 1.772800709400383e-05, | |
| "loss": 0.4607, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.5960665658093798, | |
| "grad_norm": 0.8198938972019454, | |
| "learning_rate": 1.7694333376119144e-05, | |
| "loss": 0.4433, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.5990922844175491, | |
| "grad_norm": 0.706341354857456, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.4395, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.6021180030257186, | |
| "grad_norm": 0.8267717035208544, | |
| "learning_rate": 1.762634120716238e-05, | |
| "loss": 0.4398, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.6051437216338881, | |
| "grad_norm": 0.7403996621870499, | |
| "learning_rate": 1.7592024657977432e-05, | |
| "loss": 0.4593, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6081694402420574, | |
| "grad_norm": 0.8462014986376135, | |
| "learning_rate": 1.7557495743542586e-05, | |
| "loss": 0.4585, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.6111951588502269, | |
| "grad_norm": 0.7450909669892128, | |
| "learning_rate": 1.75227554297058e-05, | |
| "loss": 0.4382, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.6142208774583964, | |
| "grad_norm": 0.7054634534256583, | |
| "learning_rate": 1.7487804688228327e-05, | |
| "loss": 0.4605, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.6172465960665658, | |
| "grad_norm": 0.8276935551562342, | |
| "learning_rate": 1.745264449675755e-05, | |
| "loss": 0.4458, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.6202723146747352, | |
| "grad_norm": 0.7193176425881135, | |
| "learning_rate": 1.7417275838799596e-05, | |
| "loss": 0.4402, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.6232980332829047, | |
| "grad_norm": 0.7560771401654319, | |
| "learning_rate": 1.7381699703691866e-05, | |
| "loss": 0.4511, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.6263237518910741, | |
| "grad_norm": 0.913990010669637, | |
| "learning_rate": 1.734591708657533e-05, | |
| "loss": 0.4481, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.6293494704992436, | |
| "grad_norm": 0.7808793798092557, | |
| "learning_rate": 1.730992898836672e-05, | |
| "loss": 0.4252, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.632375189107413, | |
| "grad_norm": 0.823347495273294, | |
| "learning_rate": 1.7273736415730488e-05, | |
| "loss": 0.4183, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.6354009077155824, | |
| "grad_norm": 0.6896409916767459, | |
| "learning_rate": 1.72373403810507e-05, | |
| "loss": 0.4627, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6384266263237519, | |
| "grad_norm": 0.7996961958820645, | |
| "learning_rate": 1.720074190240269e-05, | |
| "loss": 0.4619, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.6414523449319214, | |
| "grad_norm": 0.7732699550662897, | |
| "learning_rate": 1.7163942003524574e-05, | |
| "loss": 0.4635, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.6444780635400907, | |
| "grad_norm": 0.7073367973681686, | |
| "learning_rate": 1.7126941713788633e-05, | |
| "loss": 0.4861, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.6475037821482602, | |
| "grad_norm": 0.7887348432026596, | |
| "learning_rate": 1.70897420681725e-05, | |
| "loss": 0.4415, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.6505295007564297, | |
| "grad_norm": 0.7072817236699754, | |
| "learning_rate": 1.7052344107230244e-05, | |
| "loss": 0.4194, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.653555219364599, | |
| "grad_norm": 0.6993237189895348, | |
| "learning_rate": 1.7014748877063212e-05, | |
| "loss": 0.4359, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.6565809379727685, | |
| "grad_norm": 0.7677926109207992, | |
| "learning_rate": 1.697695742929082e-05, | |
| "loss": 0.4311, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.659606656580938, | |
| "grad_norm": 0.7047752929489467, | |
| "learning_rate": 1.693897082102109e-05, | |
| "loss": 0.4431, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.6626323751891074, | |
| "grad_norm": 0.7015516312050496, | |
| "learning_rate": 1.6900790114821122e-05, | |
| "loss": 0.4345, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.6656580937972768, | |
| "grad_norm": 0.7985131743695207, | |
| "learning_rate": 1.686241637868734e-05, | |
| "loss": 0.4304, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.6686838124054463, | |
| "grad_norm": 0.702749908215283, | |
| "learning_rate": 1.682385068601563e-05, | |
| "loss": 0.4348, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.6717095310136157, | |
| "grad_norm": 0.7311329542785553, | |
| "learning_rate": 1.6785094115571323e-05, | |
| "loss": 0.4368, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.6747352496217852, | |
| "grad_norm": 0.6891689251111319, | |
| "learning_rate": 1.674614775145901e-05, | |
| "loss": 0.4592, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.6777609682299546, | |
| "grad_norm": 0.7216711175824501, | |
| "learning_rate": 1.670701268309221e-05, | |
| "loss": 0.4374, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.680786686838124, | |
| "grad_norm": 0.6409152627920011, | |
| "learning_rate": 1.666769000516292e-05, | |
| "loss": 0.4305, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.6838124054462935, | |
| "grad_norm": 0.6777937582650404, | |
| "learning_rate": 1.6628180817610963e-05, | |
| "loss": 0.4299, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.686838124054463, | |
| "grad_norm": 0.6553713354459271, | |
| "learning_rate": 1.658848622559325e-05, | |
| "loss": 0.4295, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.6898638426626323, | |
| "grad_norm": 0.7045267153963359, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.4361, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.6928895612708018, | |
| "grad_norm": 0.6911775930914964, | |
| "learning_rate": 1.6508545274687936e-05, | |
| "loss": 0.4683, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.6959152798789713, | |
| "grad_norm": 0.7305051926567055, | |
| "learning_rate": 1.6468301151920576e-05, | |
| "loss": 0.4596, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.6989409984871406, | |
| "grad_norm": 0.7446064728589221, | |
| "learning_rate": 1.6427876096865394e-05, | |
| "loss": 0.4537, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.7019667170953101, | |
| "grad_norm": 0.7184517732078004, | |
| "learning_rate": 1.6387271240298082e-05, | |
| "loss": 0.4465, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.7049924357034796, | |
| "grad_norm": 0.7133845477561626, | |
| "learning_rate": 1.6346487718023762e-05, | |
| "loss": 0.4573, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.708018154311649, | |
| "grad_norm": 0.7737324013818997, | |
| "learning_rate": 1.6305526670845225e-05, | |
| "loss": 0.4543, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.7110438729198184, | |
| "grad_norm": 0.6802925645597805, | |
| "learning_rate": 1.6264389244531015e-05, | |
| "loss": 0.4169, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.7140695915279879, | |
| "grad_norm": 0.7780128219465188, | |
| "learning_rate": 1.6223076589783368e-05, | |
| "loss": 0.4457, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.7170953101361573, | |
| "grad_norm": 0.7820842077883533, | |
| "learning_rate": 1.6181589862206053e-05, | |
| "loss": 0.4575, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.7201210287443268, | |
| "grad_norm": 0.8470874003101201, | |
| "learning_rate": 1.613993022227202e-05, | |
| "loss": 0.495, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.7231467473524962, | |
| "grad_norm": 0.7114898888624621, | |
| "learning_rate": 1.6098098835290955e-05, | |
| "loss": 0.4172, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.7261724659606656, | |
| "grad_norm": 0.7491363240044612, | |
| "learning_rate": 1.6056096871376667e-05, | |
| "loss": 0.4172, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.7291981845688351, | |
| "grad_norm": 0.662374202975395, | |
| "learning_rate": 1.6013925505414386e-05, | |
| "loss": 0.4484, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.7322239031770046, | |
| "grad_norm": 0.7686615906883829, | |
| "learning_rate": 1.5971585917027864e-05, | |
| "loss": 0.4473, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.735249621785174, | |
| "grad_norm": 0.700155554102243, | |
| "learning_rate": 1.5929079290546408e-05, | |
| "loss": 0.4351, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.7382753403933434, | |
| "grad_norm": 0.656292730905621, | |
| "learning_rate": 1.5886406814971728e-05, | |
| "loss": 0.4273, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7413010590015129, | |
| "grad_norm": 0.6859581031800799, | |
| "learning_rate": 1.584356968394471e-05, | |
| "loss": 0.4386, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.7443267776096822, | |
| "grad_norm": 0.7545088851397694, | |
| "learning_rate": 1.5800569095711983e-05, | |
| "loss": 0.4717, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.7473524962178517, | |
| "grad_norm": 0.7289622676315515, | |
| "learning_rate": 1.575740625309244e-05, | |
| "loss": 0.4894, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.7503782148260212, | |
| "grad_norm": 0.6556249482499656, | |
| "learning_rate": 1.5714082363443576e-05, | |
| "loss": 0.4304, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.7534039334341907, | |
| "grad_norm": 0.7509413135531051, | |
| "learning_rate": 1.5670598638627707e-05, | |
| "loss": 0.4367, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.75642965204236, | |
| "grad_norm": 0.6721509034335965, | |
| "learning_rate": 1.5626956294978103e-05, | |
| "loss": 0.4269, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.7594553706505295, | |
| "grad_norm": 0.7234193431136241, | |
| "learning_rate": 1.5583156553264923e-05, | |
| "loss": 0.4294, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.762481089258699, | |
| "grad_norm": 0.7590262743068436, | |
| "learning_rate": 1.5539200638661106e-05, | |
| "loss": 0.4492, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.7655068078668684, | |
| "grad_norm": 0.78996662612169, | |
| "learning_rate": 1.5495089780708062e-05, | |
| "loss": 0.4594, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.7685325264750378, | |
| "grad_norm": 0.7844164634803753, | |
| "learning_rate": 1.5450825213281317e-05, | |
| "loss": 0.4407, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.7715582450832073, | |
| "grad_norm": 0.7382035151368229, | |
| "learning_rate": 1.5406408174555978e-05, | |
| "loss": 0.4416, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.7745839636913767, | |
| "grad_norm": 0.8024486917288045, | |
| "learning_rate": 1.5361839906972095e-05, | |
| "loss": 0.4304, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.7776096822995462, | |
| "grad_norm": 0.7222094973191768, | |
| "learning_rate": 1.531712165719992e-05, | |
| "loss": 0.4284, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.7806354009077155, | |
| "grad_norm": 0.8389875483421309, | |
| "learning_rate": 1.5272254676105026e-05, | |
| "loss": 0.4207, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.783661119515885, | |
| "grad_norm": 0.9530333386062166, | |
| "learning_rate": 1.5227240218713326e-05, | |
| "loss": 0.4242, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.7866868381240545, | |
| "grad_norm": 0.687175037032798, | |
| "learning_rate": 1.5182079544175957e-05, | |
| "loss": 0.449, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.789712556732224, | |
| "grad_norm": 0.9660020301745225, | |
| "learning_rate": 1.5136773915734067e-05, | |
| "loss": 0.452, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.7927382753403933, | |
| "grad_norm": 0.6267457591422214, | |
| "learning_rate": 1.5091324600683472e-05, | |
| "loss": 0.4182, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.7957639939485628, | |
| "grad_norm": 0.7528907826090485, | |
| "learning_rate": 1.5045732870339213e-05, | |
| "loss": 0.4152, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.7987897125567323, | |
| "grad_norm": 0.7373412455614387, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.4328, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.8018154311649016, | |
| "grad_norm": 0.6692833349830674, | |
| "learning_rate": 1.4954127268912525e-05, | |
| "loss": 0.4388, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.8048411497730711, | |
| "grad_norm": 0.8793851965542684, | |
| "learning_rate": 1.4908115960235683e-05, | |
| "loss": 0.4476, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.8078668683812406, | |
| "grad_norm": 0.6953831989002933, | |
| "learning_rate": 1.4861967361004687e-05, | |
| "loss": 0.4355, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.81089258698941, | |
| "grad_norm": 0.7018888904828565, | |
| "learning_rate": 1.4815682762095065e-05, | |
| "loss": 0.423, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.8139183055975794, | |
| "grad_norm": 0.721948170370891, | |
| "learning_rate": 1.476926345818654e-05, | |
| "loss": 0.4315, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.8169440242057489, | |
| "grad_norm": 0.7234397736711298, | |
| "learning_rate": 1.472271074772683e-05, | |
| "loss": 0.4532, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.8199697428139183, | |
| "grad_norm": 0.6568968962701406, | |
| "learning_rate": 1.4676025932895315e-05, | |
| "loss": 0.4184, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.8229954614220878, | |
| "grad_norm": 0.6403815599134479, | |
| "learning_rate": 1.4629210319566626e-05, | |
| "loss": 0.4143, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.8260211800302572, | |
| "grad_norm": 0.6988970609033885, | |
| "learning_rate": 1.4582265217274105e-05, | |
| "loss": 0.4488, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.8290468986384266, | |
| "grad_norm": 0.7200687545090472, | |
| "learning_rate": 1.4535191939173179e-05, | |
| "loss": 0.4303, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.8320726172465961, | |
| "grad_norm": 0.6526077635976553, | |
| "learning_rate": 1.4487991802004625e-05, | |
| "loss": 0.428, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.8350983358547656, | |
| "grad_norm": 0.7211620314402787, | |
| "learning_rate": 1.4440666126057743e-05, | |
| "loss": 0.4364, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.8381240544629349, | |
| "grad_norm": 0.6470744438398066, | |
| "learning_rate": 1.4393216235133427e-05, | |
| "loss": 0.4315, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.8411497730711044, | |
| "grad_norm": 0.6418555920796338, | |
| "learning_rate": 1.4345643456507126e-05, | |
| "loss": 0.4125, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8441754916792739, | |
| "grad_norm": 0.6773097901151605, | |
| "learning_rate": 1.4297949120891718e-05, | |
| "loss": 0.4255, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.8472012102874432, | |
| "grad_norm": 0.658051259082613, | |
| "learning_rate": 1.4250134562400301e-05, | |
| "loss": 0.4446, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8502269288956127, | |
| "grad_norm": 0.6974728575379905, | |
| "learning_rate": 1.4202201118508863e-05, | |
| "loss": 0.4293, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.8532526475037822, | |
| "grad_norm": 0.6765138917215264, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.4404, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.8562783661119516, | |
| "grad_norm": 0.7038524356639164, | |
| "learning_rate": 1.4105982941019751e-05, | |
| "loss": 0.4582, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.859304084720121, | |
| "grad_norm": 0.7890323327276542, | |
| "learning_rate": 1.405770089885134e-05, | |
| "loss": 0.4398, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.8623298033282905, | |
| "grad_norm": 0.6684023363425661, | |
| "learning_rate": 1.4009305354066138e-05, | |
| "loss": 0.4478, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.8653555219364599, | |
| "grad_norm": 0.6625981737074014, | |
| "learning_rate": 1.396079766039157e-05, | |
| "loss": 0.4138, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.8683812405446294, | |
| "grad_norm": 0.7315776092591135, | |
| "learning_rate": 1.39121791746921e-05, | |
| "loss": 0.4272, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.8714069591527988, | |
| "grad_norm": 0.6182560342303461, | |
| "learning_rate": 1.3863451256931286e-05, | |
| "loss": 0.4198, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.8744326777609682, | |
| "grad_norm": 0.6929137747582454, | |
| "learning_rate": 1.381461527013374e-05, | |
| "loss": 0.4217, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.8774583963691377, | |
| "grad_norm": 0.7066798247312386, | |
| "learning_rate": 1.3765672580346986e-05, | |
| "loss": 0.4431, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.8804841149773072, | |
| "grad_norm": 0.6305145222142792, | |
| "learning_rate": 1.3716624556603275e-05, | |
| "loss": 0.4357, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.8835098335854765, | |
| "grad_norm": 0.7705816597871802, | |
| "learning_rate": 1.3667472570881264e-05, | |
| "loss": 0.4414, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.886535552193646, | |
| "grad_norm": 0.6941206594271374, | |
| "learning_rate": 1.361821799806765e-05, | |
| "loss": 0.4444, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.8895612708018155, | |
| "grad_norm": 0.6619186888204759, | |
| "learning_rate": 1.356886221591872e-05, | |
| "loss": 0.4114, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.8925869894099848, | |
| "grad_norm": 0.7236389775124372, | |
| "learning_rate": 1.3519406605021797e-05, | |
| "loss": 0.4195, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.8956127080181543, | |
| "grad_norm": 0.7103703294559195, | |
| "learning_rate": 1.3469852548756626e-05, | |
| "loss": 0.4062, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.8986384266263238, | |
| "grad_norm": 0.7927048001246919, | |
| "learning_rate": 1.342020143325669e-05, | |
| "loss": 0.4312, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.9016641452344932, | |
| "grad_norm": 0.7447171044021622, | |
| "learning_rate": 1.3370454647370418e-05, | |
| "loss": 0.4325, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.9046898638426626, | |
| "grad_norm": 0.7741094333202405, | |
| "learning_rate": 1.3320613582622354e-05, | |
| "loss": 0.427, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.9077155824508321, | |
| "grad_norm": 0.7403834086404457, | |
| "learning_rate": 1.3270679633174219e-05, | |
| "loss": 0.4454, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.9107413010590015, | |
| "grad_norm": 0.7565200710834815, | |
| "learning_rate": 1.3220654195785917e-05, | |
| "loss": 0.4466, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.913767019667171, | |
| "grad_norm": 0.6649629702265453, | |
| "learning_rate": 1.3170538669776469e-05, | |
| "loss": 0.4477, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.9167927382753404, | |
| "grad_norm": 0.702144955209538, | |
| "learning_rate": 1.3120334456984871e-05, | |
| "loss": 0.4343, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.9198184568835098, | |
| "grad_norm": 0.7100832170265334, | |
| "learning_rate": 1.3070042961730878e-05, | |
| "loss": 0.4452, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.9228441754916793, | |
| "grad_norm": 0.6202809816991117, | |
| "learning_rate": 1.3019665590775717e-05, | |
| "loss": 0.4, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.9258698940998488, | |
| "grad_norm": 0.7081830841444228, | |
| "learning_rate": 1.296920375328275e-05, | |
| "loss": 0.4486, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.9288956127080181, | |
| "grad_norm": 0.6963797441407573, | |
| "learning_rate": 1.2918658860778046e-05, | |
| "loss": 0.4187, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.9319213313161876, | |
| "grad_norm": 0.6349768424614151, | |
| "learning_rate": 1.2868032327110904e-05, | |
| "loss": 0.4288, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.9349470499243571, | |
| "grad_norm": 0.7455991360461471, | |
| "learning_rate": 1.2817325568414299e-05, | |
| "loss": 0.4473, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.9379727685325264, | |
| "grad_norm": 0.6575386615310393, | |
| "learning_rate": 1.2766540003065272e-05, | |
| "loss": 0.4037, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.9409984871406959, | |
| "grad_norm": 0.663503291718573, | |
| "learning_rate": 1.2715677051645259e-05, | |
| "loss": 0.4305, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.9440242057488654, | |
| "grad_norm": 0.6542784559023755, | |
| "learning_rate": 1.266473813690035e-05, | |
| "loss": 0.4217, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.9470499243570348, | |
| "grad_norm": 0.6642429354459604, | |
| "learning_rate": 1.2613724683701491e-05, | |
| "loss": 0.4276, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.9500756429652042, | |
| "grad_norm": 0.6734395130395916, | |
| "learning_rate": 1.2562638119004627e-05, | |
| "loss": 0.4331, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9531013615733737, | |
| "grad_norm": 0.6836373337816561, | |
| "learning_rate": 1.2511479871810792e-05, | |
| "loss": 0.4376, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.9561270801815431, | |
| "grad_norm": 0.6132637708917181, | |
| "learning_rate": 1.2460251373126136e-05, | |
| "loss": 0.3959, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.9591527987897126, | |
| "grad_norm": 0.6654776679320343, | |
| "learning_rate": 1.2408954055921884e-05, | |
| "loss": 0.4159, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.962178517397882, | |
| "grad_norm": 0.6513779259535468, | |
| "learning_rate": 1.2357589355094275e-05, | |
| "loss": 0.4286, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.9652042360060514, | |
| "grad_norm": 0.6212052016389038, | |
| "learning_rate": 1.2306158707424402e-05, | |
| "loss": 0.4332, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.9682299546142209, | |
| "grad_norm": 0.6429147998928506, | |
| "learning_rate": 1.2254663551538047e-05, | |
| "loss": 0.4129, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.9712556732223904, | |
| "grad_norm": 0.7078698069166152, | |
| "learning_rate": 1.2203105327865407e-05, | |
| "loss": 0.4431, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.9742813918305597, | |
| "grad_norm": 0.702310761153178, | |
| "learning_rate": 1.215148547860084e-05, | |
| "loss": 0.4341, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.9773071104387292, | |
| "grad_norm": 0.6159471760032799, | |
| "learning_rate": 1.2099805447662485e-05, | |
| "loss": 0.4021, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.9803328290468987, | |
| "grad_norm": 0.6545744463588348, | |
| "learning_rate": 1.2048066680651908e-05, | |
| "loss": 0.4313, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.983358547655068, | |
| "grad_norm": 0.6755833839396262, | |
| "learning_rate": 1.1996270624813642e-05, | |
| "loss": 0.4244, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.9863842662632375, | |
| "grad_norm": 0.6502098028483089, | |
| "learning_rate": 1.194441872899471e-05, | |
| "loss": 0.4227, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.989409984871407, | |
| "grad_norm": 0.6374150725794762, | |
| "learning_rate": 1.1892512443604103e-05, | |
| "loss": 0.4122, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.9924357034795764, | |
| "grad_norm": 0.6263117504956414, | |
| "learning_rate": 1.1840553220572204e-05, | |
| "loss": 0.4272, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.9954614220877458, | |
| "grad_norm": 0.7052608308335758, | |
| "learning_rate": 1.1788542513310178e-05, | |
| "loss": 0.4532, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.9984871406959153, | |
| "grad_norm": 0.6274335016707111, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.4369, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.0015128593040847, | |
| "grad_norm": 0.7011043870976733, | |
| "learning_rate": 1.1684372466900306e-05, | |
| "loss": 0.3542, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.0045385779122542, | |
| "grad_norm": 0.7976666456870495, | |
| "learning_rate": 1.1632216041612595e-05, | |
| "loss": 0.3331, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.0075642965204237, | |
| "grad_norm": 0.697853265075721, | |
| "learning_rate": 1.15800139597335e-05, | |
| "loss": 0.3087, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.0105900151285931, | |
| "grad_norm": 0.693780032199813, | |
| "learning_rate": 1.1527767681467472e-05, | |
| "loss": 0.3433, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.0136157337367624, | |
| "grad_norm": 0.7718362959711602, | |
| "learning_rate": 1.1475478668255223e-05, | |
| "loss": 0.3297, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.0166414523449319, | |
| "grad_norm": 0.7799806594919197, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.3243, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.0196671709531013, | |
| "grad_norm": 0.7834342924566685, | |
| "learning_rate": 1.1370778288690947e-05, | |
| "loss": 0.3161, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.0226928895612708, | |
| "grad_norm": 0.7035742309468155, | |
| "learning_rate": 1.1318369851033604e-05, | |
| "loss": 0.3189, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.0257186081694403, | |
| "grad_norm": 0.6823194252299682, | |
| "learning_rate": 1.1265924535737494e-05, | |
| "loss": 0.3059, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.0287443267776097, | |
| "grad_norm": 0.7030204344457062, | |
| "learning_rate": 1.121344380981082e-05, | |
| "loss": 0.3351, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.0317700453857792, | |
| "grad_norm": 0.7220644752400439, | |
| "learning_rate": 1.1160929141252303e-05, | |
| "loss": 0.317, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.0347957639939485, | |
| "grad_norm": 0.7128248227834646, | |
| "learning_rate": 1.1108381999010111e-05, | |
| "loss": 0.3135, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.037821482602118, | |
| "grad_norm": 0.7286961559243299, | |
| "learning_rate": 1.1055803852940772e-05, | |
| "loss": 0.3165, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.0408472012102874, | |
| "grad_norm": 0.7581624260477089, | |
| "learning_rate": 1.1003196173768051e-05, | |
| "loss": 0.308, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.0438729198184569, | |
| "grad_norm": 0.7444100996587805, | |
| "learning_rate": 1.0950560433041825e-05, | |
| "loss": 0.318, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.0468986384266263, | |
| "grad_norm": 0.7330413738237167, | |
| "learning_rate": 1.0897898103096917e-05, | |
| "loss": 0.3145, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.0499243570347958, | |
| "grad_norm": 0.7826681189756391, | |
| "learning_rate": 1.0845210657011893e-05, | |
| "loss": 0.3163, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.0529500756429653, | |
| "grad_norm": 0.7616265400299066, | |
| "learning_rate": 1.0792499568567885e-05, | |
| "loss": 0.3082, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0559757942511347, | |
| "grad_norm": 0.7300295615553197, | |
| "learning_rate": 1.0739766312207344e-05, | |
| "loss": 0.2971, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.059001512859304, | |
| "grad_norm": 0.7663398239074543, | |
| "learning_rate": 1.068701236299281e-05, | |
| "loss": 0.3111, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.0620272314674735, | |
| "grad_norm": 0.7446408017976378, | |
| "learning_rate": 1.0634239196565646e-05, | |
| "loss": 0.3185, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.065052950075643, | |
| "grad_norm": 0.7129411844983997, | |
| "learning_rate": 1.0581448289104759e-05, | |
| "loss": 0.3192, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.0680786686838124, | |
| "grad_norm": 0.7053759221710109, | |
| "learning_rate": 1.0528641117285315e-05, | |
| "loss": 0.3197, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.0711043872919819, | |
| "grad_norm": 0.7259860444456674, | |
| "learning_rate": 1.0475819158237426e-05, | |
| "loss": 0.3032, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.0741301059001513, | |
| "grad_norm": 0.7230943975803464, | |
| "learning_rate": 1.0422983889504831e-05, | |
| "loss": 0.326, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.0771558245083208, | |
| "grad_norm": 0.6566791160264871, | |
| "learning_rate": 1.0370136789003582e-05, | |
| "loss": 0.3136, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.08018154311649, | |
| "grad_norm": 0.667875498559168, | |
| "learning_rate": 1.031727933498068e-05, | |
| "loss": 0.294, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.0832072617246595, | |
| "grad_norm": 0.7153109643036423, | |
| "learning_rate": 1.0264413005972736e-05, | |
| "loss": 0.3197, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.086232980332829, | |
| "grad_norm": 0.7175940853635103, | |
| "learning_rate": 1.0211539280764617e-05, | |
| "loss": 0.3234, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.0892586989409985, | |
| "grad_norm": 0.7173024631255541, | |
| "learning_rate": 1.015865963834808e-05, | |
| "loss": 0.307, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.092284417549168, | |
| "grad_norm": 0.7628842808749231, | |
| "learning_rate": 1.0105775557880398e-05, | |
| "loss": 0.3231, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.0953101361573374, | |
| "grad_norm": 0.6967965799805202, | |
| "learning_rate": 1.0052888518642978e-05, | |
| "loss": 0.3303, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.0983358547655069, | |
| "grad_norm": 0.6854043048347506, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3191, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.1013615733736764, | |
| "grad_norm": 0.6743308426891418, | |
| "learning_rate": 9.947111481357023e-06, | |
| "loss": 0.3088, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.1043872919818456, | |
| "grad_norm": 0.709390724397547, | |
| "learning_rate": 9.894224442119606e-06, | |
| "loss": 0.3084, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.107413010590015, | |
| "grad_norm": 0.6635787257546402, | |
| "learning_rate": 9.841340361651921e-06, | |
| "loss": 0.2913, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.1104387291981845, | |
| "grad_norm": 0.7383218187088174, | |
| "learning_rate": 9.788460719235386e-06, | |
| "loss": 0.3044, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.113464447806354, | |
| "grad_norm": 0.7493090351699705, | |
| "learning_rate": 9.735586994027267e-06, | |
| "loss": 0.3111, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.1164901664145235, | |
| "grad_norm": 0.7335164062805661, | |
| "learning_rate": 9.682720665019325e-06, | |
| "loss": 0.3249, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.119515885022693, | |
| "grad_norm": 0.7222360314514856, | |
| "learning_rate": 9.62986321099642e-06, | |
| "loss": 0.2925, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.1225416036308624, | |
| "grad_norm": 0.8119341381104316, | |
| "learning_rate": 9.57701611049517e-06, | |
| "loss": 0.3147, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.1255673222390317, | |
| "grad_norm": 0.7012015051546354, | |
| "learning_rate": 9.524180841762577e-06, | |
| "loss": 0.324, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.1285930408472011, | |
| "grad_norm": 0.7220671484486778, | |
| "learning_rate": 9.471358882714687e-06, | |
| "loss": 0.3073, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.1316187594553706, | |
| "grad_norm": 0.7393010842974813, | |
| "learning_rate": 9.418551710895243e-06, | |
| "loss": 0.3192, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.13464447806354, | |
| "grad_norm": 0.7001081477762102, | |
| "learning_rate": 9.365760803434356e-06, | |
| "loss": 0.3156, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.1376701966717095, | |
| "grad_norm": 0.7490395053984373, | |
| "learning_rate": 9.312987637007191e-06, | |
| "loss": 0.3227, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.140695915279879, | |
| "grad_norm": 0.7613077388799337, | |
| "learning_rate": 9.260233687792657e-06, | |
| "loss": 0.3171, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.1437216338880485, | |
| "grad_norm": 0.745177570098549, | |
| "learning_rate": 9.207500431432115e-06, | |
| "loss": 0.3215, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.146747352496218, | |
| "grad_norm": 0.7362508750117484, | |
| "learning_rate": 9.154789342988108e-06, | |
| "loss": 0.3056, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.1497730711043872, | |
| "grad_norm": 0.783282154083028, | |
| "learning_rate": 9.102101896903084e-06, | |
| "loss": 0.3633, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.1527987897125567, | |
| "grad_norm": 0.6860765102961823, | |
| "learning_rate": 9.049439566958176e-06, | |
| "loss": 0.3181, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.1558245083207261, | |
| "grad_norm": 0.7205552614859169, | |
| "learning_rate": 8.99680382623195e-06, | |
| "loss": 0.3099, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.1588502269288956, | |
| "grad_norm": 0.7291420382089907, | |
| "learning_rate": 8.944196147059233e-06, | |
| "loss": 0.2969, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.161875945537065, | |
| "grad_norm": 0.7080844292818619, | |
| "learning_rate": 8.89161800098989e-06, | |
| "loss": 0.3143, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.1649016641452345, | |
| "grad_norm": 1.0443913986366642, | |
| "learning_rate": 8.839070858747697e-06, | |
| "loss": 0.3251, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.167927382753404, | |
| "grad_norm": 0.6505311606424549, | |
| "learning_rate": 8.786556190189183e-06, | |
| "loss": 0.2983, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.1709531013615733, | |
| "grad_norm": 0.6750493690588029, | |
| "learning_rate": 8.734075464262507e-06, | |
| "loss": 0.2961, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.1739788199697427, | |
| "grad_norm": 0.704845137781413, | |
| "learning_rate": 8.681630148966397e-06, | |
| "loss": 0.3147, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.1770045385779122, | |
| "grad_norm": 0.6581019052799998, | |
| "learning_rate": 8.629221711309056e-06, | |
| "loss": 0.3059, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.1800302571860817, | |
| "grad_norm": 0.7279350517150733, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.2999, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.1830559757942511, | |
| "grad_norm": 0.7357482702369584, | |
| "learning_rate": 8.52452133174478e-06, | |
| "loss": 0.2952, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.1860816944024206, | |
| "grad_norm": 0.7052211966630689, | |
| "learning_rate": 8.472232318532531e-06, | |
| "loss": 0.3067, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.18910741301059, | |
| "grad_norm": 0.6971505686502555, | |
| "learning_rate": 8.419986040266502e-06, | |
| "loss": 0.3073, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.1921331316187596, | |
| "grad_norm": 0.7666326969767409, | |
| "learning_rate": 8.367783958387407e-06, | |
| "loss": 0.3148, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.1951588502269288, | |
| "grad_norm": 0.7201499233650221, | |
| "learning_rate": 8.315627533099697e-06, | |
| "loss": 0.3128, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.1981845688350983, | |
| "grad_norm": 0.7184912662084527, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.3091, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.2012102874432677, | |
| "grad_norm": 0.7447080958071112, | |
| "learning_rate": 8.211457486689829e-06, | |
| "loss": 0.301, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.2042360060514372, | |
| "grad_norm": 0.6989354135128141, | |
| "learning_rate": 8.159446779427798e-06, | |
| "loss": 0.3146, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.2072617246596067, | |
| "grad_norm": 0.6774803162217761, | |
| "learning_rate": 8.107487556395902e-06, | |
| "loss": 0.2939, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.2102874432677762, | |
| "grad_norm": 0.6882773074593409, | |
| "learning_rate": 8.055581271005292e-06, | |
| "loss": 0.3129, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.2133131618759456, | |
| "grad_norm": 0.6862756949906518, | |
| "learning_rate": 8.00372937518636e-06, | |
| "loss": 0.3172, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.2163388804841149, | |
| "grad_norm": 0.7156894955427757, | |
| "learning_rate": 7.951933319348095e-06, | |
| "loss": 0.3054, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.2193645990922843, | |
| "grad_norm": 0.6902429193640779, | |
| "learning_rate": 7.900194552337516e-06, | |
| "loss": 0.2975, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.2223903177004538, | |
| "grad_norm": 0.7164289120472808, | |
| "learning_rate": 7.848514521399167e-06, | |
| "loss": 0.3281, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.2254160363086233, | |
| "grad_norm": 0.7152978450909332, | |
| "learning_rate": 7.796894672134594e-06, | |
| "loss": 0.3119, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.2284417549167927, | |
| "grad_norm": 0.7166162308829876, | |
| "learning_rate": 7.745336448461958e-06, | |
| "loss": 0.2983, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.2314674735249622, | |
| "grad_norm": 0.6502915128978835, | |
| "learning_rate": 7.6938412925756e-06, | |
| "loss": 0.2895, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.2344931921331317, | |
| "grad_norm": 0.7004528089468532, | |
| "learning_rate": 7.642410644905726e-06, | |
| "loss": 0.3076, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.2375189107413012, | |
| "grad_norm": 0.693769957508055, | |
| "learning_rate": 7.591045944078119e-06, | |
| "loss": 0.2956, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.2405446293494704, | |
| "grad_norm": 0.7818983525619028, | |
| "learning_rate": 7.539748626873866e-06, | |
| "loss": 0.303, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.2435703479576399, | |
| "grad_norm": 0.7216342918305325, | |
| "learning_rate": 7.488520128189209e-06, | |
| "loss": 0.3146, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.2465960665658093, | |
| "grad_norm": 0.690046936406078, | |
| "learning_rate": 7.4373618809953755e-06, | |
| "loss": 0.3053, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.2496217851739788, | |
| "grad_norm": 0.7162826667118798, | |
| "learning_rate": 7.386275316298513e-06, | |
| "loss": 0.2993, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.2526475037821483, | |
| "grad_norm": 0.6817827074457138, | |
| "learning_rate": 7.335261863099652e-06, | |
| "loss": 0.3049, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.2556732223903178, | |
| "grad_norm": 0.7129802654550981, | |
| "learning_rate": 7.2843229483547405e-06, | |
| "loss": 0.3073, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.258698940998487, | |
| "grad_norm": 0.6692756823925066, | |
| "learning_rate": 7.233459996934731e-06, | |
| "loss": 0.3187, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.2617246596066565, | |
| "grad_norm": 0.6962291146882713, | |
| "learning_rate": 7.182674431585703e-06, | |
| "loss": 0.3142, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.264750378214826, | |
| "grad_norm": 0.6847858272365547, | |
| "learning_rate": 7.131967672889101e-06, | |
| "loss": 0.3094, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.2677760968229954, | |
| "grad_norm": 0.7004294371362585, | |
| "learning_rate": 7.081341139221955e-06, | |
| "loss": 0.3164, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.2708018154311649, | |
| "grad_norm": 0.677002398337919, | |
| "learning_rate": 7.0307962467172555e-06, | |
| "loss": 0.3149, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.2738275340393344, | |
| "grad_norm": 0.7399478672278513, | |
| "learning_rate": 6.9803344092242855e-06, | |
| "loss": 0.3128, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.2768532526475038, | |
| "grad_norm": 0.6829067314317432, | |
| "learning_rate": 6.929957038269123e-06, | |
| "loss": 0.3209, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.2798789712556733, | |
| "grad_norm": 0.7129152499156889, | |
| "learning_rate": 6.87966554301513e-06, | |
| "loss": 0.3135, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.2829046898638428, | |
| "grad_norm": 0.6903650256987789, | |
| "learning_rate": 6.8294613302235325e-06, | |
| "loss": 0.2886, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.2859304084720122, | |
| "grad_norm": 0.67649741460048, | |
| "learning_rate": 6.779345804214088e-06, | |
| "loss": 0.304, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.2889561270801815, | |
| "grad_norm": 0.71728725583609, | |
| "learning_rate": 6.729320366825785e-06, | |
| "loss": 0.3154, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.291981845688351, | |
| "grad_norm": 0.6545554973278517, | |
| "learning_rate": 6.679386417377649e-06, | |
| "loss": 0.3052, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.2950075642965204, | |
| "grad_norm": 0.7054045268543858, | |
| "learning_rate": 6.629545352629583e-06, | |
| "loss": 0.3015, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.29803328290469, | |
| "grad_norm": 0.6470229150859114, | |
| "learning_rate": 6.579798566743314e-06, | |
| "loss": 0.2927, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.3010590015128594, | |
| "grad_norm": 0.7172755970930309, | |
| "learning_rate": 6.530147451243377e-06, | |
| "loss": 0.3247, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.3040847201210286, | |
| "grad_norm": 0.7360918433741535, | |
| "learning_rate": 6.480593394978208e-06, | |
| "loss": 0.3328, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.307110438729198, | |
| "grad_norm": 0.7354469704386419, | |
| "learning_rate": 6.431137784081283e-06, | |
| "loss": 0.3167, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.3101361573373675, | |
| "grad_norm": 0.7018936365550571, | |
| "learning_rate": 6.381782001932352e-06, | |
| "loss": 0.3118, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.313161875945537, | |
| "grad_norm": 0.7015265174474148, | |
| "learning_rate": 6.33252742911874e-06, | |
| "loss": 0.315, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.3161875945537065, | |
| "grad_norm": 0.6974168454298726, | |
| "learning_rate": 6.283375443396726e-06, | |
| "loss": 0.3088, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.319213313161876, | |
| "grad_norm": 0.6968548133039912, | |
| "learning_rate": 6.234327419653013e-06, | |
| "loss": 0.3206, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.3222390317700454, | |
| "grad_norm": 0.6970822687825884, | |
| "learning_rate": 6.185384729866264e-06, | |
| "loss": 0.3244, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.325264750378215, | |
| "grad_norm": 0.7173117090351013, | |
| "learning_rate": 6.136548743068713e-06, | |
| "loss": 0.3289, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.3282904689863844, | |
| "grad_norm": 0.6973470014612884, | |
| "learning_rate": 6.087820825307904e-06, | |
| "loss": 0.2981, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.3313161875945538, | |
| "grad_norm": 0.6913173649694041, | |
| "learning_rate": 6.039202339608432e-06, | |
| "loss": 0.303, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.334341906202723, | |
| "grad_norm": 0.6902588001957262, | |
| "learning_rate": 5.990694645933866e-06, | |
| "loss": 0.2961, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.3373676248108926, | |
| "grad_norm": 0.6888335562598928, | |
| "learning_rate": 5.9422991011486635e-06, | |
| "loss": 0.2894, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.340393343419062, | |
| "grad_norm": 0.6481533984532106, | |
| "learning_rate": 5.894017058980249e-06, | |
| "loss": 0.2883, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.3434190620272315, | |
| "grad_norm": 0.72432569607496, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.3143, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.346444780635401, | |
| "grad_norm": 0.7140525033763379, | |
| "learning_rate": 5.797798881491138e-06, | |
| "loss": 0.317, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.3494704992435702, | |
| "grad_norm": 0.7178395197937704, | |
| "learning_rate": 5.749865437599703e-06, | |
| "loss": 0.3103, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.3524962178517397, | |
| "grad_norm": 0.6677612480369632, | |
| "learning_rate": 5.702050879108284e-06, | |
| "loss": 0.3001, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.3555219364599091, | |
| "grad_norm": 0.7134033383176448, | |
| "learning_rate": 5.654356543492883e-06, | |
| "loss": 0.3014, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.3585476550680786, | |
| "grad_norm": 0.7069914784145128, | |
| "learning_rate": 5.606783764866576e-06, | |
| "loss": 0.2886, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.361573373676248, | |
| "grad_norm": 0.6724385382734374, | |
| "learning_rate": 5.559333873942259e-06, | |
| "loss": 0.2917, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.3645990922844176, | |
| "grad_norm": 0.6755738354374036, | |
| "learning_rate": 5.512008197995379e-06, | |
| "loss": 0.3138, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.367624810892587, | |
| "grad_norm": 0.7016378510677459, | |
| "learning_rate": 5.464808060826825e-06, | |
| "loss": 0.298, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.3706505295007565, | |
| "grad_norm": 0.6817784227901139, | |
| "learning_rate": 5.417734782725896e-06, | |
| "loss": 0.2939, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.373676248108926, | |
| "grad_norm": 0.677114560152914, | |
| "learning_rate": 5.370789680433376e-06, | |
| "loss": 0.3004, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.3767019667170954, | |
| "grad_norm": 0.7188846644953927, | |
| "learning_rate": 5.323974067104687e-06, | |
| "loss": 0.3059, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.3797276853252647, | |
| "grad_norm": 0.6623497913325952, | |
| "learning_rate": 5.277289252273175e-06, | |
| "loss": 0.3045, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.3827534039334342, | |
| "grad_norm": 0.6843900586819348, | |
| "learning_rate": 5.230736541813463e-06, | |
| "loss": 0.3074, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.3857791225416036, | |
| "grad_norm": 0.7037436103755415, | |
| "learning_rate": 5.184317237904939e-06, | |
| "loss": 0.3041, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.388804841149773, | |
| "grad_norm": 0.6991848886580267, | |
| "learning_rate": 5.138032638995315e-06, | |
| "loss": 0.3143, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.3918305597579426, | |
| "grad_norm": 0.6908578201355167, | |
| "learning_rate": 5.091884039764321e-06, | |
| "loss": 0.291, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.394856278366112, | |
| "grad_norm": 0.6631153665069562, | |
| "learning_rate": 5.045872731087479e-06, | |
| "loss": 0.2937, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.3978819969742813, | |
| "grad_norm": 0.6861762479627003, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.3143, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.4009077155824508, | |
| "grad_norm": 0.6735630886585167, | |
| "learning_rate": 4.954267129660789e-06, | |
| "loss": 0.3035, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.4039334341906202, | |
| "grad_norm": 0.7401256235600105, | |
| "learning_rate": 4.908675399316534e-06, | |
| "loss": 0.3322, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.4069591527987897, | |
| "grad_norm": 0.6856509226129854, | |
| "learning_rate": 4.863226084265939e-06, | |
| "loss": 0.3009, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.4099848714069592, | |
| "grad_norm": 0.6704034036306068, | |
| "learning_rate": 4.817920455824045e-06, | |
| "loss": 0.3112, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.4130105900151286, | |
| "grad_norm": 0.6918737085863327, | |
| "learning_rate": 4.772759781286679e-06, | |
| "loss": 0.3095, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.416036308623298, | |
| "grad_norm": 0.6747471444098856, | |
| "learning_rate": 4.727745323894976e-06, | |
| "loss": 0.2995, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.4190620272314676, | |
| "grad_norm": 0.6928846004977196, | |
| "learning_rate": 4.682878342800087e-06, | |
| "loss": 0.2989, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.422087745839637, | |
| "grad_norm": 0.7445199772901875, | |
| "learning_rate": 4.638160093027908e-06, | |
| "loss": 0.3137, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.4251134644478063, | |
| "grad_norm": 0.6741559694720364, | |
| "learning_rate": 4.593591825444028e-06, | |
| "loss": 0.3102, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.4281391830559758, | |
| "grad_norm": 0.6364551280422103, | |
| "learning_rate": 4.549174786718684e-06, | |
| "loss": 0.2912, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.4311649016641452, | |
| "grad_norm": 0.6588404368298354, | |
| "learning_rate": 4.504910219291941e-06, | |
| "loss": 0.2898, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.4341906202723147, | |
| "grad_norm": 0.683736615264254, | |
| "learning_rate": 4.460799361338898e-06, | |
| "loss": 0.3094, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.4372163388804842, | |
| "grad_norm": 0.7223880379811686, | |
| "learning_rate": 4.416843446735077e-06, | |
| "loss": 0.2992, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.4402420574886536, | |
| "grad_norm": 0.6871686228646737, | |
| "learning_rate": 4.373043705021899e-06, | |
| "loss": 0.2978, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.4432677760968229, | |
| "grad_norm": 0.6372261289079035, | |
| "learning_rate": 4.3294013613722944e-06, | |
| "loss": 0.2821, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.4462934947049924, | |
| "grad_norm": 0.7212596525835027, | |
| "learning_rate": 4.2859176365564294e-06, | |
| "loss": 0.3029, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.4493192133131618, | |
| "grad_norm": 0.6792718323771673, | |
| "learning_rate": 4.2425937469075626e-06, | |
| "loss": 0.2987, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.4523449319213313, | |
| "grad_norm": 0.7457637018758471, | |
| "learning_rate": 4.19943090428802e-06, | |
| "loss": 0.318, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.4553706505295008, | |
| "grad_norm": 0.6891893920760775, | |
| "learning_rate": 4.1564303160552935e-06, | |
| "loss": 0.308, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.4583963691376702, | |
| "grad_norm": 0.7193222410111839, | |
| "learning_rate": 4.113593185028273e-06, | |
| "loss": 0.3101, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.4614220877458397, | |
| "grad_norm": 0.677510265957377, | |
| "learning_rate": 4.070920709453597e-06, | |
| "loss": 0.2941, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.4644478063540092, | |
| "grad_norm": 0.6824581576775166, | |
| "learning_rate": 4.028414082972141e-06, | |
| "loss": 0.3063, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.4674735249621786, | |
| "grad_norm": 0.6987573713057391, | |
| "learning_rate": 3.986074494585619e-06, | |
| "loss": 0.2979, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.470499243570348, | |
| "grad_norm": 0.682547909670824, | |
| "learning_rate": 3.943903128623336e-06, | |
| "loss": 0.3026, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.4735249621785174, | |
| "grad_norm": 0.7139142090689975, | |
| "learning_rate": 3.9019011647090465e-06, | |
| "loss": 0.3049, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.4765506807866868, | |
| "grad_norm": 0.6967939917538635, | |
| "learning_rate": 3.860069777727983e-06, | |
| "loss": 0.3017, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.4795763993948563, | |
| "grad_norm": 0.6639628334871605, | |
| "learning_rate": 3.818410137793947e-06, | |
| "loss": 0.2961, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.4826021180030258, | |
| "grad_norm": 0.6954523960038056, | |
| "learning_rate": 3.7769234102166365e-06, | |
| "loss": 0.3071, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.4856278366111952, | |
| "grad_norm": 0.6641255505547464, | |
| "learning_rate": 3.735610755468988e-06, | |
| "loss": 0.3048, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.4886535552193645, | |
| "grad_norm": 0.7396034206538552, | |
| "learning_rate": 3.6944733291547784e-06, | |
| "loss": 0.3112, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.491679273827534, | |
| "grad_norm": 0.6553446481480045, | |
| "learning_rate": 3.653512281976238e-06, | |
| "loss": 0.2829, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 1.4947049924357034, | |
| "grad_norm": 0.6916253040167756, | |
| "learning_rate": 3.612728759701919e-06, | |
| "loss": 0.3057, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 1.497730711043873, | |
| "grad_norm": 0.6684285414455715, | |
| "learning_rate": 3.5721239031346067e-06, | |
| "loss": 0.3118, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 1.5007564296520424, | |
| "grad_norm": 0.6800272990492517, | |
| "learning_rate": 3.5316988480794255e-06, | |
| "loss": 0.2931, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 1.5037821482602118, | |
| "grad_norm": 0.7215340748325425, | |
| "learning_rate": 3.4914547253120655e-06, | |
| "loss": 0.313, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 1.5068078668683813, | |
| "grad_norm": 0.7200813728921697, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.3, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 1.5098335854765508, | |
| "grad_norm": 0.7213465958625503, | |
| "learning_rate": 3.4115137744067516e-06, | |
| "loss": 0.3132, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 1.5128593040847202, | |
| "grad_norm": 0.695193966323551, | |
| "learning_rate": 3.37181918238904e-06, | |
| "loss": 0.2952, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.5158850226928897, | |
| "grad_norm": 0.702532493542805, | |
| "learning_rate": 3.3323099948370853e-06, | |
| "loss": 0.3224, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 1.518910741301059, | |
| "grad_norm": 0.71421659775029, | |
| "learning_rate": 3.292987316907792e-06, | |
| "loss": 0.3127, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 1.5219364599092284, | |
| "grad_norm": 0.6541964482488821, | |
| "learning_rate": 3.253852248540994e-06, | |
| "loss": 0.2925, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 1.524962178517398, | |
| "grad_norm": 0.6903678889460569, | |
| "learning_rate": 3.2149058844286796e-06, | |
| "loss": 0.3189, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 1.5279878971255674, | |
| "grad_norm": 0.6531584494309963, | |
| "learning_rate": 3.1761493139843734e-06, | |
| "loss": 0.2938, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 1.5310136157337366, | |
| "grad_norm": 0.6458204157229048, | |
| "learning_rate": 3.1375836213126653e-06, | |
| "loss": 0.2759, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 1.534039334341906, | |
| "grad_norm": 0.6389127701829911, | |
| "learning_rate": 3.099209885178882e-06, | |
| "loss": 0.3003, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 1.5370650529500756, | |
| "grad_norm": 0.6709656996746195, | |
| "learning_rate": 3.0610291789789094e-06, | |
| "loss": 0.3039, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 1.540090771558245, | |
| "grad_norm": 0.6572021762986553, | |
| "learning_rate": 3.023042570709185e-06, | |
| "loss": 0.3002, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 1.5431164901664145, | |
| "grad_norm": 0.6735268770556463, | |
| "learning_rate": 2.9852511229367862e-06, | |
| "loss": 0.2928, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.546142208774584, | |
| "grad_norm": 0.7400256066411078, | |
| "learning_rate": 2.9476558927697605e-06, | |
| "loss": 0.3105, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 1.5491679273827534, | |
| "grad_norm": 0.7049286895942135, | |
| "learning_rate": 2.9102579318274994e-06, | |
| "loss": 0.3089, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 1.552193645990923, | |
| "grad_norm": 0.7479600652850535, | |
| "learning_rate": 2.8730582862113743e-06, | |
| "loss": 0.3186, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 1.5552193645990924, | |
| "grad_norm": 0.7191826298419407, | |
| "learning_rate": 2.8360579964754277e-06, | |
| "loss": 0.2904, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 1.5582450832072618, | |
| "grad_norm": 0.7369097296346154, | |
| "learning_rate": 2.7992580975973136e-06, | |
| "loss": 0.3186, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 1.5612708018154313, | |
| "grad_norm": 0.7259344634010142, | |
| "learning_rate": 2.7626596189492983e-06, | |
| "loss": 0.3076, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 1.5642965204236006, | |
| "grad_norm": 0.7877058781693224, | |
| "learning_rate": 2.726263584269513e-06, | |
| "loss": 0.312, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 1.56732223903177, | |
| "grad_norm": 0.7253795801943571, | |
| "learning_rate": 2.690071011633284e-06, | |
| "loss": 0.2784, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 1.5703479576399395, | |
| "grad_norm": 0.716364482329252, | |
| "learning_rate": 2.6540829134246683e-06, | |
| "loss": 0.2949, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 1.573373676248109, | |
| "grad_norm": 0.6948192527833557, | |
| "learning_rate": 2.618300296308135e-06, | |
| "loss": 0.2946, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.5763993948562782, | |
| "grad_norm": 0.7559338647419146, | |
| "learning_rate": 2.582724161200405e-06, | |
| "loss": 0.3101, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 1.5794251134644477, | |
| "grad_norm": 0.7002350543846648, | |
| "learning_rate": 2.5473555032424534e-06, | |
| "loss": 0.3069, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 1.5824508320726172, | |
| "grad_norm": 0.7308945636372909, | |
| "learning_rate": 2.5121953117716744e-06, | |
| "loss": 0.3052, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 1.5854765506807866, | |
| "grad_norm": 0.6983827700173706, | |
| "learning_rate": 2.477244570294206e-06, | |
| "loss": 0.2928, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 1.588502269288956, | |
| "grad_norm": 0.6700406495515575, | |
| "learning_rate": 2.4425042564574186e-06, | |
| "loss": 0.3045, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 1.5915279878971256, | |
| "grad_norm": 0.6958652021752028, | |
| "learning_rate": 2.4079753420225694e-06, | |
| "loss": 0.2971, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 1.594553706505295, | |
| "grad_norm": 0.6559413380090855, | |
| "learning_rate": 2.3736587928376197e-06, | |
| "loss": 0.2948, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 1.5975794251134645, | |
| "grad_norm": 0.7075226725122732, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.2991, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 1.600605143721634, | |
| "grad_norm": 0.7554216234249966, | |
| "learning_rate": 2.305666623880858e-06, | |
| "loss": 0.3129, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 1.6036308623298035, | |
| "grad_norm": 0.6926891852747049, | |
| "learning_rate": 2.27199290599617e-06, | |
| "loss": 0.3019, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.606656580937973, | |
| "grad_norm": 0.7564823899165914, | |
| "learning_rate": 2.2385353570824308e-06, | |
| "loss": 0.3138, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 1.6096822995461422, | |
| "grad_norm": 0.7248932155517664, | |
| "learning_rate": 2.2052949130192136e-06, | |
| "loss": 0.3169, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 1.6127080181543116, | |
| "grad_norm": 0.6907750485175111, | |
| "learning_rate": 2.172272503613183e-06, | |
| "loss": 0.2948, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 1.615733736762481, | |
| "grad_norm": 0.6672547079313764, | |
| "learning_rate": 2.1394690525721275e-06, | |
| "loss": 0.3071, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 1.6187594553706506, | |
| "grad_norm": 0.7607069879856434, | |
| "learning_rate": 2.1068854774790783e-06, | |
| "loss": 0.3066, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 1.6217851739788198, | |
| "grad_norm": 0.7117361022094344, | |
| "learning_rate": 2.0745226897666858e-06, | |
| "loss": 0.3098, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 1.6248108925869893, | |
| "grad_norm": 0.7120266734258044, | |
| "learning_rate": 2.0423815946916783e-06, | |
| "loss": 0.2964, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 1.6278366111951588, | |
| "grad_norm": 0.6577639095052911, | |
| "learning_rate": 2.010463091309587e-06, | |
| "loss": 0.2953, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 1.6308623298033282, | |
| "grad_norm": 0.6791175159063494, | |
| "learning_rate": 1.9787680724495617e-06, | |
| "loss": 0.2858, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 1.6338880484114977, | |
| "grad_norm": 0.6635191695760196, | |
| "learning_rate": 1.947297424689414e-06, | |
| "loss": 0.3012, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.6369137670196672, | |
| "grad_norm": 0.6957858551815456, | |
| "learning_rate": 1.9160520283308115e-06, | |
| "loss": 0.3165, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 1.6399394856278366, | |
| "grad_norm": 0.7406963321348281, | |
| "learning_rate": 1.8850327573746584e-06, | |
| "loss": 0.3168, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 1.6429652042360061, | |
| "grad_norm": 0.6780961333879061, | |
| "learning_rate": 1.854240479496643e-06, | |
| "loss": 0.2954, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 1.6459909228441756, | |
| "grad_norm": 0.688033751410978, | |
| "learning_rate": 1.8236760560229715e-06, | |
| "loss": 0.3223, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 1.649016641452345, | |
| "grad_norm": 0.6431185613432867, | |
| "learning_rate": 1.7933403419062689e-06, | |
| "loss": 0.2924, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 1.6520423600605145, | |
| "grad_norm": 0.6947006358248501, | |
| "learning_rate": 1.7632341857016733e-06, | |
| "loss": 0.2965, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 1.6550680786686838, | |
| "grad_norm": 0.7327185002122619, | |
| "learning_rate": 1.7333584295430894e-06, | |
| "loss": 0.3266, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 1.6580937972768532, | |
| "grad_norm": 0.6785106712662585, | |
| "learning_rate": 1.7037139091196396e-06, | |
| "loss": 0.3063, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 1.6611195158850227, | |
| "grad_norm": 0.6951604768662649, | |
| "learning_rate": 1.6743014536522872e-06, | |
| "loss": 0.317, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 1.6641452344931922, | |
| "grad_norm": 0.7033711264998699, | |
| "learning_rate": 1.6451218858706374e-06, | |
| "loss": 0.2996, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.6671709531013614, | |
| "grad_norm": 0.719504384246559, | |
| "learning_rate": 1.616176021989926e-06, | |
| "loss": 0.2928, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 1.670196671709531, | |
| "grad_norm": 0.6522064110961668, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.2848, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 1.6732223903177004, | |
| "grad_norm": 0.6826806188122564, | |
| "learning_rate": 1.558988638083616e-06, | |
| "loss": 0.2959, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 1.6762481089258698, | |
| "grad_norm": 0.6564015721698926, | |
| "learning_rate": 1.5307487177120773e-06, | |
| "loss": 0.295, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 1.6792738275340393, | |
| "grad_norm": 0.735242106850941, | |
| "learning_rate": 1.5027457005048573e-06, | |
| "loss": 0.3034, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 1.6822995461422088, | |
| "grad_norm": 0.7070621046349863, | |
| "learning_rate": 1.4749803697665366e-06, | |
| "loss": 0.2992, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 1.6853252647503782, | |
| "grad_norm": 0.65097613583344, | |
| "learning_rate": 1.4474535021531099e-06, | |
| "loss": 0.2914, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 1.6883509833585477, | |
| "grad_norm": 0.6380444483611941, | |
| "learning_rate": 1.4201658676502294e-06, | |
| "loss": 0.2885, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 1.6913767019667172, | |
| "grad_norm": 0.6858745161805448, | |
| "learning_rate": 1.3931182295516965e-06, | |
| "loss": 0.2988, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 1.6944024205748867, | |
| "grad_norm": 0.7206905807750172, | |
| "learning_rate": 1.3663113444380905e-06, | |
| "loss": 0.3063, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.6974281391830561, | |
| "grad_norm": 0.7181097223657377, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.3174, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 1.7004538577912254, | |
| "grad_norm": 0.6783850499170709, | |
| "learning_rate": 1.3134228257951142e-06, | |
| "loss": 0.2916, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 1.7034795763993948, | |
| "grad_norm": 0.6825019140686246, | |
| "learning_rate": 1.2873426716713012e-06, | |
| "loss": 0.2997, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 1.7065052950075643, | |
| "grad_norm": 0.6711527637855523, | |
| "learning_rate": 1.2615062293021508e-06, | |
| "loss": 0.2926, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 1.7095310136157338, | |
| "grad_norm": 0.6421331930689803, | |
| "learning_rate": 1.2359142213884933e-06, | |
| "loss": 0.2918, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 1.712556732223903, | |
| "grad_norm": 0.6425987218978977, | |
| "learning_rate": 1.2105673637938054e-06, | |
| "loss": 0.2838, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 1.7155824508320725, | |
| "grad_norm": 0.6741361785330381, | |
| "learning_rate": 1.1854663655241804e-06, | |
| "loss": 0.3067, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 1.718608169440242, | |
| "grad_norm": 0.7029485943850053, | |
| "learning_rate": 1.1606119287084982e-06, | |
| "loss": 0.315, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 1.7216338880484114, | |
| "grad_norm": 0.6898349548316522, | |
| "learning_rate": 1.136004748578785e-06, | |
| "loss": 0.3022, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 1.724659606656581, | |
| "grad_norm": 0.6633566836549986, | |
| "learning_rate": 1.1116455134507665e-06, | |
| "loss": 0.2924, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.7276853252647504, | |
| "grad_norm": 0.6613453118550724, | |
| "learning_rate": 1.0875349047046113e-06, | |
| "loss": 0.2794, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 1.7307110438729199, | |
| "grad_norm": 0.7189572265288271, | |
| "learning_rate": 1.0636735967658785e-06, | |
| "loss": 0.2947, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 1.7337367624810893, | |
| "grad_norm": 0.6827175453991511, | |
| "learning_rate": 1.0400622570866426e-06, | |
| "loss": 0.2825, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 1.7367624810892588, | |
| "grad_norm": 0.6778385152009138, | |
| "learning_rate": 1.0167015461268303e-06, | |
| "loss": 0.2976, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 1.7397881996974283, | |
| "grad_norm": 0.6375010767656994, | |
| "learning_rate": 9.935921173357444e-07, | |
| "loss": 0.2888, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 1.7428139183055977, | |
| "grad_norm": 0.6720767388779083, | |
| "learning_rate": 9.707346171337895e-07, | |
| "loss": 0.2975, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 1.745839636913767, | |
| "grad_norm": 0.6785099300101036, | |
| "learning_rate": 9.481296848943744e-07, | |
| "loss": 0.2891, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 1.7488653555219364, | |
| "grad_norm": 0.6732243311673907, | |
| "learning_rate": 9.257779529260558e-07, | |
| "loss": 0.2928, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 1.751891074130106, | |
| "grad_norm": 0.6497428042061094, | |
| "learning_rate": 9.036800464548157e-07, | |
| "loss": 0.2851, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 1.7549167927382754, | |
| "grad_norm": 0.6701638192592306, | |
| "learning_rate": 8.818365836066101e-07, | |
| "loss": 0.281, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.7579425113464446, | |
| "grad_norm": 0.6806192027878634, | |
| "learning_rate": 8.602481753900427e-07, | |
| "loss": 0.3007, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 1.760968229954614, | |
| "grad_norm": 0.666149829100226, | |
| "learning_rate": 8.389154256793042e-07, | |
| "loss": 0.2828, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 1.7639939485627836, | |
| "grad_norm": 0.7197459908979876, | |
| "learning_rate": 8.178389311972612e-07, | |
| "loss": 0.3405, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 1.767019667170953, | |
| "grad_norm": 0.6788395078668634, | |
| "learning_rate": 7.970192814987676e-07, | |
| "loss": 0.2918, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 1.7700453857791225, | |
| "grad_norm": 0.6688901810727487, | |
| "learning_rate": 7.764570589541876e-07, | |
| "loss": 0.2865, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 1.773071104387292, | |
| "grad_norm": 0.6223809430011187, | |
| "learning_rate": 7.561528387330797e-07, | |
| "loss": 0.2776, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 1.7760968229954615, | |
| "grad_norm": 0.6651142677248985, | |
| "learning_rate": 7.361071887881376e-07, | |
| "loss": 0.2983, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 1.779122541603631, | |
| "grad_norm": 0.6961947144996967, | |
| "learning_rate": 7.163206698392744e-07, | |
| "loss": 0.2865, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 1.7821482602118004, | |
| "grad_norm": 0.6844482023083259, | |
| "learning_rate": 6.96793835357964e-07, | |
| "loss": 0.2974, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 1.7851739788199699, | |
| "grad_norm": 0.652153894008865, | |
| "learning_rate": 6.775272315517423e-07, | |
| "loss": 0.2798, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.7881996974281393, | |
| "grad_norm": 0.7066560972438733, | |
| "learning_rate": 6.585213973489335e-07, | |
| "loss": 0.305, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 1.7912254160363086, | |
| "grad_norm": 0.6843110985602638, | |
| "learning_rate": 6.397768643835755e-07, | |
| "loss": 0.3054, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 1.794251134644478, | |
| "grad_norm": 0.7185651313087387, | |
| "learning_rate": 6.212941569805508e-07, | |
| "loss": 0.2998, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 1.7972768532526475, | |
| "grad_norm": 0.703509497476997, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.3142, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 1.800302571860817, | |
| "grad_norm": 0.6966873221841585, | |
| "learning_rate": 5.851162795274445e-07, | |
| "loss": 0.2878, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 1.8033282904689862, | |
| "grad_norm": 0.6619482756715921, | |
| "learning_rate": 5.674221214503639e-07, | |
| "loss": 0.2902, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 1.8063540090771557, | |
| "grad_norm": 0.6690129691854629, | |
| "learning_rate": 5.499918128533155e-07, | |
| "loss": 0.2789, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 1.8093797276853252, | |
| "grad_norm": 0.7176452628298193, | |
| "learning_rate": 5.328258412994958e-07, | |
| "loss": 0.3037, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 1.8124054462934946, | |
| "grad_norm": 0.6887067764100213, | |
| "learning_rate": 5.159246869580348e-07, | |
| "loss": 0.293, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 1.8154311649016641, | |
| "grad_norm": 0.71479203999461, | |
| "learning_rate": 4.992888225905467e-07, | |
| "loss": 0.3089, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 1.8184568835098336, | |
| "grad_norm": 0.693892555635147, | |
| "learning_rate": 4.829187135379221e-07, | |
| "loss": 0.3009, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 1.821482602118003, | |
| "grad_norm": 0.7112518970862823, | |
| "learning_rate": 4.6681481770729844e-07, | |
| "loss": 0.2922, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 1.8245083207261725, | |
| "grad_norm": 0.7183401661455543, | |
| "learning_rate": 4.509775855592613e-07, | |
| "loss": 0.3017, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 1.827534039334342, | |
| "grad_norm": 0.6914024870492536, | |
| "learning_rate": 4.354074600952407e-07, | |
| "loss": 0.2957, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 1.8305597579425115, | |
| "grad_norm": 0.6999167088087518, | |
| "learning_rate": 4.2010487684511105e-07, | |
| "loss": 0.3006, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 1.833585476550681, | |
| "grad_norm": 0.6630131885563284, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.2753, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 1.8366111951588502, | |
| "grad_norm": 0.6607165437970709, | |
| "learning_rate": 3.9030404167542777e-07, | |
| "loss": 0.2874, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 1.8396369137670197, | |
| "grad_norm": 0.6800934959705965, | |
| "learning_rate": 3.7580662334929517e-07, | |
| "loss": 0.3039, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 1.8426626323751891, | |
| "grad_norm": 0.726351963057802, | |
| "learning_rate": 3.615784144005796e-07, | |
| "loss": 0.3098, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 1.8456883509833586, | |
| "grad_norm": 0.657127560344899, | |
| "learning_rate": 3.476198128228736e-07, | |
| "loss": 0.3039, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.8487140695915278, | |
| "grad_norm": 0.6924922829254053, | |
| "learning_rate": 3.339312090682689e-07, | |
| "loss": 0.2905, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 1.8517397881996973, | |
| "grad_norm": 0.6732789190119731, | |
| "learning_rate": 3.2051298603643754e-07, | |
| "loss": 0.3003, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 1.8547655068078668, | |
| "grad_norm": 0.7176648876584121, | |
| "learning_rate": 3.0736551906392354e-07, | |
| "loss": 0.3136, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 1.8577912254160363, | |
| "grad_norm": 0.714625687486915, | |
| "learning_rate": 2.9448917591363923e-07, | |
| "loss": 0.296, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 1.8608169440242057, | |
| "grad_norm": 0.6903446508480382, | |
| "learning_rate": 2.818843167645835e-07, | |
| "loss": 0.2964, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 1.8638426626323752, | |
| "grad_norm": 0.6579913415731065, | |
| "learning_rate": 2.6955129420176193e-07, | |
| "loss": 0.2969, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 1.8668683812405447, | |
| "grad_norm": 0.6526562049044484, | |
| "learning_rate": 2.5749045320632824e-07, | |
| "loss": 0.2828, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.8698940998487141, | |
| "grad_norm": 0.6603549737274746, | |
| "learning_rate": 2.4570213114592957e-07, | |
| "loss": 0.2854, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 1.8729198184568836, | |
| "grad_norm": 0.6828670462972486, | |
| "learning_rate": 2.3418665776527738e-07, | |
| "loss": 0.2968, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 1.875945537065053, | |
| "grad_norm": 0.7245376651427105, | |
| "learning_rate": 2.2294435517691504e-07, | |
| "loss": 0.2994, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.8789712556732225, | |
| "grad_norm": 0.6341115848609051, | |
| "learning_rate": 2.119755378522137e-07, | |
| "loss": 0.2786, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 1.8819969742813918, | |
| "grad_norm": 0.6434969987007261, | |
| "learning_rate": 2.0128051261257165e-07, | |
| "loss": 0.2874, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 1.8850226928895613, | |
| "grad_norm": 0.6281435960500501, | |
| "learning_rate": 1.908595786208367e-07, | |
| "loss": 0.292, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 1.8880484114977307, | |
| "grad_norm": 0.6606112187910763, | |
| "learning_rate": 1.8071302737293294e-07, | |
| "loss": 0.2945, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 1.8910741301059002, | |
| "grad_norm": 0.7200343599129199, | |
| "learning_rate": 1.7084114268971275e-07, | |
| "loss": 0.3079, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 1.8940998487140694, | |
| "grad_norm": 0.6513952824440472, | |
| "learning_rate": 1.612442007090076e-07, | |
| "loss": 0.2746, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 1.897125567322239, | |
| "grad_norm": 0.6804668642243991, | |
| "learning_rate": 1.519224698779198e-07, | |
| "loss": 0.2945, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 1.9001512859304084, | |
| "grad_norm": 0.6405722502336237, | |
| "learning_rate": 1.4287621094529524e-07, | |
| "loss": 0.2875, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 1.9031770045385779, | |
| "grad_norm": 0.7182097922727458, | |
| "learning_rate": 1.3410567695444576e-07, | |
| "loss": 0.3346, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 1.9062027231467473, | |
| "grad_norm": 0.6481210922041217, | |
| "learning_rate": 1.2561111323605714e-07, | |
| "loss": 0.2884, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.9092284417549168, | |
| "grad_norm": 0.6817197083063868, | |
| "learning_rate": 1.1739275740134004e-07, | |
| "loss": 0.2961, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 1.9122541603630863, | |
| "grad_norm": 0.70073687726962, | |
| "learning_rate": 1.0945083933537104e-07, | |
| "loss": 0.321, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 1.9152798789712557, | |
| "grad_norm": 0.68636139197188, | |
| "learning_rate": 1.0178558119067316e-07, | |
| "loss": 0.2736, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 1.9183055975794252, | |
| "grad_norm": 0.6779214936276305, | |
| "learning_rate": 9.439719738099318e-08, | |
| "loss": 0.2906, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 1.9213313161875947, | |
| "grad_norm": 0.6888324588447359, | |
| "learning_rate": 8.728589457530857e-08, | |
| "loss": 0.2809, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 1.9243570347957641, | |
| "grad_norm": 0.6782053800761134, | |
| "learning_rate": 8.04518716920466e-08, | |
| "loss": 0.2976, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 1.9273827534039334, | |
| "grad_norm": 0.645909077838239, | |
| "learning_rate": 7.389531989351773e-08, | |
| "loss": 0.2837, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 1.9304084720121029, | |
| "grad_norm": 0.6724331326984303, | |
| "learning_rate": 6.761642258056977e-08, | |
| "loss": 0.2929, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 1.9334341906202723, | |
| "grad_norm": 0.6562749823675094, | |
| "learning_rate": 6.161535538745877e-08, | |
| "loss": 0.3006, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 1.9364599092284418, | |
| "grad_norm": 0.684610175189948, | |
| "learning_rate": 5.5892286176932875e-08, | |
| "loss": 0.2988, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.939485627836611, | |
| "grad_norm": 0.6622851733514898, | |
| "learning_rate": 5.044737503554165e-08, | |
| "loss": 0.295, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 1.9425113464447805, | |
| "grad_norm": 0.7015618507612497, | |
| "learning_rate": 4.528077426915412e-08, | |
| "loss": 0.3012, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 1.94553706505295, | |
| "grad_norm": 0.6973060473623738, | |
| "learning_rate": 4.0392628398699954e-08, | |
| "loss": 0.3101, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 1.9485627836611195, | |
| "grad_norm": 0.6354644134852665, | |
| "learning_rate": 3.578307415612714e-08, | |
| "loss": 0.2813, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 1.951588502269289, | |
| "grad_norm": 0.6855840723767516, | |
| "learning_rate": 3.1452240480577265e-08, | |
| "loss": 0.2901, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 1.9546142208774584, | |
| "grad_norm": 0.6761688620323423, | |
| "learning_rate": 2.7400248514776184e-08, | |
| "loss": 0.3004, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 1.9576399394856279, | |
| "grad_norm": 0.7102689672654615, | |
| "learning_rate": 2.3627211601651157e-08, | |
| "loss": 0.2984, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 1.9606656580937973, | |
| "grad_norm": 0.7280467684124374, | |
| "learning_rate": 2.013323528115674e-08, | |
| "loss": 0.3107, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.9636913767019668, | |
| "grad_norm": 0.7070334829378867, | |
| "learning_rate": 1.6918417287318245e-08, | |
| "loss": 0.2846, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.9667170953101363, | |
| "grad_norm": 0.6971514067656974, | |
| "learning_rate": 1.3982847545507271e-08, | |
| "loss": 0.3008, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.9697428139183057, | |
| "grad_norm": 0.6326379232153385, | |
| "learning_rate": 1.1326608169920373e-08, | |
| "loss": 0.2828, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.972768532526475, | |
| "grad_norm": 0.6841439914526215, | |
| "learning_rate": 8.949773461282008e-09, | |
| "loss": 0.294, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.9757942511346445, | |
| "grad_norm": 0.6643615458448754, | |
| "learning_rate": 6.8524099047695415e-09, | |
| "loss": 0.2998, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.978819969742814, | |
| "grad_norm": 0.6718318424158599, | |
| "learning_rate": 5.034576168149175e-09, | |
| "loss": 0.2891, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.9818456883509834, | |
| "grad_norm": 0.657787901330382, | |
| "learning_rate": 3.4963231001383657e-09, | |
| "loss": 0.293, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.9848714069591527, | |
| "grad_norm": 0.6423382609769236, | |
| "learning_rate": 2.237693728981416e-09, | |
| "loss": 0.288, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.9878971255673221, | |
| "grad_norm": 0.6845802455823244, | |
| "learning_rate": 1.2587232612493172e-09, | |
| "loss": 0.2941, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.9909228441754916, | |
| "grad_norm": 0.674889769018907, | |
| "learning_rate": 5.594390808494332e-10, | |
| "loss": 0.3009, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.993948562783661, | |
| "grad_norm": 0.646210836761292, | |
| "learning_rate": 1.3986074826388697e-10, | |
| "loss": 0.2795, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.9969742813918305, | |
| "grad_norm": 0.6815241240361029, | |
| "learning_rate": 0.0, | |
| "loss": 0.3057, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.9969742813918305, | |
| "step": 660, | |
| "total_flos": 1.3739740488951398e+17, | |
| "train_loss": 0.385365203248732, | |
| "train_runtime": 1808.6656, | |
| "train_samples_per_second": 46.746, | |
| "train_steps_per_second": 0.365 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 660, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.3739740488951398e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |