| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9972968675465098, | |
| "eval_steps": 500, | |
| "global_step": 196, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005088249324216886, | |
| "grad_norm": 6.376819304774717, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.2212, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.010176498648433773, | |
| "grad_norm": 9.156958532034324, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.2331, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01526474797265066, | |
| "grad_norm": 8.548654865637552, | |
| "learning_rate": 5e-06, | |
| "loss": 1.2903, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.020352997296867546, | |
| "grad_norm": 7.6472800849066225, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.0864, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.025441246621084435, | |
| "grad_norm": 14.279902584657638, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.0768, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.03052949594530132, | |
| "grad_norm": 3.7335646723367324, | |
| "learning_rate": 1e-05, | |
| "loss": 0.9323, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.035617745269518206, | |
| "grad_norm": 0.8627027447832171, | |
| "learning_rate": 9.999316524962347e-06, | |
| "loss": 0.4469, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.04070599459373509, | |
| "grad_norm": 0.8221611562946466, | |
| "learning_rate": 9.99726628670463e-06, | |
| "loss": 0.4761, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04579424391795198, | |
| "grad_norm": 2.733350766966074, | |
| "learning_rate": 9.993849845741525e-06, | |
| "loss": 0.8348, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.05088249324216887, | |
| "grad_norm": 2.723165619967254, | |
| "learning_rate": 9.989068136093873e-06, | |
| "loss": 0.8526, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.055970742566385755, | |
| "grad_norm": 0.9401228979509004, | |
| "learning_rate": 9.98292246503335e-06, | |
| "loss": 0.4237, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.06105899189060264, | |
| "grad_norm": 2.4866400038081946, | |
| "learning_rate": 9.975414512725058e-06, | |
| "loss": 0.7775, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.06614724121481953, | |
| "grad_norm": 2.2343217891723306, | |
| "learning_rate": 9.966546331768192e-06, | |
| "loss": 0.8362, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.07123549053903641, | |
| "grad_norm": 2.1476340113996155, | |
| "learning_rate": 9.956320346634877e-06, | |
| "loss": 0.6924, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0763237398632533, | |
| "grad_norm": 2.2660253147542924, | |
| "learning_rate": 9.944739353007344e-06, | |
| "loss": 0.8006, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.08141198918747018, | |
| "grad_norm": 1.962741499851516, | |
| "learning_rate": 9.931806517013612e-06, | |
| "loss": 0.7972, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08650023851168707, | |
| "grad_norm": 2.3043159657707557, | |
| "learning_rate": 9.917525374361913e-06, | |
| "loss": 0.8331, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.09158848783590395, | |
| "grad_norm": 2.50787182166959, | |
| "learning_rate": 9.901899829374048e-06, | |
| "loss": 0.6467, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.09667673716012085, | |
| "grad_norm": 2.067143884273235, | |
| "learning_rate": 9.884934153917998e-06, | |
| "loss": 0.5831, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.10176498648433774, | |
| "grad_norm": 1.9355150939047303, | |
| "learning_rate": 9.86663298624003e-06, | |
| "loss": 0.6237, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.10685323580855462, | |
| "grad_norm": 2.4286709059735885, | |
| "learning_rate": 9.847001329696653e-06, | |
| "loss": 0.7907, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.11194148513277151, | |
| "grad_norm": 2.1638545004928136, | |
| "learning_rate": 9.826044551386743e-06, | |
| "loss": 0.6921, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11702973445698839, | |
| "grad_norm": 1.9157802742965189, | |
| "learning_rate": 9.803768380684242e-06, | |
| "loss": 0.6975, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.12211798378120528, | |
| "grad_norm": 1.9485581275135593, | |
| "learning_rate": 9.780178907671788e-06, | |
| "loss": 0.6949, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.12720623310542217, | |
| "grad_norm": 2.048988282393849, | |
| "learning_rate": 9.755282581475769e-06, | |
| "loss": 0.6428, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.13229448242963907, | |
| "grad_norm": 2.058756336489088, | |
| "learning_rate": 9.729086208503174e-06, | |
| "loss": 0.5476, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.13738273175385593, | |
| "grad_norm": 1.8921946476223952, | |
| "learning_rate": 9.701596950580807e-06, | |
| "loss": 0.5733, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.14247098107807282, | |
| "grad_norm": 2.200558594868892, | |
| "learning_rate": 9.672822322997305e-06, | |
| "loss": 0.6763, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.14755923040228971, | |
| "grad_norm": 2.2478552460928927, | |
| "learning_rate": 9.642770192448537e-06, | |
| "loss": 0.6203, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.1526474797265066, | |
| "grad_norm": 1.174257433300181, | |
| "learning_rate": 9.611448774886925e-06, | |
| "loss": 0.4586, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15773572905072347, | |
| "grad_norm": 1.0023424088545339, | |
| "learning_rate": 9.578866633275289e-06, | |
| "loss": 0.451, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.16282397837494036, | |
| "grad_norm": 2.0005021535427128, | |
| "learning_rate": 9.545032675245814e-06, | |
| "loss": 0.7157, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.16791222769915726, | |
| "grad_norm": 1.9642144233569843, | |
| "learning_rate": 9.509956150664796e-06, | |
| "loss": 0.689, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.17300047702337415, | |
| "grad_norm": 2.3153254847421914, | |
| "learning_rate": 9.473646649103819e-06, | |
| "loss": 0.6978, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.17808872634759104, | |
| "grad_norm": 2.0920285074327105, | |
| "learning_rate": 9.43611409721806e-06, | |
| "loss": 0.6146, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1831769756718079, | |
| "grad_norm": 1.9609871683991977, | |
| "learning_rate": 9.397368756032445e-06, | |
| "loss": 0.6792, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1882652249960248, | |
| "grad_norm": 2.139412662348695, | |
| "learning_rate": 9.357421218136387e-06, | |
| "loss": 0.5454, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1933534743202417, | |
| "grad_norm": 1.0220339211752483, | |
| "learning_rate": 9.31628240478787e-06, | |
| "loss": 0.4778, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.19844172364445858, | |
| "grad_norm": 2.0404826104161025, | |
| "learning_rate": 9.273963562927695e-06, | |
| "loss": 0.6245, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.20352997296867548, | |
| "grad_norm": 2.281403761746208, | |
| "learning_rate": 9.230476262104678e-06, | |
| "loss": 0.6804, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.20861822229289234, | |
| "grad_norm": 3.6084177219171947, | |
| "learning_rate": 9.185832391312644e-06, | |
| "loss": 0.6272, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.21370647161710923, | |
| "grad_norm": 1.8239489766079142, | |
| "learning_rate": 9.140044155740102e-06, | |
| "loss": 0.711, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.21879472094132613, | |
| "grad_norm": 2.070200136807796, | |
| "learning_rate": 9.093124073433464e-06, | |
| "loss": 0.6276, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.22388297026554302, | |
| "grad_norm": 1.8227868131794736, | |
| "learning_rate": 9.045084971874738e-06, | |
| "loss": 0.568, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.2289712195897599, | |
| "grad_norm": 2.0312409937908273, | |
| "learning_rate": 8.995939984474624e-06, | |
| "loss": 0.5819, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.23405946891397678, | |
| "grad_norm": 2.0591484505591984, | |
| "learning_rate": 8.94570254698197e-06, | |
| "loss": 0.6189, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.23914771823819367, | |
| "grad_norm": 1.8413511377406058, | |
| "learning_rate": 8.894386393810563e-06, | |
| "loss": 0.6415, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.24423596756241056, | |
| "grad_norm": 1.871308715661416, | |
| "learning_rate": 8.842005554284296e-06, | |
| "loss": 0.7269, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.24932421688662745, | |
| "grad_norm": 0.9010820982007217, | |
| "learning_rate": 8.788574348801676e-06, | |
| "loss": 0.4621, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.25441246621084435, | |
| "grad_norm": 1.9355973891126907, | |
| "learning_rate": 8.734107384920771e-06, | |
| "loss": 0.4703, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2595007155350612, | |
| "grad_norm": 1.6598644446012476, | |
| "learning_rate": 8.67861955336566e-06, | |
| "loss": 0.4163, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.26458896485927813, | |
| "grad_norm": 0.8262469830378751, | |
| "learning_rate": 8.622126023955446e-06, | |
| "loss": 0.466, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.269677214183495, | |
| "grad_norm": 1.9301500703636114, | |
| "learning_rate": 8.564642241456986e-06, | |
| "loss": 0.5155, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.27476546350771186, | |
| "grad_norm": 2.0045597308796457, | |
| "learning_rate": 8.506183921362443e-06, | |
| "loss": 0.5895, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.2798537128319288, | |
| "grad_norm": 1.8768604434761909, | |
| "learning_rate": 8.446767045592829e-06, | |
| "loss": 0.5402, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.28494196215614565, | |
| "grad_norm": 2.48459660000141, | |
| "learning_rate": 8.386407858128707e-06, | |
| "loss": 0.4908, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.29003021148036257, | |
| "grad_norm": 2.018246694569729, | |
| "learning_rate": 8.325122860569241e-06, | |
| "loss": 0.6405, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.29511846080457943, | |
| "grad_norm": 2.3388459880506542, | |
| "learning_rate": 8.262928807620843e-06, | |
| "loss": 0.6017, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.3002067101287963, | |
| "grad_norm": 2.3665994679506808, | |
| "learning_rate": 8.199842702516584e-06, | |
| "loss": 0.5229, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.3052949594530132, | |
| "grad_norm": 2.0184106112937252, | |
| "learning_rate": 8.135881792367686e-06, | |
| "loss": 0.6064, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.3103832087772301, | |
| "grad_norm": 1.8643271820146765, | |
| "learning_rate": 8.071063563448341e-06, | |
| "loss": 0.5915, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.31547145810144694, | |
| "grad_norm": 1.722265638914434, | |
| "learning_rate": 8.005405736415127e-06, | |
| "loss": 0.4657, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.32055970742566386, | |
| "grad_norm": 1.706733120977353, | |
| "learning_rate": 7.938926261462366e-06, | |
| "loss": 0.499, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.32564795674988073, | |
| "grad_norm": 2.240312731365644, | |
| "learning_rate": 7.871643313414718e-06, | |
| "loss": 0.5416, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.33073620607409765, | |
| "grad_norm": 1.7568581955331595, | |
| "learning_rate": 7.803575286758365e-06, | |
| "loss": 0.4993, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3358244553983145, | |
| "grad_norm": 1.7451061943054298, | |
| "learning_rate": 7.734740790612137e-06, | |
| "loss": 0.5667, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3409127047225314, | |
| "grad_norm": 2.193186411728282, | |
| "learning_rate": 7.66515864363997e-06, | |
| "loss": 0.5462, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3460009540467483, | |
| "grad_norm": 1.9643747559281786, | |
| "learning_rate": 7.594847868906076e-06, | |
| "loss": 0.6262, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.35108920337096516, | |
| "grad_norm": 1.95786323892343, | |
| "learning_rate": 7.52382768867422e-06, | |
| "loss": 0.6642, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.3561774526951821, | |
| "grad_norm": 1.0006199818159889, | |
| "learning_rate": 7.452117519152542e-06, | |
| "loss": 0.507, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.36126570201939895, | |
| "grad_norm": 1.9223672992099927, | |
| "learning_rate": 7.379736965185369e-06, | |
| "loss": 0.7529, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.3663539513436158, | |
| "grad_norm": 2.0674107718199104, | |
| "learning_rate": 7.30670581489344e-06, | |
| "loss": 0.4531, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.37144220066783273, | |
| "grad_norm": 1.9129373169630381, | |
| "learning_rate": 7.233044034264034e-06, | |
| "loss": 0.5099, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3765304499920496, | |
| "grad_norm": 1.7438996600779544, | |
| "learning_rate": 7.158771761692464e-06, | |
| "loss": 0.5289, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.3816186993162665, | |
| "grad_norm": 1.8111556114494765, | |
| "learning_rate": 7.083909302476453e-06, | |
| "loss": 0.5465, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3867069486404834, | |
| "grad_norm": 1.9016815642934104, | |
| "learning_rate": 7.008477123264849e-06, | |
| "loss": 0.5224, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.39179519796470025, | |
| "grad_norm": 1.773824146906631, | |
| "learning_rate": 6.932495846462262e-06, | |
| "loss": 0.618, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.39688344728891717, | |
| "grad_norm": 1.6675568834230474, | |
| "learning_rate": 6.855986244591104e-06, | |
| "loss": 0.4824, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.40197169661313403, | |
| "grad_norm": 1.710579271480641, | |
| "learning_rate": 6.778969234612583e-06, | |
| "loss": 0.5667, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.40705994593735095, | |
| "grad_norm": 1.6868384794732627, | |
| "learning_rate": 6.701465872208216e-06, | |
| "loss": 0.5518, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.4121481952615678, | |
| "grad_norm": 1.6861752383360624, | |
| "learning_rate": 6.6234973460234184e-06, | |
| "loss": 0.4939, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.4172364445857847, | |
| "grad_norm": 0.8972052976655005, | |
| "learning_rate": 6.545084971874738e-06, | |
| "loss": 0.4659, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.4223246939100016, | |
| "grad_norm": 0.8249541082696409, | |
| "learning_rate": 6.466250186922325e-06, | |
| "loss": 0.4719, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.42741294323421847, | |
| "grad_norm": 1.726258626642056, | |
| "learning_rate": 6.387014543809224e-06, | |
| "loss": 0.4876, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4325011925584354, | |
| "grad_norm": 1.8496356628808968, | |
| "learning_rate": 6.3073997047691e-06, | |
| "loss": 0.5125, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.43758944188265225, | |
| "grad_norm": 1.889689508642681, | |
| "learning_rate": 6.227427435703997e-06, | |
| "loss": 0.5656, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4426776912068691, | |
| "grad_norm": 2.214394784133929, | |
| "learning_rate": 6.147119600233758e-06, | |
| "loss": 0.5406, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.44776594053108604, | |
| "grad_norm": 0.8114218652395482, | |
| "learning_rate": 6.066498153718735e-06, | |
| "loss": 0.4521, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.4528541898553029, | |
| "grad_norm": 0.7973979099809638, | |
| "learning_rate": 5.985585137257401e-06, | |
| "loss": 0.4518, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4579424391795198, | |
| "grad_norm": 1.8967354755477805, | |
| "learning_rate": 5.904402671660551e-06, | |
| "loss": 0.5441, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.4630306885037367, | |
| "grad_norm": 1.716070744396094, | |
| "learning_rate": 5.82297295140367e-06, | |
| "loss": 0.6422, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.46811893782795355, | |
| "grad_norm": 0.8154244178432215, | |
| "learning_rate": 5.74131823855921e-06, | |
| "loss": 0.4769, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.47320718715217047, | |
| "grad_norm": 1.7620206363802955, | |
| "learning_rate": 5.659460856710346e-06, | |
| "loss": 0.4876, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.47829543647638734, | |
| "grad_norm": 1.8793277177212415, | |
| "learning_rate": 5.577423184847932e-06, | |
| "loss": 0.5587, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.48338368580060426, | |
| "grad_norm": 2.4056618280214623, | |
| "learning_rate": 5.495227651252315e-06, | |
| "loss": 0.5282, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.4884719351248211, | |
| "grad_norm": 1.731399212916782, | |
| "learning_rate": 5.412896727361663e-06, | |
| "loss": 0.4832, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.493560184449038, | |
| "grad_norm": 2.171469635883203, | |
| "learning_rate": 5.3304529216284974e-06, | |
| "loss": 0.5406, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4986484337732549, | |
| "grad_norm": 0.7396654418690498, | |
| "learning_rate": 5.247918773366112e-06, | |
| "loss": 0.4398, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.5037366830974718, | |
| "grad_norm": 1.9873438409847994, | |
| "learning_rate": 5.165316846586541e-06, | |
| "loss": 0.5442, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.5088249324216887, | |
| "grad_norm": 0.7557871402911565, | |
| "learning_rate": 5.082669723831793e-06, | |
| "loss": 0.4461, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5139131817459055, | |
| "grad_norm": 1.9632398561160358, | |
| "learning_rate": 5e-06, | |
| "loss": 0.657, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5190014310701224, | |
| "grad_norm": 2.7445952318033537, | |
| "learning_rate": 4.917330276168208e-06, | |
| "loss": 0.5372, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5240896803943393, | |
| "grad_norm": 1.9945819420280821, | |
| "learning_rate": 4.8346831534134595e-06, | |
| "loss": 0.5031, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5291779297185563, | |
| "grad_norm": 1.8362856740868725, | |
| "learning_rate": 4.752081226633888e-06, | |
| "loss": 0.5084, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5342661790427731, | |
| "grad_norm": 2.203070609994655, | |
| "learning_rate": 4.669547078371503e-06, | |
| "loss": 0.465, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.53935442836699, | |
| "grad_norm": 1.892770767378631, | |
| "learning_rate": 4.587103272638339e-06, | |
| "loss": 0.5403, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5444426776912069, | |
| "grad_norm": 1.9456031139611378, | |
| "learning_rate": 4.504772348747687e-06, | |
| "loss": 0.49, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5495309270154237, | |
| "grad_norm": 2.0182884689849514, | |
| "learning_rate": 4.42257681515207e-06, | |
| "loss": 0.5333, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5546191763396406, | |
| "grad_norm": 1.6146872630281819, | |
| "learning_rate": 4.340539143289655e-06, | |
| "loss": 0.5149, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5597074256638576, | |
| "grad_norm": 1.6476137113275473, | |
| "learning_rate": 4.25868176144079e-06, | |
| "loss": 0.6769, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5647956749880744, | |
| "grad_norm": 2.0658942705086423, | |
| "learning_rate": 4.17702704859633e-06, | |
| "loss": 0.6019, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5698839243122913, | |
| "grad_norm": 1.7053065530036995, | |
| "learning_rate": 4.0955973283394525e-06, | |
| "loss": 0.502, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.5749721736365082, | |
| "grad_norm": 1.7794992744029052, | |
| "learning_rate": 4.0144148627426e-06, | |
| "loss": 0.6971, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5800604229607251, | |
| "grad_norm": 2.0022976564849118, | |
| "learning_rate": 3.9335018462812664e-06, | |
| "loss": 0.61, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5851486722849419, | |
| "grad_norm": 2.2216074964170307, | |
| "learning_rate": 3.852880399766243e-06, | |
| "loss": 0.6448, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5902369216091589, | |
| "grad_norm": 2.0218425771246347, | |
| "learning_rate": 3.7725725642960047e-06, | |
| "loss": 0.57, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5953251709333758, | |
| "grad_norm": 1.8270891917131469, | |
| "learning_rate": 3.6926002952309015e-06, | |
| "loss": 0.6744, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.6004134202575926, | |
| "grad_norm": 1.9064779086360353, | |
| "learning_rate": 3.6129854561907786e-06, | |
| "loss": 0.5952, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.6055016695818095, | |
| "grad_norm": 2.0828115564813103, | |
| "learning_rate": 3.533749813077677e-06, | |
| "loss": 0.5799, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.6105899189060264, | |
| "grad_norm": 2.355735946808142, | |
| "learning_rate": 3.4549150281252635e-06, | |
| "loss": 0.5214, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6156781682302432, | |
| "grad_norm": 2.342756754310459, | |
| "learning_rate": 3.3765026539765832e-06, | |
| "loss": 0.5773, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.6207664175544602, | |
| "grad_norm": 1.9448686299055988, | |
| "learning_rate": 3.298534127791785e-06, | |
| "loss": 0.4619, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.6258546668786771, | |
| "grad_norm": 1.6976170484807502, | |
| "learning_rate": 3.2210307653874175e-06, | |
| "loss": 0.5038, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.6309429162028939, | |
| "grad_norm": 1.5907623542619864, | |
| "learning_rate": 3.1440137554088957e-06, | |
| "loss": 0.5228, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6360311655271108, | |
| "grad_norm": 2.1506068679578614, | |
| "learning_rate": 3.06750415353774e-06, | |
| "loss": 0.4815, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6411194148513277, | |
| "grad_norm": 1.7867004118397054, | |
| "learning_rate": 2.991522876735154e-06, | |
| "loss": 0.4804, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6462076641755446, | |
| "grad_norm": 1.7461855814609817, | |
| "learning_rate": 2.9160906975235493e-06, | |
| "loss": 0.5897, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6512959134997615, | |
| "grad_norm": 1.8529499964190723, | |
| "learning_rate": 2.8412282383075362e-06, | |
| "loss": 0.5804, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6563841628239784, | |
| "grad_norm": 1.7011820416779206, | |
| "learning_rate": 2.766955965735968e-06, | |
| "loss": 0.3779, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6614724121481953, | |
| "grad_norm": 2.1878021538228594, | |
| "learning_rate": 2.693294185106562e-06, | |
| "loss": 0.5192, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6665606614724121, | |
| "grad_norm": 1.89071352186738, | |
| "learning_rate": 2.6202630348146323e-06, | |
| "loss": 0.4808, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.671648910796629, | |
| "grad_norm": 1.712076689870655, | |
| "learning_rate": 2.5478824808474613e-06, | |
| "loss": 0.4778, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.676737160120846, | |
| "grad_norm": 2.2979388948141497, | |
| "learning_rate": 2.476172311325783e-06, | |
| "loss": 0.5166, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6818254094450628, | |
| "grad_norm": 1.8536884054739906, | |
| "learning_rate": 2.4051521310939258e-06, | |
| "loss": 0.4676, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6869136587692797, | |
| "grad_norm": 0.8646900812662967, | |
| "learning_rate": 2.3348413563600324e-06, | |
| "loss": 0.4461, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6920019080934966, | |
| "grad_norm": 2.01928532753317, | |
| "learning_rate": 2.265259209387867e-06, | |
| "loss": 0.5068, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6970901574177135, | |
| "grad_norm": 1.8168602175973523, | |
| "learning_rate": 2.1964247132416373e-06, | |
| "loss": 0.4525, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.7021784067419303, | |
| "grad_norm": 1.7126759434503427, | |
| "learning_rate": 2.1283566865852824e-06, | |
| "loss": 0.4317, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.7072666560661472, | |
| "grad_norm": 1.8291580014900488, | |
| "learning_rate": 2.061073738537635e-06, | |
| "loss": 0.4603, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.7123549053903642, | |
| "grad_norm": 1.724099417960198, | |
| "learning_rate": 1.9945942635848745e-06, | |
| "loss": 0.5586, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.717443154714581, | |
| "grad_norm": 2.1180511181848334, | |
| "learning_rate": 1.928936436551661e-06, | |
| "loss": 0.6016, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.7225314040387979, | |
| "grad_norm": 3.880955775488794, | |
| "learning_rate": 1.864118207632315e-06, | |
| "loss": 0.4455, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.7276196533630148, | |
| "grad_norm": 3.455742706901813, | |
| "learning_rate": 1.8001572974834169e-06, | |
| "loss": 0.5335, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.7327079026872316, | |
| "grad_norm": 2.0932962158103767, | |
| "learning_rate": 1.7370711923791567e-06, | |
| "loss": 0.5403, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.7377961520114485, | |
| "grad_norm": 1.9169862335782764, | |
| "learning_rate": 1.6748771394307584e-06, | |
| "loss": 0.6151, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7428844013356655, | |
| "grad_norm": 1.6739274256984877, | |
| "learning_rate": 1.6135921418712959e-06, | |
| "loss": 0.528, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.7479726506598824, | |
| "grad_norm": 1.9022344474756825, | |
| "learning_rate": 1.5532329544071712e-06, | |
| "loss": 0.5573, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7530608999840992, | |
| "grad_norm": 1.9206290312751073, | |
| "learning_rate": 1.4938160786375571e-06, | |
| "loss": 0.5426, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.7581491493083161, | |
| "grad_norm": 0.8151237000504089, | |
| "learning_rate": 1.4353577585430152e-06, | |
| "loss": 0.476, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.763237398632533, | |
| "grad_norm": 1.8174998311715647, | |
| "learning_rate": 1.3778739760445552e-06, | |
| "loss": 0.6476, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7683256479567498, | |
| "grad_norm": 1.8511365294091442, | |
| "learning_rate": 1.321380446634342e-06, | |
| "loss": 0.6219, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7734138972809668, | |
| "grad_norm": 1.9281617529336113, | |
| "learning_rate": 1.2658926150792321e-06, | |
| "loss": 0.5665, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.7785021466051837, | |
| "grad_norm": 1.975480725256036, | |
| "learning_rate": 1.2114256511983274e-06, | |
| "loss": 0.4917, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7835903959294005, | |
| "grad_norm": 1.881712184210223, | |
| "learning_rate": 1.157994445715706e-06, | |
| "loss": 0.5117, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7886786452536174, | |
| "grad_norm": 1.6096859946428606, | |
| "learning_rate": 1.1056136061894386e-06, | |
| "loss": 0.4197, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7937668945778343, | |
| "grad_norm": 1.5926249355515094, | |
| "learning_rate": 1.0542974530180327e-06, | |
| "loss": 0.5571, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7988551439020511, | |
| "grad_norm": 1.8331014781023134, | |
| "learning_rate": 1.0040600155253766e-06, | |
| "loss": 0.4837, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.8039433932262681, | |
| "grad_norm": 1.7661780666068934, | |
| "learning_rate": 9.549150281252633e-07, | |
| "loss": 0.6176, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.809031642550485, | |
| "grad_norm": 1.6128238891391746, | |
| "learning_rate": 9.068759265665384e-07, | |
| "loss": 0.4833, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.8141198918747019, | |
| "grad_norm": 1.9433571216817562, | |
| "learning_rate": 8.599558442598998e-07, | |
| "loss": 0.5556, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.8192081411989187, | |
| "grad_norm": 0.7908943487803456, | |
| "learning_rate": 8.141676086873574e-07, | |
| "loss": 0.441, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.8242963905231356, | |
| "grad_norm": 1.6159487866825593, | |
| "learning_rate": 7.695237378953224e-07, | |
| "loss": 0.4325, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.8293846398473526, | |
| "grad_norm": 1.7926977872061616, | |
| "learning_rate": 7.260364370723044e-07, | |
| "loss": 0.4728, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.8344728891715694, | |
| "grad_norm": 0.79042068129705, | |
| "learning_rate": 6.837175952121305e-07, | |
| "loss": 0.4435, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.8395611384957863, | |
| "grad_norm": 1.814089246270096, | |
| "learning_rate": 6.425787818636131e-07, | |
| "loss": 0.5072, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.8446493878200032, | |
| "grad_norm": 1.5747711545454517, | |
| "learning_rate": 6.026312439675553e-07, | |
| "loss": 0.55, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.84973763714422, | |
| "grad_norm": 1.8242377149400466, | |
| "learning_rate": 5.63885902781941e-07, | |
| "loss": 0.421, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.8548258864684369, | |
| "grad_norm": 1.953214479706857, | |
| "learning_rate": 5.263533508961827e-07, | |
| "loss": 0.5901, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.8599141357926539, | |
| "grad_norm": 2.2955242802982565, | |
| "learning_rate": 4.900438493352056e-07, | |
| "loss": 0.531, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.8650023851168708, | |
| "grad_norm": 1.790553869422144, | |
| "learning_rate": 4.549673247541875e-07, | |
| "loss": 0.4578, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8700906344410876, | |
| "grad_norm": 1.9079751423849147, | |
| "learning_rate": 4.211333667247125e-07, | |
| "loss": 0.6592, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8751788837653045, | |
| "grad_norm": 1.5586622664999814, | |
| "learning_rate": 3.885512251130763e-07, | |
| "loss": 0.4316, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8802671330895214, | |
| "grad_norm": 2.0693911883749134, | |
| "learning_rate": 3.572298075514652e-07, | |
| "loss": 0.5547, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8853553824137382, | |
| "grad_norm": 2.3827153776142027, | |
| "learning_rate": 3.271776770026963e-07, | |
| "loss": 0.5233, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.8904436317379552, | |
| "grad_norm": 1.726968337714764, | |
| "learning_rate": 2.984030494191942e-07, | |
| "loss": 0.5607, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.8955318810621721, | |
| "grad_norm": 1.678457800822472, | |
| "learning_rate": 2.7091379149682683e-07, | |
| "loss": 0.5635, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.9006201303863889, | |
| "grad_norm": 1.6304062948996085, | |
| "learning_rate": 2.447174185242324e-07, | |
| "loss": 0.5258, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.9057083797106058, | |
| "grad_norm": 1.7913849441660894, | |
| "learning_rate": 2.198210923282118e-07, | |
| "loss": 0.4812, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.9107966290348227, | |
| "grad_norm": 1.7493396501788248, | |
| "learning_rate": 1.962316193157593e-07, | |
| "loss": 0.5507, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.9158848783590396, | |
| "grad_norm": 1.7290444706260146, | |
| "learning_rate": 1.7395544861325718e-07, | |
| "loss": 0.4838, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.9209731276832565, | |
| "grad_norm": 1.8780586855355685, | |
| "learning_rate": 1.5299867030334815e-07, | |
| "loss": 0.4751, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.9260613770074734, | |
| "grad_norm": 1.9905075820968223, | |
| "learning_rate": 1.333670137599713e-07, | |
| "loss": 0.5576, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.9311496263316903, | |
| "grad_norm": 1.4999219259699108, | |
| "learning_rate": 1.1506584608200366e-07, | |
| "loss": 0.4519, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.9362378756559071, | |
| "grad_norm": 2.1199407248983437, | |
| "learning_rate": 9.810017062595322e-08, | |
| "loss": 0.5988, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.941326124980124, | |
| "grad_norm": 1.8067808204066615, | |
| "learning_rate": 8.247462563808816e-08, | |
| "loss": 0.4948, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.9464143743043409, | |
| "grad_norm": 1.763043188246363, | |
| "learning_rate": 6.819348298638839e-08, | |
| "loss": 0.4795, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.9515026236285578, | |
| "grad_norm": 2.057258550991737, | |
| "learning_rate": 5.526064699265754e-08, | |
| "loss": 0.5685, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.9565908729527747, | |
| "grad_norm": 2.4018383293989274, | |
| "learning_rate": 4.367965336512403e-08, | |
| "loss": 0.4988, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.9616791222769916, | |
| "grad_norm": 1.7096952541229, | |
| "learning_rate": 3.345366823180929e-08, | |
| "loss": 0.4607, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.9667673716012085, | |
| "grad_norm": 3.899388264590507, | |
| "learning_rate": 2.4585487274942922e-08, | |
| "loss": 0.4811, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.9718556209254253, | |
| "grad_norm": 2.22640179961524, | |
| "learning_rate": 1.7077534966650767e-08, | |
| "loss": 0.4044, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9769438702496422, | |
| "grad_norm": 1.5616184108006692, | |
| "learning_rate": 1.0931863906127327e-08, | |
| "loss": 0.4917, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.9820321195738592, | |
| "grad_norm": 1.7256336283483458, | |
| "learning_rate": 6.150154258476315e-09, | |
| "loss": 0.5599, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.987120368898076, | |
| "grad_norm": 2.5414845965873263, | |
| "learning_rate": 2.7337132953697555e-09, | |
| "loss": 0.4509, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9922086182222929, | |
| "grad_norm": 2.24086616108618, | |
| "learning_rate": 6.834750376549793e-10, | |
| "loss": 0.5172, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9972968675465098, | |
| "grad_norm": 0.8175454134359621, | |
| "learning_rate": 0.0, | |
| "loss": 0.4358, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.9972968675465098, | |
| "step": 196, | |
| "total_flos": 46920266465280.0, | |
| "train_loss": 0.5706830052088718, | |
| "train_runtime": 37228.8328, | |
| "train_samples_per_second": 0.676, | |
| "train_steps_per_second": 0.005 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 196, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 5000.0, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 46920266465280.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |