| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 680, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0014716703458425313, | |
| "grad_norm": 4.058454990386963, | |
| "learning_rate": 0.0, | |
| "loss": 1.0378, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0029433406916850625, | |
| "grad_norm": 4.680859565734863, | |
| "learning_rate": 2.9411764705882356e-07, | |
| "loss": 0.9258, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004415011037527594, | |
| "grad_norm": 3.905587673187256, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 0.872, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.005886681383370125, | |
| "grad_norm": 4.3216776847839355, | |
| "learning_rate": 8.823529411764707e-07, | |
| "loss": 0.934, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.007358351729212656, | |
| "grad_norm": 4.033775329589844, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 0.8904, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.008830022075055188, | |
| "grad_norm": 3.678755283355713, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 0.9328, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.010301692420897719, | |
| "grad_norm": 3.711585521697998, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 0.8471, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01177336276674025, | |
| "grad_norm": 4.169286251068115, | |
| "learning_rate": 2.058823529411765e-06, | |
| "loss": 1.0045, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.013245033112582781, | |
| "grad_norm": 3.5065314769744873, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 0.8635, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.014716703458425313, | |
| "grad_norm": 3.752119302749634, | |
| "learning_rate": 2.647058823529412e-06, | |
| "loss": 0.9662, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016188373804267846, | |
| "grad_norm": 3.2777113914489746, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 0.7967, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.017660044150110375, | |
| "grad_norm": 3.3518145084381104, | |
| "learning_rate": 3.2352941176470594e-06, | |
| "loss": 0.8465, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01913171449595291, | |
| "grad_norm": 3.0732693672180176, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 0.8289, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.020603384841795438, | |
| "grad_norm": 2.9257853031158447, | |
| "learning_rate": 3.8235294117647055e-06, | |
| "loss": 0.8942, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02207505518763797, | |
| "grad_norm": 2.98813533782959, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 0.9784, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0235467255334805, | |
| "grad_norm": 3.698316812515259, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 1.0652, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.025018395879323033, | |
| "grad_norm": 3.1177361011505127, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 0.8983, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.026490066225165563, | |
| "grad_norm": 2.7175192832946777, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8029, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.027961736571008096, | |
| "grad_norm": 2.897536039352417, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 0.8776, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.029433406916850625, | |
| "grad_norm": 2.9727461338043213, | |
| "learning_rate": 5.588235294117647e-06, | |
| "loss": 0.7791, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03090507726269316, | |
| "grad_norm": 2.6092498302459717, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 0.7451, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03237674760853569, | |
| "grad_norm": 2.913469076156616, | |
| "learning_rate": 6.176470588235295e-06, | |
| "loss": 0.9554, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.03384841795437822, | |
| "grad_norm": 2.8593599796295166, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 0.8882, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03532008830022075, | |
| "grad_norm": 2.838365077972412, | |
| "learning_rate": 6.764705882352942e-06, | |
| "loss": 0.8554, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03679175864606328, | |
| "grad_norm": 2.7695982456207275, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 0.8275, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03826342899190582, | |
| "grad_norm": 2.9007530212402344, | |
| "learning_rate": 7.352941176470589e-06, | |
| "loss": 0.908, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.039735099337748346, | |
| "grad_norm": 2.676220178604126, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 0.9996, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.041206769683590876, | |
| "grad_norm": 2.886916160583496, | |
| "learning_rate": 7.941176470588236e-06, | |
| "loss": 0.8241, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.042678440029433405, | |
| "grad_norm": 2.9885916709899902, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 0.8173, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04415011037527594, | |
| "grad_norm": 2.797389030456543, | |
| "learning_rate": 8.529411764705883e-06, | |
| "loss": 0.7265, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.04562178072111847, | |
| "grad_norm": 3.0593364238739014, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.8292, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.047093451066961, | |
| "grad_norm": 2.8509674072265625, | |
| "learning_rate": 9.11764705882353e-06, | |
| "loss": 0.7311, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04856512141280353, | |
| "grad_norm": 3.2090744972229004, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 0.8574, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.05003679175864607, | |
| "grad_norm": 2.5766093730926514, | |
| "learning_rate": 9.705882352941177e-06, | |
| "loss": 0.8513, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.051508462104488596, | |
| "grad_norm": 3.0090904235839844, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8444, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.052980132450331126, | |
| "grad_norm": 2.665348529815674, | |
| "learning_rate": 9.999940874631278e-06, | |
| "loss": 0.6617, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.054451802796173655, | |
| "grad_norm": 2.9271090030670166, | |
| "learning_rate": 9.999763499923432e-06, | |
| "loss": 0.808, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.05592347314201619, | |
| "grad_norm": 3.011321544647217, | |
| "learning_rate": 9.999467880071402e-06, | |
| "loss": 0.725, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.05739514348785872, | |
| "grad_norm": 3.2543551921844482, | |
| "learning_rate": 9.999054022066643e-06, | |
| "loss": 0.9299, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.05886681383370125, | |
| "grad_norm": 2.519449234008789, | |
| "learning_rate": 9.998521935696953e-06, | |
| "loss": 0.7564, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06033848417954378, | |
| "grad_norm": 2.8531711101531982, | |
| "learning_rate": 9.997871633546257e-06, | |
| "loss": 0.87, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06181015452538632, | |
| "grad_norm": 3.165675640106201, | |
| "learning_rate": 9.997103130994295e-06, | |
| "loss": 0.9265, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06328182487122884, | |
| "grad_norm": 3.373549222946167, | |
| "learning_rate": 9.996216446216267e-06, | |
| "loss": 0.9618, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06475349521707138, | |
| "grad_norm": 2.9908242225646973, | |
| "learning_rate": 9.995211600182397e-06, | |
| "loss": 0.8371, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06622516556291391, | |
| "grad_norm": 2.482671022415161, | |
| "learning_rate": 9.994088616657445e-06, | |
| "loss": 0.7564, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.06769683590875644, | |
| "grad_norm": 3.005289316177368, | |
| "learning_rate": 9.992847522200132e-06, | |
| "loss": 0.9316, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.06916850625459897, | |
| "grad_norm": 2.875523805618286, | |
| "learning_rate": 9.99148834616253e-06, | |
| "loss": 0.7165, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.0706401766004415, | |
| "grad_norm": 2.9502875804901123, | |
| "learning_rate": 9.990011120689352e-06, | |
| "loss": 1.0304, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.07211184694628403, | |
| "grad_norm": 2.928637981414795, | |
| "learning_rate": 9.988415880717195e-06, | |
| "loss": 0.7055, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.07358351729212656, | |
| "grad_norm": 2.8092260360717773, | |
| "learning_rate": 9.986702663973722e-06, | |
| "loss": 0.7569, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07505518763796909, | |
| "grad_norm": 2.9287312030792236, | |
| "learning_rate": 9.98487151097676e-06, | |
| "loss": 0.7917, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.07652685798381163, | |
| "grad_norm": 3.0313379764556885, | |
| "learning_rate": 9.98292246503335e-06, | |
| "loss": 0.88, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.07799852832965416, | |
| "grad_norm": 3.0787267684936523, | |
| "learning_rate": 9.980855572238715e-06, | |
| "loss": 0.7935, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.07947019867549669, | |
| "grad_norm": 2.687150716781616, | |
| "learning_rate": 9.978670881475173e-06, | |
| "loss": 0.7144, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.08094186902133922, | |
| "grad_norm": 2.613748073577881, | |
| "learning_rate": 9.976368444410985e-06, | |
| "loss": 0.7784, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08241353936718175, | |
| "grad_norm": 2.7247557640075684, | |
| "learning_rate": 9.973948315499126e-06, | |
| "loss": 0.878, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.08388520971302428, | |
| "grad_norm": 2.8546860218048096, | |
| "learning_rate": 9.971410551976001e-06, | |
| "loss": 1.0515, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.08535688005886681, | |
| "grad_norm": 3.1416189670562744, | |
| "learning_rate": 9.968755213860094e-06, | |
| "loss": 0.913, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.08682855040470934, | |
| "grad_norm": 3.013488292694092, | |
| "learning_rate": 9.96598236395054e-06, | |
| "loss": 0.9151, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.08830022075055188, | |
| "grad_norm": 2.837449073791504, | |
| "learning_rate": 9.963092067825651e-06, | |
| "loss": 0.8935, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08977189109639441, | |
| "grad_norm": 2.8535547256469727, | |
| "learning_rate": 9.960084393841355e-06, | |
| "loss": 0.8951, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09124356144223694, | |
| "grad_norm": 2.8073694705963135, | |
| "learning_rate": 9.956959413129586e-06, | |
| "loss": 0.9974, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.09271523178807947, | |
| "grad_norm": 2.9127821922302246, | |
| "learning_rate": 9.953717199596598e-06, | |
| "loss": 0.9056, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.094186902133922, | |
| "grad_norm": 2.8197975158691406, | |
| "learning_rate": 9.95035782992122e-06, | |
| "loss": 0.7322, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.09565857247976453, | |
| "grad_norm": 2.5801289081573486, | |
| "learning_rate": 9.94688138355304e-06, | |
| "loss": 0.7477, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.09713024282560706, | |
| "grad_norm": 2.6256613731384277, | |
| "learning_rate": 9.943287942710527e-06, | |
| "loss": 0.7042, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.09860191317144959, | |
| "grad_norm": 3.3652524948120117, | |
| "learning_rate": 9.939577592379088e-06, | |
| "loss": 0.888, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.10007358351729213, | |
| "grad_norm": 3.101560354232788, | |
| "learning_rate": 9.935750420309055e-06, | |
| "loss": 0.8977, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.10154525386313466, | |
| "grad_norm": 2.5174732208251953, | |
| "learning_rate": 9.931806517013612e-06, | |
| "loss": 0.6729, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.10301692420897719, | |
| "grad_norm": 2.921755313873291, | |
| "learning_rate": 9.927745975766654e-06, | |
| "loss": 0.7846, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10448859455481972, | |
| "grad_norm": 2.7532265186309814, | |
| "learning_rate": 9.923568892600579e-06, | |
| "loss": 0.9492, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.10596026490066225, | |
| "grad_norm": 2.8316383361816406, | |
| "learning_rate": 9.919275366304021e-06, | |
| "loss": 0.689, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.10743193524650478, | |
| "grad_norm": 2.870181083679199, | |
| "learning_rate": 9.91486549841951e-06, | |
| "loss": 0.6709, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.10890360559234731, | |
| "grad_norm": 2.7353837490081787, | |
| "learning_rate": 9.91033939324107e-06, | |
| "loss": 0.7755, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.11037527593818984, | |
| "grad_norm": 3.06223726272583, | |
| "learning_rate": 9.905697157811761e-06, | |
| "loss": 0.8336, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11184694628403238, | |
| "grad_norm": 3.273441791534424, | |
| "learning_rate": 9.90093890192113e-06, | |
| "loss": 0.9732, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.11331861662987491, | |
| "grad_norm": 3.0267012119293213, | |
| "learning_rate": 9.896064738102635e-06, | |
| "loss": 0.8882, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.11479028697571744, | |
| "grad_norm": 2.8442041873931885, | |
| "learning_rate": 9.891074781630967e-06, | |
| "loss": 0.6856, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.11626195732155997, | |
| "grad_norm": 2.679326057434082, | |
| "learning_rate": 9.885969150519332e-06, | |
| "loss": 0.8473, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.1177336276674025, | |
| "grad_norm": 3.2508740425109863, | |
| "learning_rate": 9.88074796551666e-06, | |
| "loss": 0.8845, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11920529801324503, | |
| "grad_norm": 3.125309944152832, | |
| "learning_rate": 9.875411350104745e-06, | |
| "loss": 0.8611, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.12067696835908756, | |
| "grad_norm": 2.9621620178222656, | |
| "learning_rate": 9.869959430495329e-06, | |
| "loss": 0.8959, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.12214863870493009, | |
| "grad_norm": 2.8529794216156006, | |
| "learning_rate": 9.864392335627118e-06, | |
| "loss": 0.7757, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.12362030905077263, | |
| "grad_norm": 2.8269686698913574, | |
| "learning_rate": 9.858710197162722e-06, | |
| "loss": 0.9969, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.12509197939661515, | |
| "grad_norm": 2.6117818355560303, | |
| "learning_rate": 9.852913149485556e-06, | |
| "loss": 0.7997, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.12656364974245768, | |
| "grad_norm": 2.948822259902954, | |
| "learning_rate": 9.847001329696653e-06, | |
| "loss": 0.9769, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.1280353200883002, | |
| "grad_norm": 3.2538185119628906, | |
| "learning_rate": 9.840974877611423e-06, | |
| "loss": 0.8283, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.12950699043414277, | |
| "grad_norm": 2.8442130088806152, | |
| "learning_rate": 9.834833935756345e-06, | |
| "loss": 0.8773, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.1309786607799853, | |
| "grad_norm": 2.388075828552246, | |
| "learning_rate": 9.8285786493656e-06, | |
| "loss": 0.7164, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.13245033112582782, | |
| "grad_norm": 2.6131460666656494, | |
| "learning_rate": 9.822209166377635e-06, | |
| "loss": 0.7686, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.13392200147167035, | |
| "grad_norm": 2.897113800048828, | |
| "learning_rate": 9.815725637431663e-06, | |
| "loss": 0.8232, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.13539367181751288, | |
| "grad_norm": 2.9289119243621826, | |
| "learning_rate": 9.809128215864096e-06, | |
| "loss": 0.8337, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.1368653421633554, | |
| "grad_norm": 2.462143898010254, | |
| "learning_rate": 9.80241705770493e-06, | |
| "loss": 0.7856, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.13833701250919794, | |
| "grad_norm": 2.8170995712280273, | |
| "learning_rate": 9.795592321674046e-06, | |
| "loss": 0.8723, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.13980868285504047, | |
| "grad_norm": 2.745225667953491, | |
| "learning_rate": 9.788654169177454e-06, | |
| "loss": 0.8504, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.141280353200883, | |
| "grad_norm": 2.677945613861084, | |
| "learning_rate": 9.781602764303488e-06, | |
| "loss": 0.7281, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.14275202354672553, | |
| "grad_norm": 2.890650987625122, | |
| "learning_rate": 9.77443827381891e-06, | |
| "loss": 0.7535, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.14422369389256806, | |
| "grad_norm": 3.1169748306274414, | |
| "learning_rate": 9.76716086716498e-06, | |
| "loss": 0.8581, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1456953642384106, | |
| "grad_norm": 2.939659357070923, | |
| "learning_rate": 9.759770716453436e-06, | |
| "loss": 0.7432, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.14716703458425312, | |
| "grad_norm": 2.7667946815490723, | |
| "learning_rate": 9.752267996462435e-06, | |
| "loss": 1.0398, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14863870493009565, | |
| "grad_norm": 2.3578202724456787, | |
| "learning_rate": 9.744652884632406e-06, | |
| "loss": 0.7488, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.15011037527593818, | |
| "grad_norm": 2.921765089035034, | |
| "learning_rate": 9.736925561061871e-06, | |
| "loss": 0.8559, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.1515820456217807, | |
| "grad_norm": 3.2434988021850586, | |
| "learning_rate": 9.729086208503174e-06, | |
| "loss": 0.8243, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.15305371596762327, | |
| "grad_norm": 2.7646985054016113, | |
| "learning_rate": 9.721135012358156e-06, | |
| "loss": 0.7898, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.1545253863134658, | |
| "grad_norm": 2.821303606033325, | |
| "learning_rate": 9.713072160673778e-06, | |
| "loss": 0.7296, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.15599705665930833, | |
| "grad_norm": 2.873602867126465, | |
| "learning_rate": 9.704897844137674e-06, | |
| "loss": 1.0217, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.15746872700515085, | |
| "grad_norm": 2.6251001358032227, | |
| "learning_rate": 9.696612256073634e-06, | |
| "loss": 0.7348, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.15894039735099338, | |
| "grad_norm": 3.296396255493164, | |
| "learning_rate": 9.68821559243704e-06, | |
| "loss": 0.9856, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.1604120676968359, | |
| "grad_norm": 2.97357439994812, | |
| "learning_rate": 9.679708051810222e-06, | |
| "loss": 0.8755, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.16188373804267844, | |
| "grad_norm": 2.6917476654052734, | |
| "learning_rate": 9.671089835397772e-06, | |
| "loss": 0.8255, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.16335540838852097, | |
| "grad_norm": 2.70802903175354, | |
| "learning_rate": 9.66236114702178e-06, | |
| "loss": 0.7903, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.1648270787343635, | |
| "grad_norm": 3.122664451599121, | |
| "learning_rate": 9.653522193117014e-06, | |
| "loss": 1.193, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.16629874908020603, | |
| "grad_norm": 2.777764081954956, | |
| "learning_rate": 9.644573182726035e-06, | |
| "loss": 0.8138, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.16777041942604856, | |
| "grad_norm": 2.6206493377685547, | |
| "learning_rate": 9.63551432749426e-06, | |
| "loss": 0.7676, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.1692420897718911, | |
| "grad_norm": 2.8496499061584473, | |
| "learning_rate": 9.626345841664953e-06, | |
| "loss": 0.7495, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.17071376011773362, | |
| "grad_norm": 2.744779586791992, | |
| "learning_rate": 9.617067942074155e-06, | |
| "loss": 0.8713, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.17218543046357615, | |
| "grad_norm": 2.4520657062530518, | |
| "learning_rate": 9.607680848145557e-06, | |
| "loss": 0.6581, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.17365710080941868, | |
| "grad_norm": 3.1236605644226074, | |
| "learning_rate": 9.59818478188532e-06, | |
| "loss": 0.8834, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.1751287711552612, | |
| "grad_norm": 3.0055110454559326, | |
| "learning_rate": 9.588579967876806e-06, | |
| "loss": 0.8016, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.17660044150110377, | |
| "grad_norm": 2.935249090194702, | |
| "learning_rate": 9.578866633275289e-06, | |
| "loss": 0.8539, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1780721118469463, | |
| "grad_norm": 2.7148215770721436, | |
| "learning_rate": 9.569045007802558e-06, | |
| "loss": 0.7939, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.17954378219278883, | |
| "grad_norm": 3.0759003162384033, | |
| "learning_rate": 9.55911532374151e-06, | |
| "loss": 0.734, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.18101545253863136, | |
| "grad_norm": 2.664663553237915, | |
| "learning_rate": 9.549077815930636e-06, | |
| "loss": 0.9403, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.18248712288447388, | |
| "grad_norm": 2.898247241973877, | |
| "learning_rate": 9.538932721758474e-06, | |
| "loss": 0.7705, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.18395879323031641, | |
| "grad_norm": 2.6949071884155273, | |
| "learning_rate": 9.528680281157999e-06, | |
| "loss": 0.8279, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.18543046357615894, | |
| "grad_norm": 2.5309793949127197, | |
| "learning_rate": 9.518320736600943e-06, | |
| "loss": 0.7256, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.18690213392200147, | |
| "grad_norm": 2.75760817527771, | |
| "learning_rate": 9.507854333092064e-06, | |
| "loss": 0.7586, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.188373804267844, | |
| "grad_norm": 2.6482796669006348, | |
| "learning_rate": 9.497281318163347e-06, | |
| "loss": 0.7408, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.18984547461368653, | |
| "grad_norm": 3.0589354038238525, | |
| "learning_rate": 9.486601941868155e-06, | |
| "loss": 0.9713, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.19131714495952906, | |
| "grad_norm": 2.661060333251953, | |
| "learning_rate": 9.475816456775313e-06, | |
| "loss": 0.9774, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1927888153053716, | |
| "grad_norm": 2.9241509437561035, | |
| "learning_rate": 9.464925117963133e-06, | |
| "loss": 0.9132, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.19426048565121412, | |
| "grad_norm": 2.5484488010406494, | |
| "learning_rate": 9.453928183013385e-06, | |
| "loss": 0.7035, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.19573215599705665, | |
| "grad_norm": 2.797853946685791, | |
| "learning_rate": 9.442825912005203e-06, | |
| "loss": 0.8403, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.19720382634289918, | |
| "grad_norm": 2.6880745887756348, | |
| "learning_rate": 9.431618567508933e-06, | |
| "loss": 0.7808, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.1986754966887417, | |
| "grad_norm": 3.096302032470703, | |
| "learning_rate": 9.420306414579925e-06, | |
| "loss": 0.7849, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.20014716703458427, | |
| "grad_norm": 2.483643054962158, | |
| "learning_rate": 9.408889720752265e-06, | |
| "loss": 0.6982, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.2016188373804268, | |
| "grad_norm": 2.664942741394043, | |
| "learning_rate": 9.397368756032445e-06, | |
| "loss": 0.8164, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.20309050772626933, | |
| "grad_norm": 2.9081645011901855, | |
| "learning_rate": 9.385743792892983e-06, | |
| "loss": 0.8065, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.20456217807211186, | |
| "grad_norm": 2.6566150188446045, | |
| "learning_rate": 9.374015106265968e-06, | |
| "loss": 0.6634, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.20603384841795438, | |
| "grad_norm": 2.695697069168091, | |
| "learning_rate": 9.362182973536568e-06, | |
| "loss": 0.7934, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.20750551876379691, | |
| "grad_norm": 2.575732469558716, | |
| "learning_rate": 9.35024767453647e-06, | |
| "loss": 0.8003, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.20897718910963944, | |
| "grad_norm": 2.4942550659179688, | |
| "learning_rate": 9.338209491537257e-06, | |
| "loss": 0.7969, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.21044885945548197, | |
| "grad_norm": 2.8870980739593506, | |
| "learning_rate": 9.326068709243727e-06, | |
| "loss": 1.0334, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.2119205298013245, | |
| "grad_norm": 2.619992971420288, | |
| "learning_rate": 9.313825614787178e-06, | |
| "loss": 0.7597, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.21339220014716703, | |
| "grad_norm": 2.5091888904571533, | |
| "learning_rate": 9.301480497718594e-06, | |
| "loss": 0.6728, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.21486387049300956, | |
| "grad_norm": 2.789379358291626, | |
| "learning_rate": 9.289033650001817e-06, | |
| "loss": 0.8862, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.2163355408388521, | |
| "grad_norm": 2.4175891876220703, | |
| "learning_rate": 9.276485366006634e-06, | |
| "loss": 0.7874, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.21780721118469462, | |
| "grad_norm": 2.8280205726623535, | |
| "learning_rate": 9.263835942501807e-06, | |
| "loss": 0.7887, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.21927888153053715, | |
| "grad_norm": 3.125808000564575, | |
| "learning_rate": 9.251085678648072e-06, | |
| "loss": 0.9522, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.22075055187637968, | |
| "grad_norm": 2.627514362335205, | |
| "learning_rate": 9.238234875991048e-06, | |
| "loss": 0.9322, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.8124518394470215, | |
| "learning_rate": 9.225283838454111e-06, | |
| "loss": 0.7869, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.22369389256806477, | |
| "grad_norm": 3.086568593978882, | |
| "learning_rate": 9.21223287233121e-06, | |
| "loss": 0.9827, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.2251655629139073, | |
| "grad_norm": 2.854715347290039, | |
| "learning_rate": 9.199082286279622e-06, | |
| "loss": 0.7919, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.22663723325974983, | |
| "grad_norm": 2.5665128231048584, | |
| "learning_rate": 9.185832391312644e-06, | |
| "loss": 0.7764, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.22810890360559236, | |
| "grad_norm": 2.915379285812378, | |
| "learning_rate": 9.172483500792246e-06, | |
| "loss": 0.8804, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.22958057395143489, | |
| "grad_norm": 2.723245620727539, | |
| "learning_rate": 9.159035930421658e-06, | |
| "loss": 0.769, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.23105224429727741, | |
| "grad_norm": 3.021594285964966, | |
| "learning_rate": 9.145489998237902e-06, | |
| "loss": 0.8767, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.23252391464311994, | |
| "grad_norm": 2.540314197540283, | |
| "learning_rate": 9.131846024604275e-06, | |
| "loss": 0.7199, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.23399558498896247, | |
| "grad_norm": 2.591949462890625, | |
| "learning_rate": 9.11810433220276e-06, | |
| "loss": 0.8694, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.235467255334805, | |
| "grad_norm": 2.764286518096924, | |
| "learning_rate": 9.104265246026414e-06, | |
| "loss": 0.9038, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.23693892568064753, | |
| "grad_norm": 2.5182137489318848, | |
| "learning_rate": 9.090329093371667e-06, | |
| "loss": 0.7015, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.23841059602649006, | |
| "grad_norm": 2.872434616088867, | |
| "learning_rate": 9.07629620383058e-06, | |
| "loss": 0.8608, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.2398822663723326, | |
| "grad_norm": 2.6351943016052246, | |
| "learning_rate": 9.062166909283062e-06, | |
| "loss": 0.8356, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.24135393671817512, | |
| "grad_norm": 2.9412364959716797, | |
| "learning_rate": 9.047941543889014e-06, | |
| "loss": 0.9061, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.24282560706401765, | |
| "grad_norm": 2.7398324012756348, | |
| "learning_rate": 9.033620444080427e-06, | |
| "loss": 0.8046, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.24429727740986018, | |
| "grad_norm": 2.4704911708831787, | |
| "learning_rate": 9.019203948553422e-06, | |
| "loss": 0.6613, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.2457689477557027, | |
| "grad_norm": 2.9570794105529785, | |
| "learning_rate": 9.004692398260243e-06, | |
| "loss": 0.9917, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.24724061810154527, | |
| "grad_norm": 2.62516713142395, | |
| "learning_rate": 8.990086136401199e-06, | |
| "loss": 0.8395, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.2487122884473878, | |
| "grad_norm": 2.8056349754333496, | |
| "learning_rate": 8.975385508416532e-06, | |
| "loss": 0.7698, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.2501839587932303, | |
| "grad_norm": 2.83166241645813, | |
| "learning_rate": 8.960590861978265e-06, | |
| "loss": 0.6347, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.25165562913907286, | |
| "grad_norm": 2.616415500640869, | |
| "learning_rate": 8.94570254698197e-06, | |
| "loss": 0.8078, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.25312729948491536, | |
| "grad_norm": 2.677455186843872, | |
| "learning_rate": 8.930720915538487e-06, | |
| "loss": 0.8911, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.2545989698307579, | |
| "grad_norm": 3.1193928718566895, | |
| "learning_rate": 8.915646321965615e-06, | |
| "loss": 0.977, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2560706401766004, | |
| "grad_norm": 5.6962971687316895, | |
| "learning_rate": 8.900479122779712e-06, | |
| "loss": 0.7694, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.257542310522443, | |
| "grad_norm": 3.3536412715911865, | |
| "learning_rate": 8.885219676687277e-06, | |
| "loss": 1.0121, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.25901398086828553, | |
| "grad_norm": 2.662569999694824, | |
| "learning_rate": 8.86986834457646e-06, | |
| "loss": 0.8054, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.26048565121412803, | |
| "grad_norm": 2.8296239376068115, | |
| "learning_rate": 8.85442548950853e-06, | |
| "loss": 0.832, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.2619573215599706, | |
| "grad_norm": 2.915475606918335, | |
| "learning_rate": 8.838891476709289e-06, | |
| "loss": 0.7431, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.2634289919058131, | |
| "grad_norm": 3.094879150390625, | |
| "learning_rate": 8.823266673560426e-06, | |
| "loss": 0.7247, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.26490066225165565, | |
| "grad_norm": 2.888617753982544, | |
| "learning_rate": 8.807551449590846e-06, | |
| "loss": 0.932, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.26637233259749815, | |
| "grad_norm": 2.9324824810028076, | |
| "learning_rate": 8.791746176467908e-06, | |
| "loss": 0.8315, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.2678440029433407, | |
| "grad_norm": 2.9685041904449463, | |
| "learning_rate": 8.775851227988655e-06, | |
| "loss": 0.84, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.2693156732891832, | |
| "grad_norm": 2.951185703277588, | |
| "learning_rate": 8.759866980070963e-06, | |
| "loss": 0.7088, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.27078734363502577, | |
| "grad_norm": 2.66342830657959, | |
| "learning_rate": 8.743793810744655e-06, | |
| "loss": 0.8356, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.27225901398086827, | |
| "grad_norm": 2.9835357666015625, | |
| "learning_rate": 8.72763210014255e-06, | |
| "loss": 0.8564, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.2737306843267108, | |
| "grad_norm": 2.647819757461548, | |
| "learning_rate": 8.711382230491494e-06, | |
| "loss": 0.7725, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.27520235467255333, | |
| "grad_norm": 2.6995792388916016, | |
| "learning_rate": 8.695044586103297e-06, | |
| "loss": 0.6293, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2766740250183959, | |
| "grad_norm": 3.1014583110809326, | |
| "learning_rate": 8.67861955336566e-06, | |
| "loss": 0.805, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2781456953642384, | |
| "grad_norm": 3.188678741455078, | |
| "learning_rate": 8.662107520733027e-06, | |
| "loss": 0.7317, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.27961736571008095, | |
| "grad_norm": 2.689643621444702, | |
| "learning_rate": 8.64550887871741e-06, | |
| "loss": 0.6776, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.28108903605592345, | |
| "grad_norm": 2.912175416946411, | |
| "learning_rate": 8.628824019879137e-06, | |
| "loss": 0.8686, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.282560706401766, | |
| "grad_norm": 2.829907178878784, | |
| "learning_rate": 8.612053338817582e-06, | |
| "loss": 0.7567, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.28403237674760856, | |
| "grad_norm": 3.0386757850646973, | |
| "learning_rate": 8.595197232161824e-06, | |
| "loss": 0.8411, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.28550404709345106, | |
| "grad_norm": 2.5806057453155518, | |
| "learning_rate": 8.578256098561276e-06, | |
| "loss": 0.7135, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.2869757174392936, | |
| "grad_norm": 2.757744789123535, | |
| "learning_rate": 8.56123033867624e-06, | |
| "loss": 0.7537, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2884473877851361, | |
| "grad_norm": 2.6642186641693115, | |
| "learning_rate": 8.544120355168451e-06, | |
| "loss": 1.0471, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.2899190581309787, | |
| "grad_norm": 2.6267518997192383, | |
| "learning_rate": 8.526926552691545e-06, | |
| "loss": 0.777, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.2913907284768212, | |
| "grad_norm": 2.747035264968872, | |
| "learning_rate": 8.509649337881483e-06, | |
| "loss": 0.727, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.29286239882266374, | |
| "grad_norm": 2.958754539489746, | |
| "learning_rate": 8.492289119346944e-06, | |
| "loss": 0.7843, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.29433406916850624, | |
| "grad_norm": 2.807094097137451, | |
| "learning_rate": 8.47484630765966e-06, | |
| "loss": 0.9576, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.2958057395143488, | |
| "grad_norm": 2.713832139968872, | |
| "learning_rate": 8.457321315344695e-06, | |
| "loss": 0.9781, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2972774098601913, | |
| "grad_norm": 3.011497974395752, | |
| "learning_rate": 8.439714556870705e-06, | |
| "loss": 0.8523, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.29874908020603386, | |
| "grad_norm": 3.302286386489868, | |
| "learning_rate": 8.422026448640124e-06, | |
| "loss": 0.8937, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.30022075055187636, | |
| "grad_norm": 2.5852441787719727, | |
| "learning_rate": 8.404257408979322e-06, | |
| "loss": 0.7258, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3016924208977189, | |
| "grad_norm": 2.5731096267700195, | |
| "learning_rate": 8.386407858128707e-06, | |
| "loss": 0.7649, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.3031640912435614, | |
| "grad_norm": 2.4777395725250244, | |
| "learning_rate": 8.368478218232787e-06, | |
| "loss": 0.7454, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.304635761589404, | |
| "grad_norm": 2.754692554473877, | |
| "learning_rate": 8.350468913330192e-06, | |
| "loss": 0.8684, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.30610743193524653, | |
| "grad_norm": 2.4948291778564453, | |
| "learning_rate": 8.33238036934364e-06, | |
| "loss": 0.7159, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.30757910228108903, | |
| "grad_norm": 2.582134962081909, | |
| "learning_rate": 8.31421301406986e-06, | |
| "loss": 0.6708, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.3090507726269316, | |
| "grad_norm": 3.2686588764190674, | |
| "learning_rate": 8.29596727716949e-06, | |
| "loss": 0.9558, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3105224429727741, | |
| "grad_norm": 2.770517110824585, | |
| "learning_rate": 8.277643590156893e-06, | |
| "loss": 0.8734, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.31199411331861665, | |
| "grad_norm": 2.6695854663848877, | |
| "learning_rate": 8.259242386389975e-06, | |
| "loss": 0.8677, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.31346578366445915, | |
| "grad_norm": 2.725999116897583, | |
| "learning_rate": 8.240764101059913e-06, | |
| "loss": 0.6776, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.3149374540103017, | |
| "grad_norm": 2.592602014541626, | |
| "learning_rate": 8.222209171180883e-06, | |
| "loss": 0.826, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.3164091243561442, | |
| "grad_norm": 2.528812885284424, | |
| "learning_rate": 8.203578035579716e-06, | |
| "loss": 0.7403, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.31788079470198677, | |
| "grad_norm": 2.584472894668579, | |
| "learning_rate": 8.184871134885512e-06, | |
| "loss": 0.6792, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.31935246504782927, | |
| "grad_norm": 2.6827452182769775, | |
| "learning_rate": 8.166088911519236e-06, | |
| "loss": 0.8447, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.3208241353936718, | |
| "grad_norm": 2.8367629051208496, | |
| "learning_rate": 8.147231809683236e-06, | |
| "loss": 0.7953, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.32229580573951433, | |
| "grad_norm": 2.950148820877075, | |
| "learning_rate": 8.128300275350756e-06, | |
| "loss": 0.8487, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3237674760853569, | |
| "grad_norm": 2.701542854309082, | |
| "learning_rate": 8.109294756255375e-06, | |
| "loss": 0.7345, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3252391464311994, | |
| "grad_norm": 2.7395551204681396, | |
| "learning_rate": 8.090215701880418e-06, | |
| "loss": 0.864, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.32671081677704195, | |
| "grad_norm": 2.761073350906372, | |
| "learning_rate": 8.071063563448341e-06, | |
| "loss": 0.7823, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.32818248712288445, | |
| "grad_norm": 2.613734006881714, | |
| "learning_rate": 8.051838793910038e-06, | |
| "loss": 0.6439, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.329654157468727, | |
| "grad_norm": 2.8382253646850586, | |
| "learning_rate": 8.032541847934145e-06, | |
| "loss": 0.7498, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.33112582781456956, | |
| "grad_norm": 2.5751657485961914, | |
| "learning_rate": 8.013173181896283e-06, | |
| "loss": 0.7132, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.33259749816041206, | |
| "grad_norm": 2.6825568675994873, | |
| "learning_rate": 7.993733253868256e-06, | |
| "loss": 0.8806, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.3340691685062546, | |
| "grad_norm": 2.8396565914154053, | |
| "learning_rate": 7.974222523607236e-06, | |
| "loss": 0.7735, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3355408388520971, | |
| "grad_norm": 2.496772289276123, | |
| "learning_rate": 7.954641452544864e-06, | |
| "loss": 0.6571, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.3370125091979397, | |
| "grad_norm": 2.668145179748535, | |
| "learning_rate": 7.934990503776363e-06, | |
| "loss": 0.8631, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.3384841795437822, | |
| "grad_norm": 2.763549327850342, | |
| "learning_rate": 7.915270142049566e-06, | |
| "loss": 0.7821, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.33995584988962474, | |
| "grad_norm": 2.9257652759552, | |
| "learning_rate": 7.895480833753942e-06, | |
| "loss": 0.8285, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.34142752023546724, | |
| "grad_norm": 2.4478650093078613, | |
| "learning_rate": 7.875623046909544e-06, | |
| "loss": 0.7354, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.3428991905813098, | |
| "grad_norm": 2.6642887592315674, | |
| "learning_rate": 7.855697251155967e-06, | |
| "loss": 0.644, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.3443708609271523, | |
| "grad_norm": 2.843369960784912, | |
| "learning_rate": 7.835703917741213e-06, | |
| "loss": 0.9386, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.34584253127299486, | |
| "grad_norm": 3.1602954864501953, | |
| "learning_rate": 7.815643519510571e-06, | |
| "loss": 0.8077, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.34731420161883736, | |
| "grad_norm": 2.789182186126709, | |
| "learning_rate": 7.795516530895414e-06, | |
| "loss": 0.8464, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3487858719646799, | |
| "grad_norm": 2.463135004043579, | |
| "learning_rate": 7.775323427901993e-06, | |
| "loss": 0.7189, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.3502575423105224, | |
| "grad_norm": 2.626633882522583, | |
| "learning_rate": 7.75506468810017e-06, | |
| "loss": 0.7559, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.351729212656365, | |
| "grad_norm": 2.4405670166015625, | |
| "learning_rate": 7.734740790612137e-06, | |
| "loss": 0.8049, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.35320088300220753, | |
| "grad_norm": 2.6157946586608887, | |
| "learning_rate": 7.714352216101055e-06, | |
| "loss": 0.7195, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.35467255334805003, | |
| "grad_norm": 2.6621780395507812, | |
| "learning_rate": 7.693899446759727e-06, | |
| "loss": 0.8505, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.3561442236938926, | |
| "grad_norm": 3.248483657836914, | |
| "learning_rate": 7.673382966299163e-06, | |
| "loss": 0.8385, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.3576158940397351, | |
| "grad_norm": 2.657336950302124, | |
| "learning_rate": 7.65280325993715e-06, | |
| "loss": 0.7362, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.35908756438557765, | |
| "grad_norm": 2.793397903442383, | |
| "learning_rate": 7.63216081438678e-06, | |
| "loss": 0.8892, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.36055923473142015, | |
| "grad_norm": 2.8806533813476562, | |
| "learning_rate": 7.611456117844934e-06, | |
| "loss": 0.7511, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.3620309050772627, | |
| "grad_norm": 2.9854753017425537, | |
| "learning_rate": 7.59068965998074e-06, | |
| "loss": 0.8891, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.3635025754231052, | |
| "grad_norm": 3.109421491622925, | |
| "learning_rate": 7.569861931923989e-06, | |
| "loss": 0.8743, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.36497424576894777, | |
| "grad_norm": 2.67236065864563, | |
| "learning_rate": 7.548973426253521e-06, | |
| "loss": 0.6766, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.36644591611479027, | |
| "grad_norm": 2.873359441757202, | |
| "learning_rate": 7.528024636985575e-06, | |
| "loss": 0.9396, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.36791758646063283, | |
| "grad_norm": 2.40339994430542, | |
| "learning_rate": 7.507016059562107e-06, | |
| "loss": 0.7551, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.36938925680647533, | |
| "grad_norm": 2.6877894401550293, | |
| "learning_rate": 7.485948190839076e-06, | |
| "loss": 0.7781, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.3708609271523179, | |
| "grad_norm": 2.939539670944214, | |
| "learning_rate": 7.464821529074678e-06, | |
| "loss": 0.7794, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.3723325974981604, | |
| "grad_norm": 2.9500153064727783, | |
| "learning_rate": 7.443636573917585e-06, | |
| "loss": 0.8368, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.37380426784400295, | |
| "grad_norm": 2.593169689178467, | |
| "learning_rate": 7.4223938263951075e-06, | |
| "loss": 0.806, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.37527593818984545, | |
| "grad_norm": 2.776139736175537, | |
| "learning_rate": 7.40109378890136e-06, | |
| "loss": 0.9303, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.376747608535688, | |
| "grad_norm": 2.4220099449157715, | |
| "learning_rate": 7.379736965185369e-06, | |
| "loss": 0.6452, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.37821927888153056, | |
| "grad_norm": 2.896829605102539, | |
| "learning_rate": 7.358323860339165e-06, | |
| "loss": 0.9025, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.37969094922737306, | |
| "grad_norm": 2.9651646614074707, | |
| "learning_rate": 7.336854980785839e-06, | |
| "loss": 0.7699, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.3811626195732156, | |
| "grad_norm": 2.693281650543213, | |
| "learning_rate": 7.315330834267553e-06, | |
| "loss": 0.7204, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.3826342899190581, | |
| "grad_norm": 2.8250482082366943, | |
| "learning_rate": 7.293751929833553e-06, | |
| "loss": 0.8387, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3841059602649007, | |
| "grad_norm": 2.729510545730591, | |
| "learning_rate": 7.272118777828109e-06, | |
| "loss": 0.789, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.3855776306107432, | |
| "grad_norm": 3.190199613571167, | |
| "learning_rate": 7.250431889878455e-06, | |
| "loss": 0.735, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.38704930095658574, | |
| "grad_norm": 2.878964900970459, | |
| "learning_rate": 7.2286917788826926e-06, | |
| "loss": 0.6908, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.38852097130242824, | |
| "grad_norm": 2.516129970550537, | |
| "learning_rate": 7.20689895899765e-06, | |
| "loss": 0.6908, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.3899926416482708, | |
| "grad_norm": 2.970674991607666, | |
| "learning_rate": 7.185053945626734e-06, | |
| "loss": 0.8865, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.3914643119941133, | |
| "grad_norm": 2.7611653804779053, | |
| "learning_rate": 7.163157255407732e-06, | |
| "loss": 0.6888, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.39293598233995586, | |
| "grad_norm": 2.6418163776397705, | |
| "learning_rate": 7.1412094062005985e-06, | |
| "loss": 0.7193, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.39440765268579836, | |
| "grad_norm": 2.6448776721954346, | |
| "learning_rate": 7.119210917075201e-06, | |
| "loss": 0.6769, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.3958793230316409, | |
| "grad_norm": 3.3905608654022217, | |
| "learning_rate": 7.097162308299055e-06, | |
| "loss": 0.922, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.3973509933774834, | |
| "grad_norm": 2.664294481277466, | |
| "learning_rate": 7.07506410132501e-06, | |
| "loss": 0.7169, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.398822663723326, | |
| "grad_norm": 3.0975229740142822, | |
| "learning_rate": 7.052916818778918e-06, | |
| "loss": 0.8299, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.40029433406916853, | |
| "grad_norm": 2.8812222480773926, | |
| "learning_rate": 7.030720984447279e-06, | |
| "loss": 0.9557, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.40176600441501104, | |
| "grad_norm": 2.807002544403076, | |
| "learning_rate": 7.008477123264849e-06, | |
| "loss": 0.7065, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.4032376747608536, | |
| "grad_norm": 3.06130313873291, | |
| "learning_rate": 6.986185761302224e-06, | |
| "loss": 0.8652, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.4047093451066961, | |
| "grad_norm": 3.1718475818634033, | |
| "learning_rate": 6.9638474257534025e-06, | |
| "loss": 0.8435, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.40618101545253865, | |
| "grad_norm": 2.6698691844940186, | |
| "learning_rate": 6.941462644923318e-06, | |
| "loss": 0.7995, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.40765268579838115, | |
| "grad_norm": 2.624782085418701, | |
| "learning_rate": 6.919031948215335e-06, | |
| "loss": 0.7063, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.4091243561442237, | |
| "grad_norm": 3.034499406814575, | |
| "learning_rate": 6.89655586611874e-06, | |
| "loss": 0.8636, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.4105960264900662, | |
| "grad_norm": 2.70481538772583, | |
| "learning_rate": 6.874034930196191e-06, | |
| "loss": 0.8662, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.41206769683590877, | |
| "grad_norm": 2.350039005279541, | |
| "learning_rate": 6.851469673071143e-06, | |
| "loss": 0.6598, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.41353936718175127, | |
| "grad_norm": 2.4506044387817383, | |
| "learning_rate": 6.8288606284152535e-06, | |
| "loss": 0.6876, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.41501103752759383, | |
| "grad_norm": 2.7478151321411133, | |
| "learning_rate": 6.806208330935766e-06, | |
| "loss": 0.6849, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.41648270787343633, | |
| "grad_norm": 2.9902756214141846, | |
| "learning_rate": 6.783513316362855e-06, | |
| "loss": 0.8701, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.4179543782192789, | |
| "grad_norm": 3.0882210731506348, | |
| "learning_rate": 6.760776121436963e-06, | |
| "loss": 0.9867, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.4194260485651214, | |
| "grad_norm": 2.815107822418213, | |
| "learning_rate": 6.737997283896104e-06, | |
| "loss": 0.8299, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.42089771891096395, | |
| "grad_norm": 2.409449815750122, | |
| "learning_rate": 6.715177342463145e-06, | |
| "loss": 0.6086, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.42236938925680645, | |
| "grad_norm": 3.0505027770996094, | |
| "learning_rate": 6.692316836833066e-06, | |
| "loss": 0.8025, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.423841059602649, | |
| "grad_norm": 2.561631679534912, | |
| "learning_rate": 6.6694163076601995e-06, | |
| "loss": 0.7739, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.42531272994849156, | |
| "grad_norm": 2.783630847930908, | |
| "learning_rate": 6.646476296545434e-06, | |
| "loss": 0.73, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.42678440029433407, | |
| "grad_norm": 2.918511390686035, | |
| "learning_rate": 6.6234973460234184e-06, | |
| "loss": 0.8336, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.4282560706401766, | |
| "grad_norm": 2.899632692337036, | |
| "learning_rate": 6.600479999549721e-06, | |
| "loss": 0.847, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.4297277409860191, | |
| "grad_norm": 2.849313974380493, | |
| "learning_rate": 6.57742480148798e-06, | |
| "loss": 0.675, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.4311994113318617, | |
| "grad_norm": 2.822368860244751, | |
| "learning_rate": 6.554332297097032e-06, | |
| "loss": 0.8094, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4326710816777042, | |
| "grad_norm": 2.5822582244873047, | |
| "learning_rate": 6.53120303251801e-06, | |
| "loss": 0.7853, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.43414275202354674, | |
| "grad_norm": 2.8273942470550537, | |
| "learning_rate": 6.5080375547614325e-06, | |
| "loss": 1.074, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.43561442236938924, | |
| "grad_norm": 2.904768705368042, | |
| "learning_rate": 6.4848364116942665e-06, | |
| "loss": 0.7764, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.4370860927152318, | |
| "grad_norm": 2.6361513137817383, | |
| "learning_rate": 6.461600152026966e-06, | |
| "loss": 0.7447, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.4385577630610743, | |
| "grad_norm": 2.717353105545044, | |
| "learning_rate": 6.4383293253004996e-06, | |
| "loss": 0.6936, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.44002943340691686, | |
| "grad_norm": 2.722168207168579, | |
| "learning_rate": 6.415024481873352e-06, | |
| "loss": 0.7681, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.44150110375275936, | |
| "grad_norm": 2.6191341876983643, | |
| "learning_rate": 6.391686172908507e-06, | |
| "loss": 0.7627, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4429727740986019, | |
| "grad_norm": 2.391493082046509, | |
| "learning_rate": 6.368314950360416e-06, | |
| "loss": 0.6849, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.654991865158081, | |
| "learning_rate": 6.344911366961935e-06, | |
| "loss": 0.8075, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.445916114790287, | |
| "grad_norm": 2.5275535583496094, | |
| "learning_rate": 6.321475976211267e-06, | |
| "loss": 0.7684, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.44738778513612953, | |
| "grad_norm": 2.607572078704834, | |
| "learning_rate": 6.298009332358857e-06, | |
| "loss": 0.7934, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.44885945548197204, | |
| "grad_norm": 2.6352803707122803, | |
| "learning_rate": 6.274511990394294e-06, | |
| "loss": 0.8401, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.4503311258278146, | |
| "grad_norm": 2.862342357635498, | |
| "learning_rate": 6.250984506033183e-06, | |
| "loss": 0.8266, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.4518027961736571, | |
| "grad_norm": 2.898751735687256, | |
| "learning_rate": 6.227427435703997e-06, | |
| "loss": 0.78, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.45327446651949965, | |
| "grad_norm": 3.0323238372802734, | |
| "learning_rate": 6.203841336534924e-06, | |
| "loss": 0.7944, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.45474613686534215, | |
| "grad_norm": 2.3359568119049072, | |
| "learning_rate": 6.180226766340688e-06, | |
| "loss": 0.7331, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.4562178072111847, | |
| "grad_norm": 2.904798746109009, | |
| "learning_rate": 6.156584283609359e-06, | |
| "loss": 0.7173, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.4576894775570272, | |
| "grad_norm": 3.093244791030884, | |
| "learning_rate": 6.132914447489137e-06, | |
| "loss": 0.8657, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.45916114790286977, | |
| "grad_norm": 3.100961685180664, | |
| "learning_rate": 6.109217817775139e-06, | |
| "loss": 1.0079, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.4606328182487123, | |
| "grad_norm": 2.5757603645324707, | |
| "learning_rate": 6.085494954896156e-06, | |
| "loss": 0.9887, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.46210448859455483, | |
| "grad_norm": 2.6319212913513184, | |
| "learning_rate": 6.061746419901389e-06, | |
| "loss": 0.5864, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.46357615894039733, | |
| "grad_norm": 3.017703056335449, | |
| "learning_rate": 6.037972774447194e-06, | |
| "loss": 0.828, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.4650478292862399, | |
| "grad_norm": 2.824852466583252, | |
| "learning_rate": 6.014174580783794e-06, | |
| "loss": 0.9, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.4665194996320824, | |
| "grad_norm": 2.583486795425415, | |
| "learning_rate": 5.990352401741981e-06, | |
| "loss": 0.8048, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.46799116997792495, | |
| "grad_norm": 3.027487277984619, | |
| "learning_rate": 5.966506800719798e-06, | |
| "loss": 0.938, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.46946284032376745, | |
| "grad_norm": 2.3798038959503174, | |
| "learning_rate": 5.94263834166923e-06, | |
| "loss": 0.7072, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.47093451066961, | |
| "grad_norm": 3.0898847579956055, | |
| "learning_rate": 5.918747589082853e-06, | |
| "loss": 0.8603, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.47240618101545256, | |
| "grad_norm": 3.080111026763916, | |
| "learning_rate": 5.8948351079804875e-06, | |
| "loss": 0.7156, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.47387785136129507, | |
| "grad_norm": 3.2610669136047363, | |
| "learning_rate": 5.8709014638958406e-06, | |
| "loss": 1.0839, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.4753495217071376, | |
| "grad_norm": 2.785217046737671, | |
| "learning_rate": 5.846947222863123e-06, | |
| "loss": 0.7786, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.4768211920529801, | |
| "grad_norm": 2.5710747241973877, | |
| "learning_rate": 5.82297295140367e-06, | |
| "loss": 0.7557, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.4782928623988227, | |
| "grad_norm": 2.669019937515259, | |
| "learning_rate": 5.798979216512536e-06, | |
| "loss": 0.7343, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.4797645327446652, | |
| "grad_norm": 2.868227005004883, | |
| "learning_rate": 5.774966585645092e-06, | |
| "loss": 0.7073, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.48123620309050774, | |
| "grad_norm": 2.7177627086639404, | |
| "learning_rate": 5.750935626703598e-06, | |
| "loss": 0.671, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.48270787343635024, | |
| "grad_norm": 2.884152412414551, | |
| "learning_rate": 5.726886908023777e-06, | |
| "loss": 0.7695, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.4841795437821928, | |
| "grad_norm": 2.8466315269470215, | |
| "learning_rate": 5.702820998361374e-06, | |
| "loss": 0.7373, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.4856512141280353, | |
| "grad_norm": 2.944166660308838, | |
| "learning_rate": 5.678738466878699e-06, | |
| "loss": 0.701, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.48712288447387786, | |
| "grad_norm": 2.806544303894043, | |
| "learning_rate": 5.6546398831311774e-06, | |
| "loss": 0.9049, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.48859455481972036, | |
| "grad_norm": 2.7961156368255615, | |
| "learning_rate": 5.6305258170538676e-06, | |
| "loss": 0.6243, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.4900662251655629, | |
| "grad_norm": 2.8264875411987305, | |
| "learning_rate": 5.606396838947988e-06, | |
| "loss": 0.8022, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.4915378955114054, | |
| "grad_norm": 2.3879430294036865, | |
| "learning_rate": 5.582253519467432e-06, | |
| "loss": 0.7052, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.493009565857248, | |
| "grad_norm": 2.72257137298584, | |
| "learning_rate": 5.558096429605263e-06, | |
| "loss": 0.7944, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.49448123620309054, | |
| "grad_norm": 2.5829052925109863, | |
| "learning_rate": 5.533926140680222e-06, | |
| "loss": 0.6631, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.49595290654893304, | |
| "grad_norm": 2.6868889331817627, | |
| "learning_rate": 5.509743224323203e-06, | |
| "loss": 0.7841, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.4974245768947756, | |
| "grad_norm": 2.834749698638916, | |
| "learning_rate": 5.485548252463749e-06, | |
| "loss": 0.7468, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.4988962472406181, | |
| "grad_norm": 2.4310719966888428, | |
| "learning_rate": 5.46134179731651e-06, | |
| "loss": 0.6794, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.5003679175864606, | |
| "grad_norm": 2.9393675327301025, | |
| "learning_rate": 5.437124431367723e-06, | |
| "loss": 0.7932, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5018395879323032, | |
| "grad_norm": 2.730503797531128, | |
| "learning_rate": 5.412896727361663e-06, | |
| "loss": 0.8225, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5033112582781457, | |
| "grad_norm": 3.03702449798584, | |
| "learning_rate": 5.388659258287103e-06, | |
| "loss": 0.7743, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.5047829286239882, | |
| "grad_norm": 2.5906665325164795, | |
| "learning_rate": 5.36441259736376e-06, | |
| "loss": 0.6833, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5062545989698307, | |
| "grad_norm": 2.9820640087127686, | |
| "learning_rate": 5.340157318028743e-06, | |
| "loss": 0.9421, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5077262693156733, | |
| "grad_norm": 2.8592886924743652, | |
| "learning_rate": 5.3158939939229855e-06, | |
| "loss": 0.7296, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5091979396615158, | |
| "grad_norm": 2.825484275817871, | |
| "learning_rate": 5.29162319887768e-06, | |
| "loss": 0.8671, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5106696100073583, | |
| "grad_norm": 2.661414384841919, | |
| "learning_rate": 5.26734550690071e-06, | |
| "loss": 0.8022, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5121412803532008, | |
| "grad_norm": 2.814080238342285, | |
| "learning_rate": 5.243061492163073e-06, | |
| "loss": 0.8754, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.5136129506990434, | |
| "grad_norm": 2.7929255962371826, | |
| "learning_rate": 5.218771728985296e-06, | |
| "loss": 0.6977, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.515084621044886, | |
| "grad_norm": 3.3023571968078613, | |
| "learning_rate": 5.1944767918238625e-06, | |
| "loss": 0.9073, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5165562913907285, | |
| "grad_norm": 2.8338370323181152, | |
| "learning_rate": 5.170177255257618e-06, | |
| "loss": 0.8119, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5180279617365711, | |
| "grad_norm": 2.5478556156158447, | |
| "learning_rate": 5.145873693974188e-06, | |
| "loss": 0.7282, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.5194996320824136, | |
| "grad_norm": 2.494419813156128, | |
| "learning_rate": 5.12156668275638e-06, | |
| "loss": 0.7521, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5209713024282561, | |
| "grad_norm": 2.797395706176758, | |
| "learning_rate": 5.097256796468598e-06, | |
| "loss": 0.7097, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5224429727740986, | |
| "grad_norm": 2.939812421798706, | |
| "learning_rate": 5.0729446100432326e-06, | |
| "loss": 0.7828, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.5239146431199412, | |
| "grad_norm": 2.9113619327545166, | |
| "learning_rate": 5.048630698467081e-06, | |
| "loss": 0.844, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5253863134657837, | |
| "grad_norm": 2.550365924835205, | |
| "learning_rate": 5.024315636767738e-06, | |
| "loss": 0.7853, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5268579838116262, | |
| "grad_norm": 2.595287799835205, | |
| "learning_rate": 5e-06, | |
| "loss": 0.6457, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5283296541574687, | |
| "grad_norm": 2.7226860523223877, | |
| "learning_rate": 4.975684363232263e-06, | |
| "loss": 0.734, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5298013245033113, | |
| "grad_norm": 2.8578407764434814, | |
| "learning_rate": 4.951369301532919e-06, | |
| "loss": 0.8679, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5312729948491538, | |
| "grad_norm": 2.848055124282837, | |
| "learning_rate": 4.927055389956768e-06, | |
| "loss": 0.7845, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.5327446651949963, | |
| "grad_norm": 2.7717485427856445, | |
| "learning_rate": 4.9027432035314045e-06, | |
| "loss": 0.8777, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5342163355408388, | |
| "grad_norm": 2.854522466659546, | |
| "learning_rate": 4.878433317243621e-06, | |
| "loss": 0.8327, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.5356880058866814, | |
| "grad_norm": 3.19992733001709, | |
| "learning_rate": 4.854126306025813e-06, | |
| "loss": 0.7984, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.5371596762325239, | |
| "grad_norm": 2.4490439891815186, | |
| "learning_rate": 4.829822744742383e-06, | |
| "loss": 0.7675, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5386313465783664, | |
| "grad_norm": 2.8721706867218018, | |
| "learning_rate": 4.805523208176139e-06, | |
| "loss": 0.7735, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.5401030169242089, | |
| "grad_norm": 2.759693145751953, | |
| "learning_rate": 4.781228271014704e-06, | |
| "loss": 0.8394, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5415746872700515, | |
| "grad_norm": 2.8939993381500244, | |
| "learning_rate": 4.756938507836929e-06, | |
| "loss": 0.7914, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.543046357615894, | |
| "grad_norm": 2.637204647064209, | |
| "learning_rate": 4.7326544930992905e-06, | |
| "loss": 0.7225, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.5445180279617365, | |
| "grad_norm": 2.537356376647949, | |
| "learning_rate": 4.708376801122321e-06, | |
| "loss": 0.7063, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5459896983075792, | |
| "grad_norm": 2.9717514514923096, | |
| "learning_rate": 4.684106006077015e-06, | |
| "loss": 1.0508, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.5474613686534217, | |
| "grad_norm": 2.7512850761413574, | |
| "learning_rate": 4.659842681971258e-06, | |
| "loss": 0.9937, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5489330389992642, | |
| "grad_norm": 2.5516979694366455, | |
| "learning_rate": 4.635587402636241e-06, | |
| "loss": 0.7076, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5504047093451067, | |
| "grad_norm": 2.5767934322357178, | |
| "learning_rate": 4.611340741712901e-06, | |
| "loss": 0.6927, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5518763796909493, | |
| "grad_norm": 2.8047547340393066, | |
| "learning_rate": 4.587103272638339e-06, | |
| "loss": 0.7365, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5533480500367918, | |
| "grad_norm": 2.8013343811035156, | |
| "learning_rate": 4.562875568632278e-06, | |
| "loss": 0.8549, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5548197203826343, | |
| "grad_norm": 2.835747718811035, | |
| "learning_rate": 4.53865820268349e-06, | |
| "loss": 0.8297, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5562913907284768, | |
| "grad_norm": 2.6400527954101562, | |
| "learning_rate": 4.514451747536252e-06, | |
| "loss": 0.8164, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5577630610743194, | |
| "grad_norm": 2.340898036956787, | |
| "learning_rate": 4.4902567756767976e-06, | |
| "loss": 0.7111, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5592347314201619, | |
| "grad_norm": 2.1810595989227295, | |
| "learning_rate": 4.466073859319781e-06, | |
| "loss": 0.6738, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5607064017660044, | |
| "grad_norm": 2.5656542778015137, | |
| "learning_rate": 4.441903570394739e-06, | |
| "loss": 0.8892, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5621780721118469, | |
| "grad_norm": 2.4357686042785645, | |
| "learning_rate": 4.4177464805325695e-06, | |
| "loss": 0.6686, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5636497424576895, | |
| "grad_norm": 2.548100471496582, | |
| "learning_rate": 4.3936031610520126e-06, | |
| "loss": 0.6647, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.565121412803532, | |
| "grad_norm": 2.8117403984069824, | |
| "learning_rate": 4.369474182946133e-06, | |
| "loss": 0.8539, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5665930831493745, | |
| "grad_norm": 2.70920467376709, | |
| "learning_rate": 4.3453601168688225e-06, | |
| "loss": 0.7258, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5680647534952171, | |
| "grad_norm": 2.609267234802246, | |
| "learning_rate": 4.321261533121303e-06, | |
| "loss": 0.9332, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.5695364238410596, | |
| "grad_norm": 2.978053331375122, | |
| "learning_rate": 4.297179001638629e-06, | |
| "loss": 0.8062, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.5710080941869021, | |
| "grad_norm": 2.7160444259643555, | |
| "learning_rate": 4.273113091976226e-06, | |
| "loss": 0.7262, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.5724797645327446, | |
| "grad_norm": 2.662433624267578, | |
| "learning_rate": 4.249064373296403e-06, | |
| "loss": 1.0035, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.5739514348785872, | |
| "grad_norm": 2.56451416015625, | |
| "learning_rate": 4.225033414354909e-06, | |
| "loss": 0.7963, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5754231052244297, | |
| "grad_norm": 2.2902348041534424, | |
| "learning_rate": 4.201020783487465e-06, | |
| "loss": 0.7863, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.5768947755702722, | |
| "grad_norm": 2.604950428009033, | |
| "learning_rate": 4.17702704859633e-06, | |
| "loss": 0.7413, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.5783664459161147, | |
| "grad_norm": 2.8261375427246094, | |
| "learning_rate": 4.153052777136879e-06, | |
| "loss": 0.8013, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.5798381162619574, | |
| "grad_norm": 2.6362226009368896, | |
| "learning_rate": 4.129098536104161e-06, | |
| "loss": 0.8815, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.5813097866077999, | |
| "grad_norm": 2.562701463699341, | |
| "learning_rate": 4.105164892019514e-06, | |
| "loss": 0.7645, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.5827814569536424, | |
| "grad_norm": 2.8253800868988037, | |
| "learning_rate": 4.081252410917148e-06, | |
| "loss": 0.8393, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.5842531272994849, | |
| "grad_norm": 3.0047738552093506, | |
| "learning_rate": 4.0573616583307705e-06, | |
| "loss": 0.8258, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.5857247976453275, | |
| "grad_norm": 2.6175127029418945, | |
| "learning_rate": 4.033493199280203e-06, | |
| "loss": 0.8083, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.58719646799117, | |
| "grad_norm": 2.4778695106506348, | |
| "learning_rate": 4.009647598258022e-06, | |
| "loss": 0.7379, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.5886681383370125, | |
| "grad_norm": 2.7437469959259033, | |
| "learning_rate": 3.985825419216207e-06, | |
| "loss": 0.8363, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5901398086828551, | |
| "grad_norm": 2.574824571609497, | |
| "learning_rate": 3.962027225552807e-06, | |
| "loss": 0.6493, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.5916114790286976, | |
| "grad_norm": 2.9498939514160156, | |
| "learning_rate": 3.938253580098613e-06, | |
| "loss": 0.8471, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.5930831493745401, | |
| "grad_norm": 2.7222161293029785, | |
| "learning_rate": 3.914505045103845e-06, | |
| "loss": 0.8087, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.5945548197203826, | |
| "grad_norm": 2.566035747528076, | |
| "learning_rate": 3.89078218222486e-06, | |
| "loss": 0.585, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.5960264900662252, | |
| "grad_norm": 2.4814834594726562, | |
| "learning_rate": 3.867085552510865e-06, | |
| "loss": 0.7181, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.5974981604120677, | |
| "grad_norm": 2.8687238693237305, | |
| "learning_rate": 3.843415716390644e-06, | |
| "loss": 0.6498, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.5989698307579102, | |
| "grad_norm": 2.9591500759124756, | |
| "learning_rate": 3.819773233659314e-06, | |
| "loss": 0.8105, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.6004415011037527, | |
| "grad_norm": 2.970616579055786, | |
| "learning_rate": 3.7961586634650773e-06, | |
| "loss": 0.8648, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.6019131714495953, | |
| "grad_norm": 2.613100290298462, | |
| "learning_rate": 3.7725725642960047e-06, | |
| "loss": 0.6968, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.6033848417954378, | |
| "grad_norm": 2.374007225036621, | |
| "learning_rate": 3.7490154939668176e-06, | |
| "loss": 0.6306, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6048565121412803, | |
| "grad_norm": 2.7874433994293213, | |
| "learning_rate": 3.725488009605708e-06, | |
| "loss": 0.7445, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6063281824871228, | |
| "grad_norm": 3.110804319381714, | |
| "learning_rate": 3.701990667641145e-06, | |
| "loss": 0.8443, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6077998528329654, | |
| "grad_norm": 2.5718374252319336, | |
| "learning_rate": 3.6785240237887355e-06, | |
| "loss": 0.6842, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.609271523178808, | |
| "grad_norm": 3.108994722366333, | |
| "learning_rate": 3.655088633038067e-06, | |
| "loss": 0.6848, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6107431935246505, | |
| "grad_norm": 2.580518960952759, | |
| "learning_rate": 3.6316850496395863e-06, | |
| "loss": 0.7368, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6122148638704931, | |
| "grad_norm": 2.642230749130249, | |
| "learning_rate": 3.6083138270914932e-06, | |
| "loss": 0.7295, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6136865342163356, | |
| "grad_norm": 3.21877121925354, | |
| "learning_rate": 3.584975518126648e-06, | |
| "loss": 0.8949, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6151582045621781, | |
| "grad_norm": 2.6950385570526123, | |
| "learning_rate": 3.561670674699503e-06, | |
| "loss": 0.942, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.6166298749080206, | |
| "grad_norm": 3.3686113357543945, | |
| "learning_rate": 3.5383998479730357e-06, | |
| "loss": 0.9905, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6181015452538632, | |
| "grad_norm": 2.3857452869415283, | |
| "learning_rate": 3.5151635883057356e-06, | |
| "loss": 0.7793, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6195732155997057, | |
| "grad_norm": 2.945770740509033, | |
| "learning_rate": 3.491962445238569e-06, | |
| "loss": 0.8128, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6210448859455482, | |
| "grad_norm": 2.611788511276245, | |
| "learning_rate": 3.4687969674819912e-06, | |
| "loss": 0.7271, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.6225165562913907, | |
| "grad_norm": 2.4041404724121094, | |
| "learning_rate": 3.4456677029029687e-06, | |
| "loss": 0.7543, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.6239882266372333, | |
| "grad_norm": 2.8908700942993164, | |
| "learning_rate": 3.4225751985120213e-06, | |
| "loss": 0.7535, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6254598969830758, | |
| "grad_norm": 2.6423447132110596, | |
| "learning_rate": 3.3995200004502814e-06, | |
| "loss": 0.7395, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6269315673289183, | |
| "grad_norm": 2.6107118129730225, | |
| "learning_rate": 3.3765026539765832e-06, | |
| "loss": 0.9585, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.6284032376747608, | |
| "grad_norm": 2.6921772956848145, | |
| "learning_rate": 3.3535237034545677e-06, | |
| "loss": 0.867, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6298749080206034, | |
| "grad_norm": 2.6669552326202393, | |
| "learning_rate": 3.3305836923398026e-06, | |
| "loss": 0.785, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.6313465783664459, | |
| "grad_norm": 2.6771135330200195, | |
| "learning_rate": 3.307683163166934e-06, | |
| "loss": 0.7238, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.6328182487122884, | |
| "grad_norm": 2.9073171615600586, | |
| "learning_rate": 3.2848226575368565e-06, | |
| "loss": 0.8712, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6342899190581309, | |
| "grad_norm": 2.675462484359741, | |
| "learning_rate": 3.2620027161038975e-06, | |
| "loss": 0.8208, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.6357615894039735, | |
| "grad_norm": 2.6564865112304688, | |
| "learning_rate": 3.2392238785630387e-06, | |
| "loss": 0.8071, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.637233259749816, | |
| "grad_norm": 3.0897388458251953, | |
| "learning_rate": 3.216486683637146e-06, | |
| "loss": 0.9577, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.6387049300956585, | |
| "grad_norm": 2.678072452545166, | |
| "learning_rate": 3.1937916690642356e-06, | |
| "loss": 0.7587, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.6401766004415012, | |
| "grad_norm": 2.514816999435425, | |
| "learning_rate": 3.1711393715847477e-06, | |
| "loss": 0.7648, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6416482707873437, | |
| "grad_norm": 2.598339319229126, | |
| "learning_rate": 3.1485303269288603e-06, | |
| "loss": 0.9055, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.6431199411331862, | |
| "grad_norm": 2.5613248348236084, | |
| "learning_rate": 3.1259650698038106e-06, | |
| "loss": 0.8907, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.6445916114790287, | |
| "grad_norm": 2.8210527896881104, | |
| "learning_rate": 3.103444133881261e-06, | |
| "loss": 1.0472, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.6460632818248713, | |
| "grad_norm": 2.8982555866241455, | |
| "learning_rate": 3.0809680517846664e-06, | |
| "loss": 0.7641, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.6475349521707138, | |
| "grad_norm": 2.70125150680542, | |
| "learning_rate": 3.058537355076683e-06, | |
| "loss": 0.7527, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6490066225165563, | |
| "grad_norm": 2.6196935176849365, | |
| "learning_rate": 3.0361525742465975e-06, | |
| "loss": 0.7548, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.6504782928623988, | |
| "grad_norm": 2.6116650104522705, | |
| "learning_rate": 3.0138142386977786e-06, | |
| "loss": 0.866, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6519499632082414, | |
| "grad_norm": 2.4863133430480957, | |
| "learning_rate": 2.991522876735154e-06, | |
| "loss": 0.8648, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.6534216335540839, | |
| "grad_norm": 2.4774370193481445, | |
| "learning_rate": 2.9692790155527228e-06, | |
| "loss": 0.7423, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6548933038999264, | |
| "grad_norm": 2.4120702743530273, | |
| "learning_rate": 2.9470831812210836e-06, | |
| "loss": 0.6506, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6563649742457689, | |
| "grad_norm": 2.8675732612609863, | |
| "learning_rate": 2.9249358986749922e-06, | |
| "loss": 0.6814, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6578366445916115, | |
| "grad_norm": 2.7161166667938232, | |
| "learning_rate": 2.9028376917009448e-06, | |
| "loss": 0.7128, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.659308314937454, | |
| "grad_norm": 2.4354002475738525, | |
| "learning_rate": 2.880789082924798e-06, | |
| "loss": 0.7436, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.6607799852832965, | |
| "grad_norm": 2.698469400405884, | |
| "learning_rate": 2.858790593799405e-06, | |
| "loss": 0.8305, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6622516556291391, | |
| "grad_norm": 2.746195077896118, | |
| "learning_rate": 2.8368427445922697e-06, | |
| "loss": 0.8079, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6637233259749816, | |
| "grad_norm": 2.750650644302368, | |
| "learning_rate": 2.8149460543732666e-06, | |
| "loss": 0.7356, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.6651949963208241, | |
| "grad_norm": 2.8698570728302, | |
| "learning_rate": 2.7931010410023516e-06, | |
| "loss": 0.7823, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 2.5994505882263184, | |
| "learning_rate": 2.771308221117309e-06, | |
| "loss": 0.8341, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.6681383370125092, | |
| "grad_norm": 2.789361000061035, | |
| "learning_rate": 2.749568110121545e-06, | |
| "loss": 0.7507, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.6696100073583517, | |
| "grad_norm": 2.711209774017334, | |
| "learning_rate": 2.7278812221718927e-06, | |
| "loss": 0.7451, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.6710816777041942, | |
| "grad_norm": 2.647339105606079, | |
| "learning_rate": 2.706248070166449e-06, | |
| "loss": 0.6768, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.6725533480500367, | |
| "grad_norm": 2.946976661682129, | |
| "learning_rate": 2.6846691657324473e-06, | |
| "loss": 0.8795, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.6740250183958794, | |
| "grad_norm": 2.607889175415039, | |
| "learning_rate": 2.663145019214163e-06, | |
| "loss": 0.7953, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.6754966887417219, | |
| "grad_norm": 2.4289133548736572, | |
| "learning_rate": 2.6416761396608365e-06, | |
| "loss": 0.7475, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.6769683590875644, | |
| "grad_norm": 2.715745449066162, | |
| "learning_rate": 2.6202630348146323e-06, | |
| "loss": 0.6455, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.6784400294334069, | |
| "grad_norm": 2.5252177715301514, | |
| "learning_rate": 2.598906211098643e-06, | |
| "loss": 0.7237, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.6799116997792495, | |
| "grad_norm": 2.5386343002319336, | |
| "learning_rate": 2.577606173604894e-06, | |
| "loss": 0.793, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.681383370125092, | |
| "grad_norm": 2.5121896266937256, | |
| "learning_rate": 2.556363426082418e-06, | |
| "loss": 0.7519, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.6828550404709345, | |
| "grad_norm": 2.628500461578369, | |
| "learning_rate": 2.535178470925323e-06, | |
| "loss": 0.8999, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.6843267108167771, | |
| "grad_norm": 2.4954497814178467, | |
| "learning_rate": 2.5140518091609254e-06, | |
| "loss": 0.7667, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.6857983811626196, | |
| "grad_norm": 2.9571664333343506, | |
| "learning_rate": 2.4929839404378935e-06, | |
| "loss": 0.9206, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.6872700515084621, | |
| "grad_norm": 2.7304599285125732, | |
| "learning_rate": 2.4719753630144283e-06, | |
| "loss": 0.8066, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.6887417218543046, | |
| "grad_norm": 2.7657480239868164, | |
| "learning_rate": 2.451026573746482e-06, | |
| "loss": 0.7235, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.6902133922001472, | |
| "grad_norm": 2.851828098297119, | |
| "learning_rate": 2.430138068076013e-06, | |
| "loss": 0.7381, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.6916850625459897, | |
| "grad_norm": 2.8263611793518066, | |
| "learning_rate": 2.4093103400192626e-06, | |
| "loss": 0.9067, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6931567328918322, | |
| "grad_norm": 2.27524995803833, | |
| "learning_rate": 2.388543882155067e-06, | |
| "loss": 0.5643, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.6946284032376747, | |
| "grad_norm": 2.7731189727783203, | |
| "learning_rate": 2.3678391856132203e-06, | |
| "loss": 0.7023, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.6961000735835173, | |
| "grad_norm": 2.6988525390625, | |
| "learning_rate": 2.3471967400628513e-06, | |
| "loss": 0.7669, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.6975717439293598, | |
| "grad_norm": 2.60081148147583, | |
| "learning_rate": 2.32661703370084e-06, | |
| "loss": 0.8822, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.6990434142752023, | |
| "grad_norm": 2.901594638824463, | |
| "learning_rate": 2.306100553240274e-06, | |
| "loss": 0.7002, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7005150846210448, | |
| "grad_norm": 2.74725079536438, | |
| "learning_rate": 2.2856477838989455e-06, | |
| "loss": 0.7208, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.7019867549668874, | |
| "grad_norm": 2.5290257930755615, | |
| "learning_rate": 2.265259209387867e-06, | |
| "loss": 0.9018, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.70345842531273, | |
| "grad_norm": 3.207190752029419, | |
| "learning_rate": 2.244935311899829e-06, | |
| "loss": 0.9887, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.7049300956585725, | |
| "grad_norm": 3.089996099472046, | |
| "learning_rate": 2.2246765720980074e-06, | |
| "loss": 0.8796, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.7064017660044151, | |
| "grad_norm": 2.8044493198394775, | |
| "learning_rate": 2.2044834691045873e-06, | |
| "loss": 0.8091, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7078734363502576, | |
| "grad_norm": 2.7987630367279053, | |
| "learning_rate": 2.1843564804894316e-06, | |
| "loss": 0.792, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.7093451066961001, | |
| "grad_norm": 3.069977045059204, | |
| "learning_rate": 2.1642960822587878e-06, | |
| "loss": 0.7951, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.7108167770419426, | |
| "grad_norm": 3.010993003845215, | |
| "learning_rate": 2.1443027488440338e-06, | |
| "loss": 0.8252, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7122884473877852, | |
| "grad_norm": 2.7961068153381348, | |
| "learning_rate": 2.1243769530904564e-06, | |
| "loss": 0.89, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7137601177336277, | |
| "grad_norm": 2.8578946590423584, | |
| "learning_rate": 2.104519166246059e-06, | |
| "loss": 0.8419, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7152317880794702, | |
| "grad_norm": 2.810152769088745, | |
| "learning_rate": 2.0847298579504344e-06, | |
| "loss": 0.7479, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7167034584253127, | |
| "grad_norm": 2.567078113555908, | |
| "learning_rate": 2.065009496223638e-06, | |
| "loss": 0.664, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7181751287711553, | |
| "grad_norm": 2.8592417240142822, | |
| "learning_rate": 2.045358547455138e-06, | |
| "loss": 0.8539, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7196467991169978, | |
| "grad_norm": 2.859276533126831, | |
| "learning_rate": 2.0257774763927656e-06, | |
| "loss": 0.8118, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.7211184694628403, | |
| "grad_norm": 2.916217088699341, | |
| "learning_rate": 2.0062667461317425e-06, | |
| "loss": 0.7133, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7225901398086828, | |
| "grad_norm": 2.890087127685547, | |
| "learning_rate": 1.9868268181037186e-06, | |
| "loss": 0.8926, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7240618101545254, | |
| "grad_norm": 2.791355848312378, | |
| "learning_rate": 1.967458152065857e-06, | |
| "loss": 0.9062, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7255334805003679, | |
| "grad_norm": 2.536635398864746, | |
| "learning_rate": 1.9481612060899646e-06, | |
| "loss": 0.7114, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.7270051508462104, | |
| "grad_norm": 2.708343505859375, | |
| "learning_rate": 1.928936436551661e-06, | |
| "loss": 0.8861, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7284768211920529, | |
| "grad_norm": 2.842942953109741, | |
| "learning_rate": 1.9097842981195836e-06, | |
| "loss": 0.7926, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7299484915378955, | |
| "grad_norm": 2.975794792175293, | |
| "learning_rate": 1.8907052437446271e-06, | |
| "loss": 0.801, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.731420161883738, | |
| "grad_norm": 2.9826395511627197, | |
| "learning_rate": 1.871699724649244e-06, | |
| "loss": 0.6895, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7328918322295805, | |
| "grad_norm": 2.5878913402557373, | |
| "learning_rate": 1.8527681903167644e-06, | |
| "loss": 0.6327, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.7343635025754232, | |
| "grad_norm": 2.8014075756073, | |
| "learning_rate": 1.8339110884807671e-06, | |
| "loss": 0.7839, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.7358351729212657, | |
| "grad_norm": 2.62280535697937, | |
| "learning_rate": 1.8151288651144894e-06, | |
| "loss": 0.6315, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7373068432671082, | |
| "grad_norm": 2.3114869594573975, | |
| "learning_rate": 1.7964219644202852e-06, | |
| "loss": 0.8394, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7387785136129507, | |
| "grad_norm": 2.415231466293335, | |
| "learning_rate": 1.7777908288191176e-06, | |
| "loss": 0.724, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7402501839587933, | |
| "grad_norm": 2.5043222904205322, | |
| "learning_rate": 1.7592358989400882e-06, | |
| "loss": 0.8248, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7417218543046358, | |
| "grad_norm": 2.547647476196289, | |
| "learning_rate": 1.7407576136100278e-06, | |
| "loss": 0.7288, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7431935246504783, | |
| "grad_norm": 2.986689329147339, | |
| "learning_rate": 1.7223564098431067e-06, | |
| "loss": 0.8131, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7446651949963208, | |
| "grad_norm": 2.9111227989196777, | |
| "learning_rate": 1.704032722830512e-06, | |
| "loss": 0.7931, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.7461368653421634, | |
| "grad_norm": 2.6043741703033447, | |
| "learning_rate": 1.6857869859301401e-06, | |
| "loss": 0.68, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7476085356880059, | |
| "grad_norm": 2.713120460510254, | |
| "learning_rate": 1.6676196306563614e-06, | |
| "loss": 0.7447, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.7490802060338484, | |
| "grad_norm": 2.875211000442505, | |
| "learning_rate": 1.6495310866698095e-06, | |
| "loss": 0.8778, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7505518763796909, | |
| "grad_norm": 2.747056722640991, | |
| "learning_rate": 1.6315217817672142e-06, | |
| "loss": 0.8072, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7520235467255335, | |
| "grad_norm": 2.6293632984161377, | |
| "learning_rate": 1.6135921418712959e-06, | |
| "loss": 0.8272, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.753495217071376, | |
| "grad_norm": 2.7001330852508545, | |
| "learning_rate": 1.5957425910206787e-06, | |
| "loss": 0.9759, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.7549668874172185, | |
| "grad_norm": 2.5239274501800537, | |
| "learning_rate": 1.577973551359877e-06, | |
| "loss": 0.717, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.7564385577630611, | |
| "grad_norm": 2.681542158126831, | |
| "learning_rate": 1.5602854431292963e-06, | |
| "loss": 0.6227, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.7579102281089036, | |
| "grad_norm": 2.667667865753174, | |
| "learning_rate": 1.542678684655306e-06, | |
| "loss": 0.8233, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.7593818984547461, | |
| "grad_norm": 2.79180908203125, | |
| "learning_rate": 1.5251536923403427e-06, | |
| "loss": 0.7726, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.7608535688005886, | |
| "grad_norm": 3.0569746494293213, | |
| "learning_rate": 1.5077108806530582e-06, | |
| "loss": 0.7847, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.7623252391464312, | |
| "grad_norm": 2.5163207054138184, | |
| "learning_rate": 1.4903506621185193e-06, | |
| "loss": 0.7946, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.7637969094922737, | |
| "grad_norm": 2.5696611404418945, | |
| "learning_rate": 1.4730734473084568e-06, | |
| "loss": 0.7233, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.7652685798381162, | |
| "grad_norm": 2.863888740539551, | |
| "learning_rate": 1.4558796448315504e-06, | |
| "loss": 0.7795, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.7667402501839587, | |
| "grad_norm": 2.5073134899139404, | |
| "learning_rate": 1.4387696613237612e-06, | |
| "loss": 0.782, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.7682119205298014, | |
| "grad_norm": 3.094791889190674, | |
| "learning_rate": 1.4217439014387251e-06, | |
| "loss": 0.936, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.7696835908756439, | |
| "grad_norm": 2.4089252948760986, | |
| "learning_rate": 1.404802767838176e-06, | |
| "loss": 0.7706, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.7711552612214864, | |
| "grad_norm": 2.9336740970611572, | |
| "learning_rate": 1.38794666118242e-06, | |
| "loss": 0.8929, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.7726269315673289, | |
| "grad_norm": 2.5258219242095947, | |
| "learning_rate": 1.371175980120864e-06, | |
| "loss": 0.763, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.7740986019131715, | |
| "grad_norm": 2.525669574737549, | |
| "learning_rate": 1.3544911212825907e-06, | |
| "loss": 0.8683, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.775570272259014, | |
| "grad_norm": 2.4933722019195557, | |
| "learning_rate": 1.337892479266974e-06, | |
| "loss": 0.7019, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.7770419426048565, | |
| "grad_norm": 2.8783955574035645, | |
| "learning_rate": 1.321380446634342e-06, | |
| "loss": 0.914, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.7785136129506991, | |
| "grad_norm": 3.0792112350463867, | |
| "learning_rate": 1.3049554138967052e-06, | |
| "loss": 0.768, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.7799852832965416, | |
| "grad_norm": 2.811859130859375, | |
| "learning_rate": 1.2886177695085078e-06, | |
| "loss": 0.782, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7814569536423841, | |
| "grad_norm": 2.7325949668884277, | |
| "learning_rate": 1.2723678998574512e-06, | |
| "loss": 0.8395, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.7829286239882266, | |
| "grad_norm": 2.7767133712768555, | |
| "learning_rate": 1.2562061892553472e-06, | |
| "loss": 0.766, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.7844002943340692, | |
| "grad_norm": 2.8727259635925293, | |
| "learning_rate": 1.2401330199290368e-06, | |
| "loss": 0.7734, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.7858719646799117, | |
| "grad_norm": 2.7274365425109863, | |
| "learning_rate": 1.224148772011346e-06, | |
| "loss": 0.8786, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.7873436350257542, | |
| "grad_norm": 2.7339842319488525, | |
| "learning_rate": 1.2082538235320928e-06, | |
| "loss": 0.7648, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.7888153053715967, | |
| "grad_norm": 2.509180784225464, | |
| "learning_rate": 1.1924485504091565e-06, | |
| "loss": 0.7314, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.7902869757174393, | |
| "grad_norm": 3.0106115341186523, | |
| "learning_rate": 1.1767333264395735e-06, | |
| "loss": 0.7766, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.7917586460632818, | |
| "grad_norm": 2.565518617630005, | |
| "learning_rate": 1.1611085232907132e-06, | |
| "loss": 0.683, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.7932303164091243, | |
| "grad_norm": 2.6307907104492188, | |
| "learning_rate": 1.14557451049147e-06, | |
| "loss": 0.833, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.7947019867549668, | |
| "grad_norm": 2.797319173812866, | |
| "learning_rate": 1.1301316554235397e-06, | |
| "loss": 0.7519, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7961736571008095, | |
| "grad_norm": 2.6741998195648193, | |
| "learning_rate": 1.1147803233127241e-06, | |
| "loss": 0.7264, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.797645327446652, | |
| "grad_norm": 2.9562933444976807, | |
| "learning_rate": 1.0995208772202898e-06, | |
| "loss": 0.9802, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.7991169977924945, | |
| "grad_norm": 2.957308053970337, | |
| "learning_rate": 1.0843536780343866e-06, | |
| "loss": 0.904, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.8005886681383371, | |
| "grad_norm": 3.1149260997772217, | |
| "learning_rate": 1.0692790844615131e-06, | |
| "loss": 0.8182, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.8020603384841796, | |
| "grad_norm": 3.102614641189575, | |
| "learning_rate": 1.0542974530180327e-06, | |
| "loss": 0.7731, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.8035320088300221, | |
| "grad_norm": 2.8476767539978027, | |
| "learning_rate": 1.0394091380217354e-06, | |
| "loss": 0.7407, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.8050036791758646, | |
| "grad_norm": 2.722993850708008, | |
| "learning_rate": 1.0246144915834683e-06, | |
| "loss": 0.6995, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8064753495217072, | |
| "grad_norm": 2.6003148555755615, | |
| "learning_rate": 1.0099138635988026e-06, | |
| "loss": 0.7062, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8079470198675497, | |
| "grad_norm": 2.6799726486206055, | |
| "learning_rate": 9.953076017397579e-07, | |
| "loss": 0.6479, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.8094186902133922, | |
| "grad_norm": 2.7827186584472656, | |
| "learning_rate": 9.807960514465792e-07, | |
| "loss": 0.7404, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8108903605592347, | |
| "grad_norm": 2.6582939624786377, | |
| "learning_rate": 9.663795559195733e-07, | |
| "loss": 0.7495, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.8123620309050773, | |
| "grad_norm": 2.6799185276031494, | |
| "learning_rate": 9.520584561109863e-07, | |
| "loss": 0.8731, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.8138337012509198, | |
| "grad_norm": 2.924217462539673, | |
| "learning_rate": 9.378330907169387e-07, | |
| "loss": 0.6908, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8153053715967623, | |
| "grad_norm": 2.823230028152466, | |
| "learning_rate": 9.237037961694223e-07, | |
| "loss": 0.7609, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8167770419426048, | |
| "grad_norm": 2.8368513584136963, | |
| "learning_rate": 9.096709066283355e-07, | |
| "loss": 0.8756, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8182487122884474, | |
| "grad_norm": 2.3523733615875244, | |
| "learning_rate": 8.957347539735872e-07, | |
| "loss": 0.6146, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.8197203826342899, | |
| "grad_norm": 2.600253105163574, | |
| "learning_rate": 8.818956677972407e-07, | |
| "loss": 0.6998, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8211920529801324, | |
| "grad_norm": 2.78033447265625, | |
| "learning_rate": 8.681539753957268e-07, | |
| "loss": 0.8612, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.8226637233259749, | |
| "grad_norm": 2.650313138961792, | |
| "learning_rate": 8.545100017620988e-07, | |
| "loss": 0.7443, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8241353936718175, | |
| "grad_norm": 2.694070816040039, | |
| "learning_rate": 8.409640695783444e-07, | |
| "loss": 0.7601, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.82560706401766, | |
| "grad_norm": 3.236161708831787, | |
| "learning_rate": 8.275164992077555e-07, | |
| "loss": 0.8981, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.8270787343635025, | |
| "grad_norm": 2.4480512142181396, | |
| "learning_rate": 8.141676086873574e-07, | |
| "loss": 0.6256, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8285504047093452, | |
| "grad_norm": 2.66650128364563, | |
| "learning_rate": 8.009177137203794e-07, | |
| "loss": 0.6718, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8300220750551877, | |
| "grad_norm": 2.679651975631714, | |
| "learning_rate": 7.877671276687899e-07, | |
| "loss": 0.67, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.8314937454010302, | |
| "grad_norm": 2.603134870529175, | |
| "learning_rate": 7.747161615458903e-07, | |
| "loss": 0.6928, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.8329654157468727, | |
| "grad_norm": 2.7582461833953857, | |
| "learning_rate": 7.617651240089546e-07, | |
| "loss": 0.8008, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.8344370860927153, | |
| "grad_norm": 2.8830525875091553, | |
| "learning_rate": 7.489143213519301e-07, | |
| "loss": 0.8107, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8359087564385578, | |
| "grad_norm": 2.789246082305908, | |
| "learning_rate": 7.361640574981938e-07, | |
| "loss": 0.7996, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8373804267844003, | |
| "grad_norm": 3.0257482528686523, | |
| "learning_rate": 7.235146339933674e-07, | |
| "loss": 0.7135, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8388520971302428, | |
| "grad_norm": 2.429319381713867, | |
| "learning_rate": 7.109663499981834e-07, | |
| "loss": 0.8229, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8403237674760854, | |
| "grad_norm": 2.646169424057007, | |
| "learning_rate": 6.985195022814068e-07, | |
| "loss": 0.7295, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.8417954378219279, | |
| "grad_norm": 2.7810020446777344, | |
| "learning_rate": 6.861743852128233e-07, | |
| "loss": 0.8911, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8432671081677704, | |
| "grad_norm": 2.963247537612915, | |
| "learning_rate": 6.739312907562734e-07, | |
| "loss": 0.9285, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.8447387785136129, | |
| "grad_norm": 2.7100367546081543, | |
| "learning_rate": 6.617905084627452e-07, | |
| "loss": 0.7045, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.8462104488594555, | |
| "grad_norm": 2.6360723972320557, | |
| "learning_rate": 6.497523254635296e-07, | |
| "loss": 0.923, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.847682119205298, | |
| "grad_norm": 2.7827036380767822, | |
| "learning_rate": 6.37817026463432e-07, | |
| "loss": 0.7223, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.8491537895511405, | |
| "grad_norm": 3.0738601684570312, | |
| "learning_rate": 6.25984893734034e-07, | |
| "loss": 0.952, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.8506254598969831, | |
| "grad_norm": 2.955704689025879, | |
| "learning_rate": 6.142562071070179e-07, | |
| "loss": 0.9231, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.8520971302428256, | |
| "grad_norm": 2.607506036758423, | |
| "learning_rate": 6.026312439675553e-07, | |
| "loss": 0.8037, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.8535688005886681, | |
| "grad_norm": 2.722355842590332, | |
| "learning_rate": 5.911102792477358e-07, | |
| "loss": 0.8769, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.8550404709345106, | |
| "grad_norm": 2.6345765590667725, | |
| "learning_rate": 5.796935854200764e-07, | |
| "loss": 0.6607, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.8565121412803532, | |
| "grad_norm": 2.4528110027313232, | |
| "learning_rate": 5.683814324910685e-07, | |
| "loss": 0.7011, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.8579838116261957, | |
| "grad_norm": 2.753122329711914, | |
| "learning_rate": 5.571740879947979e-07, | |
| "loss": 0.7738, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.8594554819720382, | |
| "grad_norm": 2.6649703979492188, | |
| "learning_rate": 5.460718169866163e-07, | |
| "loss": 0.7858, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.8609271523178808, | |
| "grad_norm": 2.739748477935791, | |
| "learning_rate": 5.350748820368689e-07, | |
| "loss": 0.7256, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.8623988226637234, | |
| "grad_norm": 2.549894094467163, | |
| "learning_rate": 5.241835432246888e-07, | |
| "loss": 0.6335, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.8638704930095659, | |
| "grad_norm": 2.778587579727173, | |
| "learning_rate": 5.133980581318459e-07, | |
| "loss": 0.8065, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.8653421633554084, | |
| "grad_norm": 2.5618858337402344, | |
| "learning_rate": 5.027186818366542e-07, | |
| "loss": 0.7347, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.8668138337012509, | |
| "grad_norm": 2.583333730697632, | |
| "learning_rate": 4.921456669079366e-07, | |
| "loss": 0.8103, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.8682855040470935, | |
| "grad_norm": 2.6635944843292236, | |
| "learning_rate": 4.81679263399057e-07, | |
| "loss": 0.8243, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.869757174392936, | |
| "grad_norm": 3.3781163692474365, | |
| "learning_rate": 4.713197188420027e-07, | |
| "loss": 0.8512, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.8712288447387785, | |
| "grad_norm": 2.3862738609313965, | |
| "learning_rate": 4.6106727824152764e-07, | |
| "loss": 0.6945, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.8727005150846211, | |
| "grad_norm": 2.710103750228882, | |
| "learning_rate": 4.509221840693656e-07, | |
| "loss": 0.7842, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.8741721854304636, | |
| "grad_norm": 3.1467630863189697, | |
| "learning_rate": 4.408846762584901e-07, | |
| "loss": 0.8328, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.8756438557763061, | |
| "grad_norm": 2.763000965118408, | |
| "learning_rate": 4.309549921974421e-07, | |
| "loss": 0.8947, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.8771155261221486, | |
| "grad_norm": 3.0412211418151855, | |
| "learning_rate": 4.211333667247125e-07, | |
| "loss": 0.8716, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.8785871964679912, | |
| "grad_norm": 2.719144344329834, | |
| "learning_rate": 4.114200321231937e-07, | |
| "loss": 0.7778, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.8800588668138337, | |
| "grad_norm": 2.2873966693878174, | |
| "learning_rate": 4.018152181146823e-07, | |
| "loss": 0.6861, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.8815305371596762, | |
| "grad_norm": 2.654785633087158, | |
| "learning_rate": 3.9231915185444337e-07, | |
| "loss": 0.8151, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.8830022075055187, | |
| "grad_norm": 2.84719181060791, | |
| "learning_rate": 3.8293205792584666e-07, | |
| "loss": 0.9433, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8844738778513613, | |
| "grad_norm": 2.5994880199432373, | |
| "learning_rate": 3.736541583350473e-07, | |
| "loss": 0.9252, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.8859455481972038, | |
| "grad_norm": 2.488598108291626, | |
| "learning_rate": 3.6448567250574053e-07, | |
| "loss": 0.6793, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.8874172185430463, | |
| "grad_norm": 2.968190908432007, | |
| "learning_rate": 3.5542681727396613e-07, | |
| "loss": 0.908, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 2.297053813934326, | |
| "learning_rate": 3.464778068829883e-07, | |
| "loss": 0.6731, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.8903605592347315, | |
| "grad_norm": 2.4779140949249268, | |
| "learning_rate": 3.3763885297822153e-07, | |
| "loss": 0.6418, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.891832229580574, | |
| "grad_norm": 2.8664562702178955, | |
| "learning_rate": 3.289101646022297e-07, | |
| "loss": 0.7856, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.8933038999264165, | |
| "grad_norm": 2.8044114112854004, | |
| "learning_rate": 3.2029194818977984e-07, | |
| "loss": 0.9089, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.8947755702722591, | |
| "grad_norm": 2.656986951828003, | |
| "learning_rate": 3.117844075629617e-07, | |
| "loss": 0.8366, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.8962472406181016, | |
| "grad_norm": 2.7499563694000244, | |
| "learning_rate": 3.033877439263666e-07, | |
| "loss": 0.7709, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.8977189109639441, | |
| "grad_norm": 2.770153760910034, | |
| "learning_rate": 2.9510215586232737e-07, | |
| "loss": 0.8263, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.8991905813097866, | |
| "grad_norm": 2.6473796367645264, | |
| "learning_rate": 2.869278393262226e-07, | |
| "loss": 0.7757, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.9006622516556292, | |
| "grad_norm": 2.7503674030303955, | |
| "learning_rate": 2.7886498764184587e-07, | |
| "loss": 0.7354, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.9021339220014717, | |
| "grad_norm": 2.681307792663574, | |
| "learning_rate": 2.7091379149682683e-07, | |
| "loss": 0.8607, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.9036055923473142, | |
| "grad_norm": 2.6204802989959717, | |
| "learning_rate": 2.6307443893812847e-07, | |
| "loss": 0.8717, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.9050772626931567, | |
| "grad_norm": 2.9451098442077637, | |
| "learning_rate": 2.55347115367594e-07, | |
| "loss": 0.895, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9065489330389993, | |
| "grad_norm": 2.728437662124634, | |
| "learning_rate": 2.47732003537568e-07, | |
| "loss": 0.8632, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.9080206033848418, | |
| "grad_norm": 2.671208381652832, | |
| "learning_rate": 2.402292835465647e-07, | |
| "loss": 0.7702, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9094922737306843, | |
| "grad_norm": 2.5721795558929443, | |
| "learning_rate": 2.3283913283502047e-07, | |
| "loss": 0.8497, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9109639440765268, | |
| "grad_norm": 2.4093384742736816, | |
| "learning_rate": 2.2556172618108996e-07, | |
| "loss": 0.8805, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9124356144223694, | |
| "grad_norm": 3.0314748287200928, | |
| "learning_rate": 2.183972356965125e-07, | |
| "loss": 0.8581, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9139072847682119, | |
| "grad_norm": 2.5950942039489746, | |
| "learning_rate": 2.113458308225458e-07, | |
| "loss": 0.6963, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9153789551140544, | |
| "grad_norm": 2.590554714202881, | |
| "learning_rate": 2.0440767832595576e-07, | |
| "loss": 0.6748, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9168506254598969, | |
| "grad_norm": 2.457000970840454, | |
| "learning_rate": 1.9758294229507092e-07, | |
| "loss": 0.761, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.9183222958057395, | |
| "grad_norm": 2.3966705799102783, | |
| "learning_rate": 1.908717841359048e-07, | |
| "loss": 0.7255, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.919793966151582, | |
| "grad_norm": 2.606142520904541, | |
| "learning_rate": 1.8427436256833853e-07, | |
| "loss": 0.7811, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9212656364974245, | |
| "grad_norm": 2.725306749343872, | |
| "learning_rate": 1.777908336223655e-07, | |
| "loss": 0.7978, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9227373068432672, | |
| "grad_norm": 2.595306634902954, | |
| "learning_rate": 1.7142135063440034e-07, | |
| "loss": 0.7847, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9242089771891097, | |
| "grad_norm": 2.832662582397461, | |
| "learning_rate": 1.6516606424365644e-07, | |
| "loss": 0.6639, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9256806475349522, | |
| "grad_norm": 2.824404239654541, | |
| "learning_rate": 1.590251223885786e-07, | |
| "loss": 0.6387, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9271523178807947, | |
| "grad_norm": 2.551408052444458, | |
| "learning_rate": 1.5299867030334815e-07, | |
| "loss": 0.7776, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9286239882266373, | |
| "grad_norm": 2.958336591720581, | |
| "learning_rate": 1.4708685051444515e-07, | |
| "loss": 0.9973, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9300956585724798, | |
| "grad_norm": 2.609131336212158, | |
| "learning_rate": 1.4128980283727946e-07, | |
| "loss": 0.9308, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9315673289183223, | |
| "grad_norm": 2.8172359466552734, | |
| "learning_rate": 1.3560766437288432e-07, | |
| "loss": 0.7915, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9330389992641648, | |
| "grad_norm": 2.5778167247772217, | |
| "learning_rate": 1.3004056950467135e-07, | |
| "loss": 0.8021, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.9345106696100074, | |
| "grad_norm": 3.02774977684021, | |
| "learning_rate": 1.24588649895257e-07, | |
| "loss": 0.857, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9359823399558499, | |
| "grad_norm": 2.6817007064819336, | |
| "learning_rate": 1.19252034483342e-07, | |
| "loss": 0.7217, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.9374540103016924, | |
| "grad_norm": 3.296025037765503, | |
| "learning_rate": 1.1403084948067023e-07, | |
| "loss": 0.9432, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9389256806475349, | |
| "grad_norm": 2.963472604751587, | |
| "learning_rate": 1.089252183690348e-07, | |
| "loss": 0.7671, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.9403973509933775, | |
| "grad_norm": 2.72013258934021, | |
| "learning_rate": 1.0393526189736602e-07, | |
| "loss": 0.725, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.94186902133922, | |
| "grad_norm": 2.7649495601654053, | |
| "learning_rate": 9.906109807887032e-08, | |
| "loss": 0.8975, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9433406916850625, | |
| "grad_norm": 2.6894352436065674, | |
| "learning_rate": 9.430284218824026e-08, | |
| "loss": 0.832, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.9448123620309051, | |
| "grad_norm": 2.740673780441284, | |
| "learning_rate": 8.966060675892951e-08, | |
| "loss": 1.0232, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.9462840323767476, | |
| "grad_norm": 2.584526777267456, | |
| "learning_rate": 8.513450158049109e-08, | |
| "loss": 0.7357, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.9477557027225901, | |
| "grad_norm": 2.6124203205108643, | |
| "learning_rate": 8.072463369597994e-08, | |
| "loss": 0.7387, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.9492273730684326, | |
| "grad_norm": 2.9415154457092285, | |
| "learning_rate": 7.643110739942172e-08, | |
| "loss": 0.7183, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.9506990434142752, | |
| "grad_norm": 2.719373941421509, | |
| "learning_rate": 7.225402423334694e-08, | |
| "loss": 0.69, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.9521707137601177, | |
| "grad_norm": 2.724714517593384, | |
| "learning_rate": 6.819348298638839e-08, | |
| "loss": 0.9065, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.9536423841059603, | |
| "grad_norm": 2.491793155670166, | |
| "learning_rate": 6.424957969094536e-08, | |
| "loss": 0.6751, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.9551140544518028, | |
| "grad_norm": 2.8342292308807373, | |
| "learning_rate": 6.0422407620912e-08, | |
| "loss": 0.8441, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.9565857247976454, | |
| "grad_norm": 2.551548719406128, | |
| "learning_rate": 5.6712057289473047e-08, | |
| "loss": 0.7815, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.9580573951434879, | |
| "grad_norm": 2.459239959716797, | |
| "learning_rate": 5.3118616446960484e-08, | |
| "loss": 0.7014, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.9595290654893304, | |
| "grad_norm": 2.655364751815796, | |
| "learning_rate": 4.9642170078780804e-08, | |
| "loss": 0.7887, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.9610007358351729, | |
| "grad_norm": 3.058354616165161, | |
| "learning_rate": 4.628280040340272e-08, | |
| "loss": 0.8193, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.9624724061810155, | |
| "grad_norm": 2.8031606674194336, | |
| "learning_rate": 4.3040586870415346e-08, | |
| "loss": 0.8415, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.963944076526858, | |
| "grad_norm": 2.788810968399048, | |
| "learning_rate": 3.991560615864587e-08, | |
| "loss": 0.6981, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.9654157468727005, | |
| "grad_norm": 2.4134199619293213, | |
| "learning_rate": 3.690793217434985e-08, | |
| "loss": 0.6854, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.9668874172185431, | |
| "grad_norm": 2.4256997108459473, | |
| "learning_rate": 3.40176360494604e-08, | |
| "loss": 0.8412, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.9683590875643856, | |
| "grad_norm": 2.961256742477417, | |
| "learning_rate": 3.1244786139907334e-08, | |
| "loss": 0.8928, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.9698307579102281, | |
| "grad_norm": 3.0347156524658203, | |
| "learning_rate": 2.858944802399899e-08, | |
| "loss": 0.862, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.9713024282560706, | |
| "grad_norm": 2.7489945888519287, | |
| "learning_rate": 2.605168450087514e-08, | |
| "loss": 0.8402, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.9727740986019132, | |
| "grad_norm": 2.5865862369537354, | |
| "learning_rate": 2.363155558901542e-08, | |
| "loss": 0.7846, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.9742457689477557, | |
| "grad_norm": 2.7627742290496826, | |
| "learning_rate": 2.1329118524827662e-08, | |
| "loss": 0.8333, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.9757174392935982, | |
| "grad_norm": 2.5887227058410645, | |
| "learning_rate": 1.914442776128622e-08, | |
| "loss": 0.8554, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.9771891096394407, | |
| "grad_norm": 2.511735439300537, | |
| "learning_rate": 1.7077534966650767e-08, | |
| "loss": 0.705, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.9786607799852833, | |
| "grad_norm": 2.7016990184783936, | |
| "learning_rate": 1.51284890232406e-08, | |
| "loss": 0.7558, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.9801324503311258, | |
| "grad_norm": 2.836970329284668, | |
| "learning_rate": 1.3297336026280027e-08, | |
| "loss": 0.8053, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.9816041206769683, | |
| "grad_norm": 2.5258963108062744, | |
| "learning_rate": 1.158411928280645e-08, | |
| "loss": 0.5653, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.9830757910228108, | |
| "grad_norm": 2.4458508491516113, | |
| "learning_rate": 9.988879310649513e-09, | |
| "loss": 0.6538, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.9845474613686535, | |
| "grad_norm": 2.5343708992004395, | |
| "learning_rate": 8.511653837470212e-09, | |
| "loss": 0.703, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.986019131714496, | |
| "grad_norm": 2.5260910987854004, | |
| "learning_rate": 7.152477799867718e-09, | |
| "loss": 0.67, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.9874908020603385, | |
| "grad_norm": 2.7152626514434814, | |
| "learning_rate": 5.911383342556143e-09, | |
| "loss": 0.9093, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.9889624724061811, | |
| "grad_norm": 2.4940526485443115, | |
| "learning_rate": 4.788399817602929e-09, | |
| "loss": 0.6923, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.9904341427520236, | |
| "grad_norm": 2.8715314865112305, | |
| "learning_rate": 3.783553783733851e-09, | |
| "loss": 0.7952, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.9919058130978661, | |
| "grad_norm": 2.9330999851226807, | |
| "learning_rate": 2.896869005705183e-09, | |
| "loss": 0.9401, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.9933774834437086, | |
| "grad_norm": 2.560947895050049, | |
| "learning_rate": 2.128366453743591e-09, | |
| "loss": 0.7578, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.9948491537895512, | |
| "grad_norm": 2.549210786819458, | |
| "learning_rate": 1.4780643030476439e-09, | |
| "loss": 0.8439, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.9963208241353937, | |
| "grad_norm": 2.915217876434326, | |
| "learning_rate": 9.459779333587104e-10, | |
| "loss": 0.7632, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.9977924944812362, | |
| "grad_norm": 2.8781983852386475, | |
| "learning_rate": 5.321199285979184e-10, | |
| "loss": 0.8137, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.9992641648270787, | |
| "grad_norm": 2.7309377193450928, | |
| "learning_rate": 2.3650007656805804e-10, | |
| "loss": 0.6736, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 4.366690158843994, | |
| "learning_rate": 5.912536872321184e-11, | |
| "loss": 0.6984, | |
| "step": 680 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 680, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.879511026381619e+16, | |
| "train_batch_size": 6, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |