{ "best_global_step": null, "best_metric": null, "best_model_checkpoint": null, "epoch": 1.0, "eval_steps": 0, "global_step": 24129, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 4.1443905673670685e-05, "grad_norm": 0.7630084753036499, "learning_rate": 5e-06, "loss": 1.073, "step": 1 }, { "epoch": 8.288781134734137e-05, "grad_norm": 0.7379353046417236, "learning_rate": 4.999792780471633e-06, "loss": 1.0439, "step": 2 }, { "epoch": 0.00012433171702101205, "grad_norm": 0.5726526975631714, "learning_rate": 4.999585560943264e-06, "loss": 1.0112, "step": 3 }, { "epoch": 0.00016577562269468274, "grad_norm": 0.6247206330299377, "learning_rate": 4.999378341414895e-06, "loss": 1.0464, "step": 4 }, { "epoch": 0.00020721952836835343, "grad_norm": 0.5651471614837646, "learning_rate": 4.999171121886527e-06, "loss": 0.9998, "step": 5 }, { "epoch": 0.0002486634340420241, "grad_norm": 0.556253969669342, "learning_rate": 4.998963902358159e-06, "loss": 0.96, "step": 6 }, { "epoch": 0.0002901073397156948, "grad_norm": 0.5390304327011108, "learning_rate": 4.99875668282979e-06, "loss": 0.9778, "step": 7 }, { "epoch": 0.0003315512453893655, "grad_norm": 0.6542398929595947, "learning_rate": 4.998549463301422e-06, "loss": 1.0061, "step": 8 }, { "epoch": 0.0003729951510630362, "grad_norm": 0.7193869948387146, "learning_rate": 4.998342243773053e-06, "loss": 1.0396, "step": 9 }, { "epoch": 0.00041443905673670687, "grad_norm": 0.6612483859062195, "learning_rate": 4.998135024244685e-06, "loss": 0.9978, "step": 10 }, { "epoch": 0.00045588296241037753, "grad_norm": 0.6465231776237488, "learning_rate": 4.997927804716317e-06, "loss": 1.0269, "step": 11 }, { "epoch": 0.0004973268680840482, "grad_norm": 0.6349920034408569, "learning_rate": 4.997720585187948e-06, "loss": 0.958, "step": 12 }, { "epoch": 0.000538770773757719, "grad_norm": 0.6287173628807068, "learning_rate": 4.9975133656595804e-06, "loss": 1.0046, "step": 13 }, { "epoch": 0.0005802146794313896, "grad_norm": 0.5536159873008728, "learning_rate": 4.997306146131212e-06, "loss": 0.9456, "step": 14 }, { "epoch": 0.0006216585851050603, "grad_norm": 0.5690981149673462, "learning_rate": 4.997098926602843e-06, "loss": 0.9475, "step": 15 }, { "epoch": 0.000663102490778731, "grad_norm": 0.512193500995636, "learning_rate": 4.996891707074475e-06, "loss": 0.9431, "step": 16 }, { "epoch": 0.0007045463964524016, "grad_norm": 0.5570981502532959, "learning_rate": 4.996684487546107e-06, "loss": 0.9458, "step": 17 }, { "epoch": 0.0007459903021260724, "grad_norm": 0.5056589245796204, "learning_rate": 4.996477268017738e-06, "loss": 0.925, "step": 18 }, { "epoch": 0.0007874342077997431, "grad_norm": 0.5428594350814819, "learning_rate": 4.9962700484893704e-06, "loss": 0.9546, "step": 19 }, { "epoch": 0.0008288781134734137, "grad_norm": 0.5670416355133057, "learning_rate": 4.996062828961002e-06, "loss": 0.9832, "step": 20 }, { "epoch": 0.0008703220191470844, "grad_norm": 0.5489663481712341, "learning_rate": 4.995855609432633e-06, "loss": 0.9514, "step": 21 }, { "epoch": 0.0009117659248207551, "grad_norm": 0.5362726449966431, "learning_rate": 4.9956483899042654e-06, "loss": 0.9153, "step": 22 }, { "epoch": 0.0009532098304944258, "grad_norm": 0.5166146755218506, "learning_rate": 4.995441170375897e-06, "loss": 0.9258, "step": 23 }, { "epoch": 0.0009946537361680964, "grad_norm": 0.5216695666313171, "learning_rate": 4.995233950847528e-06, "loss": 0.9104, "step": 24 }, { "epoch": 0.0010360976418417672, "grad_norm": 0.5722406506538391, "learning_rate": 4.99502673131916e-06, "loss": 0.9812, "step": 25 }, { "epoch": 0.001077541547515438, "grad_norm": 0.599118709564209, "learning_rate": 4.994819511790792e-06, "loss": 0.9851, "step": 26 }, { "epoch": 0.0011189854531891085, "grad_norm": 0.5487611293792725, "learning_rate": 4.994612292262423e-06, "loss": 0.9912, "step": 27 }, { "epoch": 0.0011604293588627793, "grad_norm": 0.5384929180145264, "learning_rate": 4.994405072734055e-06, "loss": 0.9438, "step": 28 }, { "epoch": 0.0012018732645364498, "grad_norm": 0.5511430501937866, "learning_rate": 4.994197853205687e-06, "loss": 0.9172, "step": 29 }, { "epoch": 0.0012433171702101206, "grad_norm": 0.49488845467567444, "learning_rate": 4.993990633677318e-06, "loss": 0.8768, "step": 30 }, { "epoch": 0.0012847610758837914, "grad_norm": 0.4959312975406647, "learning_rate": 4.99378341414895e-06, "loss": 0.9048, "step": 31 }, { "epoch": 0.001326204981557462, "grad_norm": 0.531954824924469, "learning_rate": 4.993576194620581e-06, "loss": 0.9719, "step": 32 }, { "epoch": 0.0013676488872311327, "grad_norm": 0.5291857719421387, "learning_rate": 4.993368975092213e-06, "loss": 0.8948, "step": 33 }, { "epoch": 0.0014090927929048033, "grad_norm": 0.4953014552593231, "learning_rate": 4.993161755563845e-06, "loss": 0.8916, "step": 34 }, { "epoch": 0.001450536698578474, "grad_norm": 0.5361866354942322, "learning_rate": 4.992954536035477e-06, "loss": 0.9556, "step": 35 }, { "epoch": 0.0014919806042521448, "grad_norm": 0.5809301733970642, "learning_rate": 4.992747316507108e-06, "loss": 0.9189, "step": 36 }, { "epoch": 0.0015334245099258154, "grad_norm": 0.5245487689971924, "learning_rate": 4.99254009697874e-06, "loss": 0.9507, "step": 37 }, { "epoch": 0.0015748684155994861, "grad_norm": 0.5433439612388611, "learning_rate": 4.992332877450372e-06, "loss": 0.9312, "step": 38 }, { "epoch": 0.0016163123212731567, "grad_norm": 0.49423933029174805, "learning_rate": 4.992125657922003e-06, "loss": 0.8755, "step": 39 }, { "epoch": 0.0016577562269468275, "grad_norm": 0.5284749865531921, "learning_rate": 4.991918438393635e-06, "loss": 0.9126, "step": 40 }, { "epoch": 0.0016992001326204982, "grad_norm": 0.5137740969657898, "learning_rate": 4.991711218865266e-06, "loss": 0.9541, "step": 41 }, { "epoch": 0.0017406440382941688, "grad_norm": 0.5341567397117615, "learning_rate": 4.991503999336898e-06, "loss": 0.9265, "step": 42 }, { "epoch": 0.0017820879439678396, "grad_norm": 0.5049672722816467, "learning_rate": 4.99129677980853e-06, "loss": 0.8508, "step": 43 }, { "epoch": 0.0018235318496415101, "grad_norm": 0.548191487789154, "learning_rate": 4.991089560280161e-06, "loss": 0.8953, "step": 44 }, { "epoch": 0.001864975755315181, "grad_norm": 0.7055763602256775, "learning_rate": 4.990882340751793e-06, "loss": 0.9331, "step": 45 }, { "epoch": 0.0019064196609888517, "grad_norm": 0.4973026514053345, "learning_rate": 4.990675121223425e-06, "loss": 0.9463, "step": 46 }, { "epoch": 0.0019478635666625222, "grad_norm": 0.45394378900527954, "learning_rate": 4.990467901695056e-06, "loss": 0.8792, "step": 47 }, { "epoch": 0.0019893074723361928, "grad_norm": 0.45260077714920044, "learning_rate": 4.990260682166687e-06, "loss": 0.8354, "step": 48 }, { "epoch": 0.0020307513780098638, "grad_norm": 0.502605140209198, "learning_rate": 4.99005346263832e-06, "loss": 0.8909, "step": 49 }, { "epoch": 0.0020721952836835343, "grad_norm": 0.46680155396461487, "learning_rate": 4.989846243109951e-06, "loss": 0.8728, "step": 50 }, { "epoch": 0.002113639189357205, "grad_norm": 0.4857476055622101, "learning_rate": 4.989639023581582e-06, "loss": 0.8711, "step": 51 }, { "epoch": 0.002155083095030876, "grad_norm": 0.49140453338623047, "learning_rate": 4.989431804053214e-06, "loss": 0.895, "step": 52 }, { "epoch": 0.0021965270007045464, "grad_norm": 0.4997093379497528, "learning_rate": 4.989224584524846e-06, "loss": 0.9133, "step": 53 }, { "epoch": 0.002237970906378217, "grad_norm": 0.5029782652854919, "learning_rate": 4.989017364996478e-06, "loss": 0.8987, "step": 54 }, { "epoch": 0.002279414812051888, "grad_norm": 0.5551053881645203, "learning_rate": 4.98881014546811e-06, "loss": 0.9167, "step": 55 }, { "epoch": 0.0023208587177255585, "grad_norm": 0.49238938093185425, "learning_rate": 4.988602925939741e-06, "loss": 0.865, "step": 56 }, { "epoch": 0.002362302623399229, "grad_norm": 0.49992960691452026, "learning_rate": 4.988395706411372e-06, "loss": 0.9646, "step": 57 }, { "epoch": 0.0024037465290728996, "grad_norm": 0.555106520652771, "learning_rate": 4.988188486883005e-06, "loss": 0.812, "step": 58 }, { "epoch": 0.0024451904347465706, "grad_norm": 0.5102022886276245, "learning_rate": 4.987981267354636e-06, "loss": 0.8608, "step": 59 }, { "epoch": 0.002486634340420241, "grad_norm": 0.5039694309234619, "learning_rate": 4.987774047826267e-06, "loss": 0.9617, "step": 60 }, { "epoch": 0.0025280782460939118, "grad_norm": 0.5221696496009827, "learning_rate": 4.987566828297899e-06, "loss": 0.8892, "step": 61 }, { "epoch": 0.0025695221517675827, "grad_norm": 0.5181254744529724, "learning_rate": 4.987359608769531e-06, "loss": 0.991, "step": 62 }, { "epoch": 0.0026109660574412533, "grad_norm": 0.5053512454032898, "learning_rate": 4.987152389241162e-06, "loss": 0.9097, "step": 63 }, { "epoch": 0.002652409963114924, "grad_norm": 0.49161139130592346, "learning_rate": 4.986945169712794e-06, "loss": 0.8901, "step": 64 }, { "epoch": 0.002693853868788595, "grad_norm": 0.4739881753921509, "learning_rate": 4.986737950184426e-06, "loss": 0.8699, "step": 65 }, { "epoch": 0.0027352977744622654, "grad_norm": 0.5461541414260864, "learning_rate": 4.986530730656057e-06, "loss": 0.8945, "step": 66 }, { "epoch": 0.002776741680135936, "grad_norm": 0.5793424844741821, "learning_rate": 4.986323511127689e-06, "loss": 0.936, "step": 67 }, { "epoch": 0.0028181855858096065, "grad_norm": 0.4656437039375305, "learning_rate": 4.98611629159932e-06, "loss": 0.8499, "step": 68 }, { "epoch": 0.0028596294914832775, "grad_norm": 0.5063554644584656, "learning_rate": 4.985909072070952e-06, "loss": 0.8713, "step": 69 }, { "epoch": 0.002901073397156948, "grad_norm": 0.545987606048584, "learning_rate": 4.985701852542584e-06, "loss": 0.957, "step": 70 }, { "epoch": 0.0029425173028306186, "grad_norm": 0.5510537028312683, "learning_rate": 4.985494633014216e-06, "loss": 0.8796, "step": 71 }, { "epoch": 0.0029839612085042896, "grad_norm": 0.5463219285011292, "learning_rate": 4.9852874134858474e-06, "loss": 0.9336, "step": 72 }, { "epoch": 0.00302540511417796, "grad_norm": 0.5715978145599365, "learning_rate": 4.985080193957479e-06, "loss": 0.9844, "step": 73 }, { "epoch": 0.0030668490198516307, "grad_norm": 0.5243675112724304, "learning_rate": 4.984872974429111e-06, "loss": 0.8474, "step": 74 }, { "epoch": 0.0031082929255253017, "grad_norm": 0.47537675499916077, "learning_rate": 4.9846657549007424e-06, "loss": 0.8713, "step": 75 }, { "epoch": 0.0031497368311989723, "grad_norm": 0.48445916175842285, "learning_rate": 4.984458535372374e-06, "loss": 0.9302, "step": 76 }, { "epoch": 0.003191180736872643, "grad_norm": 0.5203693509101868, "learning_rate": 4.984251315844005e-06, "loss": 0.894, "step": 77 }, { "epoch": 0.0032326246425463134, "grad_norm": 0.48706889152526855, "learning_rate": 4.9840440963156374e-06, "loss": 0.8809, "step": 78 }, { "epoch": 0.0032740685482199844, "grad_norm": 0.4953436553478241, "learning_rate": 4.983836876787269e-06, "loss": 0.8833, "step": 79 }, { "epoch": 0.003315512453893655, "grad_norm": 0.5255720615386963, "learning_rate": 4.9836296572589e-06, "loss": 0.9133, "step": 80 }, { "epoch": 0.0033569563595673255, "grad_norm": 0.5064576864242554, "learning_rate": 4.9834224377305324e-06, "loss": 0.8767, "step": 81 }, { "epoch": 0.0033984002652409965, "grad_norm": 0.4973648190498352, "learning_rate": 4.983215218202164e-06, "loss": 0.8743, "step": 82 }, { "epoch": 0.003439844170914667, "grad_norm": 0.5787176489830017, "learning_rate": 4.983007998673795e-06, "loss": 0.9216, "step": 83 }, { "epoch": 0.0034812880765883376, "grad_norm": 0.5432944893836975, "learning_rate": 4.982800779145427e-06, "loss": 0.8425, "step": 84 }, { "epoch": 0.0035227319822620086, "grad_norm": 0.5159140825271606, "learning_rate": 4.982593559617059e-06, "loss": 0.8672, "step": 85 }, { "epoch": 0.003564175887935679, "grad_norm": 0.4964894950389862, "learning_rate": 4.98238634008869e-06, "loss": 0.8904, "step": 86 }, { "epoch": 0.0036056197936093497, "grad_norm": 0.4951587915420532, "learning_rate": 4.9821791205603224e-06, "loss": 0.8574, "step": 87 }, { "epoch": 0.0036470636992830202, "grad_norm": 0.46900156140327454, "learning_rate": 4.981971901031953e-06, "loss": 0.8496, "step": 88 }, { "epoch": 0.0036885076049566912, "grad_norm": 0.5107403993606567, "learning_rate": 4.981764681503585e-06, "loss": 0.8232, "step": 89 }, { "epoch": 0.003729951510630362, "grad_norm": 0.4707816541194916, "learning_rate": 4.9815574619752174e-06, "loss": 0.8472, "step": 90 }, { "epoch": 0.0037713954163040323, "grad_norm": 0.5420551896095276, "learning_rate": 4.981350242446849e-06, "loss": 0.8201, "step": 91 }, { "epoch": 0.0038128393219777033, "grad_norm": 0.4953294098377228, "learning_rate": 4.98114302291848e-06, "loss": 0.8284, "step": 92 }, { "epoch": 0.003854283227651374, "grad_norm": 0.5386088490486145, "learning_rate": 4.980935803390112e-06, "loss": 0.9541, "step": 93 }, { "epoch": 0.0038957271333250445, "grad_norm": 0.5048432350158691, "learning_rate": 4.980728583861744e-06, "loss": 0.8738, "step": 94 }, { "epoch": 0.0039371710389987154, "grad_norm": 0.5135311484336853, "learning_rate": 4.980521364333375e-06, "loss": 0.8145, "step": 95 }, { "epoch": 0.0039786149446723856, "grad_norm": 0.5058515667915344, "learning_rate": 4.980314144805007e-06, "loss": 0.8142, "step": 96 }, { "epoch": 0.0040200588503460566, "grad_norm": 0.47229743003845215, "learning_rate": 4.980106925276639e-06, "loss": 0.8455, "step": 97 }, { "epoch": 0.0040615027560197275, "grad_norm": 0.485802561044693, "learning_rate": 4.97989970574827e-06, "loss": 0.8601, "step": 98 }, { "epoch": 0.004102946661693398, "grad_norm": 0.4353331923484802, "learning_rate": 4.979692486219902e-06, "loss": 0.8562, "step": 99 }, { "epoch": 0.004144390567367069, "grad_norm": 0.4869312644004822, "learning_rate": 4.979485266691533e-06, "loss": 0.8806, "step": 100 }, { "epoch": 0.00418583447304074, "grad_norm": 0.5096659064292908, "learning_rate": 4.979278047163165e-06, "loss": 0.8943, "step": 101 }, { "epoch": 0.00422727837871441, "grad_norm": 0.5356626510620117, "learning_rate": 4.979070827634797e-06, "loss": 0.8645, "step": 102 }, { "epoch": 0.004268722284388081, "grad_norm": 0.5393323302268982, "learning_rate": 4.978863608106429e-06, "loss": 0.8652, "step": 103 }, { "epoch": 0.004310166190061752, "grad_norm": 0.4678276777267456, "learning_rate": 4.978656388578059e-06, "loss": 0.8943, "step": 104 }, { "epoch": 0.004351610095735422, "grad_norm": 0.5040932893753052, "learning_rate": 4.978449169049692e-06, "loss": 0.8877, "step": 105 }, { "epoch": 0.004393054001409093, "grad_norm": 0.4834267199039459, "learning_rate": 4.978241949521324e-06, "loss": 0.8384, "step": 106 }, { "epoch": 0.004434497907082764, "grad_norm": 0.555371105670929, "learning_rate": 4.978034729992955e-06, "loss": 0.9224, "step": 107 }, { "epoch": 0.004475941812756434, "grad_norm": 0.5358298420906067, "learning_rate": 4.977827510464587e-06, "loss": 0.874, "step": 108 }, { "epoch": 0.004517385718430105, "grad_norm": 0.49929314851760864, "learning_rate": 4.977620290936218e-06, "loss": 0.8464, "step": 109 }, { "epoch": 0.004558829624103776, "grad_norm": 0.5089736580848694, "learning_rate": 4.97741307140785e-06, "loss": 0.866, "step": 110 }, { "epoch": 0.004600273529777446, "grad_norm": 0.506629228591919, "learning_rate": 4.977205851879482e-06, "loss": 0.8469, "step": 111 }, { "epoch": 0.004641717435451117, "grad_norm": 0.4904714524745941, "learning_rate": 4.976998632351113e-06, "loss": 0.8372, "step": 112 }, { "epoch": 0.004683161341124787, "grad_norm": 0.4875148832798004, "learning_rate": 4.976791412822744e-06, "loss": 0.915, "step": 113 }, { "epoch": 0.004724605246798458, "grad_norm": 0.5229685306549072, "learning_rate": 4.976584193294377e-06, "loss": 0.8477, "step": 114 }, { "epoch": 0.004766049152472129, "grad_norm": 0.5013989210128784, "learning_rate": 4.976376973766008e-06, "loss": 0.8608, "step": 115 }, { "epoch": 0.004807493058145799, "grad_norm": 0.5142388939857483, "learning_rate": 4.976169754237639e-06, "loss": 0.8477, "step": 116 }, { "epoch": 0.00484893696381947, "grad_norm": 0.5292401909828186, "learning_rate": 4.975962534709272e-06, "loss": 0.8997, "step": 117 }, { "epoch": 0.004890380869493141, "grad_norm": 0.5275249481201172, "learning_rate": 4.975755315180903e-06, "loss": 0.8364, "step": 118 }, { "epoch": 0.004931824775166811, "grad_norm": 0.5310258269309998, "learning_rate": 4.975548095652534e-06, "loss": 0.8779, "step": 119 }, { "epoch": 0.004973268680840482, "grad_norm": 0.4797632098197937, "learning_rate": 4.975340876124166e-06, "loss": 0.7776, "step": 120 }, { "epoch": 0.005014712586514153, "grad_norm": 0.5749223232269287, "learning_rate": 4.975133656595798e-06, "loss": 0.8945, "step": 121 }, { "epoch": 0.0050561564921878235, "grad_norm": 0.4948255717754364, "learning_rate": 4.974926437067429e-06, "loss": 0.8572, "step": 122 }, { "epoch": 0.0050976003978614945, "grad_norm": 0.4390929341316223, "learning_rate": 4.974719217539062e-06, "loss": 0.8337, "step": 123 }, { "epoch": 0.0051390443035351655, "grad_norm": 0.5416498184204102, "learning_rate": 4.974511998010693e-06, "loss": 0.9121, "step": 124 }, { "epoch": 0.005180488209208836, "grad_norm": 0.5238378643989563, "learning_rate": 4.974304778482324e-06, "loss": 0.8374, "step": 125 }, { "epoch": 0.005221932114882507, "grad_norm": 0.4952799677848816, "learning_rate": 4.974097558953957e-06, "loss": 0.8687, "step": 126 }, { "epoch": 0.005263376020556178, "grad_norm": 0.4925903379917145, "learning_rate": 4.973890339425588e-06, "loss": 0.8286, "step": 127 }, { "epoch": 0.005304819926229848, "grad_norm": 0.4815710783004761, "learning_rate": 4.973683119897219e-06, "loss": 0.8416, "step": 128 }, { "epoch": 0.005346263831903519, "grad_norm": 0.5659744739532471, "learning_rate": 4.973475900368851e-06, "loss": 0.8613, "step": 129 }, { "epoch": 0.00538770773757719, "grad_norm": 0.4779456853866577, "learning_rate": 4.973268680840483e-06, "loss": 0.8179, "step": 130 }, { "epoch": 0.00542915164325086, "grad_norm": 0.44191497564315796, "learning_rate": 4.9730614613121144e-06, "loss": 0.8591, "step": 131 }, { "epoch": 0.005470595548924531, "grad_norm": 0.4689691960811615, "learning_rate": 4.972854241783746e-06, "loss": 0.8972, "step": 132 }, { "epoch": 0.005512039454598201, "grad_norm": 0.4754927456378937, "learning_rate": 4.972647022255378e-06, "loss": 0.8386, "step": 133 }, { "epoch": 0.005553483360271872, "grad_norm": 0.5093626976013184, "learning_rate": 4.9724398027270094e-06, "loss": 0.9167, "step": 134 }, { "epoch": 0.005594927265945543, "grad_norm": 0.5663705468177795, "learning_rate": 4.972232583198641e-06, "loss": 0.9009, "step": 135 }, { "epoch": 0.005636371171619213, "grad_norm": 0.5459282398223877, "learning_rate": 4.972025363670272e-06, "loss": 0.9341, "step": 136 }, { "epoch": 0.005677815077292884, "grad_norm": 0.5546956658363342, "learning_rate": 4.9718181441419044e-06, "loss": 0.9043, "step": 137 }, { "epoch": 0.005719258982966555, "grad_norm": 0.5535392761230469, "learning_rate": 4.971610924613536e-06, "loss": 0.853, "step": 138 }, { "epoch": 0.005760702888640225, "grad_norm": 0.5206494927406311, "learning_rate": 4.971403705085168e-06, "loss": 0.9067, "step": 139 }, { "epoch": 0.005802146794313896, "grad_norm": 0.5393713712692261, "learning_rate": 4.971196485556799e-06, "loss": 0.8645, "step": 140 }, { "epoch": 0.005843590699987567, "grad_norm": 0.5049880146980286, "learning_rate": 4.970989266028431e-06, "loss": 0.8533, "step": 141 }, { "epoch": 0.005885034605661237, "grad_norm": 0.5119956731796265, "learning_rate": 4.970782046500063e-06, "loss": 0.7871, "step": 142 }, { "epoch": 0.005926478511334908, "grad_norm": 0.4828684329986572, "learning_rate": 4.9705748269716944e-06, "loss": 0.864, "step": 143 }, { "epoch": 0.005967922417008579, "grad_norm": 0.4998629689216614, "learning_rate": 4.970367607443326e-06, "loss": 0.9089, "step": 144 }, { "epoch": 0.006009366322682249, "grad_norm": 0.5004873871803284, "learning_rate": 4.970160387914957e-06, "loss": 0.8389, "step": 145 }, { "epoch": 0.00605081022835592, "grad_norm": 0.525513768196106, "learning_rate": 4.9699531683865894e-06, "loss": 0.9302, "step": 146 }, { "epoch": 0.006092254134029591, "grad_norm": 0.4276360273361206, "learning_rate": 4.969745948858221e-06, "loss": 0.7893, "step": 147 }, { "epoch": 0.0061336980397032614, "grad_norm": 0.5161129832267761, "learning_rate": 4.969538729329852e-06, "loss": 0.9001, "step": 148 }, { "epoch": 0.006175141945376932, "grad_norm": 0.5208853483200073, "learning_rate": 4.969331509801484e-06, "loss": 0.8379, "step": 149 }, { "epoch": 0.006216585851050603, "grad_norm": 0.4908878207206726, "learning_rate": 4.969124290273116e-06, "loss": 0.8953, "step": 150 }, { "epoch": 0.0062580297567242735, "grad_norm": 0.46791812777519226, "learning_rate": 4.968917070744747e-06, "loss": 0.8303, "step": 151 }, { "epoch": 0.0062994736623979445, "grad_norm": 0.4564230740070343, "learning_rate": 4.968709851216379e-06, "loss": 0.9202, "step": 152 }, { "epoch": 0.006340917568071615, "grad_norm": 0.4829869270324707, "learning_rate": 4.968502631688011e-06, "loss": 0.8589, "step": 153 }, { "epoch": 0.006382361473745286, "grad_norm": 0.5078172087669373, "learning_rate": 4.968295412159642e-06, "loss": 0.8279, "step": 154 }, { "epoch": 0.006423805379418957, "grad_norm": 0.4795092046260834, "learning_rate": 4.9680881926312745e-06, "loss": 0.8956, "step": 155 }, { "epoch": 0.006465249285092627, "grad_norm": 0.4985639452934265, "learning_rate": 4.967880973102905e-06, "loss": 0.8203, "step": 156 }, { "epoch": 0.006506693190766298, "grad_norm": 0.5322465300559998, "learning_rate": 4.967673753574537e-06, "loss": 0.9133, "step": 157 }, { "epoch": 0.006548137096439969, "grad_norm": 0.5147272348403931, "learning_rate": 4.9674665340461695e-06, "loss": 0.894, "step": 158 }, { "epoch": 0.006589581002113639, "grad_norm": 0.5311028361320496, "learning_rate": 4.967259314517801e-06, "loss": 0.9231, "step": 159 }, { "epoch": 0.00663102490778731, "grad_norm": 0.4999554753303528, "learning_rate": 4.967052094989432e-06, "loss": 0.8215, "step": 160 }, { "epoch": 0.006672468813460981, "grad_norm": 0.5298110246658325, "learning_rate": 4.966844875461064e-06, "loss": 0.8933, "step": 161 }, { "epoch": 0.006713912719134651, "grad_norm": 0.46060001850128174, "learning_rate": 4.966637655932696e-06, "loss": 0.7771, "step": 162 }, { "epoch": 0.006755356624808322, "grad_norm": 0.4887921214103699, "learning_rate": 4.966430436404327e-06, "loss": 0.8943, "step": 163 }, { "epoch": 0.006796800530481993, "grad_norm": 0.5011575222015381, "learning_rate": 4.966223216875959e-06, "loss": 0.9082, "step": 164 }, { "epoch": 0.006838244436155663, "grad_norm": 0.5304556488990784, "learning_rate": 4.96601599734759e-06, "loss": 0.8735, "step": 165 }, { "epoch": 0.006879688341829334, "grad_norm": 0.522753119468689, "learning_rate": 4.965808777819222e-06, "loss": 0.8738, "step": 166 }, { "epoch": 0.006921132247503005, "grad_norm": 0.4741937220096588, "learning_rate": 4.965601558290854e-06, "loss": 0.855, "step": 167 }, { "epoch": 0.006962576153176675, "grad_norm": 0.5090004205703735, "learning_rate": 4.965394338762485e-06, "loss": 0.8445, "step": 168 }, { "epoch": 0.007004020058850346, "grad_norm": 0.6183812618255615, "learning_rate": 4.965187119234117e-06, "loss": 0.9092, "step": 169 }, { "epoch": 0.007045463964524017, "grad_norm": 0.5028287768363953, "learning_rate": 4.964979899705749e-06, "loss": 0.8486, "step": 170 }, { "epoch": 0.007086907870197687, "grad_norm": 0.513696014881134, "learning_rate": 4.964772680177381e-06, "loss": 0.9048, "step": 171 }, { "epoch": 0.007128351775871358, "grad_norm": 0.44042113423347473, "learning_rate": 4.964565460649011e-06, "loss": 0.748, "step": 172 }, { "epoch": 0.007169795681545028, "grad_norm": 0.5216784477233887, "learning_rate": 4.964358241120644e-06, "loss": 0.8972, "step": 173 }, { "epoch": 0.007211239587218699, "grad_norm": 0.5251628160476685, "learning_rate": 4.964151021592275e-06, "loss": 0.8831, "step": 174 }, { "epoch": 0.00725268349289237, "grad_norm": 0.5277590751647949, "learning_rate": 4.963943802063907e-06, "loss": 0.9287, "step": 175 }, { "epoch": 0.0072941273985660405, "grad_norm": 0.4672757387161255, "learning_rate": 4.963736582535539e-06, "loss": 0.8489, "step": 176 }, { "epoch": 0.0073355713042397115, "grad_norm": 0.5269532203674316, "learning_rate": 4.96352936300717e-06, "loss": 0.8674, "step": 177 }, { "epoch": 0.0073770152099133825, "grad_norm": 0.5004445910453796, "learning_rate": 4.963322143478802e-06, "loss": 0.8079, "step": 178 }, { "epoch": 0.007418459115587053, "grad_norm": 0.5009570717811584, "learning_rate": 4.963114923950434e-06, "loss": 0.8538, "step": 179 }, { "epoch": 0.007459903021260724, "grad_norm": 0.473165899515152, "learning_rate": 4.962907704422065e-06, "loss": 0.801, "step": 180 }, { "epoch": 0.007501346926934395, "grad_norm": 0.4999670088291168, "learning_rate": 4.962700484893696e-06, "loss": 0.8811, "step": 181 }, { "epoch": 0.007542790832608065, "grad_norm": 0.4970436692237854, "learning_rate": 4.962493265365329e-06, "loss": 0.9043, "step": 182 }, { "epoch": 0.007584234738281736, "grad_norm": 0.5126577615737915, "learning_rate": 4.96228604583696e-06, "loss": 0.9436, "step": 183 }, { "epoch": 0.007625678643955407, "grad_norm": 0.48738300800323486, "learning_rate": 4.962078826308591e-06, "loss": 0.8391, "step": 184 }, { "epoch": 0.007667122549629077, "grad_norm": 0.5486866235733032, "learning_rate": 4.961871606780224e-06, "loss": 0.8625, "step": 185 }, { "epoch": 0.007708566455302748, "grad_norm": 0.507525622844696, "learning_rate": 4.961664387251855e-06, "loss": 0.856, "step": 186 }, { "epoch": 0.007750010360976419, "grad_norm": 0.5589158535003662, "learning_rate": 4.961457167723486e-06, "loss": 0.8716, "step": 187 }, { "epoch": 0.007791454266650089, "grad_norm": 0.4950938820838928, "learning_rate": 4.961249948195118e-06, "loss": 0.877, "step": 188 }, { "epoch": 0.007832898172323759, "grad_norm": 0.5249196887016296, "learning_rate": 4.96104272866675e-06, "loss": 0.9199, "step": 189 }, { "epoch": 0.007874342077997431, "grad_norm": 0.46564406156539917, "learning_rate": 4.9608355091383814e-06, "loss": 0.8347, "step": 190 }, { "epoch": 0.007915785983671101, "grad_norm": 0.5101157426834106, "learning_rate": 4.960628289610014e-06, "loss": 0.9353, "step": 191 }, { "epoch": 0.007957229889344771, "grad_norm": 0.4670466184616089, "learning_rate": 4.960421070081645e-06, "loss": 0.8596, "step": 192 }, { "epoch": 0.007998673795018443, "grad_norm": 0.45002996921539307, "learning_rate": 4.9602138505532764e-06, "loss": 0.8745, "step": 193 }, { "epoch": 0.008040117700692113, "grad_norm": 0.48973751068115234, "learning_rate": 4.960006631024909e-06, "loss": 0.8765, "step": 194 }, { "epoch": 0.008081561606365783, "grad_norm": 0.49519243836402893, "learning_rate": 4.95979941149654e-06, "loss": 0.8516, "step": 195 }, { "epoch": 0.008123005512039455, "grad_norm": 0.49807009100914, "learning_rate": 4.9595921919681714e-06, "loss": 0.8057, "step": 196 }, { "epoch": 0.008164449417713125, "grad_norm": 0.5044920444488525, "learning_rate": 4.959384972439803e-06, "loss": 0.8525, "step": 197 }, { "epoch": 0.008205893323386795, "grad_norm": 0.5080502033233643, "learning_rate": 4.959177752911435e-06, "loss": 0.8638, "step": 198 }, { "epoch": 0.008247337229060467, "grad_norm": 0.5046712160110474, "learning_rate": 4.9589705333830664e-06, "loss": 0.9036, "step": 199 }, { "epoch": 0.008288781134734137, "grad_norm": 0.4891860783100128, "learning_rate": 4.958763313854698e-06, "loss": 0.894, "step": 200 }, { "epoch": 0.008330225040407807, "grad_norm": 0.5205092430114746, "learning_rate": 4.958556094326329e-06, "loss": 0.812, "step": 201 }, { "epoch": 0.00837166894608148, "grad_norm": 0.4664292633533478, "learning_rate": 4.9583488747979614e-06, "loss": 0.8679, "step": 202 }, { "epoch": 0.00841311285175515, "grad_norm": 0.48823976516723633, "learning_rate": 4.958141655269593e-06, "loss": 0.8948, "step": 203 }, { "epoch": 0.00845455675742882, "grad_norm": 0.4761791527271271, "learning_rate": 4.957934435741224e-06, "loss": 0.8499, "step": 204 }, { "epoch": 0.008496000663102491, "grad_norm": 0.4740946888923645, "learning_rate": 4.9577272162128564e-06, "loss": 0.8782, "step": 205 }, { "epoch": 0.008537444568776162, "grad_norm": 0.49799829721450806, "learning_rate": 4.957519996684488e-06, "loss": 0.8657, "step": 206 }, { "epoch": 0.008578888474449832, "grad_norm": 0.44548892974853516, "learning_rate": 4.95731277715612e-06, "loss": 0.8411, "step": 207 }, { "epoch": 0.008620332380123504, "grad_norm": 0.48993632197380066, "learning_rate": 4.9571055576277514e-06, "loss": 0.8479, "step": 208 }, { "epoch": 0.008661776285797174, "grad_norm": 0.5193896889686584, "learning_rate": 4.956898338099383e-06, "loss": 0.8127, "step": 209 }, { "epoch": 0.008703220191470844, "grad_norm": 0.4925740957260132, "learning_rate": 4.956691118571015e-06, "loss": 0.8674, "step": 210 }, { "epoch": 0.008744664097144516, "grad_norm": 0.4636530578136444, "learning_rate": 4.9564838990426465e-06, "loss": 0.8237, "step": 211 }, { "epoch": 0.008786108002818186, "grad_norm": 0.4979214668273926, "learning_rate": 4.956276679514278e-06, "loss": 0.7773, "step": 212 }, { "epoch": 0.008827551908491856, "grad_norm": 0.5074848532676697, "learning_rate": 4.956069459985909e-06, "loss": 0.7693, "step": 213 }, { "epoch": 0.008868995814165528, "grad_norm": 0.5405175685882568, "learning_rate": 4.9558622404575415e-06, "loss": 0.8757, "step": 214 }, { "epoch": 0.008910439719839198, "grad_norm": 0.4779643416404724, "learning_rate": 4.955655020929173e-06, "loss": 0.8401, "step": 215 }, { "epoch": 0.008951883625512868, "grad_norm": 0.474722683429718, "learning_rate": 4.955447801400804e-06, "loss": 0.7988, "step": 216 }, { "epoch": 0.00899332753118654, "grad_norm": 0.4955325722694397, "learning_rate": 4.955240581872436e-06, "loss": 0.8997, "step": 217 }, { "epoch": 0.00903477143686021, "grad_norm": 0.5195847749710083, "learning_rate": 4.955033362344068e-06, "loss": 0.8855, "step": 218 }, { "epoch": 0.00907621534253388, "grad_norm": 0.4938107132911682, "learning_rate": 4.954826142815699e-06, "loss": 0.8835, "step": 219 }, { "epoch": 0.009117659248207552, "grad_norm": 0.5264998078346252, "learning_rate": 4.954618923287331e-06, "loss": 0.9331, "step": 220 }, { "epoch": 0.009159103153881222, "grad_norm": 0.5167268514633179, "learning_rate": 4.954411703758963e-06, "loss": 0.9111, "step": 221 }, { "epoch": 0.009200547059554892, "grad_norm": 0.4937087893486023, "learning_rate": 4.954204484230594e-06, "loss": 0.7659, "step": 222 }, { "epoch": 0.009241990965228562, "grad_norm": 0.5105080604553223, "learning_rate": 4.9539972647022265e-06, "loss": 0.8772, "step": 223 }, { "epoch": 0.009283434870902234, "grad_norm": 0.6784617900848389, "learning_rate": 4.953790045173857e-06, "loss": 0.9214, "step": 224 }, { "epoch": 0.009324878776575904, "grad_norm": 0.4829583168029785, "learning_rate": 4.953582825645489e-06, "loss": 0.9202, "step": 225 }, { "epoch": 0.009366322682249574, "grad_norm": 0.4833115339279175, "learning_rate": 4.953375606117121e-06, "loss": 0.8379, "step": 226 }, { "epoch": 0.009407766587923246, "grad_norm": 0.5154611468315125, "learning_rate": 4.953168386588753e-06, "loss": 0.9465, "step": 227 }, { "epoch": 0.009449210493596916, "grad_norm": 0.5287801623344421, "learning_rate": 4.952961167060384e-06, "loss": 0.8738, "step": 228 }, { "epoch": 0.009490654399270586, "grad_norm": 0.4534788131713867, "learning_rate": 4.952753947532016e-06, "loss": 0.8386, "step": 229 }, { "epoch": 0.009532098304944258, "grad_norm": 0.4852525293827057, "learning_rate": 4.952546728003648e-06, "loss": 0.8203, "step": 230 }, { "epoch": 0.009573542210617928, "grad_norm": 0.4988188147544861, "learning_rate": 4.952339508475279e-06, "loss": 0.8201, "step": 231 }, { "epoch": 0.009614986116291599, "grad_norm": 0.5310726165771484, "learning_rate": 4.952132288946911e-06, "loss": 0.8464, "step": 232 }, { "epoch": 0.00965643002196527, "grad_norm": 0.48887568712234497, "learning_rate": 4.951925069418542e-06, "loss": 0.8406, "step": 233 }, { "epoch": 0.00969787392763894, "grad_norm": 0.483868807554245, "learning_rate": 4.951717849890174e-06, "loss": 0.8789, "step": 234 }, { "epoch": 0.00973931783331261, "grad_norm": 0.4487422704696655, "learning_rate": 4.951510630361806e-06, "loss": 0.8269, "step": 235 }, { "epoch": 0.009780761738986283, "grad_norm": 0.49090155959129333, "learning_rate": 4.951303410833437e-06, "loss": 0.8433, "step": 236 }, { "epoch": 0.009822205644659953, "grad_norm": 0.4861697852611542, "learning_rate": 4.951096191305069e-06, "loss": 0.8247, "step": 237 }, { "epoch": 0.009863649550333623, "grad_norm": 0.4558032751083374, "learning_rate": 4.950888971776701e-06, "loss": 0.8132, "step": 238 }, { "epoch": 0.009905093456007295, "grad_norm": 0.44360578060150146, "learning_rate": 4.950681752248333e-06, "loss": 0.8379, "step": 239 }, { "epoch": 0.009946537361680965, "grad_norm": 0.5036263465881348, "learning_rate": 4.950474532719963e-06, "loss": 0.8787, "step": 240 }, { "epoch": 0.009987981267354635, "grad_norm": 0.5028334259986877, "learning_rate": 4.950267313191596e-06, "loss": 0.8867, "step": 241 }, { "epoch": 0.010029425173028307, "grad_norm": 0.4768572449684143, "learning_rate": 4.950060093663227e-06, "loss": 0.9009, "step": 242 }, { "epoch": 0.010070869078701977, "grad_norm": 0.4231841266155243, "learning_rate": 4.949852874134859e-06, "loss": 0.8049, "step": 243 }, { "epoch": 0.010112312984375647, "grad_norm": 0.4906524419784546, "learning_rate": 4.949645654606491e-06, "loss": 0.8005, "step": 244 }, { "epoch": 0.010153756890049319, "grad_norm": 0.5032555460929871, "learning_rate": 4.949438435078122e-06, "loss": 0.8647, "step": 245 }, { "epoch": 0.010195200795722989, "grad_norm": 0.49362102150917053, "learning_rate": 4.949231215549754e-06, "loss": 0.8459, "step": 246 }, { "epoch": 0.010236644701396659, "grad_norm": 0.48964157700538635, "learning_rate": 4.949023996021386e-06, "loss": 0.8621, "step": 247 }, { "epoch": 0.010278088607070331, "grad_norm": 0.5083989500999451, "learning_rate": 4.948816776493017e-06, "loss": 0.8601, "step": 248 }, { "epoch": 0.010319532512744001, "grad_norm": 0.5186017155647278, "learning_rate": 4.9486095569646484e-06, "loss": 0.8828, "step": 249 }, { "epoch": 0.010360976418417671, "grad_norm": 0.4820512533187866, "learning_rate": 4.948402337436281e-06, "loss": 0.8096, "step": 250 }, { "epoch": 0.010402420324091343, "grad_norm": 0.5429985523223877, "learning_rate": 4.948195117907912e-06, "loss": 0.9194, "step": 251 }, { "epoch": 0.010443864229765013, "grad_norm": 0.46637988090515137, "learning_rate": 4.9479878983795434e-06, "loss": 0.7842, "step": 252 }, { "epoch": 0.010485308135438683, "grad_norm": 0.5143275260925293, "learning_rate": 4.947780678851175e-06, "loss": 0.8152, "step": 253 }, { "epoch": 0.010526752041112355, "grad_norm": 0.4902860224246979, "learning_rate": 4.947573459322807e-06, "loss": 0.7815, "step": 254 }, { "epoch": 0.010568195946786025, "grad_norm": 0.49528950452804565, "learning_rate": 4.947366239794439e-06, "loss": 0.8345, "step": 255 }, { "epoch": 0.010609639852459695, "grad_norm": 0.4657568633556366, "learning_rate": 4.94715902026607e-06, "loss": 0.9146, "step": 256 }, { "epoch": 0.010651083758133367, "grad_norm": 0.49593663215637207, "learning_rate": 4.946951800737702e-06, "loss": 0.8413, "step": 257 }, { "epoch": 0.010692527663807037, "grad_norm": 0.47337037324905396, "learning_rate": 4.9467445812093334e-06, "loss": 0.7865, "step": 258 }, { "epoch": 0.010733971569480708, "grad_norm": 0.5408326387405396, "learning_rate": 4.946537361680966e-06, "loss": 0.8765, "step": 259 }, { "epoch": 0.01077541547515438, "grad_norm": 0.5662903785705566, "learning_rate": 4.946330142152597e-06, "loss": 0.9335, "step": 260 }, { "epoch": 0.01081685938082805, "grad_norm": 0.4936153292655945, "learning_rate": 4.9461229226242284e-06, "loss": 0.8359, "step": 261 }, { "epoch": 0.01085830328650172, "grad_norm": 0.49883532524108887, "learning_rate": 4.94591570309586e-06, "loss": 0.8175, "step": 262 }, { "epoch": 0.01089974719217539, "grad_norm": 0.49196162819862366, "learning_rate": 4.945708483567492e-06, "loss": 0.8367, "step": 263 }, { "epoch": 0.010941191097849062, "grad_norm": 0.49908918142318726, "learning_rate": 4.9455012640391234e-06, "loss": 0.8755, "step": 264 }, { "epoch": 0.010982635003522732, "grad_norm": 0.47234293818473816, "learning_rate": 4.945294044510755e-06, "loss": 0.8337, "step": 265 }, { "epoch": 0.011024078909196402, "grad_norm": 0.486908882856369, "learning_rate": 4.945086824982387e-06, "loss": 0.8013, "step": 266 }, { "epoch": 0.011065522814870074, "grad_norm": 0.5201804041862488, "learning_rate": 4.9448796054540184e-06, "loss": 0.7981, "step": 267 }, { "epoch": 0.011106966720543744, "grad_norm": 0.5242292284965515, "learning_rate": 4.94467238592565e-06, "loss": 0.8794, "step": 268 }, { "epoch": 0.011148410626217414, "grad_norm": 0.4720354676246643, "learning_rate": 4.944465166397281e-06, "loss": 0.8882, "step": 269 }, { "epoch": 0.011189854531891086, "grad_norm": 0.512622594833374, "learning_rate": 4.9442579468689135e-06, "loss": 0.8042, "step": 270 }, { "epoch": 0.011231298437564756, "grad_norm": 0.5217183828353882, "learning_rate": 4.944050727340545e-06, "loss": 0.854, "step": 271 }, { "epoch": 0.011272742343238426, "grad_norm": 0.4697608947753906, "learning_rate": 4.943843507812176e-06, "loss": 0.8735, "step": 272 }, { "epoch": 0.011314186248912098, "grad_norm": 0.4464910626411438, "learning_rate": 4.9436362882838085e-06, "loss": 0.8435, "step": 273 }, { "epoch": 0.011355630154585768, "grad_norm": 0.4901544749736786, "learning_rate": 4.94342906875544e-06, "loss": 0.8521, "step": 274 }, { "epoch": 0.011397074060259438, "grad_norm": 0.4696155786514282, "learning_rate": 4.943221849227072e-06, "loss": 0.8298, "step": 275 }, { "epoch": 0.01143851796593311, "grad_norm": 0.45748764276504517, "learning_rate": 4.9430146296987035e-06, "loss": 0.7368, "step": 276 }, { "epoch": 0.01147996187160678, "grad_norm": 0.48581135272979736, "learning_rate": 4.942807410170335e-06, "loss": 0.8708, "step": 277 }, { "epoch": 0.01152140577728045, "grad_norm": 0.473838746547699, "learning_rate": 4.942600190641966e-06, "loss": 0.8337, "step": 278 }, { "epoch": 0.011562849682954122, "grad_norm": 0.48033320903778076, "learning_rate": 4.9423929711135985e-06, "loss": 0.9041, "step": 279 }, { "epoch": 0.011604293588627792, "grad_norm": 0.4913444519042969, "learning_rate": 4.94218575158523e-06, "loss": 0.8762, "step": 280 }, { "epoch": 0.011645737494301462, "grad_norm": 0.584656298160553, "learning_rate": 4.941978532056861e-06, "loss": 0.9578, "step": 281 }, { "epoch": 0.011687181399975134, "grad_norm": 0.5384389162063599, "learning_rate": 4.9417713125284935e-06, "loss": 0.853, "step": 282 }, { "epoch": 0.011728625305648804, "grad_norm": 0.4899871349334717, "learning_rate": 4.941564093000125e-06, "loss": 0.8254, "step": 283 }, { "epoch": 0.011770069211322474, "grad_norm": 0.551979124546051, "learning_rate": 4.941356873471756e-06, "loss": 0.8796, "step": 284 }, { "epoch": 0.011811513116996146, "grad_norm": 0.5795146822929382, "learning_rate": 4.941149653943388e-06, "loss": 0.9009, "step": 285 }, { "epoch": 0.011852957022669816, "grad_norm": 0.47303688526153564, "learning_rate": 4.94094243441502e-06, "loss": 0.7842, "step": 286 }, { "epoch": 0.011894400928343487, "grad_norm": 0.4773743450641632, "learning_rate": 4.940735214886651e-06, "loss": 0.7971, "step": 287 }, { "epoch": 0.011935844834017158, "grad_norm": 0.5141909718513489, "learning_rate": 4.940527995358283e-06, "loss": 0.8618, "step": 288 }, { "epoch": 0.011977288739690829, "grad_norm": 0.4858902096748352, "learning_rate": 4.940320775829915e-06, "loss": 0.8523, "step": 289 }, { "epoch": 0.012018732645364499, "grad_norm": 0.4706079661846161, "learning_rate": 4.940113556301546e-06, "loss": 0.8262, "step": 290 }, { "epoch": 0.01206017655103817, "grad_norm": 0.5480523705482483, "learning_rate": 4.9399063367731785e-06, "loss": 0.8411, "step": 291 }, { "epoch": 0.01210162045671184, "grad_norm": 0.48437756299972534, "learning_rate": 4.939699117244809e-06, "loss": 0.8318, "step": 292 }, { "epoch": 0.01214306436238551, "grad_norm": 0.47752273082733154, "learning_rate": 4.939491897716441e-06, "loss": 0.8462, "step": 293 }, { "epoch": 0.012184508268059183, "grad_norm": 0.4477703869342804, "learning_rate": 4.939284678188073e-06, "loss": 0.7792, "step": 294 }, { "epoch": 0.012225952173732853, "grad_norm": 0.48229581117630005, "learning_rate": 4.939077458659705e-06, "loss": 0.8062, "step": 295 }, { "epoch": 0.012267396079406523, "grad_norm": 0.4598805606365204, "learning_rate": 4.938870239131336e-06, "loss": 0.8247, "step": 296 }, { "epoch": 0.012308839985080195, "grad_norm": 0.48081985116004944, "learning_rate": 4.938663019602968e-06, "loss": 0.8345, "step": 297 }, { "epoch": 0.012350283890753865, "grad_norm": 0.5329732298851013, "learning_rate": 4.9384558000746e-06, "loss": 0.8604, "step": 298 }, { "epoch": 0.012391727796427535, "grad_norm": 0.4876191020011902, "learning_rate": 4.938248580546231e-06, "loss": 0.8289, "step": 299 }, { "epoch": 0.012433171702101207, "grad_norm": 0.46362125873565674, "learning_rate": 4.938041361017863e-06, "loss": 0.8381, "step": 300 }, { "epoch": 0.012474615607774877, "grad_norm": 0.5167970061302185, "learning_rate": 4.937834141489494e-06, "loss": 0.8306, "step": 301 }, { "epoch": 0.012516059513448547, "grad_norm": 0.5373725891113281, "learning_rate": 4.937626921961126e-06, "loss": 0.8604, "step": 302 }, { "epoch": 0.012557503419122219, "grad_norm": 0.5176645517349243, "learning_rate": 4.937419702432758e-06, "loss": 0.8745, "step": 303 }, { "epoch": 0.012598947324795889, "grad_norm": 0.5150823593139648, "learning_rate": 4.937212482904389e-06, "loss": 0.8682, "step": 304 }, { "epoch": 0.01264039123046956, "grad_norm": 0.504682719707489, "learning_rate": 4.93700526337602e-06, "loss": 0.8591, "step": 305 }, { "epoch": 0.01268183513614323, "grad_norm": 0.49569401144981384, "learning_rate": 4.936798043847653e-06, "loss": 0.8188, "step": 306 }, { "epoch": 0.012723279041816901, "grad_norm": 0.4833688735961914, "learning_rate": 4.936590824319285e-06, "loss": 0.8728, "step": 307 }, { "epoch": 0.012764722947490571, "grad_norm": 0.4999264180660248, "learning_rate": 4.9363836047909154e-06, "loss": 0.9021, "step": 308 }, { "epoch": 0.012806166853164241, "grad_norm": 0.4826984703540802, "learning_rate": 4.936176385262548e-06, "loss": 0.8523, "step": 309 }, { "epoch": 0.012847610758837913, "grad_norm": 0.5844562649726868, "learning_rate": 4.935969165734179e-06, "loss": 0.8499, "step": 310 }, { "epoch": 0.012889054664511583, "grad_norm": 0.5145385265350342, "learning_rate": 4.935761946205811e-06, "loss": 0.8762, "step": 311 }, { "epoch": 0.012930498570185254, "grad_norm": 0.48962658643722534, "learning_rate": 4.935554726677443e-06, "loss": 0.8506, "step": 312 }, { "epoch": 0.012971942475858925, "grad_norm": 0.49469470977783203, "learning_rate": 4.935347507149074e-06, "loss": 0.8962, "step": 313 }, { "epoch": 0.013013386381532595, "grad_norm": 0.4592374265193939, "learning_rate": 4.9351402876207054e-06, "loss": 0.8123, "step": 314 }, { "epoch": 0.013054830287206266, "grad_norm": 0.47087806463241577, "learning_rate": 4.934933068092338e-06, "loss": 0.8069, "step": 315 }, { "epoch": 0.013096274192879937, "grad_norm": 0.49014976620674133, "learning_rate": 4.934725848563969e-06, "loss": 0.8447, "step": 316 }, { "epoch": 0.013137718098553608, "grad_norm": 0.511128842830658, "learning_rate": 4.9345186290356004e-06, "loss": 0.8772, "step": 317 }, { "epoch": 0.013179162004227278, "grad_norm": 0.5179100632667542, "learning_rate": 4.934311409507233e-06, "loss": 0.8306, "step": 318 }, { "epoch": 0.01322060590990095, "grad_norm": 0.48943230509757996, "learning_rate": 4.934104189978864e-06, "loss": 0.8708, "step": 319 }, { "epoch": 0.01326204981557462, "grad_norm": 0.4891606569290161, "learning_rate": 4.9338969704504954e-06, "loss": 0.7983, "step": 320 }, { "epoch": 0.01330349372124829, "grad_norm": 0.5088717341423035, "learning_rate": 4.933689750922127e-06, "loss": 0.8728, "step": 321 }, { "epoch": 0.013344937626921962, "grad_norm": 0.48573896288871765, "learning_rate": 4.933482531393759e-06, "loss": 0.8342, "step": 322 }, { "epoch": 0.013386381532595632, "grad_norm": 0.47867557406425476, "learning_rate": 4.9332753118653904e-06, "loss": 0.8232, "step": 323 }, { "epoch": 0.013427825438269302, "grad_norm": 0.4582088589668274, "learning_rate": 4.933068092337022e-06, "loss": 0.8135, "step": 324 }, { "epoch": 0.013469269343942974, "grad_norm": 0.46959298849105835, "learning_rate": 4.932860872808654e-06, "loss": 0.8037, "step": 325 }, { "epoch": 0.013510713249616644, "grad_norm": 0.47610822319984436, "learning_rate": 4.9326536532802854e-06, "loss": 0.8269, "step": 326 }, { "epoch": 0.013552157155290314, "grad_norm": 0.5315168499946594, "learning_rate": 4.932446433751918e-06, "loss": 0.96, "step": 327 }, { "epoch": 0.013593601060963986, "grad_norm": 0.4996245801448822, "learning_rate": 4.932239214223549e-06, "loss": 0.8848, "step": 328 }, { "epoch": 0.013635044966637656, "grad_norm": 0.4850527048110962, "learning_rate": 4.9320319946951805e-06, "loss": 0.8176, "step": 329 }, { "epoch": 0.013676488872311326, "grad_norm": 0.5390617847442627, "learning_rate": 4.931824775166812e-06, "loss": 0.8579, "step": 330 }, { "epoch": 0.013717932777984998, "grad_norm": 0.4859105348587036, "learning_rate": 4.931617555638444e-06, "loss": 0.7898, "step": 331 }, { "epoch": 0.013759376683658668, "grad_norm": 0.4938487708568573, "learning_rate": 4.9314103361100755e-06, "loss": 0.8411, "step": 332 }, { "epoch": 0.013800820589332338, "grad_norm": 0.4921363890171051, "learning_rate": 4.931203116581707e-06, "loss": 0.8577, "step": 333 }, { "epoch": 0.01384226449500601, "grad_norm": 0.44038423895835876, "learning_rate": 4.930995897053339e-06, "loss": 0.798, "step": 334 }, { "epoch": 0.01388370840067968, "grad_norm": 0.5288088917732239, "learning_rate": 4.9307886775249705e-06, "loss": 0.8899, "step": 335 }, { "epoch": 0.01392515230635335, "grad_norm": 0.4595298171043396, "learning_rate": 4.930581457996602e-06, "loss": 0.8213, "step": 336 }, { "epoch": 0.013966596212027022, "grad_norm": 0.47665566205978394, "learning_rate": 4.930374238468233e-06, "loss": 0.7621, "step": 337 }, { "epoch": 0.014008040117700692, "grad_norm": 0.4706921875476837, "learning_rate": 4.9301670189398655e-06, "loss": 0.7566, "step": 338 }, { "epoch": 0.014049484023374362, "grad_norm": 0.4992421567440033, "learning_rate": 4.929959799411497e-06, "loss": 0.7972, "step": 339 }, { "epoch": 0.014090927929048034, "grad_norm": 0.4747614562511444, "learning_rate": 4.929752579883128e-06, "loss": 0.811, "step": 340 }, { "epoch": 0.014132371834721704, "grad_norm": 0.4470059871673584, "learning_rate": 4.9295453603547605e-06, "loss": 0.7942, "step": 341 }, { "epoch": 0.014173815740395375, "grad_norm": 0.4530155658721924, "learning_rate": 4.929338140826392e-06, "loss": 0.8584, "step": 342 }, { "epoch": 0.014215259646069046, "grad_norm": 0.4985761344432831, "learning_rate": 4.929130921298024e-06, "loss": 0.8533, "step": 343 }, { "epoch": 0.014256703551742717, "grad_norm": 0.4647839367389679, "learning_rate": 4.9289237017696555e-06, "loss": 0.8103, "step": 344 }, { "epoch": 0.014298147457416387, "grad_norm": 0.5143153071403503, "learning_rate": 4.928716482241287e-06, "loss": 0.8511, "step": 345 }, { "epoch": 0.014339591363090057, "grad_norm": 0.45094966888427734, "learning_rate": 4.928509262712918e-06, "loss": 0.843, "step": 346 }, { "epoch": 0.014381035268763729, "grad_norm": 0.49729955196380615, "learning_rate": 4.9283020431845505e-06, "loss": 0.8755, "step": 347 }, { "epoch": 0.014422479174437399, "grad_norm": 0.6007020473480225, "learning_rate": 4.928094823656182e-06, "loss": 0.8201, "step": 348 }, { "epoch": 0.014463923080111069, "grad_norm": 0.4756455421447754, "learning_rate": 4.927887604127813e-06, "loss": 0.8464, "step": 349 }, { "epoch": 0.01450536698578474, "grad_norm": 0.4875801205635071, "learning_rate": 4.9276803845994455e-06, "loss": 0.8767, "step": 350 }, { "epoch": 0.01454681089145841, "grad_norm": 0.5090059638023376, "learning_rate": 4.927473165071077e-06, "loss": 0.8137, "step": 351 }, { "epoch": 0.014588254797132081, "grad_norm": 0.4570600688457489, "learning_rate": 4.927265945542708e-06, "loss": 0.8076, "step": 352 }, { "epoch": 0.014629698702805753, "grad_norm": 0.5911429524421692, "learning_rate": 4.92705872601434e-06, "loss": 0.9153, "step": 353 }, { "epoch": 0.014671142608479423, "grad_norm": 0.46505096554756165, "learning_rate": 4.926851506485972e-06, "loss": 0.76, "step": 354 }, { "epoch": 0.014712586514153093, "grad_norm": 0.5193902850151062, "learning_rate": 4.926644286957603e-06, "loss": 0.8336, "step": 355 }, { "epoch": 0.014754030419826765, "grad_norm": 0.5347269773483276, "learning_rate": 4.926437067429235e-06, "loss": 0.9011, "step": 356 }, { "epoch": 0.014795474325500435, "grad_norm": 0.5115196108818054, "learning_rate": 4.926229847900866e-06, "loss": 0.9023, "step": 357 }, { "epoch": 0.014836918231174105, "grad_norm": 0.4776272177696228, "learning_rate": 4.926022628372498e-06, "loss": 0.8584, "step": 358 }, { "epoch": 0.014878362136847777, "grad_norm": 0.48909175395965576, "learning_rate": 4.9258154088441305e-06, "loss": 0.8142, "step": 359 }, { "epoch": 0.014919806042521447, "grad_norm": 0.47103431820869446, "learning_rate": 4.925608189315762e-06, "loss": 0.798, "step": 360 }, { "epoch": 0.014961249948195117, "grad_norm": 0.4640336036682129, "learning_rate": 4.925400969787393e-06, "loss": 0.7783, "step": 361 }, { "epoch": 0.01500269385386879, "grad_norm": 0.45157426595687866, "learning_rate": 4.925193750259025e-06, "loss": 0.8083, "step": 362 }, { "epoch": 0.01504413775954246, "grad_norm": 0.48852115869522095, "learning_rate": 4.924986530730657e-06, "loss": 0.8718, "step": 363 }, { "epoch": 0.01508558166521613, "grad_norm": 0.5745199918746948, "learning_rate": 4.924779311202288e-06, "loss": 0.9255, "step": 364 }, { "epoch": 0.015127025570889801, "grad_norm": 0.47590112686157227, "learning_rate": 4.92457209167392e-06, "loss": 0.7534, "step": 365 }, { "epoch": 0.015168469476563471, "grad_norm": 0.5060416460037231, "learning_rate": 4.924364872145551e-06, "loss": 0.8748, "step": 366 }, { "epoch": 0.015209913382237141, "grad_norm": 0.47386276721954346, "learning_rate": 4.924157652617183e-06, "loss": 0.9031, "step": 367 }, { "epoch": 0.015251357287910813, "grad_norm": 0.5125477910041809, "learning_rate": 4.923950433088815e-06, "loss": 0.844, "step": 368 }, { "epoch": 0.015292801193584483, "grad_norm": 0.4907439351081848, "learning_rate": 4.923743213560446e-06, "loss": 0.856, "step": 369 }, { "epoch": 0.015334245099258154, "grad_norm": 0.4889748990535736, "learning_rate": 4.923535994032078e-06, "loss": 0.9004, "step": 370 }, { "epoch": 0.015375689004931825, "grad_norm": 0.479399710893631, "learning_rate": 4.92332877450371e-06, "loss": 0.7988, "step": 371 }, { "epoch": 0.015417132910605496, "grad_norm": 0.5161091685295105, "learning_rate": 4.923121554975341e-06, "loss": 0.8877, "step": 372 }, { "epoch": 0.015458576816279166, "grad_norm": 0.4821130931377411, "learning_rate": 4.9229143354469724e-06, "loss": 0.8564, "step": 373 }, { "epoch": 0.015500020721952838, "grad_norm": 0.47413793206214905, "learning_rate": 4.922707115918605e-06, "loss": 0.8455, "step": 374 }, { "epoch": 0.015541464627626508, "grad_norm": 0.46731749176979065, "learning_rate": 4.922499896390236e-06, "loss": 0.854, "step": 375 }, { "epoch": 0.015582908533300178, "grad_norm": 0.5367522239685059, "learning_rate": 4.9222926768618674e-06, "loss": 0.8652, "step": 376 }, { "epoch": 0.01562435243897385, "grad_norm": 0.49957993626594543, "learning_rate": 4.9220854573335e-06, "loss": 0.8335, "step": 377 }, { "epoch": 0.015665796344647518, "grad_norm": 0.5024069547653198, "learning_rate": 4.921878237805131e-06, "loss": 0.7959, "step": 378 }, { "epoch": 0.01570724025032119, "grad_norm": 0.46874380111694336, "learning_rate": 4.921671018276763e-06, "loss": 0.8059, "step": 379 }, { "epoch": 0.015748684155994862, "grad_norm": 0.4658451974391937, "learning_rate": 4.921463798748395e-06, "loss": 0.8418, "step": 380 }, { "epoch": 0.015790128061668532, "grad_norm": 0.5046740770339966, "learning_rate": 4.921256579220026e-06, "loss": 0.8706, "step": 381 }, { "epoch": 0.015831571967342202, "grad_norm": 0.4836690425872803, "learning_rate": 4.9210493596916574e-06, "loss": 0.8196, "step": 382 }, { "epoch": 0.015873015873015872, "grad_norm": 0.48251408338546753, "learning_rate": 4.92084214016329e-06, "loss": 0.8528, "step": 383 }, { "epoch": 0.015914459778689542, "grad_norm": 0.4738592803478241, "learning_rate": 4.920634920634921e-06, "loss": 0.8, "step": 384 }, { "epoch": 0.015955903684363216, "grad_norm": 0.5402851700782776, "learning_rate": 4.9204277011065524e-06, "loss": 0.8914, "step": 385 }, { "epoch": 0.015997347590036886, "grad_norm": 0.49203255772590637, "learning_rate": 4.920220481578185e-06, "loss": 0.8574, "step": 386 }, { "epoch": 0.016038791495710556, "grad_norm": 0.4661460816860199, "learning_rate": 4.920013262049816e-06, "loss": 0.8533, "step": 387 }, { "epoch": 0.016080235401384226, "grad_norm": 0.48006075620651245, "learning_rate": 4.9198060425214475e-06, "loss": 0.803, "step": 388 }, { "epoch": 0.016121679307057896, "grad_norm": 0.4801277220249176, "learning_rate": 4.919598822993079e-06, "loss": 0.8674, "step": 389 }, { "epoch": 0.016163123212731566, "grad_norm": 0.44423675537109375, "learning_rate": 4.919391603464711e-06, "loss": 0.7825, "step": 390 }, { "epoch": 0.01620456711840524, "grad_norm": 0.4668760895729065, "learning_rate": 4.9191843839363425e-06, "loss": 0.8147, "step": 391 }, { "epoch": 0.01624601102407891, "grad_norm": 0.5075100660324097, "learning_rate": 4.918977164407974e-06, "loss": 0.906, "step": 392 }, { "epoch": 0.01628745492975258, "grad_norm": 0.4797791540622711, "learning_rate": 4.918769944879606e-06, "loss": 0.8662, "step": 393 }, { "epoch": 0.01632889883542625, "grad_norm": 0.49664506316185, "learning_rate": 4.9185627253512375e-06, "loss": 0.7861, "step": 394 }, { "epoch": 0.01637034274109992, "grad_norm": 0.4621793329715729, "learning_rate": 4.91835550582287e-06, "loss": 0.8477, "step": 395 }, { "epoch": 0.01641178664677359, "grad_norm": 0.5162895917892456, "learning_rate": 4.918148286294501e-06, "loss": 0.8728, "step": 396 }, { "epoch": 0.016453230552447264, "grad_norm": 0.5187440514564514, "learning_rate": 4.9179410667661325e-06, "loss": 0.7927, "step": 397 }, { "epoch": 0.016494674458120934, "grad_norm": 0.48792213201522827, "learning_rate": 4.917733847237764e-06, "loss": 0.8474, "step": 398 }, { "epoch": 0.016536118363794605, "grad_norm": 0.4553443193435669, "learning_rate": 4.917526627709396e-06, "loss": 0.7991, "step": 399 }, { "epoch": 0.016577562269468275, "grad_norm": 0.4740474224090576, "learning_rate": 4.9173194081810275e-06, "loss": 0.8228, "step": 400 }, { "epoch": 0.016619006175141945, "grad_norm": 0.49421992897987366, "learning_rate": 4.917112188652659e-06, "loss": 0.8513, "step": 401 }, { "epoch": 0.016660450080815615, "grad_norm": 0.5225406885147095, "learning_rate": 4.916904969124291e-06, "loss": 0.9307, "step": 402 }, { "epoch": 0.01670189398648929, "grad_norm": 0.453609824180603, "learning_rate": 4.9166977495959225e-06, "loss": 0.7539, "step": 403 }, { "epoch": 0.01674333789216296, "grad_norm": 0.4644213914871216, "learning_rate": 4.916490530067554e-06, "loss": 0.8186, "step": 404 }, { "epoch": 0.01678478179783663, "grad_norm": 0.5403565168380737, "learning_rate": 4.916283310539185e-06, "loss": 0.842, "step": 405 }, { "epoch": 0.0168262257035103, "grad_norm": 0.4868334233760834, "learning_rate": 4.9160760910108175e-06, "loss": 0.7981, "step": 406 }, { "epoch": 0.01686766960918397, "grad_norm": 0.47065234184265137, "learning_rate": 4.915868871482449e-06, "loss": 0.8123, "step": 407 }, { "epoch": 0.01690911351485764, "grad_norm": 0.5169198513031006, "learning_rate": 4.91566165195408e-06, "loss": 0.8466, "step": 408 }, { "epoch": 0.01695055742053131, "grad_norm": 0.44264426827430725, "learning_rate": 4.915454432425712e-06, "loss": 0.7815, "step": 409 }, { "epoch": 0.016992001326204983, "grad_norm": 0.46051862835884094, "learning_rate": 4.915247212897344e-06, "loss": 0.8281, "step": 410 }, { "epoch": 0.017033445231878653, "grad_norm": 0.48056861758232117, "learning_rate": 4.915039993368976e-06, "loss": 0.7898, "step": 411 }, { "epoch": 0.017074889137552323, "grad_norm": 0.5130049586296082, "learning_rate": 4.9148327738406075e-06, "loss": 0.8306, "step": 412 }, { "epoch": 0.017116333043225993, "grad_norm": 0.45076465606689453, "learning_rate": 4.914625554312239e-06, "loss": 0.7935, "step": 413 }, { "epoch": 0.017157776948899663, "grad_norm": 0.46089985966682434, "learning_rate": 4.91441833478387e-06, "loss": 0.8059, "step": 414 }, { "epoch": 0.017199220854573333, "grad_norm": 0.45488303899765015, "learning_rate": 4.9142111152555025e-06, "loss": 0.7965, "step": 415 }, { "epoch": 0.017240664760247007, "grad_norm": 0.4720882773399353, "learning_rate": 4.914003895727134e-06, "loss": 0.8225, "step": 416 }, { "epoch": 0.017282108665920677, "grad_norm": 0.46068084239959717, "learning_rate": 4.913796676198765e-06, "loss": 0.804, "step": 417 }, { "epoch": 0.017323552571594347, "grad_norm": 0.5240599513053894, "learning_rate": 4.913589456670397e-06, "loss": 0.8711, "step": 418 }, { "epoch": 0.017364996477268017, "grad_norm": 0.6012427806854248, "learning_rate": 4.913382237142029e-06, "loss": 0.8191, "step": 419 }, { "epoch": 0.017406440382941687, "grad_norm": 0.5937158465385437, "learning_rate": 4.91317501761366e-06, "loss": 0.8796, "step": 420 }, { "epoch": 0.017447884288615358, "grad_norm": 0.48416364192962646, "learning_rate": 4.912967798085292e-06, "loss": 0.8274, "step": 421 }, { "epoch": 0.01748932819428903, "grad_norm": 0.44899284839630127, "learning_rate": 4.912760578556924e-06, "loss": 0.8484, "step": 422 }, { "epoch": 0.0175307720999627, "grad_norm": 0.4800012707710266, "learning_rate": 4.912553359028555e-06, "loss": 0.7979, "step": 423 }, { "epoch": 0.01757221600563637, "grad_norm": 0.4837320148944855, "learning_rate": 4.912346139500187e-06, "loss": 0.8186, "step": 424 }, { "epoch": 0.01761365991131004, "grad_norm": 0.4740530848503113, "learning_rate": 4.912138919971818e-06, "loss": 0.8291, "step": 425 }, { "epoch": 0.01765510381698371, "grad_norm": 0.4518044590950012, "learning_rate": 4.91193170044345e-06, "loss": 0.8049, "step": 426 }, { "epoch": 0.017696547722657382, "grad_norm": 0.4841671586036682, "learning_rate": 4.911724480915082e-06, "loss": 0.8516, "step": 427 }, { "epoch": 0.017737991628331055, "grad_norm": 0.4785279631614685, "learning_rate": 4.911517261386714e-06, "loss": 0.8169, "step": 428 }, { "epoch": 0.017779435534004726, "grad_norm": 0.4598132371902466, "learning_rate": 4.911310041858345e-06, "loss": 0.8069, "step": 429 }, { "epoch": 0.017820879439678396, "grad_norm": 0.5076266527175903, "learning_rate": 4.911102822329977e-06, "loss": 0.8569, "step": 430 }, { "epoch": 0.017862323345352066, "grad_norm": 0.45577993988990784, "learning_rate": 4.910895602801609e-06, "loss": 0.7512, "step": 431 }, { "epoch": 0.017903767251025736, "grad_norm": 0.5087863206863403, "learning_rate": 4.91068838327324e-06, "loss": 0.7761, "step": 432 }, { "epoch": 0.017945211156699406, "grad_norm": 0.4486319422721863, "learning_rate": 4.910481163744872e-06, "loss": 0.8982, "step": 433 }, { "epoch": 0.01798665506237308, "grad_norm": 0.5014278292655945, "learning_rate": 4.910273944216503e-06, "loss": 0.8162, "step": 434 }, { "epoch": 0.01802809896804675, "grad_norm": 0.46839311718940735, "learning_rate": 4.910066724688135e-06, "loss": 0.7803, "step": 435 }, { "epoch": 0.01806954287372042, "grad_norm": 0.49171018600463867, "learning_rate": 4.909859505159767e-06, "loss": 0.8779, "step": 436 }, { "epoch": 0.01811098677939409, "grad_norm": 0.4889352023601532, "learning_rate": 4.909652285631398e-06, "loss": 0.8862, "step": 437 }, { "epoch": 0.01815243068506776, "grad_norm": 0.4969484806060791, "learning_rate": 4.90944506610303e-06, "loss": 0.8142, "step": 438 }, { "epoch": 0.01819387459074143, "grad_norm": 0.45526519417762756, "learning_rate": 4.909237846574662e-06, "loss": 0.8193, "step": 439 }, { "epoch": 0.018235318496415104, "grad_norm": 0.4844783544540405, "learning_rate": 4.909030627046293e-06, "loss": 0.8396, "step": 440 }, { "epoch": 0.018276762402088774, "grad_norm": 0.5045813918113708, "learning_rate": 4.9088234075179244e-06, "loss": 0.8481, "step": 441 }, { "epoch": 0.018318206307762444, "grad_norm": 0.547040581703186, "learning_rate": 4.908616187989557e-06, "loss": 0.8477, "step": 442 }, { "epoch": 0.018359650213436114, "grad_norm": 0.4526495039463043, "learning_rate": 4.908408968461188e-06, "loss": 0.8064, "step": 443 }, { "epoch": 0.018401094119109784, "grad_norm": 0.5089373588562012, "learning_rate": 4.9082017489328195e-06, "loss": 0.843, "step": 444 }, { "epoch": 0.018442538024783454, "grad_norm": 0.4830186367034912, "learning_rate": 4.907994529404451e-06, "loss": 0.8269, "step": 445 }, { "epoch": 0.018483981930457125, "grad_norm": 0.539989173412323, "learning_rate": 4.907787309876083e-06, "loss": 0.8621, "step": 446 }, { "epoch": 0.018525425836130798, "grad_norm": 0.4397867023944855, "learning_rate": 4.907580090347715e-06, "loss": 0.7822, "step": 447 }, { "epoch": 0.01856686974180447, "grad_norm": 0.48654913902282715, "learning_rate": 4.907372870819347e-06, "loss": 0.8457, "step": 448 }, { "epoch": 0.01860831364747814, "grad_norm": 0.4765969514846802, "learning_rate": 4.907165651290978e-06, "loss": 0.7732, "step": 449 }, { "epoch": 0.01864975755315181, "grad_norm": 0.5010995268821716, "learning_rate": 4.9069584317626095e-06, "loss": 0.7888, "step": 450 }, { "epoch": 0.01869120145882548, "grad_norm": 0.45635828375816345, "learning_rate": 4.906751212234242e-06, "loss": 0.7874, "step": 451 }, { "epoch": 0.01873264536449915, "grad_norm": 0.5300329923629761, "learning_rate": 4.906543992705873e-06, "loss": 0.8276, "step": 452 }, { "epoch": 0.018774089270172822, "grad_norm": 0.4671211540699005, "learning_rate": 4.9063367731775045e-06, "loss": 0.8667, "step": 453 }, { "epoch": 0.018815533175846492, "grad_norm": 0.46540865302085876, "learning_rate": 4.906129553649137e-06, "loss": 0.8335, "step": 454 }, { "epoch": 0.018856977081520163, "grad_norm": 0.49816012382507324, "learning_rate": 4.905922334120768e-06, "loss": 0.8743, "step": 455 }, { "epoch": 0.018898420987193833, "grad_norm": 0.5244741439819336, "learning_rate": 4.9057151145923995e-06, "loss": 0.8552, "step": 456 }, { "epoch": 0.018939864892867503, "grad_norm": 0.4913051724433899, "learning_rate": 4.905507895064031e-06, "loss": 0.8618, "step": 457 }, { "epoch": 0.018981308798541173, "grad_norm": 0.5005473494529724, "learning_rate": 4.905300675535663e-06, "loss": 0.8523, "step": 458 }, { "epoch": 0.019022752704214847, "grad_norm": 0.4599267244338989, "learning_rate": 4.9050934560072945e-06, "loss": 0.7976, "step": 459 }, { "epoch": 0.019064196609888517, "grad_norm": 0.4852423369884491, "learning_rate": 4.904886236478926e-06, "loss": 0.8057, "step": 460 }, { "epoch": 0.019105640515562187, "grad_norm": 0.451443076133728, "learning_rate": 4.904679016950557e-06, "loss": 0.801, "step": 461 }, { "epoch": 0.019147084421235857, "grad_norm": 0.4907212555408478, "learning_rate": 4.9044717974221895e-06, "loss": 0.8125, "step": 462 }, { "epoch": 0.019188528326909527, "grad_norm": 0.46715471148490906, "learning_rate": 4.904264577893822e-06, "loss": 0.8167, "step": 463 }, { "epoch": 0.019229972232583197, "grad_norm": 0.47199976444244385, "learning_rate": 4.904057358365453e-06, "loss": 0.8157, "step": 464 }, { "epoch": 0.01927141613825687, "grad_norm": 0.46977463364601135, "learning_rate": 4.9038501388370845e-06, "loss": 0.8757, "step": 465 }, { "epoch": 0.01931286004393054, "grad_norm": 0.44438350200653076, "learning_rate": 4.903642919308716e-06, "loss": 0.76, "step": 466 }, { "epoch": 0.01935430394960421, "grad_norm": 0.5002657771110535, "learning_rate": 4.903435699780348e-06, "loss": 0.7944, "step": 467 }, { "epoch": 0.01939574785527788, "grad_norm": 0.5109201669692993, "learning_rate": 4.9032284802519795e-06, "loss": 0.8711, "step": 468 }, { "epoch": 0.01943719176095155, "grad_norm": 0.5137448310852051, "learning_rate": 4.903021260723611e-06, "loss": 0.752, "step": 469 }, { "epoch": 0.01947863566662522, "grad_norm": 0.5363143086433411, "learning_rate": 4.902814041195242e-06, "loss": 0.8176, "step": 470 }, { "epoch": 0.019520079572298895, "grad_norm": 0.4738991856575012, "learning_rate": 4.9026068216668745e-06, "loss": 0.7893, "step": 471 }, { "epoch": 0.019561523477972565, "grad_norm": 0.4510260224342346, "learning_rate": 4.902399602138506e-06, "loss": 0.7937, "step": 472 }, { "epoch": 0.019602967383646235, "grad_norm": 0.4918208420276642, "learning_rate": 4.902192382610137e-06, "loss": 0.7979, "step": 473 }, { "epoch": 0.019644411289319905, "grad_norm": 0.48828259110450745, "learning_rate": 4.9019851630817695e-06, "loss": 0.8721, "step": 474 }, { "epoch": 0.019685855194993575, "grad_norm": 0.43061384558677673, "learning_rate": 4.901777943553401e-06, "loss": 0.8055, "step": 475 }, { "epoch": 0.019727299100667246, "grad_norm": 0.4545750617980957, "learning_rate": 4.901570724025032e-06, "loss": 0.7976, "step": 476 }, { "epoch": 0.01976874300634092, "grad_norm": 0.5229188799858093, "learning_rate": 4.901363504496664e-06, "loss": 0.8301, "step": 477 }, { "epoch": 0.01981018691201459, "grad_norm": 0.44408050179481506, "learning_rate": 4.901156284968296e-06, "loss": 0.78, "step": 478 }, { "epoch": 0.01985163081768826, "grad_norm": 0.5063157677650452, "learning_rate": 4.900949065439927e-06, "loss": 0.8621, "step": 479 }, { "epoch": 0.01989307472336193, "grad_norm": 0.4920934736728668, "learning_rate": 4.9007418459115595e-06, "loss": 0.7847, "step": 480 }, { "epoch": 0.0199345186290356, "grad_norm": 0.5848267674446106, "learning_rate": 4.900534626383191e-06, "loss": 0.8501, "step": 481 }, { "epoch": 0.01997596253470927, "grad_norm": 0.48913031816482544, "learning_rate": 4.900327406854822e-06, "loss": 0.8008, "step": 482 }, { "epoch": 0.020017406440382943, "grad_norm": 0.5036863088607788, "learning_rate": 4.9001201873264545e-06, "loss": 0.8018, "step": 483 }, { "epoch": 0.020058850346056614, "grad_norm": 0.46786680817604065, "learning_rate": 4.899912967798086e-06, "loss": 0.8413, "step": 484 }, { "epoch": 0.020100294251730284, "grad_norm": 0.47618743777275085, "learning_rate": 4.899705748269717e-06, "loss": 0.7832, "step": 485 }, { "epoch": 0.020141738157403954, "grad_norm": 0.4857902228832245, "learning_rate": 4.899498528741349e-06, "loss": 0.8784, "step": 486 }, { "epoch": 0.020183182063077624, "grad_norm": 0.5140090584754944, "learning_rate": 4.899291309212981e-06, "loss": 0.8889, "step": 487 }, { "epoch": 0.020224625968751294, "grad_norm": 0.4780973792076111, "learning_rate": 4.899084089684612e-06, "loss": 0.7751, "step": 488 }, { "epoch": 0.020266069874424964, "grad_norm": 0.46658629179000854, "learning_rate": 4.898876870156244e-06, "loss": 0.8184, "step": 489 }, { "epoch": 0.020307513780098638, "grad_norm": 0.47505757212638855, "learning_rate": 4.898669650627876e-06, "loss": 0.7725, "step": 490 }, { "epoch": 0.020348957685772308, "grad_norm": 0.4759451150894165, "learning_rate": 4.898462431099507e-06, "loss": 0.8167, "step": 491 }, { "epoch": 0.020390401591445978, "grad_norm": 0.4769792854785919, "learning_rate": 4.898255211571139e-06, "loss": 0.8152, "step": 492 }, { "epoch": 0.020431845497119648, "grad_norm": 0.5034412145614624, "learning_rate": 4.89804799204277e-06, "loss": 0.8057, "step": 493 }, { "epoch": 0.020473289402793318, "grad_norm": 0.44975370168685913, "learning_rate": 4.897840772514402e-06, "loss": 0.7925, "step": 494 }, { "epoch": 0.02051473330846699, "grad_norm": 0.4678061306476593, "learning_rate": 4.897633552986034e-06, "loss": 0.804, "step": 495 }, { "epoch": 0.020556177214140662, "grad_norm": 0.4949014484882355, "learning_rate": 4.897426333457666e-06, "loss": 0.8298, "step": 496 }, { "epoch": 0.020597621119814332, "grad_norm": 0.5196720957756042, "learning_rate": 4.8972191139292964e-06, "loss": 0.8376, "step": 497 }, { "epoch": 0.020639065025488002, "grad_norm": 0.5207451581954956, "learning_rate": 4.897011894400929e-06, "loss": 0.8726, "step": 498 }, { "epoch": 0.020680508931161672, "grad_norm": 0.46578139066696167, "learning_rate": 4.896804674872561e-06, "loss": 0.7998, "step": 499 }, { "epoch": 0.020721952836835342, "grad_norm": 0.5228472948074341, "learning_rate": 4.896597455344192e-06, "loss": 0.8379, "step": 500 }, { "epoch": 0.020763396742509013, "grad_norm": 0.4439935088157654, "learning_rate": 4.896390235815824e-06, "loss": 0.7885, "step": 501 }, { "epoch": 0.020804840648182686, "grad_norm": 0.46296626329421997, "learning_rate": 4.896183016287455e-06, "loss": 0.7659, "step": 502 }, { "epoch": 0.020846284553856356, "grad_norm": 0.49447980523109436, "learning_rate": 4.895975796759087e-06, "loss": 0.8208, "step": 503 }, { "epoch": 0.020887728459530026, "grad_norm": 0.45511969923973083, "learning_rate": 4.895768577230719e-06, "loss": 0.7983, "step": 504 }, { "epoch": 0.020929172365203697, "grad_norm": 0.4478197991847992, "learning_rate": 4.89556135770235e-06, "loss": 0.7834, "step": 505 }, { "epoch": 0.020970616270877367, "grad_norm": 0.4402836263179779, "learning_rate": 4.8953541381739815e-06, "loss": 0.7676, "step": 506 }, { "epoch": 0.021012060176551037, "grad_norm": 0.44297662377357483, "learning_rate": 4.895146918645614e-06, "loss": 0.7883, "step": 507 }, { "epoch": 0.02105350408222471, "grad_norm": 0.5047253966331482, "learning_rate": 4.894939699117245e-06, "loss": 0.9138, "step": 508 }, { "epoch": 0.02109494798789838, "grad_norm": 0.5168031454086304, "learning_rate": 4.8947324795888765e-06, "loss": 0.8809, "step": 509 }, { "epoch": 0.02113639189357205, "grad_norm": 0.5152113437652588, "learning_rate": 4.894525260060509e-06, "loss": 0.7847, "step": 510 }, { "epoch": 0.02117783579924572, "grad_norm": 0.46421441435813904, "learning_rate": 4.89431804053214e-06, "loss": 0.7681, "step": 511 }, { "epoch": 0.02121927970491939, "grad_norm": 0.46760526299476624, "learning_rate": 4.8941108210037715e-06, "loss": 0.79, "step": 512 }, { "epoch": 0.02126072361059306, "grad_norm": 0.470059871673584, "learning_rate": 4.893903601475403e-06, "loss": 0.8193, "step": 513 }, { "epoch": 0.021302167516266735, "grad_norm": 0.4791758358478546, "learning_rate": 4.893696381947035e-06, "loss": 0.8418, "step": 514 }, { "epoch": 0.021343611421940405, "grad_norm": 0.46390190720558167, "learning_rate": 4.893489162418667e-06, "loss": 0.8459, "step": 515 }, { "epoch": 0.021385055327614075, "grad_norm": 0.43868038058280945, "learning_rate": 4.893281942890299e-06, "loss": 0.7942, "step": 516 }, { "epoch": 0.021426499233287745, "grad_norm": 0.47130662202835083, "learning_rate": 4.89307472336193e-06, "loss": 0.854, "step": 517 }, { "epoch": 0.021467943138961415, "grad_norm": 0.4984039068222046, "learning_rate": 4.8928675038335615e-06, "loss": 0.8887, "step": 518 }, { "epoch": 0.021509387044635085, "grad_norm": 0.4585687220096588, "learning_rate": 4.892660284305194e-06, "loss": 0.8198, "step": 519 }, { "epoch": 0.02155083095030876, "grad_norm": 0.496217280626297, "learning_rate": 4.892453064776825e-06, "loss": 0.8025, "step": 520 }, { "epoch": 0.02159227485598243, "grad_norm": 0.47160789370536804, "learning_rate": 4.8922458452484565e-06, "loss": 0.7446, "step": 521 }, { "epoch": 0.0216337187616561, "grad_norm": 0.5480387210845947, "learning_rate": 4.892038625720088e-06, "loss": 0.9136, "step": 522 }, { "epoch": 0.02167516266732977, "grad_norm": 0.44932806491851807, "learning_rate": 4.89183140619172e-06, "loss": 0.7328, "step": 523 }, { "epoch": 0.02171660657300344, "grad_norm": 0.4601422846317291, "learning_rate": 4.8916241866633515e-06, "loss": 0.7666, "step": 524 }, { "epoch": 0.02175805047867711, "grad_norm": 0.47914057970046997, "learning_rate": 4.891416967134983e-06, "loss": 0.799, "step": 525 }, { "epoch": 0.02179949438435078, "grad_norm": 0.47169601917266846, "learning_rate": 4.891209747606615e-06, "loss": 0.8933, "step": 526 }, { "epoch": 0.021840938290024453, "grad_norm": 0.4357249438762665, "learning_rate": 4.8910025280782465e-06, "loss": 0.7849, "step": 527 }, { "epoch": 0.021882382195698123, "grad_norm": 0.5225855708122253, "learning_rate": 4.890795308549878e-06, "loss": 0.793, "step": 528 }, { "epoch": 0.021923826101371793, "grad_norm": 0.5255203247070312, "learning_rate": 4.890588089021509e-06, "loss": 0.9143, "step": 529 }, { "epoch": 0.021965270007045463, "grad_norm": 0.47456568479537964, "learning_rate": 4.8903808694931415e-06, "loss": 0.8313, "step": 530 }, { "epoch": 0.022006713912719134, "grad_norm": 0.48161983489990234, "learning_rate": 4.890173649964773e-06, "loss": 0.8484, "step": 531 }, { "epoch": 0.022048157818392804, "grad_norm": 0.4773552417755127, "learning_rate": 4.889966430436405e-06, "loss": 0.8198, "step": 532 }, { "epoch": 0.022089601724066477, "grad_norm": 0.43980589509010315, "learning_rate": 4.8897592109080365e-06, "loss": 0.8496, "step": 533 }, { "epoch": 0.022131045629740147, "grad_norm": 0.4770891070365906, "learning_rate": 4.889551991379668e-06, "loss": 0.7791, "step": 534 }, { "epoch": 0.022172489535413818, "grad_norm": 0.4900587499141693, "learning_rate": 4.8893447718513e-06, "loss": 0.7479, "step": 535 }, { "epoch": 0.022213933441087488, "grad_norm": 0.48809048533439636, "learning_rate": 4.8891375523229315e-06, "loss": 0.8226, "step": 536 }, { "epoch": 0.022255377346761158, "grad_norm": 0.48735225200653076, "learning_rate": 4.888930332794563e-06, "loss": 0.853, "step": 537 }, { "epoch": 0.022296821252434828, "grad_norm": 0.48354893922805786, "learning_rate": 4.888723113266194e-06, "loss": 0.8655, "step": 538 }, { "epoch": 0.0223382651581085, "grad_norm": 0.4572780728340149, "learning_rate": 4.8885158937378265e-06, "loss": 0.7361, "step": 539 }, { "epoch": 0.02237970906378217, "grad_norm": 0.505377471446991, "learning_rate": 4.888308674209458e-06, "loss": 0.8479, "step": 540 }, { "epoch": 0.022421152969455842, "grad_norm": 0.4808878004550934, "learning_rate": 4.888101454681089e-06, "loss": 0.7456, "step": 541 }, { "epoch": 0.022462596875129512, "grad_norm": 0.48792564868927, "learning_rate": 4.8878942351527215e-06, "loss": 0.834, "step": 542 }, { "epoch": 0.022504040780803182, "grad_norm": 0.4690813422203064, "learning_rate": 4.887687015624353e-06, "loss": 0.7578, "step": 543 }, { "epoch": 0.022545484686476852, "grad_norm": 0.5344796180725098, "learning_rate": 4.887479796095984e-06, "loss": 0.8955, "step": 544 }, { "epoch": 0.022586928592150526, "grad_norm": 0.47094985842704773, "learning_rate": 4.887272576567616e-06, "loss": 0.7598, "step": 545 }, { "epoch": 0.022628372497824196, "grad_norm": 0.5210912227630615, "learning_rate": 4.887065357039248e-06, "loss": 0.9221, "step": 546 }, { "epoch": 0.022669816403497866, "grad_norm": 0.4628584384918213, "learning_rate": 4.886858137510879e-06, "loss": 0.7937, "step": 547 }, { "epoch": 0.022711260309171536, "grad_norm": 0.48059824109077454, "learning_rate": 4.8866509179825115e-06, "loss": 0.7708, "step": 548 }, { "epoch": 0.022752704214845206, "grad_norm": 0.4434678852558136, "learning_rate": 4.886443698454142e-06, "loss": 0.7505, "step": 549 }, { "epoch": 0.022794148120518876, "grad_norm": 0.4747712016105652, "learning_rate": 4.886236478925774e-06, "loss": 0.8174, "step": 550 }, { "epoch": 0.02283559202619255, "grad_norm": 0.4849909842014313, "learning_rate": 4.8860292593974065e-06, "loss": 0.8782, "step": 551 }, { "epoch": 0.02287703593186622, "grad_norm": 0.5049998760223389, "learning_rate": 4.885822039869038e-06, "loss": 0.8596, "step": 552 }, { "epoch": 0.02291847983753989, "grad_norm": 0.5053225755691528, "learning_rate": 4.885614820340669e-06, "loss": 0.7964, "step": 553 }, { "epoch": 0.02295992374321356, "grad_norm": 0.43197908997535706, "learning_rate": 4.885407600812301e-06, "loss": 0.8164, "step": 554 }, { "epoch": 0.02300136764888723, "grad_norm": 0.47206753492355347, "learning_rate": 4.885200381283933e-06, "loss": 0.8247, "step": 555 }, { "epoch": 0.0230428115545609, "grad_norm": 0.453747034072876, "learning_rate": 4.884993161755564e-06, "loss": 0.7861, "step": 556 }, { "epoch": 0.023084255460234574, "grad_norm": 0.4709733724594116, "learning_rate": 4.884785942227196e-06, "loss": 0.7666, "step": 557 }, { "epoch": 0.023125699365908244, "grad_norm": 0.4618700444698334, "learning_rate": 4.884578722698827e-06, "loss": 0.823, "step": 558 }, { "epoch": 0.023167143271581914, "grad_norm": 0.4612927734851837, "learning_rate": 4.884371503170459e-06, "loss": 0.8152, "step": 559 }, { "epoch": 0.023208587177255584, "grad_norm": 0.4989731013774872, "learning_rate": 4.884164283642091e-06, "loss": 0.833, "step": 560 }, { "epoch": 0.023250031082929255, "grad_norm": 0.4959282875061035, "learning_rate": 4.883957064113722e-06, "loss": 0.8604, "step": 561 }, { "epoch": 0.023291474988602925, "grad_norm": 0.46370023488998413, "learning_rate": 4.883749844585354e-06, "loss": 0.8252, "step": 562 }, { "epoch": 0.0233329188942766, "grad_norm": 0.46025994420051575, "learning_rate": 4.883542625056986e-06, "loss": 0.7876, "step": 563 }, { "epoch": 0.02337436279995027, "grad_norm": 0.4388412833213806, "learning_rate": 4.883335405528618e-06, "loss": 0.8208, "step": 564 }, { "epoch": 0.02341580670562394, "grad_norm": 0.4504384696483612, "learning_rate": 4.8831281860002485e-06, "loss": 0.7512, "step": 565 }, { "epoch": 0.02345725061129761, "grad_norm": 0.4930025637149811, "learning_rate": 4.882920966471881e-06, "loss": 0.803, "step": 566 }, { "epoch": 0.02349869451697128, "grad_norm": 0.4779817461967468, "learning_rate": 4.882713746943513e-06, "loss": 0.8284, "step": 567 }, { "epoch": 0.02354013842264495, "grad_norm": 0.48746493458747864, "learning_rate": 4.882506527415144e-06, "loss": 0.8496, "step": 568 }, { "epoch": 0.02358158232831862, "grad_norm": 0.4496462643146515, "learning_rate": 4.882299307886776e-06, "loss": 0.8191, "step": 569 }, { "epoch": 0.023623026233992293, "grad_norm": 0.4515238106250763, "learning_rate": 4.882092088358407e-06, "loss": 0.7618, "step": 570 }, { "epoch": 0.023664470139665963, "grad_norm": 0.49928152561187744, "learning_rate": 4.881884868830039e-06, "loss": 0.8364, "step": 571 }, { "epoch": 0.023705914045339633, "grad_norm": 0.4698212742805481, "learning_rate": 4.881677649301671e-06, "loss": 0.7612, "step": 572 }, { "epoch": 0.023747357951013303, "grad_norm": 0.4687686860561371, "learning_rate": 4.881470429773302e-06, "loss": 0.7778, "step": 573 }, { "epoch": 0.023788801856686973, "grad_norm": 0.5051504969596863, "learning_rate": 4.8812632102449335e-06, "loss": 0.845, "step": 574 }, { "epoch": 0.023830245762360643, "grad_norm": 0.4640558958053589, "learning_rate": 4.881055990716566e-06, "loss": 0.8052, "step": 575 }, { "epoch": 0.023871689668034317, "grad_norm": 0.47961655259132385, "learning_rate": 4.880848771188197e-06, "loss": 0.7686, "step": 576 }, { "epoch": 0.023913133573707987, "grad_norm": 0.4662036597728729, "learning_rate": 4.8806415516598285e-06, "loss": 0.8304, "step": 577 }, { "epoch": 0.023954577479381657, "grad_norm": 0.4470343291759491, "learning_rate": 4.880434332131461e-06, "loss": 0.822, "step": 578 }, { "epoch": 0.023996021385055327, "grad_norm": 0.4809962213039398, "learning_rate": 4.880227112603092e-06, "loss": 0.8489, "step": 579 }, { "epoch": 0.024037465290728997, "grad_norm": 0.49782678484916687, "learning_rate": 4.880019893074724e-06, "loss": 0.864, "step": 580 }, { "epoch": 0.024078909196402667, "grad_norm": 0.4834621846675873, "learning_rate": 4.879812673546355e-06, "loss": 0.7886, "step": 581 }, { "epoch": 0.02412035310207634, "grad_norm": 0.4877339005470276, "learning_rate": 4.879605454017987e-06, "loss": 0.7717, "step": 582 }, { "epoch": 0.02416179700775001, "grad_norm": 0.45702728629112244, "learning_rate": 4.8793982344896185e-06, "loss": 0.8247, "step": 583 }, { "epoch": 0.02420324091342368, "grad_norm": 0.4746725261211395, "learning_rate": 4.879191014961251e-06, "loss": 0.7629, "step": 584 }, { "epoch": 0.02424468481909735, "grad_norm": 0.5049235224723816, "learning_rate": 4.878983795432882e-06, "loss": 0.8403, "step": 585 }, { "epoch": 0.02428612872477102, "grad_norm": 0.4915831685066223, "learning_rate": 4.8787765759045135e-06, "loss": 0.7683, "step": 586 }, { "epoch": 0.02432757263044469, "grad_norm": 0.48449084162712097, "learning_rate": 4.878569356376146e-06, "loss": 0.7861, "step": 587 }, { "epoch": 0.024369016536118365, "grad_norm": 0.5011218786239624, "learning_rate": 4.878362136847777e-06, "loss": 0.7993, "step": 588 }, { "epoch": 0.024410460441792035, "grad_norm": 0.47988468408584595, "learning_rate": 4.8781549173194085e-06, "loss": 0.7902, "step": 589 }, { "epoch": 0.024451904347465706, "grad_norm": 0.46121472120285034, "learning_rate": 4.87794769779104e-06, "loss": 0.8059, "step": 590 }, { "epoch": 0.024493348253139376, "grad_norm": 0.5387938022613525, "learning_rate": 4.877740478262672e-06, "loss": 0.8933, "step": 591 }, { "epoch": 0.024534792158813046, "grad_norm": 0.4928279519081116, "learning_rate": 4.8775332587343035e-06, "loss": 0.7759, "step": 592 }, { "epoch": 0.024576236064486716, "grad_norm": 0.4332732558250427, "learning_rate": 4.877326039205935e-06, "loss": 0.7856, "step": 593 }, { "epoch": 0.02461767997016039, "grad_norm": 0.5063583254814148, "learning_rate": 4.877118819677567e-06, "loss": 0.8286, "step": 594 }, { "epoch": 0.02465912387583406, "grad_norm": 0.5039634108543396, "learning_rate": 4.8769116001491985e-06, "loss": 0.8147, "step": 595 }, { "epoch": 0.02470056778150773, "grad_norm": 0.48804840445518494, "learning_rate": 4.87670438062083e-06, "loss": 0.7988, "step": 596 }, { "epoch": 0.0247420116871814, "grad_norm": 0.48544737696647644, "learning_rate": 4.876497161092461e-06, "loss": 0.8467, "step": 597 }, { "epoch": 0.02478345559285507, "grad_norm": 0.49868589639663696, "learning_rate": 4.8762899415640935e-06, "loss": 0.8496, "step": 598 }, { "epoch": 0.02482489949852874, "grad_norm": 0.4890144169330597, "learning_rate": 4.876082722035725e-06, "loss": 0.8284, "step": 599 }, { "epoch": 0.024866343404202414, "grad_norm": 0.46914008259773254, "learning_rate": 4.875875502507357e-06, "loss": 0.7963, "step": 600 }, { "epoch": 0.024907787309876084, "grad_norm": 0.501280665397644, "learning_rate": 4.8756682829789885e-06, "loss": 0.814, "step": 601 }, { "epoch": 0.024949231215549754, "grad_norm": 0.487753689289093, "learning_rate": 4.87546106345062e-06, "loss": 0.791, "step": 602 }, { "epoch": 0.024990675121223424, "grad_norm": 0.47674545645713806, "learning_rate": 4.875253843922252e-06, "loss": 0.7776, "step": 603 }, { "epoch": 0.025032119026897094, "grad_norm": 0.491993248462677, "learning_rate": 4.8750466243938835e-06, "loss": 0.8093, "step": 604 }, { "epoch": 0.025073562932570764, "grad_norm": 0.5122238397598267, "learning_rate": 4.874839404865515e-06, "loss": 0.8975, "step": 605 }, { "epoch": 0.025115006838244438, "grad_norm": 0.5160649418830872, "learning_rate": 4.874632185337146e-06, "loss": 0.8159, "step": 606 }, { "epoch": 0.025156450743918108, "grad_norm": 0.49168211221694946, "learning_rate": 4.8744249658087785e-06, "loss": 0.8467, "step": 607 }, { "epoch": 0.025197894649591778, "grad_norm": 0.44403907656669617, "learning_rate": 4.87421774628041e-06, "loss": 0.823, "step": 608 }, { "epoch": 0.025239338555265448, "grad_norm": 0.4288485646247864, "learning_rate": 4.874010526752041e-06, "loss": 0.782, "step": 609 }, { "epoch": 0.02528078246093912, "grad_norm": 0.47649186849594116, "learning_rate": 4.873803307223673e-06, "loss": 0.8376, "step": 610 }, { "epoch": 0.02532222636661279, "grad_norm": 0.46790993213653564, "learning_rate": 4.873596087695305e-06, "loss": 0.8308, "step": 611 }, { "epoch": 0.02536367027228646, "grad_norm": 0.4332045912742615, "learning_rate": 4.873388868166936e-06, "loss": 0.7708, "step": 612 }, { "epoch": 0.025405114177960132, "grad_norm": 0.4512092173099518, "learning_rate": 4.873181648638568e-06, "loss": 0.7969, "step": 613 }, { "epoch": 0.025446558083633802, "grad_norm": 0.49965643882751465, "learning_rate": 4.8729744291102e-06, "loss": 0.8408, "step": 614 }, { "epoch": 0.025488001989307472, "grad_norm": 0.47372087836265564, "learning_rate": 4.872767209581831e-06, "loss": 0.8291, "step": 615 }, { "epoch": 0.025529445894981143, "grad_norm": 0.4483170807361603, "learning_rate": 4.8725599900534635e-06, "loss": 0.7595, "step": 616 }, { "epoch": 0.025570889800654813, "grad_norm": 0.46808502078056335, "learning_rate": 4.872352770525094e-06, "loss": 0.8196, "step": 617 }, { "epoch": 0.025612333706328483, "grad_norm": 0.44011443853378296, "learning_rate": 4.872145550996726e-06, "loss": 0.7788, "step": 618 }, { "epoch": 0.025653777612002156, "grad_norm": 0.5084178447723389, "learning_rate": 4.871938331468358e-06, "loss": 0.8479, "step": 619 }, { "epoch": 0.025695221517675827, "grad_norm": 0.4346061646938324, "learning_rate": 4.87173111193999e-06, "loss": 0.783, "step": 620 }, { "epoch": 0.025736665423349497, "grad_norm": 0.44072747230529785, "learning_rate": 4.871523892411621e-06, "loss": 0.7644, "step": 621 }, { "epoch": 0.025778109329023167, "grad_norm": 0.45584553480148315, "learning_rate": 4.871316672883253e-06, "loss": 0.801, "step": 622 }, { "epoch": 0.025819553234696837, "grad_norm": 0.5316863059997559, "learning_rate": 4.871109453354885e-06, "loss": 0.8276, "step": 623 }, { "epoch": 0.025860997140370507, "grad_norm": 0.45720532536506653, "learning_rate": 4.870902233826516e-06, "loss": 0.8145, "step": 624 }, { "epoch": 0.02590244104604418, "grad_norm": 0.4098816215991974, "learning_rate": 4.870695014298148e-06, "loss": 0.7986, "step": 625 }, { "epoch": 0.02594388495171785, "grad_norm": 0.4784835875034332, "learning_rate": 4.870487794769779e-06, "loss": 0.7578, "step": 626 }, { "epoch": 0.02598532885739152, "grad_norm": 0.45547032356262207, "learning_rate": 4.870280575241411e-06, "loss": 0.7825, "step": 627 }, { "epoch": 0.02602677276306519, "grad_norm": 0.4823001027107239, "learning_rate": 4.870073355713043e-06, "loss": 0.7881, "step": 628 }, { "epoch": 0.02606821666873886, "grad_norm": 0.415988951921463, "learning_rate": 4.869866136184674e-06, "loss": 0.7856, "step": 629 }, { "epoch": 0.02610966057441253, "grad_norm": 0.4871703088283539, "learning_rate": 4.869658916656306e-06, "loss": 0.7993, "step": 630 }, { "epoch": 0.026151104480086205, "grad_norm": 0.4808284640312195, "learning_rate": 4.869451697127938e-06, "loss": 0.7407, "step": 631 }, { "epoch": 0.026192548385759875, "grad_norm": 0.458878755569458, "learning_rate": 4.86924447759957e-06, "loss": 0.7998, "step": 632 }, { "epoch": 0.026233992291433545, "grad_norm": 0.5022748112678528, "learning_rate": 4.8690372580712005e-06, "loss": 0.792, "step": 633 }, { "epoch": 0.026275436197107215, "grad_norm": 0.5150101780891418, "learning_rate": 4.868830038542833e-06, "loss": 0.8372, "step": 634 }, { "epoch": 0.026316880102780885, "grad_norm": 0.4635588228702545, "learning_rate": 4.868622819014464e-06, "loss": 0.7617, "step": 635 }, { "epoch": 0.026358324008454555, "grad_norm": 0.5002948045730591, "learning_rate": 4.868415599486096e-06, "loss": 0.8218, "step": 636 }, { "epoch": 0.02639976791412823, "grad_norm": 0.5043407678604126, "learning_rate": 4.868208379957728e-06, "loss": 0.8483, "step": 637 }, { "epoch": 0.0264412118198019, "grad_norm": 0.4270593523979187, "learning_rate": 4.868001160429359e-06, "loss": 0.7446, "step": 638 }, { "epoch": 0.02648265572547557, "grad_norm": 0.5148329734802246, "learning_rate": 4.867793940900991e-06, "loss": 0.7773, "step": 639 }, { "epoch": 0.02652409963114924, "grad_norm": 0.514662504196167, "learning_rate": 4.867586721372623e-06, "loss": 0.8442, "step": 640 }, { "epoch": 0.02656554353682291, "grad_norm": 0.48593640327453613, "learning_rate": 4.867379501844254e-06, "loss": 0.8217, "step": 641 }, { "epoch": 0.02660698744249658, "grad_norm": 0.49880021810531616, "learning_rate": 4.8671722823158855e-06, "loss": 0.8069, "step": 642 }, { "epoch": 0.026648431348170253, "grad_norm": 0.4725196957588196, "learning_rate": 4.866965062787518e-06, "loss": 0.8081, "step": 643 }, { "epoch": 0.026689875253843923, "grad_norm": 0.4438782036304474, "learning_rate": 4.866757843259149e-06, "loss": 0.7932, "step": 644 }, { "epoch": 0.026731319159517593, "grad_norm": 0.47148048877716064, "learning_rate": 4.8665506237307805e-06, "loss": 0.8704, "step": 645 }, { "epoch": 0.026772763065191264, "grad_norm": 0.4726231098175049, "learning_rate": 4.866343404202413e-06, "loss": 0.79, "step": 646 }, { "epoch": 0.026814206970864934, "grad_norm": 0.4968494772911072, "learning_rate": 4.866136184674044e-06, "loss": 0.8047, "step": 647 }, { "epoch": 0.026855650876538604, "grad_norm": 0.43174630403518677, "learning_rate": 4.865928965145676e-06, "loss": 0.7822, "step": 648 }, { "epoch": 0.026897094782212274, "grad_norm": 0.43715959787368774, "learning_rate": 4.865721745617307e-06, "loss": 0.7703, "step": 649 }, { "epoch": 0.026938538687885948, "grad_norm": 0.4591558873653412, "learning_rate": 4.865514526088939e-06, "loss": 0.7905, "step": 650 }, { "epoch": 0.026979982593559618, "grad_norm": 0.5094489455223083, "learning_rate": 4.8653073065605705e-06, "loss": 0.8013, "step": 651 }, { "epoch": 0.027021426499233288, "grad_norm": 0.49252450466156006, "learning_rate": 4.865100087032203e-06, "loss": 0.9199, "step": 652 }, { "epoch": 0.027062870404906958, "grad_norm": 0.4657875597476959, "learning_rate": 4.864892867503834e-06, "loss": 0.7932, "step": 653 }, { "epoch": 0.027104314310580628, "grad_norm": 0.501944363117218, "learning_rate": 4.8646856479754655e-06, "loss": 0.7993, "step": 654 }, { "epoch": 0.027145758216254298, "grad_norm": 0.48011940717697144, "learning_rate": 4.864478428447098e-06, "loss": 0.7837, "step": 655 }, { "epoch": 0.027187202121927972, "grad_norm": 0.5366302728652954, "learning_rate": 4.864271208918729e-06, "loss": 0.8665, "step": 656 }, { "epoch": 0.027228646027601642, "grad_norm": 0.4874062240123749, "learning_rate": 4.8640639893903605e-06, "loss": 0.781, "step": 657 }, { "epoch": 0.027270089933275312, "grad_norm": 0.4771879315376282, "learning_rate": 4.863856769861992e-06, "loss": 0.7959, "step": 658 }, { "epoch": 0.027311533838948982, "grad_norm": 0.46157220005989075, "learning_rate": 4.863649550333624e-06, "loss": 0.8113, "step": 659 }, { "epoch": 0.027352977744622652, "grad_norm": 0.4611596465110779, "learning_rate": 4.8634423308052555e-06, "loss": 0.7826, "step": 660 }, { "epoch": 0.027394421650296322, "grad_norm": 0.46256789565086365, "learning_rate": 4.863235111276887e-06, "loss": 0.833, "step": 661 }, { "epoch": 0.027435865555969996, "grad_norm": 0.4736841320991516, "learning_rate": 4.863027891748518e-06, "loss": 0.7576, "step": 662 }, { "epoch": 0.027477309461643666, "grad_norm": 0.47134703397750854, "learning_rate": 4.8628206722201505e-06, "loss": 0.7837, "step": 663 }, { "epoch": 0.027518753367317336, "grad_norm": 0.46176448464393616, "learning_rate": 4.862613452691782e-06, "loss": 0.7808, "step": 664 }, { "epoch": 0.027560197272991006, "grad_norm": 0.4663459062576294, "learning_rate": 4.862406233163413e-06, "loss": 0.7507, "step": 665 }, { "epoch": 0.027601641178664676, "grad_norm": 0.5323469042778015, "learning_rate": 4.8621990136350455e-06, "loss": 0.811, "step": 666 }, { "epoch": 0.027643085084338347, "grad_norm": 0.4284200072288513, "learning_rate": 4.861991794106677e-06, "loss": 0.7959, "step": 667 }, { "epoch": 0.02768452899001202, "grad_norm": 0.43598589301109314, "learning_rate": 4.861784574578309e-06, "loss": 0.7664, "step": 668 }, { "epoch": 0.02772597289568569, "grad_norm": 0.5601446032524109, "learning_rate": 4.8615773550499405e-06, "loss": 0.8423, "step": 669 }, { "epoch": 0.02776741680135936, "grad_norm": 0.47742122411727905, "learning_rate": 4.861370135521572e-06, "loss": 0.7942, "step": 670 }, { "epoch": 0.02780886070703303, "grad_norm": 0.5415261387825012, "learning_rate": 4.861162915993203e-06, "loss": 0.7916, "step": 671 }, { "epoch": 0.0278503046127067, "grad_norm": 0.501384973526001, "learning_rate": 4.8609556964648355e-06, "loss": 0.7976, "step": 672 }, { "epoch": 0.02789174851838037, "grad_norm": 0.4537607431411743, "learning_rate": 4.860748476936467e-06, "loss": 0.7682, "step": 673 }, { "epoch": 0.027933192424054044, "grad_norm": 0.4841330051422119, "learning_rate": 4.860541257408098e-06, "loss": 0.7483, "step": 674 }, { "epoch": 0.027974636329727715, "grad_norm": 0.4894607961177826, "learning_rate": 4.8603340378797305e-06, "loss": 0.833, "step": 675 }, { "epoch": 0.028016080235401385, "grad_norm": 0.4641619920730591, "learning_rate": 4.860126818351362e-06, "loss": 0.74, "step": 676 }, { "epoch": 0.028057524141075055, "grad_norm": 0.4701530337333679, "learning_rate": 4.859919598822993e-06, "loss": 0.7727, "step": 677 }, { "epoch": 0.028098968046748725, "grad_norm": 0.46435436606407166, "learning_rate": 4.859712379294625e-06, "loss": 0.8499, "step": 678 }, { "epoch": 0.028140411952422395, "grad_norm": 0.4807320237159729, "learning_rate": 4.859505159766257e-06, "loss": 0.8267, "step": 679 }, { "epoch": 0.02818185585809607, "grad_norm": 0.46613964438438416, "learning_rate": 4.859297940237888e-06, "loss": 0.7937, "step": 680 }, { "epoch": 0.02822329976376974, "grad_norm": 0.4798445999622345, "learning_rate": 4.85909072070952e-06, "loss": 0.7693, "step": 681 }, { "epoch": 0.02826474366944341, "grad_norm": 0.4688875675201416, "learning_rate": 4.858883501181152e-06, "loss": 0.7822, "step": 682 }, { "epoch": 0.02830618757511708, "grad_norm": 0.5104736685752869, "learning_rate": 4.858676281652783e-06, "loss": 0.8098, "step": 683 }, { "epoch": 0.02834763148079075, "grad_norm": 0.44255852699279785, "learning_rate": 4.8584690621244155e-06, "loss": 0.7878, "step": 684 }, { "epoch": 0.02838907538646442, "grad_norm": 0.461228609085083, "learning_rate": 4.858261842596046e-06, "loss": 0.8057, "step": 685 }, { "epoch": 0.028430519292138093, "grad_norm": 0.4532942771911621, "learning_rate": 4.858054623067678e-06, "loss": 0.7722, "step": 686 }, { "epoch": 0.028471963197811763, "grad_norm": 0.4623205363750458, "learning_rate": 4.85784740353931e-06, "loss": 0.7737, "step": 687 }, { "epoch": 0.028513407103485433, "grad_norm": 0.4836650788784027, "learning_rate": 4.857640184010942e-06, "loss": 0.8596, "step": 688 }, { "epoch": 0.028554851009159103, "grad_norm": 0.5195596814155579, "learning_rate": 4.857432964482573e-06, "loss": 0.8511, "step": 689 }, { "epoch": 0.028596294914832773, "grad_norm": 0.4888651669025421, "learning_rate": 4.857225744954205e-06, "loss": 0.7889, "step": 690 }, { "epoch": 0.028637738820506443, "grad_norm": 0.4511382281780243, "learning_rate": 4.857018525425837e-06, "loss": 0.7546, "step": 691 }, { "epoch": 0.028679182726180114, "grad_norm": 0.42539161443710327, "learning_rate": 4.856811305897468e-06, "loss": 0.8123, "step": 692 }, { "epoch": 0.028720626631853787, "grad_norm": 0.44170787930488586, "learning_rate": 4.8566040863691e-06, "loss": 0.8096, "step": 693 }, { "epoch": 0.028762070537527457, "grad_norm": 0.46986204385757446, "learning_rate": 4.856396866840731e-06, "loss": 0.814, "step": 694 }, { "epoch": 0.028803514443201127, "grad_norm": 0.48899999260902405, "learning_rate": 4.856189647312363e-06, "loss": 0.854, "step": 695 }, { "epoch": 0.028844958348874798, "grad_norm": 0.48885342478752136, "learning_rate": 4.855982427783995e-06, "loss": 0.7871, "step": 696 }, { "epoch": 0.028886402254548468, "grad_norm": 0.4346855878829956, "learning_rate": 4.855775208255626e-06, "loss": 0.7156, "step": 697 }, { "epoch": 0.028927846160222138, "grad_norm": 0.4426824152469635, "learning_rate": 4.855567988727258e-06, "loss": 0.803, "step": 698 }, { "epoch": 0.02896929006589581, "grad_norm": 0.4890058636665344, "learning_rate": 4.85536076919889e-06, "loss": 0.8206, "step": 699 }, { "epoch": 0.02901073397156948, "grad_norm": 0.42204558849334717, "learning_rate": 4.855153549670522e-06, "loss": 0.7463, "step": 700 }, { "epoch": 0.02905217787724315, "grad_norm": 0.46971461176872253, "learning_rate": 4.8549463301421525e-06, "loss": 0.7766, "step": 701 }, { "epoch": 0.02909362178291682, "grad_norm": 0.5029205679893494, "learning_rate": 4.854739110613785e-06, "loss": 0.783, "step": 702 }, { "epoch": 0.029135065688590492, "grad_norm": 0.5121147632598877, "learning_rate": 4.854531891085416e-06, "loss": 0.8743, "step": 703 }, { "epoch": 0.029176509594264162, "grad_norm": 0.480033814907074, "learning_rate": 4.854324671557048e-06, "loss": 0.7969, "step": 704 }, { "epoch": 0.029217953499937836, "grad_norm": 0.47659072279930115, "learning_rate": 4.85411745202868e-06, "loss": 0.7544, "step": 705 }, { "epoch": 0.029259397405611506, "grad_norm": 0.4808111786842346, "learning_rate": 4.853910232500311e-06, "loss": 0.8296, "step": 706 }, { "epoch": 0.029300841311285176, "grad_norm": 0.45272013545036316, "learning_rate": 4.853703012971943e-06, "loss": 0.7883, "step": 707 }, { "epoch": 0.029342285216958846, "grad_norm": 0.478324830532074, "learning_rate": 4.853495793443575e-06, "loss": 0.813, "step": 708 }, { "epoch": 0.029383729122632516, "grad_norm": 0.4619821012020111, "learning_rate": 4.853288573915206e-06, "loss": 0.7922, "step": 709 }, { "epoch": 0.029425173028306186, "grad_norm": 0.44082069396972656, "learning_rate": 4.8530813543868375e-06, "loss": 0.7776, "step": 710 }, { "epoch": 0.02946661693397986, "grad_norm": 0.4187260866165161, "learning_rate": 4.85287413485847e-06, "loss": 0.7288, "step": 711 }, { "epoch": 0.02950806083965353, "grad_norm": 0.4479549527168274, "learning_rate": 4.852666915330101e-06, "loss": 0.7932, "step": 712 }, { "epoch": 0.0295495047453272, "grad_norm": 0.4630022644996643, "learning_rate": 4.8524596958017325e-06, "loss": 0.7803, "step": 713 }, { "epoch": 0.02959094865100087, "grad_norm": 0.4839380085468292, "learning_rate": 4.852252476273364e-06, "loss": 0.8027, "step": 714 }, { "epoch": 0.02963239255667454, "grad_norm": 0.52446448802948, "learning_rate": 4.852045256744996e-06, "loss": 0.8098, "step": 715 }, { "epoch": 0.02967383646234821, "grad_norm": 0.4584200978279114, "learning_rate": 4.851838037216628e-06, "loss": 0.8459, "step": 716 }, { "epoch": 0.029715280368021884, "grad_norm": 0.45347079634666443, "learning_rate": 4.851630817688259e-06, "loss": 0.8269, "step": 717 }, { "epoch": 0.029756724273695554, "grad_norm": 0.46492689847946167, "learning_rate": 4.851423598159891e-06, "loss": 0.7617, "step": 718 }, { "epoch": 0.029798168179369224, "grad_norm": 0.45526203513145447, "learning_rate": 4.8512163786315225e-06, "loss": 0.7146, "step": 719 }, { "epoch": 0.029839612085042894, "grad_norm": 0.4939683973789215, "learning_rate": 4.851009159103155e-06, "loss": 0.7861, "step": 720 }, { "epoch": 0.029881055990716564, "grad_norm": 0.4907040596008301, "learning_rate": 4.850801939574786e-06, "loss": 0.8308, "step": 721 }, { "epoch": 0.029922499896390235, "grad_norm": 0.4499310553073883, "learning_rate": 4.8505947200464175e-06, "loss": 0.8, "step": 722 }, { "epoch": 0.029963943802063908, "grad_norm": 0.4763675034046173, "learning_rate": 4.850387500518049e-06, "loss": 0.7598, "step": 723 }, { "epoch": 0.03000538770773758, "grad_norm": 0.4945710003376007, "learning_rate": 4.850180280989681e-06, "loss": 0.8596, "step": 724 }, { "epoch": 0.03004683161341125, "grad_norm": 0.4911648631095886, "learning_rate": 4.8499730614613125e-06, "loss": 0.8088, "step": 725 }, { "epoch": 0.03008827551908492, "grad_norm": 0.4712153673171997, "learning_rate": 4.849765841932944e-06, "loss": 0.792, "step": 726 }, { "epoch": 0.03012971942475859, "grad_norm": 0.45330849289894104, "learning_rate": 4.849558622404576e-06, "loss": 0.8354, "step": 727 }, { "epoch": 0.03017116333043226, "grad_norm": 0.4708772599697113, "learning_rate": 4.8493514028762075e-06, "loss": 0.8049, "step": 728 }, { "epoch": 0.03021260723610593, "grad_norm": 0.4524977505207062, "learning_rate": 4.849144183347839e-06, "loss": 0.7917, "step": 729 }, { "epoch": 0.030254051141779603, "grad_norm": 0.5035803318023682, "learning_rate": 4.84893696381947e-06, "loss": 0.7781, "step": 730 }, { "epoch": 0.030295495047453273, "grad_norm": 0.47172626852989197, "learning_rate": 4.8487297442911025e-06, "loss": 0.7856, "step": 731 }, { "epoch": 0.030336938953126943, "grad_norm": 0.4861360192298889, "learning_rate": 4.848522524762734e-06, "loss": 0.8303, "step": 732 }, { "epoch": 0.030378382858800613, "grad_norm": 0.44855186343193054, "learning_rate": 4.848315305234365e-06, "loss": 0.7377, "step": 733 }, { "epoch": 0.030419826764474283, "grad_norm": 0.477257639169693, "learning_rate": 4.8481080857059975e-06, "loss": 0.7854, "step": 734 }, { "epoch": 0.030461270670147953, "grad_norm": 0.5123553276062012, "learning_rate": 4.847900866177629e-06, "loss": 0.8237, "step": 735 }, { "epoch": 0.030502714575821627, "grad_norm": 0.49826470017433167, "learning_rate": 4.847693646649261e-06, "loss": 0.8208, "step": 736 }, { "epoch": 0.030544158481495297, "grad_norm": 0.4987088441848755, "learning_rate": 4.8474864271208925e-06, "loss": 0.7612, "step": 737 }, { "epoch": 0.030585602387168967, "grad_norm": 0.5046583414077759, "learning_rate": 4.847279207592524e-06, "loss": 0.8271, "step": 738 }, { "epoch": 0.030627046292842637, "grad_norm": 0.5096154808998108, "learning_rate": 4.847071988064155e-06, "loss": 0.8513, "step": 739 }, { "epoch": 0.030668490198516307, "grad_norm": 0.43660619854927063, "learning_rate": 4.8468647685357875e-06, "loss": 0.8267, "step": 740 }, { "epoch": 0.030709934104189977, "grad_norm": 0.4789735674858093, "learning_rate": 4.846657549007419e-06, "loss": 0.7532, "step": 741 }, { "epoch": 0.03075137800986365, "grad_norm": 0.43951645493507385, "learning_rate": 4.84645032947905e-06, "loss": 0.7708, "step": 742 }, { "epoch": 0.03079282191553732, "grad_norm": 0.46978306770324707, "learning_rate": 4.8462431099506825e-06, "loss": 0.8218, "step": 743 }, { "epoch": 0.03083426582121099, "grad_norm": 0.5086688995361328, "learning_rate": 4.846035890422314e-06, "loss": 0.8218, "step": 744 }, { "epoch": 0.03087570972688466, "grad_norm": 0.4742758870124817, "learning_rate": 4.845828670893945e-06, "loss": 0.7444, "step": 745 }, { "epoch": 0.03091715363255833, "grad_norm": 0.4543149769306183, "learning_rate": 4.845621451365577e-06, "loss": 0.7594, "step": 746 }, { "epoch": 0.030958597538232, "grad_norm": 0.4872720241546631, "learning_rate": 4.845414231837209e-06, "loss": 0.7544, "step": 747 }, { "epoch": 0.031000041443905675, "grad_norm": 0.4720541536808014, "learning_rate": 4.84520701230884e-06, "loss": 0.8198, "step": 748 }, { "epoch": 0.031041485349579345, "grad_norm": 0.448688805103302, "learning_rate": 4.844999792780472e-06, "loss": 0.7834, "step": 749 }, { "epoch": 0.031082929255253015, "grad_norm": 0.4824693202972412, "learning_rate": 4.844792573252104e-06, "loss": 0.8193, "step": 750 }, { "epoch": 0.031124373160926685, "grad_norm": 0.5053086280822754, "learning_rate": 4.844585353723735e-06, "loss": 0.7954, "step": 751 }, { "epoch": 0.031165817066600356, "grad_norm": 0.46447333693504333, "learning_rate": 4.8443781341953675e-06, "loss": 0.7952, "step": 752 }, { "epoch": 0.031207260972274026, "grad_norm": 0.5440570712089539, "learning_rate": 4.844170914666999e-06, "loss": 0.8206, "step": 753 }, { "epoch": 0.0312487048779477, "grad_norm": 0.46322107315063477, "learning_rate": 4.84396369513863e-06, "loss": 0.7988, "step": 754 }, { "epoch": 0.031290148783621366, "grad_norm": 0.5141944885253906, "learning_rate": 4.843756475610262e-06, "loss": 0.8601, "step": 755 }, { "epoch": 0.031331592689295036, "grad_norm": 0.47407636046409607, "learning_rate": 4.843549256081894e-06, "loss": 0.8118, "step": 756 }, { "epoch": 0.03137303659496871, "grad_norm": 0.502138614654541, "learning_rate": 4.843342036553525e-06, "loss": 0.8162, "step": 757 }, { "epoch": 0.03141448050064238, "grad_norm": 0.4771718382835388, "learning_rate": 4.843134817025157e-06, "loss": 0.8069, "step": 758 }, { "epoch": 0.03145592440631605, "grad_norm": 0.5140120387077332, "learning_rate": 4.842927597496789e-06, "loss": 0.781, "step": 759 }, { "epoch": 0.031497368311989724, "grad_norm": 0.4754839539527893, "learning_rate": 4.84272037796842e-06, "loss": 0.8174, "step": 760 }, { "epoch": 0.031538812217663394, "grad_norm": 0.4931987524032593, "learning_rate": 4.842513158440052e-06, "loss": 0.7605, "step": 761 }, { "epoch": 0.031580256123337064, "grad_norm": 0.49690335988998413, "learning_rate": 4.842305938911683e-06, "loss": 0.8198, "step": 762 }, { "epoch": 0.031621700029010734, "grad_norm": 0.4721209704875946, "learning_rate": 4.842098719383315e-06, "loss": 0.803, "step": 763 }, { "epoch": 0.031663143934684404, "grad_norm": 0.45114579796791077, "learning_rate": 4.841891499854947e-06, "loss": 0.8425, "step": 764 }, { "epoch": 0.031704587840358074, "grad_norm": 0.4510507583618164, "learning_rate": 4.841684280326578e-06, "loss": 0.7568, "step": 765 }, { "epoch": 0.031746031746031744, "grad_norm": 0.4513837993144989, "learning_rate": 4.8414770607982095e-06, "loss": 0.7849, "step": 766 }, { "epoch": 0.031787475651705414, "grad_norm": 0.510221004486084, "learning_rate": 4.841269841269842e-06, "loss": 0.7654, "step": 767 }, { "epoch": 0.031828919557379085, "grad_norm": 0.47325363755226135, "learning_rate": 4.841062621741474e-06, "loss": 0.803, "step": 768 }, { "epoch": 0.031870363463052755, "grad_norm": 0.46081799268722534, "learning_rate": 4.8408554022131045e-06, "loss": 0.7904, "step": 769 }, { "epoch": 0.03191180736872643, "grad_norm": 0.5060776472091675, "learning_rate": 4.840648182684737e-06, "loss": 0.7927, "step": 770 }, { "epoch": 0.0319532512744001, "grad_norm": 0.47735685110092163, "learning_rate": 4.840440963156368e-06, "loss": 0.8318, "step": 771 }, { "epoch": 0.03199469518007377, "grad_norm": 0.4513167142868042, "learning_rate": 4.840233743628e-06, "loss": 0.7734, "step": 772 }, { "epoch": 0.03203613908574744, "grad_norm": 0.5181498527526855, "learning_rate": 4.840026524099632e-06, "loss": 0.8201, "step": 773 }, { "epoch": 0.03207758299142111, "grad_norm": 0.48776254057884216, "learning_rate": 4.839819304571263e-06, "loss": 0.7979, "step": 774 }, { "epoch": 0.03211902689709478, "grad_norm": 0.45880892872810364, "learning_rate": 4.8396120850428945e-06, "loss": 0.8087, "step": 775 }, { "epoch": 0.03216047080276845, "grad_norm": 0.4686993658542633, "learning_rate": 4.839404865514527e-06, "loss": 0.8337, "step": 776 }, { "epoch": 0.03220191470844212, "grad_norm": 0.4397232234477997, "learning_rate": 4.839197645986158e-06, "loss": 0.7578, "step": 777 }, { "epoch": 0.03224335861411579, "grad_norm": 0.4607049226760864, "learning_rate": 4.8389904264577895e-06, "loss": 0.8018, "step": 778 }, { "epoch": 0.03228480251978946, "grad_norm": 0.4511394500732422, "learning_rate": 4.838783206929422e-06, "loss": 0.8093, "step": 779 }, { "epoch": 0.03232624642546313, "grad_norm": 0.4307085871696472, "learning_rate": 4.838575987401053e-06, "loss": 0.7722, "step": 780 }, { "epoch": 0.0323676903311368, "grad_norm": 0.4876425862312317, "learning_rate": 4.8383687678726845e-06, "loss": 0.7537, "step": 781 }, { "epoch": 0.03240913423681048, "grad_norm": 0.4792354702949524, "learning_rate": 4.838161548344316e-06, "loss": 0.8357, "step": 782 }, { "epoch": 0.03245057814248415, "grad_norm": 0.4683007299900055, "learning_rate": 4.837954328815948e-06, "loss": 0.7688, "step": 783 }, { "epoch": 0.03249202204815782, "grad_norm": 0.46011966466903687, "learning_rate": 4.8377471092875795e-06, "loss": 0.8152, "step": 784 }, { "epoch": 0.03253346595383149, "grad_norm": 0.48548996448516846, "learning_rate": 4.837539889759211e-06, "loss": 0.814, "step": 785 }, { "epoch": 0.03257490985950516, "grad_norm": 0.4813372492790222, "learning_rate": 4.837332670230843e-06, "loss": 0.8186, "step": 786 }, { "epoch": 0.03261635376517883, "grad_norm": 0.4809481203556061, "learning_rate": 4.8371254507024745e-06, "loss": 0.7795, "step": 787 }, { "epoch": 0.0326577976708525, "grad_norm": 0.47284194827079773, "learning_rate": 4.836918231174107e-06, "loss": 0.8435, "step": 788 }, { "epoch": 0.03269924157652617, "grad_norm": 0.48725366592407227, "learning_rate": 4.836711011645738e-06, "loss": 0.7795, "step": 789 }, { "epoch": 0.03274068548219984, "grad_norm": 0.5093293190002441, "learning_rate": 4.8365037921173695e-06, "loss": 0.8564, "step": 790 }, { "epoch": 0.03278212938787351, "grad_norm": 0.45030084252357483, "learning_rate": 4.836296572589001e-06, "loss": 0.7986, "step": 791 }, { "epoch": 0.03282357329354718, "grad_norm": 0.45919808745384216, "learning_rate": 4.836089353060633e-06, "loss": 0.8132, "step": 792 }, { "epoch": 0.03286501719922085, "grad_norm": 0.46075549721717834, "learning_rate": 4.8358821335322645e-06, "loss": 0.7678, "step": 793 }, { "epoch": 0.03290646110489453, "grad_norm": 0.5387317538261414, "learning_rate": 4.835674914003896e-06, "loss": 0.8026, "step": 794 }, { "epoch": 0.0329479050105682, "grad_norm": 0.446251779794693, "learning_rate": 4.835467694475528e-06, "loss": 0.7966, "step": 795 }, { "epoch": 0.03298934891624187, "grad_norm": 0.4801095426082611, "learning_rate": 4.8352604749471595e-06, "loss": 0.785, "step": 796 }, { "epoch": 0.03303079282191554, "grad_norm": 0.47974684834480286, "learning_rate": 4.835053255418791e-06, "loss": 0.7939, "step": 797 }, { "epoch": 0.03307223672758921, "grad_norm": 0.46524977684020996, "learning_rate": 4.834846035890422e-06, "loss": 0.8074, "step": 798 }, { "epoch": 0.03311368063326288, "grad_norm": 0.5188291072845459, "learning_rate": 4.8346388163620545e-06, "loss": 0.843, "step": 799 }, { "epoch": 0.03315512453893655, "grad_norm": 0.47121351957321167, "learning_rate": 4.834431596833686e-06, "loss": 0.843, "step": 800 }, { "epoch": 0.03319656844461022, "grad_norm": 0.4718429446220398, "learning_rate": 4.834224377305317e-06, "loss": 0.8169, "step": 801 }, { "epoch": 0.03323801235028389, "grad_norm": 0.6425647139549255, "learning_rate": 4.834017157776949e-06, "loss": 0.8171, "step": 802 }, { "epoch": 0.03327945625595756, "grad_norm": 0.5799872875213623, "learning_rate": 4.833809938248581e-06, "loss": 0.7239, "step": 803 }, { "epoch": 0.03332090016163123, "grad_norm": 0.46306636929512024, "learning_rate": 4.833602718720213e-06, "loss": 0.8054, "step": 804 }, { "epoch": 0.0333623440673049, "grad_norm": 0.4275192320346832, "learning_rate": 4.8333954991918445e-06, "loss": 0.7664, "step": 805 }, { "epoch": 0.03340378797297858, "grad_norm": 0.5045058131217957, "learning_rate": 4.833188279663476e-06, "loss": 0.7881, "step": 806 }, { "epoch": 0.03344523187865225, "grad_norm": 0.44459038972854614, "learning_rate": 4.832981060135107e-06, "loss": 0.853, "step": 807 }, { "epoch": 0.03348667578432592, "grad_norm": 0.40647628903388977, "learning_rate": 4.8327738406067395e-06, "loss": 0.7952, "step": 808 }, { "epoch": 0.03352811968999959, "grad_norm": 0.4865948557853699, "learning_rate": 4.832566621078371e-06, "loss": 0.8064, "step": 809 }, { "epoch": 0.03356956359567326, "grad_norm": 0.4718790650367737, "learning_rate": 4.832359401550002e-06, "loss": 0.8225, "step": 810 }, { "epoch": 0.03361100750134693, "grad_norm": 0.49259084463119507, "learning_rate": 4.8321521820216345e-06, "loss": 0.7886, "step": 811 }, { "epoch": 0.0336524514070206, "grad_norm": 0.48640403151512146, "learning_rate": 4.831944962493266e-06, "loss": 0.7993, "step": 812 }, { "epoch": 0.03369389531269427, "grad_norm": 0.43770548701286316, "learning_rate": 4.831737742964897e-06, "loss": 0.8164, "step": 813 }, { "epoch": 0.03373533921836794, "grad_norm": 0.49797001481056213, "learning_rate": 4.831530523436529e-06, "loss": 0.8281, "step": 814 }, { "epoch": 0.03377678312404161, "grad_norm": 0.4484914243221283, "learning_rate": 4.831323303908161e-06, "loss": 0.8237, "step": 815 }, { "epoch": 0.03381822702971528, "grad_norm": 0.514343798160553, "learning_rate": 4.831116084379792e-06, "loss": 0.7988, "step": 816 }, { "epoch": 0.03385967093538895, "grad_norm": 0.48368391394615173, "learning_rate": 4.830908864851424e-06, "loss": 0.8132, "step": 817 }, { "epoch": 0.03390111484106262, "grad_norm": 0.4572143256664276, "learning_rate": 4.830701645323055e-06, "loss": 0.7871, "step": 818 }, { "epoch": 0.033942558746736295, "grad_norm": 0.46438106894493103, "learning_rate": 4.830494425794687e-06, "loss": 0.7914, "step": 819 }, { "epoch": 0.033984002652409966, "grad_norm": 0.4726259708404541, "learning_rate": 4.8302872062663196e-06, "loss": 0.801, "step": 820 }, { "epoch": 0.034025446558083636, "grad_norm": 0.4818885624408722, "learning_rate": 4.830079986737951e-06, "loss": 0.8318, "step": 821 }, { "epoch": 0.034066890463757306, "grad_norm": 0.44731706380844116, "learning_rate": 4.829872767209582e-06, "loss": 0.7549, "step": 822 }, { "epoch": 0.034108334369430976, "grad_norm": 0.44706088304519653, "learning_rate": 4.829665547681214e-06, "loss": 0.7705, "step": 823 }, { "epoch": 0.034149778275104646, "grad_norm": 0.48286518454551697, "learning_rate": 4.829458328152846e-06, "loss": 0.8022, "step": 824 }, { "epoch": 0.034191222180778316, "grad_norm": 0.4611935615539551, "learning_rate": 4.829251108624477e-06, "loss": 0.8064, "step": 825 }, { "epoch": 0.034232666086451986, "grad_norm": 0.5443453192710876, "learning_rate": 4.829043889096109e-06, "loss": 0.8209, "step": 826 }, { "epoch": 0.034274109992125656, "grad_norm": 0.47493240237236023, "learning_rate": 4.82883666956774e-06, "loss": 0.7751, "step": 827 }, { "epoch": 0.03431555389779933, "grad_norm": 0.4742193818092346, "learning_rate": 4.828629450039372e-06, "loss": 0.7634, "step": 828 }, { "epoch": 0.034356997803473, "grad_norm": 0.44926828145980835, "learning_rate": 4.828422230511004e-06, "loss": 0.761, "step": 829 }, { "epoch": 0.03439844170914667, "grad_norm": 0.4580516219139099, "learning_rate": 4.828215010982635e-06, "loss": 0.7488, "step": 830 }, { "epoch": 0.034439885614820344, "grad_norm": 0.45133379101753235, "learning_rate": 4.828007791454267e-06, "loss": 0.749, "step": 831 }, { "epoch": 0.034481329520494014, "grad_norm": 0.43070828914642334, "learning_rate": 4.827800571925899e-06, "loss": 0.7842, "step": 832 }, { "epoch": 0.034522773426167684, "grad_norm": 0.463408499956131, "learning_rate": 4.82759335239753e-06, "loss": 0.804, "step": 833 }, { "epoch": 0.034564217331841354, "grad_norm": 0.424195796251297, "learning_rate": 4.8273861328691615e-06, "loss": 0.7493, "step": 834 }, { "epoch": 0.034605661237515024, "grad_norm": 0.4392853081226349, "learning_rate": 4.827178913340794e-06, "loss": 0.8049, "step": 835 }, { "epoch": 0.034647105143188694, "grad_norm": 0.5163056254386902, "learning_rate": 4.826971693812425e-06, "loss": 0.8274, "step": 836 }, { "epoch": 0.034688549048862365, "grad_norm": 0.46480703353881836, "learning_rate": 4.8267644742840565e-06, "loss": 0.8071, "step": 837 }, { "epoch": 0.034729992954536035, "grad_norm": 0.4739395081996918, "learning_rate": 4.826557254755689e-06, "loss": 0.7964, "step": 838 }, { "epoch": 0.034771436860209705, "grad_norm": 0.45199519395828247, "learning_rate": 4.82635003522732e-06, "loss": 0.803, "step": 839 }, { "epoch": 0.034812880765883375, "grad_norm": 0.44721484184265137, "learning_rate": 4.826142815698952e-06, "loss": 0.7593, "step": 840 }, { "epoch": 0.034854324671557045, "grad_norm": 0.4465482831001282, "learning_rate": 4.825935596170584e-06, "loss": 0.814, "step": 841 }, { "epoch": 0.034895768577230715, "grad_norm": 0.5203372836112976, "learning_rate": 4.825728376642215e-06, "loss": 0.8416, "step": 842 }, { "epoch": 0.03493721248290439, "grad_norm": 0.48009631037712097, "learning_rate": 4.8255211571138465e-06, "loss": 0.905, "step": 843 }, { "epoch": 0.03497865638857806, "grad_norm": 0.48944076895713806, "learning_rate": 4.825313937585479e-06, "loss": 0.813, "step": 844 }, { "epoch": 0.03502010029425173, "grad_norm": 0.5161928534507751, "learning_rate": 4.82510671805711e-06, "loss": 0.8391, "step": 845 }, { "epoch": 0.0350615441999254, "grad_norm": 0.49111801385879517, "learning_rate": 4.8248994985287415e-06, "loss": 0.7148, "step": 846 }, { "epoch": 0.03510298810559907, "grad_norm": 0.452984482049942, "learning_rate": 4.824692279000374e-06, "loss": 0.7896, "step": 847 }, { "epoch": 0.03514443201127274, "grad_norm": 0.44530120491981506, "learning_rate": 4.824485059472005e-06, "loss": 0.7717, "step": 848 }, { "epoch": 0.03518587591694641, "grad_norm": 0.4604248106479645, "learning_rate": 4.8242778399436365e-06, "loss": 0.8225, "step": 849 }, { "epoch": 0.03522731982262008, "grad_norm": 0.49463069438934326, "learning_rate": 4.824070620415268e-06, "loss": 0.7812, "step": 850 }, { "epoch": 0.03526876372829375, "grad_norm": 0.4273708760738373, "learning_rate": 4.8238634008869e-06, "loss": 0.7625, "step": 851 }, { "epoch": 0.03531020763396742, "grad_norm": 0.5574250221252441, "learning_rate": 4.8236561813585315e-06, "loss": 0.8408, "step": 852 }, { "epoch": 0.035351651539641094, "grad_norm": 0.49560749530792236, "learning_rate": 4.823448961830163e-06, "loss": 0.8496, "step": 853 }, { "epoch": 0.035393095445314764, "grad_norm": 0.5219417810440063, "learning_rate": 4.823241742301794e-06, "loss": 0.814, "step": 854 }, { "epoch": 0.035434539350988434, "grad_norm": 0.4465983510017395, "learning_rate": 4.8230345227734265e-06, "loss": 0.8118, "step": 855 }, { "epoch": 0.03547598325666211, "grad_norm": 0.4661894142627716, "learning_rate": 4.822827303245059e-06, "loss": 0.7755, "step": 856 }, { "epoch": 0.03551742716233578, "grad_norm": 0.45798560976982117, "learning_rate": 4.82262008371669e-06, "loss": 0.8206, "step": 857 }, { "epoch": 0.03555887106800945, "grad_norm": 0.447319358587265, "learning_rate": 4.8224128641883215e-06, "loss": 0.8215, "step": 858 }, { "epoch": 0.03560031497368312, "grad_norm": 0.47446373105049133, "learning_rate": 4.822205644659953e-06, "loss": 0.7686, "step": 859 }, { "epoch": 0.03564175887935679, "grad_norm": 0.5082666873931885, "learning_rate": 4.821998425131585e-06, "loss": 0.8584, "step": 860 }, { "epoch": 0.03568320278503046, "grad_norm": 0.4586051404476166, "learning_rate": 4.8217912056032165e-06, "loss": 0.7856, "step": 861 }, { "epoch": 0.03572464669070413, "grad_norm": 0.4524220824241638, "learning_rate": 4.821583986074848e-06, "loss": 0.7869, "step": 862 }, { "epoch": 0.0357660905963778, "grad_norm": 0.47431883215904236, "learning_rate": 4.821376766546479e-06, "loss": 0.8022, "step": 863 }, { "epoch": 0.03580753450205147, "grad_norm": 0.4569200277328491, "learning_rate": 4.8211695470181115e-06, "loss": 0.8242, "step": 864 }, { "epoch": 0.03584897840772514, "grad_norm": 0.4440664052963257, "learning_rate": 4.820962327489743e-06, "loss": 0.7637, "step": 865 }, { "epoch": 0.03589042231339881, "grad_norm": 0.4485827386379242, "learning_rate": 4.820755107961374e-06, "loss": 0.8101, "step": 866 }, { "epoch": 0.03593186621907248, "grad_norm": 0.46246907114982605, "learning_rate": 4.8205478884330065e-06, "loss": 0.7793, "step": 867 }, { "epoch": 0.03597331012474616, "grad_norm": 0.4496723413467407, "learning_rate": 4.820340668904638e-06, "loss": 0.7684, "step": 868 }, { "epoch": 0.03601475403041983, "grad_norm": 0.43587517738342285, "learning_rate": 4.820133449376269e-06, "loss": 0.7339, "step": 869 }, { "epoch": 0.0360561979360935, "grad_norm": 0.4498605728149414, "learning_rate": 4.819926229847901e-06, "loss": 0.8093, "step": 870 }, { "epoch": 0.03609764184176717, "grad_norm": 0.44379284977912903, "learning_rate": 4.819719010319533e-06, "loss": 0.7986, "step": 871 }, { "epoch": 0.03613908574744084, "grad_norm": 0.4766504466533661, "learning_rate": 4.819511790791165e-06, "loss": 0.7517, "step": 872 }, { "epoch": 0.03618052965311451, "grad_norm": 0.45311790704727173, "learning_rate": 4.8193045712627966e-06, "loss": 0.7595, "step": 873 }, { "epoch": 0.03622197355878818, "grad_norm": 0.4438081979751587, "learning_rate": 4.819097351734428e-06, "loss": 0.7576, "step": 874 }, { "epoch": 0.03626341746446185, "grad_norm": 0.4847230017185211, "learning_rate": 4.818890132206059e-06, "loss": 0.8088, "step": 875 }, { "epoch": 0.03630486137013552, "grad_norm": 0.43706944584846497, "learning_rate": 4.8186829126776916e-06, "loss": 0.7964, "step": 876 }, { "epoch": 0.03634630527580919, "grad_norm": 0.47320547699928284, "learning_rate": 4.818475693149323e-06, "loss": 0.783, "step": 877 }, { "epoch": 0.03638774918148286, "grad_norm": 0.4701194167137146, "learning_rate": 4.818268473620954e-06, "loss": 0.8491, "step": 878 }, { "epoch": 0.03642919308715653, "grad_norm": 0.44274404644966125, "learning_rate": 4.818061254092586e-06, "loss": 0.7834, "step": 879 }, { "epoch": 0.03647063699283021, "grad_norm": 0.4761819839477539, "learning_rate": 4.817854034564218e-06, "loss": 0.812, "step": 880 }, { "epoch": 0.03651208089850388, "grad_norm": 0.4779709577560425, "learning_rate": 4.817646815035849e-06, "loss": 0.781, "step": 881 }, { "epoch": 0.03655352480417755, "grad_norm": 0.42150968313217163, "learning_rate": 4.817439595507481e-06, "loss": 0.7827, "step": 882 }, { "epoch": 0.03659496870985122, "grad_norm": 0.46990767121315, "learning_rate": 4.817232375979113e-06, "loss": 0.8174, "step": 883 }, { "epoch": 0.03663641261552489, "grad_norm": 0.4755314588546753, "learning_rate": 4.817025156450744e-06, "loss": 0.7622, "step": 884 }, { "epoch": 0.03667785652119856, "grad_norm": 0.4565747082233429, "learning_rate": 4.816817936922376e-06, "loss": 0.7478, "step": 885 }, { "epoch": 0.03671930042687223, "grad_norm": 0.4576369822025299, "learning_rate": 4.816610717394007e-06, "loss": 0.8198, "step": 886 }, { "epoch": 0.0367607443325459, "grad_norm": 0.44579315185546875, "learning_rate": 4.816403497865639e-06, "loss": 0.8186, "step": 887 }, { "epoch": 0.03680218823821957, "grad_norm": 0.44514021277427673, "learning_rate": 4.816196278337271e-06, "loss": 0.8125, "step": 888 }, { "epoch": 0.03684363214389324, "grad_norm": 0.491281121969223, "learning_rate": 4.815989058808903e-06, "loss": 0.8308, "step": 889 }, { "epoch": 0.03688507604956691, "grad_norm": 0.4317649006843567, "learning_rate": 4.815781839280534e-06, "loss": 0.8181, "step": 890 }, { "epoch": 0.03692651995524058, "grad_norm": 0.4579673707485199, "learning_rate": 4.815574619752166e-06, "loss": 0.7024, "step": 891 }, { "epoch": 0.03696796386091425, "grad_norm": 0.4602883756160736, "learning_rate": 4.815367400223798e-06, "loss": 0.7803, "step": 892 }, { "epoch": 0.037009407766587926, "grad_norm": 0.517935037612915, "learning_rate": 4.815160180695429e-06, "loss": 0.8113, "step": 893 }, { "epoch": 0.037050851672261596, "grad_norm": 0.48947110772132874, "learning_rate": 4.814952961167061e-06, "loss": 0.8264, "step": 894 }, { "epoch": 0.037092295577935266, "grad_norm": 0.47166675329208374, "learning_rate": 4.814745741638692e-06, "loss": 0.7656, "step": 895 }, { "epoch": 0.03713373948360894, "grad_norm": 0.44415104389190674, "learning_rate": 4.814538522110324e-06, "loss": 0.7576, "step": 896 }, { "epoch": 0.03717518338928261, "grad_norm": 0.4769923686981201, "learning_rate": 4.814331302581956e-06, "loss": 0.7905, "step": 897 }, { "epoch": 0.03721662729495628, "grad_norm": 0.4704188406467438, "learning_rate": 4.814124083053587e-06, "loss": 0.7661, "step": 898 }, { "epoch": 0.03725807120062995, "grad_norm": 0.4227563142776489, "learning_rate": 4.813916863525219e-06, "loss": 0.7949, "step": 899 }, { "epoch": 0.03729951510630362, "grad_norm": 0.4472195506095886, "learning_rate": 4.813709643996851e-06, "loss": 0.7991, "step": 900 }, { "epoch": 0.03734095901197729, "grad_norm": 0.4472038447856903, "learning_rate": 4.813502424468482e-06, "loss": 0.7644, "step": 901 }, { "epoch": 0.03738240291765096, "grad_norm": 0.4717763364315033, "learning_rate": 4.8132952049401135e-06, "loss": 0.8101, "step": 902 }, { "epoch": 0.03742384682332463, "grad_norm": 0.5045163631439209, "learning_rate": 4.813087985411746e-06, "loss": 0.7876, "step": 903 }, { "epoch": 0.0374652907289983, "grad_norm": 0.439333975315094, "learning_rate": 4.812880765883377e-06, "loss": 0.7568, "step": 904 }, { "epoch": 0.037506734634671975, "grad_norm": 0.47774791717529297, "learning_rate": 4.812673546355009e-06, "loss": 0.8582, "step": 905 }, { "epoch": 0.037548178540345645, "grad_norm": 0.4379725456237793, "learning_rate": 4.81246632682664e-06, "loss": 0.7554, "step": 906 }, { "epoch": 0.037589622446019315, "grad_norm": 0.45828449726104736, "learning_rate": 4.812259107298272e-06, "loss": 0.8049, "step": 907 }, { "epoch": 0.037631066351692985, "grad_norm": 0.5110654234886169, "learning_rate": 4.812051887769904e-06, "loss": 0.782, "step": 908 }, { "epoch": 0.037672510257366655, "grad_norm": 0.436849981546402, "learning_rate": 4.811844668241536e-06, "loss": 0.7869, "step": 909 }, { "epoch": 0.037713954163040325, "grad_norm": 0.5075635313987732, "learning_rate": 4.811637448713167e-06, "loss": 0.8484, "step": 910 }, { "epoch": 0.037755398068713995, "grad_norm": 0.4612642228603363, "learning_rate": 4.8114302291847985e-06, "loss": 0.8284, "step": 911 }, { "epoch": 0.037796841974387665, "grad_norm": 0.4357260763645172, "learning_rate": 4.811223009656431e-06, "loss": 0.7539, "step": 912 }, { "epoch": 0.037838285880061336, "grad_norm": 0.41013216972351074, "learning_rate": 4.811015790128062e-06, "loss": 0.7336, "step": 913 }, { "epoch": 0.037879729785735006, "grad_norm": 0.5076503157615662, "learning_rate": 4.8108085705996935e-06, "loss": 0.8237, "step": 914 }, { "epoch": 0.037921173691408676, "grad_norm": 0.5303014516830444, "learning_rate": 4.810601351071325e-06, "loss": 0.8384, "step": 915 }, { "epoch": 0.037962617597082346, "grad_norm": 0.5182031393051147, "learning_rate": 4.810394131542957e-06, "loss": 0.8159, "step": 916 }, { "epoch": 0.03800406150275602, "grad_norm": 0.4546484053134918, "learning_rate": 4.8101869120145885e-06, "loss": 0.7715, "step": 917 }, { "epoch": 0.03804550540842969, "grad_norm": 0.4660073220729828, "learning_rate": 4.80997969248622e-06, "loss": 0.8127, "step": 918 }, { "epoch": 0.03808694931410336, "grad_norm": 0.5112962126731873, "learning_rate": 4.809772472957852e-06, "loss": 0.8281, "step": 919 }, { "epoch": 0.03812839321977703, "grad_norm": 0.467479407787323, "learning_rate": 4.8095652534294835e-06, "loss": 0.7578, "step": 920 }, { "epoch": 0.038169837125450704, "grad_norm": 0.4521731436252594, "learning_rate": 4.809358033901115e-06, "loss": 0.7488, "step": 921 }, { "epoch": 0.038211281031124374, "grad_norm": 0.42674127221107483, "learning_rate": 4.809150814372746e-06, "loss": 0.752, "step": 922 }, { "epoch": 0.038252724936798044, "grad_norm": 0.42088428139686584, "learning_rate": 4.8089435948443785e-06, "loss": 0.7764, "step": 923 }, { "epoch": 0.038294168842471714, "grad_norm": 0.47706201672554016, "learning_rate": 4.80873637531601e-06, "loss": 0.7947, "step": 924 }, { "epoch": 0.038335612748145384, "grad_norm": 0.4748612940311432, "learning_rate": 4.808529155787642e-06, "loss": 0.8188, "step": 925 }, { "epoch": 0.038377056653819054, "grad_norm": 0.45237982273101807, "learning_rate": 4.8083219362592735e-06, "loss": 0.7661, "step": 926 }, { "epoch": 0.038418500559492724, "grad_norm": 0.4897715747356415, "learning_rate": 4.808114716730905e-06, "loss": 0.7974, "step": 927 }, { "epoch": 0.038459944465166394, "grad_norm": 0.4391297996044159, "learning_rate": 4.807907497202537e-06, "loss": 0.7759, "step": 928 }, { "epoch": 0.03850138837084007, "grad_norm": 0.4632616341114044, "learning_rate": 4.8077002776741685e-06, "loss": 0.7887, "step": 929 }, { "epoch": 0.03854283227651374, "grad_norm": 0.4474062919616699, "learning_rate": 4.8074930581458e-06, "loss": 0.8311, "step": 930 }, { "epoch": 0.03858427618218741, "grad_norm": 0.48291000723838806, "learning_rate": 4.807285838617431e-06, "loss": 0.7312, "step": 931 }, { "epoch": 0.03862572008786108, "grad_norm": 0.4543563723564148, "learning_rate": 4.8070786190890636e-06, "loss": 0.8115, "step": 932 }, { "epoch": 0.03866716399353475, "grad_norm": 0.5148612260818481, "learning_rate": 4.806871399560695e-06, "loss": 0.8621, "step": 933 }, { "epoch": 0.03870860789920842, "grad_norm": 0.4792311489582062, "learning_rate": 4.806664180032326e-06, "loss": 0.8262, "step": 934 }, { "epoch": 0.03875005180488209, "grad_norm": 0.46342992782592773, "learning_rate": 4.8064569605039586e-06, "loss": 0.8003, "step": 935 }, { "epoch": 0.03879149571055576, "grad_norm": 0.45019346475601196, "learning_rate": 4.80624974097559e-06, "loss": 0.7874, "step": 936 }, { "epoch": 0.03883293961622943, "grad_norm": 0.5109577775001526, "learning_rate": 4.806042521447221e-06, "loss": 0.8142, "step": 937 }, { "epoch": 0.0388743835219031, "grad_norm": 0.44418174028396606, "learning_rate": 4.805835301918853e-06, "loss": 0.7892, "step": 938 }, { "epoch": 0.03891582742757677, "grad_norm": 0.4462260901927948, "learning_rate": 4.805628082390485e-06, "loss": 0.7908, "step": 939 }, { "epoch": 0.03895727133325044, "grad_norm": 0.4844035804271698, "learning_rate": 4.805420862862116e-06, "loss": 0.7781, "step": 940 }, { "epoch": 0.03899871523892411, "grad_norm": 0.4537885785102844, "learning_rate": 4.8052136433337486e-06, "loss": 0.7194, "step": 941 }, { "epoch": 0.03904015914459779, "grad_norm": 0.43595224618911743, "learning_rate": 4.80500642380538e-06, "loss": 0.7346, "step": 942 }, { "epoch": 0.03908160305027146, "grad_norm": 0.4676819443702698, "learning_rate": 4.804799204277011e-06, "loss": 0.7646, "step": 943 }, { "epoch": 0.03912304695594513, "grad_norm": 0.5424928665161133, "learning_rate": 4.8045919847486436e-06, "loss": 0.8176, "step": 944 }, { "epoch": 0.0391644908616188, "grad_norm": 0.5223216414451599, "learning_rate": 4.804384765220275e-06, "loss": 0.8591, "step": 945 }, { "epoch": 0.03920593476729247, "grad_norm": 0.4513929784297943, "learning_rate": 4.804177545691906e-06, "loss": 0.7903, "step": 946 }, { "epoch": 0.03924737867296614, "grad_norm": 0.4726882576942444, "learning_rate": 4.803970326163538e-06, "loss": 0.8093, "step": 947 }, { "epoch": 0.03928882257863981, "grad_norm": 0.4442755877971649, "learning_rate": 4.80376310663517e-06, "loss": 0.822, "step": 948 }, { "epoch": 0.03933026648431348, "grad_norm": 0.48815596103668213, "learning_rate": 4.803555887106801e-06, "loss": 0.7595, "step": 949 }, { "epoch": 0.03937171038998715, "grad_norm": 0.4548647999763489, "learning_rate": 4.803348667578433e-06, "loss": 0.7561, "step": 950 }, { "epoch": 0.03941315429566082, "grad_norm": 0.48345446586608887, "learning_rate": 4.803141448050065e-06, "loss": 0.7722, "step": 951 }, { "epoch": 0.03945459820133449, "grad_norm": 0.553205132484436, "learning_rate": 4.802934228521696e-06, "loss": 0.8356, "step": 952 }, { "epoch": 0.03949604210700816, "grad_norm": 0.506557822227478, "learning_rate": 4.802727008993328e-06, "loss": 0.8259, "step": 953 }, { "epoch": 0.03953748601268184, "grad_norm": 0.46744129061698914, "learning_rate": 4.802519789464959e-06, "loss": 0.7456, "step": 954 }, { "epoch": 0.03957892991835551, "grad_norm": 0.47676709294319153, "learning_rate": 4.802312569936591e-06, "loss": 0.802, "step": 955 }, { "epoch": 0.03962037382402918, "grad_norm": 0.47377076745033264, "learning_rate": 4.802105350408223e-06, "loss": 0.8062, "step": 956 }, { "epoch": 0.03966181772970285, "grad_norm": 0.4776807427406311, "learning_rate": 4.801898130879855e-06, "loss": 0.7788, "step": 957 }, { "epoch": 0.03970326163537652, "grad_norm": 0.4366399049758911, "learning_rate": 4.8016909113514855e-06, "loss": 0.8247, "step": 958 }, { "epoch": 0.03974470554105019, "grad_norm": 0.4870122969150543, "learning_rate": 4.801483691823118e-06, "loss": 0.8557, "step": 959 }, { "epoch": 0.03978614944672386, "grad_norm": 0.4365279972553253, "learning_rate": 4.80127647229475e-06, "loss": 0.7952, "step": 960 }, { "epoch": 0.03982759335239753, "grad_norm": 0.4440559148788452, "learning_rate": 4.801069252766381e-06, "loss": 0.8191, "step": 961 }, { "epoch": 0.0398690372580712, "grad_norm": 0.45365670323371887, "learning_rate": 4.800862033238013e-06, "loss": 0.7848, "step": 962 }, { "epoch": 0.03991048116374487, "grad_norm": 0.43977826833724976, "learning_rate": 4.800654813709644e-06, "loss": 0.8115, "step": 963 }, { "epoch": 0.03995192506941854, "grad_norm": 0.4278276562690735, "learning_rate": 4.800447594181276e-06, "loss": 0.7417, "step": 964 }, { "epoch": 0.03999336897509221, "grad_norm": 0.45921963453292847, "learning_rate": 4.800240374652908e-06, "loss": 0.8319, "step": 965 }, { "epoch": 0.04003481288076589, "grad_norm": 0.4661577641963959, "learning_rate": 4.800033155124539e-06, "loss": 0.7759, "step": 966 }, { "epoch": 0.04007625678643956, "grad_norm": 0.4300996661186218, "learning_rate": 4.7998259355961705e-06, "loss": 0.7546, "step": 967 }, { "epoch": 0.04011770069211323, "grad_norm": 0.4361669719219208, "learning_rate": 4.799618716067803e-06, "loss": 0.7178, "step": 968 }, { "epoch": 0.0401591445977869, "grad_norm": 0.4488246738910675, "learning_rate": 4.799411496539434e-06, "loss": 0.7783, "step": 969 }, { "epoch": 0.04020058850346057, "grad_norm": 0.4444413483142853, "learning_rate": 4.7992042770110655e-06, "loss": 0.8276, "step": 970 }, { "epoch": 0.04024203240913424, "grad_norm": 0.4681646227836609, "learning_rate": 4.798997057482698e-06, "loss": 0.7952, "step": 971 }, { "epoch": 0.04028347631480791, "grad_norm": 0.4517576992511749, "learning_rate": 4.798789837954329e-06, "loss": 0.7209, "step": 972 }, { "epoch": 0.04032492022048158, "grad_norm": 0.4871593415737152, "learning_rate": 4.798582618425961e-06, "loss": 0.7661, "step": 973 }, { "epoch": 0.04036636412615525, "grad_norm": 0.46669653058052063, "learning_rate": 4.798375398897592e-06, "loss": 0.7603, "step": 974 }, { "epoch": 0.04040780803182892, "grad_norm": 0.4552239775657654, "learning_rate": 4.798168179369224e-06, "loss": 0.8652, "step": 975 }, { "epoch": 0.04044925193750259, "grad_norm": 0.44540682435035706, "learning_rate": 4.7979609598408555e-06, "loss": 0.7534, "step": 976 }, { "epoch": 0.04049069584317626, "grad_norm": 0.46907296776771545, "learning_rate": 4.797753740312488e-06, "loss": 0.7979, "step": 977 }, { "epoch": 0.04053213974884993, "grad_norm": 0.46654975414276123, "learning_rate": 4.797546520784119e-06, "loss": 0.7939, "step": 978 }, { "epoch": 0.040573583654523605, "grad_norm": 0.45213475823402405, "learning_rate": 4.7973393012557505e-06, "loss": 0.804, "step": 979 }, { "epoch": 0.040615027560197275, "grad_norm": 0.4843692183494568, "learning_rate": 4.797132081727383e-06, "loss": 0.8472, "step": 980 }, { "epoch": 0.040656471465870946, "grad_norm": 0.4351000189781189, "learning_rate": 4.796924862199014e-06, "loss": 0.7732, "step": 981 }, { "epoch": 0.040697915371544616, "grad_norm": 0.46643754839897156, "learning_rate": 4.7967176426706455e-06, "loss": 0.8257, "step": 982 }, { "epoch": 0.040739359277218286, "grad_norm": 0.47618910670280457, "learning_rate": 4.796510423142277e-06, "loss": 0.8284, "step": 983 }, { "epoch": 0.040780803182891956, "grad_norm": 0.4954012334346771, "learning_rate": 4.796303203613909e-06, "loss": 0.8013, "step": 984 }, { "epoch": 0.040822247088565626, "grad_norm": 0.5031293034553528, "learning_rate": 4.7960959840855405e-06, "loss": 0.8403, "step": 985 }, { "epoch": 0.040863690994239296, "grad_norm": 0.4458840787410736, "learning_rate": 4.795888764557172e-06, "loss": 0.7712, "step": 986 }, { "epoch": 0.040905134899912966, "grad_norm": 0.46724194288253784, "learning_rate": 4.795681545028804e-06, "loss": 0.7683, "step": 987 }, { "epoch": 0.040946578805586636, "grad_norm": 0.45755645632743835, "learning_rate": 4.7954743255004355e-06, "loss": 0.7847, "step": 988 }, { "epoch": 0.04098802271126031, "grad_norm": 0.4850478768348694, "learning_rate": 4.795267105972067e-06, "loss": 0.8101, "step": 989 }, { "epoch": 0.04102946661693398, "grad_norm": 0.48655596375465393, "learning_rate": 4.795059886443698e-06, "loss": 0.8138, "step": 990 }, { "epoch": 0.041070910522607654, "grad_norm": 0.4782755672931671, "learning_rate": 4.7948526669153306e-06, "loss": 0.7961, "step": 991 }, { "epoch": 0.041112354428281324, "grad_norm": 0.49097394943237305, "learning_rate": 4.794645447386962e-06, "loss": 0.7705, "step": 992 }, { "epoch": 0.041153798333954994, "grad_norm": 0.48887911438941956, "learning_rate": 4.794438227858594e-06, "loss": 0.8474, "step": 993 }, { "epoch": 0.041195242239628664, "grad_norm": 0.5012791156768799, "learning_rate": 4.7942310083302256e-06, "loss": 0.7839, "step": 994 }, { "epoch": 0.041236686145302334, "grad_norm": 0.4260593056678772, "learning_rate": 4.794023788801857e-06, "loss": 0.7913, "step": 995 }, { "epoch": 0.041278130050976004, "grad_norm": 0.47090914845466614, "learning_rate": 4.793816569273489e-06, "loss": 0.7939, "step": 996 }, { "epoch": 0.041319573956649674, "grad_norm": 0.43993470072746277, "learning_rate": 4.7936093497451206e-06, "loss": 0.7917, "step": 997 }, { "epoch": 0.041361017862323345, "grad_norm": 0.4615001082420349, "learning_rate": 4.793402130216752e-06, "loss": 0.7561, "step": 998 }, { "epoch": 0.041402461767997015, "grad_norm": 0.4298277199268341, "learning_rate": 4.793194910688383e-06, "loss": 0.8127, "step": 999 }, { "epoch": 0.041443905673670685, "grad_norm": 0.4404458999633789, "learning_rate": 4.7929876911600156e-06, "loss": 0.7771, "step": 1000 }, { "epoch": 0.041485349579344355, "grad_norm": 0.5292378664016724, "learning_rate": 4.792780471631647e-06, "loss": 0.8579, "step": 1001 }, { "epoch": 0.041526793485018025, "grad_norm": 0.4654186964035034, "learning_rate": 4.792573252103278e-06, "loss": 0.8076, "step": 1002 }, { "epoch": 0.0415682373906917, "grad_norm": 0.45650607347488403, "learning_rate": 4.7923660325749106e-06, "loss": 0.8188, "step": 1003 }, { "epoch": 0.04160968129636537, "grad_norm": 0.45740291476249695, "learning_rate": 4.792158813046542e-06, "loss": 0.8037, "step": 1004 }, { "epoch": 0.04165112520203904, "grad_norm": 0.44465282559394836, "learning_rate": 4.791951593518173e-06, "loss": 0.7495, "step": 1005 }, { "epoch": 0.04169256910771271, "grad_norm": 0.4487740099430084, "learning_rate": 4.791744373989805e-06, "loss": 0.7794, "step": 1006 }, { "epoch": 0.04173401301338638, "grad_norm": 0.4635974168777466, "learning_rate": 4.791537154461437e-06, "loss": 0.7729, "step": 1007 }, { "epoch": 0.04177545691906005, "grad_norm": 0.4493637979030609, "learning_rate": 4.791329934933068e-06, "loss": 0.7737, "step": 1008 }, { "epoch": 0.04181690082473372, "grad_norm": 0.4791603982448578, "learning_rate": 4.7911227154047006e-06, "loss": 0.7959, "step": 1009 }, { "epoch": 0.04185834473040739, "grad_norm": 0.4576243758201599, "learning_rate": 4.790915495876331e-06, "loss": 0.8398, "step": 1010 }, { "epoch": 0.04189978863608106, "grad_norm": 0.47298121452331543, "learning_rate": 4.790708276347963e-06, "loss": 0.7998, "step": 1011 }, { "epoch": 0.04194123254175473, "grad_norm": 0.48323437571525574, "learning_rate": 4.790501056819596e-06, "loss": 0.8804, "step": 1012 }, { "epoch": 0.0419826764474284, "grad_norm": 0.474826455116272, "learning_rate": 4.790293837291227e-06, "loss": 0.8137, "step": 1013 }, { "epoch": 0.042024120353102073, "grad_norm": 0.4563380777835846, "learning_rate": 4.790086617762858e-06, "loss": 0.7656, "step": 1014 }, { "epoch": 0.042065564258775744, "grad_norm": 0.47896111011505127, "learning_rate": 4.78987939823449e-06, "loss": 0.8835, "step": 1015 }, { "epoch": 0.04210700816444942, "grad_norm": 0.4392525851726532, "learning_rate": 4.789672178706122e-06, "loss": 0.7024, "step": 1016 }, { "epoch": 0.04214845207012309, "grad_norm": 0.5214576721191406, "learning_rate": 4.789464959177753e-06, "loss": 0.7979, "step": 1017 }, { "epoch": 0.04218989597579676, "grad_norm": 0.4837241768836975, "learning_rate": 4.789257739649385e-06, "loss": 0.7976, "step": 1018 }, { "epoch": 0.04223133988147043, "grad_norm": 0.4692282974720001, "learning_rate": 4.789050520121016e-06, "loss": 0.8574, "step": 1019 }, { "epoch": 0.0422727837871441, "grad_norm": 0.5051727890968323, "learning_rate": 4.788843300592648e-06, "loss": 0.764, "step": 1020 }, { "epoch": 0.04231422769281777, "grad_norm": 0.4650651216506958, "learning_rate": 4.78863608106428e-06, "loss": 0.803, "step": 1021 }, { "epoch": 0.04235567159849144, "grad_norm": 0.4585110545158386, "learning_rate": 4.788428861535911e-06, "loss": 0.8154, "step": 1022 }, { "epoch": 0.04239711550416511, "grad_norm": 0.46456143260002136, "learning_rate": 4.788221642007543e-06, "loss": 0.8394, "step": 1023 }, { "epoch": 0.04243855940983878, "grad_norm": 0.470569372177124, "learning_rate": 4.788014422479175e-06, "loss": 0.7769, "step": 1024 }, { "epoch": 0.04248000331551245, "grad_norm": 0.47860991954803467, "learning_rate": 4.787807202950807e-06, "loss": 0.8289, "step": 1025 }, { "epoch": 0.04252144722118612, "grad_norm": 0.4554358720779419, "learning_rate": 4.7875999834224375e-06, "loss": 0.7986, "step": 1026 }, { "epoch": 0.04256289112685979, "grad_norm": 0.45716437697410583, "learning_rate": 4.78739276389407e-06, "loss": 0.7822, "step": 1027 }, { "epoch": 0.04260433503253347, "grad_norm": 0.4630057215690613, "learning_rate": 4.787185544365701e-06, "loss": 0.7949, "step": 1028 }, { "epoch": 0.04264577893820714, "grad_norm": 0.4765913188457489, "learning_rate": 4.786978324837333e-06, "loss": 0.8516, "step": 1029 }, { "epoch": 0.04268722284388081, "grad_norm": 0.4592381417751312, "learning_rate": 4.786771105308965e-06, "loss": 0.7993, "step": 1030 }, { "epoch": 0.04272866674955448, "grad_norm": 0.5326821208000183, "learning_rate": 4.786563885780596e-06, "loss": 0.8147, "step": 1031 }, { "epoch": 0.04277011065522815, "grad_norm": 0.43418946862220764, "learning_rate": 4.786356666252228e-06, "loss": 0.7595, "step": 1032 }, { "epoch": 0.04281155456090182, "grad_norm": 0.4404500126838684, "learning_rate": 4.78614944672386e-06, "loss": 0.8386, "step": 1033 }, { "epoch": 0.04285299846657549, "grad_norm": 0.4906412363052368, "learning_rate": 4.785942227195491e-06, "loss": 0.7761, "step": 1034 }, { "epoch": 0.04289444237224916, "grad_norm": 0.4952963888645172, "learning_rate": 4.7857350076671225e-06, "loss": 0.7893, "step": 1035 }, { "epoch": 0.04293588627792283, "grad_norm": 0.436569482088089, "learning_rate": 4.785527788138755e-06, "loss": 0.7563, "step": 1036 }, { "epoch": 0.0429773301835965, "grad_norm": 0.5171957015991211, "learning_rate": 4.785320568610386e-06, "loss": 0.8477, "step": 1037 }, { "epoch": 0.04301877408927017, "grad_norm": 0.4241662621498108, "learning_rate": 4.7851133490820175e-06, "loss": 0.738, "step": 1038 }, { "epoch": 0.04306021799494384, "grad_norm": 0.4370529055595398, "learning_rate": 4.78490612955365e-06, "loss": 0.7903, "step": 1039 }, { "epoch": 0.04310166190061752, "grad_norm": 0.4420294761657715, "learning_rate": 4.784698910025281e-06, "loss": 0.7524, "step": 1040 }, { "epoch": 0.04314310580629119, "grad_norm": 0.46341052651405334, "learning_rate": 4.784491690496913e-06, "loss": 0.7883, "step": 1041 }, { "epoch": 0.04318454971196486, "grad_norm": 0.43772614002227783, "learning_rate": 4.784284470968544e-06, "loss": 0.7148, "step": 1042 }, { "epoch": 0.04322599361763853, "grad_norm": 0.4657709002494812, "learning_rate": 4.784077251440176e-06, "loss": 0.8296, "step": 1043 }, { "epoch": 0.0432674375233122, "grad_norm": 0.41048768162727356, "learning_rate": 4.7838700319118075e-06, "loss": 0.7358, "step": 1044 }, { "epoch": 0.04330888142898587, "grad_norm": 0.48912936449050903, "learning_rate": 4.78366281238344e-06, "loss": 0.8477, "step": 1045 }, { "epoch": 0.04335032533465954, "grad_norm": 0.515285313129425, "learning_rate": 4.783455592855071e-06, "loss": 0.7952, "step": 1046 }, { "epoch": 0.04339176924033321, "grad_norm": 0.4189876616001129, "learning_rate": 4.7832483733267025e-06, "loss": 0.7969, "step": 1047 }, { "epoch": 0.04343321314600688, "grad_norm": 0.4508560299873352, "learning_rate": 4.783041153798335e-06, "loss": 0.7974, "step": 1048 }, { "epoch": 0.04347465705168055, "grad_norm": 0.47887948155403137, "learning_rate": 4.782833934269966e-06, "loss": 0.7988, "step": 1049 }, { "epoch": 0.04351610095735422, "grad_norm": 0.44417762756347656, "learning_rate": 4.7826267147415976e-06, "loss": 0.7744, "step": 1050 }, { "epoch": 0.04355754486302789, "grad_norm": 0.4393226206302643, "learning_rate": 4.782419495213229e-06, "loss": 0.7338, "step": 1051 }, { "epoch": 0.04359898876870156, "grad_norm": 0.46549293398857117, "learning_rate": 4.782212275684861e-06, "loss": 0.8191, "step": 1052 }, { "epoch": 0.043640432674375236, "grad_norm": 0.45024704933166504, "learning_rate": 4.7820050561564926e-06, "loss": 0.7842, "step": 1053 }, { "epoch": 0.043681876580048906, "grad_norm": 0.45627233386039734, "learning_rate": 4.781797836628124e-06, "loss": 0.7979, "step": 1054 }, { "epoch": 0.043723320485722576, "grad_norm": 0.46247386932373047, "learning_rate": 4.781590617099756e-06, "loss": 0.8213, "step": 1055 }, { "epoch": 0.043764764391396246, "grad_norm": 0.5227290987968445, "learning_rate": 4.7813833975713876e-06, "loss": 0.8562, "step": 1056 }, { "epoch": 0.04380620829706992, "grad_norm": 0.4721840023994446, "learning_rate": 4.781176178043019e-06, "loss": 0.7395, "step": 1057 }, { "epoch": 0.04384765220274359, "grad_norm": 0.46023762226104736, "learning_rate": 4.78096895851465e-06, "loss": 0.8157, "step": 1058 }, { "epoch": 0.04388909610841726, "grad_norm": 0.4536685049533844, "learning_rate": 4.7807617389862826e-06, "loss": 0.7578, "step": 1059 }, { "epoch": 0.04393054001409093, "grad_norm": 0.43217524886131287, "learning_rate": 4.780554519457914e-06, "loss": 0.8279, "step": 1060 }, { "epoch": 0.0439719839197646, "grad_norm": 0.456112802028656, "learning_rate": 4.780347299929546e-06, "loss": 0.8474, "step": 1061 }, { "epoch": 0.04401342782543827, "grad_norm": 0.4474206864833832, "learning_rate": 4.7801400804011776e-06, "loss": 0.7642, "step": 1062 }, { "epoch": 0.04405487173111194, "grad_norm": 0.47980812191963196, "learning_rate": 4.779932860872809e-06, "loss": 0.8501, "step": 1063 }, { "epoch": 0.04409631563678561, "grad_norm": 0.4759003818035126, "learning_rate": 4.779725641344441e-06, "loss": 0.7739, "step": 1064 }, { "epoch": 0.044137759542459284, "grad_norm": 0.43127790093421936, "learning_rate": 4.7795184218160726e-06, "loss": 0.7468, "step": 1065 }, { "epoch": 0.044179203448132955, "grad_norm": 0.45588958263397217, "learning_rate": 4.779311202287704e-06, "loss": 0.7634, "step": 1066 }, { "epoch": 0.044220647353806625, "grad_norm": 0.44599223136901855, "learning_rate": 4.779103982759335e-06, "loss": 0.7515, "step": 1067 }, { "epoch": 0.044262091259480295, "grad_norm": 0.4741554260253906, "learning_rate": 4.7788967632309676e-06, "loss": 0.7725, "step": 1068 }, { "epoch": 0.044303535165153965, "grad_norm": 0.4776347577571869, "learning_rate": 4.778689543702599e-06, "loss": 0.7939, "step": 1069 }, { "epoch": 0.044344979070827635, "grad_norm": 0.4661857485771179, "learning_rate": 4.77848232417423e-06, "loss": 0.8621, "step": 1070 }, { "epoch": 0.044386422976501305, "grad_norm": 0.48425886034965515, "learning_rate": 4.778275104645862e-06, "loss": 0.7522, "step": 1071 }, { "epoch": 0.044427866882174975, "grad_norm": 0.48456665873527527, "learning_rate": 4.778067885117494e-06, "loss": 0.8384, "step": 1072 }, { "epoch": 0.044469310787848645, "grad_norm": 0.47244054079055786, "learning_rate": 4.777860665589125e-06, "loss": 0.7417, "step": 1073 }, { "epoch": 0.044510754693522316, "grad_norm": 0.42493709921836853, "learning_rate": 4.777653446060757e-06, "loss": 0.7424, "step": 1074 }, { "epoch": 0.044552198599195986, "grad_norm": 0.4758439064025879, "learning_rate": 4.777446226532389e-06, "loss": 0.8054, "step": 1075 }, { "epoch": 0.044593642504869656, "grad_norm": 0.489058256149292, "learning_rate": 4.77723900700402e-06, "loss": 0.8093, "step": 1076 }, { "epoch": 0.04463508641054333, "grad_norm": 0.4572688639163971, "learning_rate": 4.777031787475653e-06, "loss": 0.7811, "step": 1077 }, { "epoch": 0.044676530316217, "grad_norm": 0.4598316252231598, "learning_rate": 4.776824567947284e-06, "loss": 0.7764, "step": 1078 }, { "epoch": 0.04471797422189067, "grad_norm": 0.48176220059394836, "learning_rate": 4.776617348418915e-06, "loss": 0.8389, "step": 1079 }, { "epoch": 0.04475941812756434, "grad_norm": 0.4744527339935303, "learning_rate": 4.776410128890547e-06, "loss": 0.8042, "step": 1080 }, { "epoch": 0.04480086203323801, "grad_norm": 0.49460116028785706, "learning_rate": 4.776202909362179e-06, "loss": 0.8757, "step": 1081 }, { "epoch": 0.044842305938911683, "grad_norm": 0.44487905502319336, "learning_rate": 4.77599568983381e-06, "loss": 0.7737, "step": 1082 }, { "epoch": 0.044883749844585354, "grad_norm": 0.48597803711891174, "learning_rate": 4.775788470305442e-06, "loss": 0.7971, "step": 1083 }, { "epoch": 0.044925193750259024, "grad_norm": 0.4437675476074219, "learning_rate": 4.775581250777074e-06, "loss": 0.8245, "step": 1084 }, { "epoch": 0.044966637655932694, "grad_norm": 0.45028436183929443, "learning_rate": 4.775374031248705e-06, "loss": 0.7422, "step": 1085 }, { "epoch": 0.045008081561606364, "grad_norm": 0.46277710795402527, "learning_rate": 4.775166811720337e-06, "loss": 0.8352, "step": 1086 }, { "epoch": 0.045049525467280034, "grad_norm": 0.5086259841918945, "learning_rate": 4.774959592191968e-06, "loss": 0.8068, "step": 1087 }, { "epoch": 0.045090969372953704, "grad_norm": 0.5544159412384033, "learning_rate": 4.7747523726636e-06, "loss": 0.8794, "step": 1088 }, { "epoch": 0.04513241327862738, "grad_norm": 0.4555202126502991, "learning_rate": 4.774545153135232e-06, "loss": 0.8228, "step": 1089 }, { "epoch": 0.04517385718430105, "grad_norm": 0.4569913148880005, "learning_rate": 4.774337933606863e-06, "loss": 0.8098, "step": 1090 }, { "epoch": 0.04521530108997472, "grad_norm": 0.425127774477005, "learning_rate": 4.774130714078495e-06, "loss": 0.7759, "step": 1091 }, { "epoch": 0.04525674499564839, "grad_norm": 0.42579367756843567, "learning_rate": 4.773923494550127e-06, "loss": 0.7352, "step": 1092 }, { "epoch": 0.04529818890132206, "grad_norm": 0.4868384301662445, "learning_rate": 4.773716275021759e-06, "loss": 0.7891, "step": 1093 }, { "epoch": 0.04533963280699573, "grad_norm": 0.4713551104068756, "learning_rate": 4.7735090554933895e-06, "loss": 0.7776, "step": 1094 }, { "epoch": 0.0453810767126694, "grad_norm": 0.4447707533836365, "learning_rate": 4.773301835965022e-06, "loss": 0.7825, "step": 1095 }, { "epoch": 0.04542252061834307, "grad_norm": 0.4642535150051117, "learning_rate": 4.773094616436653e-06, "loss": 0.7607, "step": 1096 }, { "epoch": 0.04546396452401674, "grad_norm": 0.45153817534446716, "learning_rate": 4.772887396908285e-06, "loss": 0.8589, "step": 1097 }, { "epoch": 0.04550540842969041, "grad_norm": 0.4268827736377716, "learning_rate": 4.772680177379917e-06, "loss": 0.7378, "step": 1098 }, { "epoch": 0.04554685233536408, "grad_norm": 0.4873831868171692, "learning_rate": 4.772472957851548e-06, "loss": 0.8479, "step": 1099 }, { "epoch": 0.04558829624103775, "grad_norm": 0.4293820261955261, "learning_rate": 4.77226573832318e-06, "loss": 0.7791, "step": 1100 }, { "epoch": 0.04562974014671142, "grad_norm": 0.5006012320518494, "learning_rate": 4.772058518794812e-06, "loss": 0.7483, "step": 1101 }, { "epoch": 0.0456711840523851, "grad_norm": 0.4601713716983795, "learning_rate": 4.771851299266443e-06, "loss": 0.7852, "step": 1102 }, { "epoch": 0.04571262795805877, "grad_norm": 0.4780215322971344, "learning_rate": 4.7716440797380745e-06, "loss": 0.7512, "step": 1103 }, { "epoch": 0.04575407186373244, "grad_norm": 0.46182432770729065, "learning_rate": 4.771436860209707e-06, "loss": 0.8018, "step": 1104 }, { "epoch": 0.04579551576940611, "grad_norm": 0.5341752171516418, "learning_rate": 4.771229640681338e-06, "loss": 0.7598, "step": 1105 }, { "epoch": 0.04583695967507978, "grad_norm": 0.4713796079158783, "learning_rate": 4.7710224211529695e-06, "loss": 0.821, "step": 1106 }, { "epoch": 0.04587840358075345, "grad_norm": 0.5094240307807922, "learning_rate": 4.770815201624602e-06, "loss": 0.8196, "step": 1107 }, { "epoch": 0.04591984748642712, "grad_norm": 0.48558828234672546, "learning_rate": 4.770607982096233e-06, "loss": 0.7471, "step": 1108 }, { "epoch": 0.04596129139210079, "grad_norm": 0.4777097702026367, "learning_rate": 4.770400762567865e-06, "loss": 0.7449, "step": 1109 }, { "epoch": 0.04600273529777446, "grad_norm": 0.45582717657089233, "learning_rate": 4.770193543039496e-06, "loss": 0.7649, "step": 1110 }, { "epoch": 0.04604417920344813, "grad_norm": 0.4614855945110321, "learning_rate": 4.769986323511128e-06, "loss": 0.7795, "step": 1111 }, { "epoch": 0.0460856231091218, "grad_norm": 0.4478304386138916, "learning_rate": 4.7697791039827596e-06, "loss": 0.8113, "step": 1112 }, { "epoch": 0.04612706701479547, "grad_norm": 0.48237845301628113, "learning_rate": 4.769571884454392e-06, "loss": 0.8135, "step": 1113 }, { "epoch": 0.04616851092046915, "grad_norm": 0.5383651852607727, "learning_rate": 4.769364664926023e-06, "loss": 0.8545, "step": 1114 }, { "epoch": 0.04620995482614282, "grad_norm": 0.4325060248374939, "learning_rate": 4.7691574453976546e-06, "loss": 0.8008, "step": 1115 }, { "epoch": 0.04625139873181649, "grad_norm": 0.4721400737762451, "learning_rate": 4.768950225869287e-06, "loss": 0.8584, "step": 1116 }, { "epoch": 0.04629284263749016, "grad_norm": 0.4709514379501343, "learning_rate": 4.768743006340918e-06, "loss": 0.8298, "step": 1117 }, { "epoch": 0.04633428654316383, "grad_norm": 0.501682698726654, "learning_rate": 4.7685357868125496e-06, "loss": 0.8245, "step": 1118 }, { "epoch": 0.0463757304488375, "grad_norm": 0.4643377661705017, "learning_rate": 4.768328567284181e-06, "loss": 0.7642, "step": 1119 }, { "epoch": 0.04641717435451117, "grad_norm": 0.48200082778930664, "learning_rate": 4.768121347755813e-06, "loss": 0.7991, "step": 1120 }, { "epoch": 0.04645861826018484, "grad_norm": 0.4852246046066284, "learning_rate": 4.7679141282274446e-06, "loss": 0.8018, "step": 1121 }, { "epoch": 0.04650006216585851, "grad_norm": 0.45635098218917847, "learning_rate": 4.767706908699076e-06, "loss": 0.748, "step": 1122 }, { "epoch": 0.04654150607153218, "grad_norm": 0.4639163315296173, "learning_rate": 4.767499689170707e-06, "loss": 0.7729, "step": 1123 }, { "epoch": 0.04658294997720585, "grad_norm": 0.4940907657146454, "learning_rate": 4.7672924696423396e-06, "loss": 0.813, "step": 1124 }, { "epoch": 0.04662439388287952, "grad_norm": 0.4561811089515686, "learning_rate": 4.767085250113972e-06, "loss": 0.7661, "step": 1125 }, { "epoch": 0.0466658377885532, "grad_norm": 0.41685640811920166, "learning_rate": 4.766878030585602e-06, "loss": 0.7219, "step": 1126 }, { "epoch": 0.04670728169422687, "grad_norm": 0.4310116767883301, "learning_rate": 4.7666708110572346e-06, "loss": 0.7629, "step": 1127 }, { "epoch": 0.04674872559990054, "grad_norm": 0.4604714810848236, "learning_rate": 4.766463591528866e-06, "loss": 0.7888, "step": 1128 }, { "epoch": 0.04679016950557421, "grad_norm": 0.5103821754455566, "learning_rate": 4.766256372000498e-06, "loss": 0.7864, "step": 1129 }, { "epoch": 0.04683161341124788, "grad_norm": 0.5306897759437561, "learning_rate": 4.76604915247213e-06, "loss": 0.8103, "step": 1130 }, { "epoch": 0.04687305731692155, "grad_norm": 0.4589296579360962, "learning_rate": 4.765841932943761e-06, "loss": 0.7878, "step": 1131 }, { "epoch": 0.04691450122259522, "grad_norm": 0.4608319103717804, "learning_rate": 4.765634713415392e-06, "loss": 0.7869, "step": 1132 }, { "epoch": 0.04695594512826889, "grad_norm": 0.4795214533805847, "learning_rate": 4.765427493887025e-06, "loss": 0.7703, "step": 1133 }, { "epoch": 0.04699738903394256, "grad_norm": 0.4532718360424042, "learning_rate": 4.765220274358656e-06, "loss": 0.7974, "step": 1134 }, { "epoch": 0.04703883293961623, "grad_norm": 0.47184133529663086, "learning_rate": 4.765013054830287e-06, "loss": 0.7329, "step": 1135 }, { "epoch": 0.0470802768452899, "grad_norm": 0.4561813771724701, "learning_rate": 4.76480583530192e-06, "loss": 0.8391, "step": 1136 }, { "epoch": 0.04712172075096357, "grad_norm": 0.48163068294525146, "learning_rate": 4.764598615773551e-06, "loss": 0.8845, "step": 1137 }, { "epoch": 0.04716316465663724, "grad_norm": 0.5012065768241882, "learning_rate": 4.764391396245182e-06, "loss": 0.7657, "step": 1138 }, { "epoch": 0.047204608562310915, "grad_norm": 0.4675280749797821, "learning_rate": 4.764184176716814e-06, "loss": 0.7937, "step": 1139 }, { "epoch": 0.047246052467984585, "grad_norm": 0.4500986337661743, "learning_rate": 4.763976957188446e-06, "loss": 0.7605, "step": 1140 }, { "epoch": 0.047287496373658255, "grad_norm": 0.47017499804496765, "learning_rate": 4.763769737660077e-06, "loss": 0.7668, "step": 1141 }, { "epoch": 0.047328940279331926, "grad_norm": 0.4638185203075409, "learning_rate": 4.763562518131709e-06, "loss": 0.7778, "step": 1142 }, { "epoch": 0.047370384185005596, "grad_norm": 0.425403356552124, "learning_rate": 4.763355298603341e-06, "loss": 0.7761, "step": 1143 }, { "epoch": 0.047411828090679266, "grad_norm": 0.48976048827171326, "learning_rate": 4.763148079074972e-06, "loss": 0.7825, "step": 1144 }, { "epoch": 0.047453271996352936, "grad_norm": 0.4575006067752838, "learning_rate": 4.762940859546605e-06, "loss": 0.7527, "step": 1145 }, { "epoch": 0.047494715902026606, "grad_norm": 0.47396737337112427, "learning_rate": 4.762733640018236e-06, "loss": 0.7181, "step": 1146 }, { "epoch": 0.047536159807700276, "grad_norm": 0.4640709459781647, "learning_rate": 4.762526420489867e-06, "loss": 0.8, "step": 1147 }, { "epoch": 0.047577603713373946, "grad_norm": 0.4709440767765045, "learning_rate": 4.762319200961499e-06, "loss": 0.7522, "step": 1148 }, { "epoch": 0.047619047619047616, "grad_norm": 0.47328272461891174, "learning_rate": 4.762111981433131e-06, "loss": 0.7909, "step": 1149 }, { "epoch": 0.047660491524721287, "grad_norm": 0.4334677457809448, "learning_rate": 4.761904761904762e-06, "loss": 0.7891, "step": 1150 }, { "epoch": 0.047701935430394964, "grad_norm": 0.4913560748100281, "learning_rate": 4.761697542376394e-06, "loss": 0.8271, "step": 1151 }, { "epoch": 0.047743379336068634, "grad_norm": 0.49631425738334656, "learning_rate": 4.761490322848026e-06, "loss": 0.8167, "step": 1152 }, { "epoch": 0.047784823241742304, "grad_norm": 0.47891414165496826, "learning_rate": 4.761283103319657e-06, "loss": 0.8074, "step": 1153 }, { "epoch": 0.047826267147415974, "grad_norm": 0.5080652832984924, "learning_rate": 4.761075883791289e-06, "loss": 0.8113, "step": 1154 }, { "epoch": 0.047867711053089644, "grad_norm": 0.44767627120018005, "learning_rate": 4.76086866426292e-06, "loss": 0.7991, "step": 1155 }, { "epoch": 0.047909154958763314, "grad_norm": 0.4517739713191986, "learning_rate": 4.760661444734552e-06, "loss": 0.7751, "step": 1156 }, { "epoch": 0.047950598864436984, "grad_norm": 0.4694949686527252, "learning_rate": 4.760454225206184e-06, "loss": 0.7852, "step": 1157 }, { "epoch": 0.047992042770110654, "grad_norm": 0.46059003472328186, "learning_rate": 4.760247005677815e-06, "loss": 0.7761, "step": 1158 }, { "epoch": 0.048033486675784325, "grad_norm": 0.43597954511642456, "learning_rate": 4.7600397861494465e-06, "loss": 0.7859, "step": 1159 }, { "epoch": 0.048074930581457995, "grad_norm": 0.45111414790153503, "learning_rate": 4.759832566621079e-06, "loss": 0.7434, "step": 1160 }, { "epoch": 0.048116374487131665, "grad_norm": 0.4659668207168579, "learning_rate": 4.759625347092711e-06, "loss": 0.8137, "step": 1161 }, { "epoch": 0.048157818392805335, "grad_norm": 0.4754352271556854, "learning_rate": 4.7594181275643415e-06, "loss": 0.8413, "step": 1162 }, { "epoch": 0.04819926229847901, "grad_norm": 0.4473358392715454, "learning_rate": 4.759210908035974e-06, "loss": 0.8242, "step": 1163 }, { "epoch": 0.04824070620415268, "grad_norm": 0.4281471073627472, "learning_rate": 4.759003688507605e-06, "loss": 0.7441, "step": 1164 }, { "epoch": 0.04828215010982635, "grad_norm": 0.480766624212265, "learning_rate": 4.758796468979237e-06, "loss": 0.8167, "step": 1165 }, { "epoch": 0.04832359401550002, "grad_norm": 0.4934529960155487, "learning_rate": 4.758589249450869e-06, "loss": 0.7783, "step": 1166 }, { "epoch": 0.04836503792117369, "grad_norm": 0.5014682412147522, "learning_rate": 4.7583820299225e-06, "loss": 0.8022, "step": 1167 }, { "epoch": 0.04840648182684736, "grad_norm": 0.45076847076416016, "learning_rate": 4.758174810394132e-06, "loss": 0.7622, "step": 1168 }, { "epoch": 0.04844792573252103, "grad_norm": 0.486648827791214, "learning_rate": 4.757967590865764e-06, "loss": 0.7939, "step": 1169 }, { "epoch": 0.0484893696381947, "grad_norm": 0.446209192276001, "learning_rate": 4.757760371337395e-06, "loss": 0.7676, "step": 1170 }, { "epoch": 0.04853081354386837, "grad_norm": 0.4450940787792206, "learning_rate": 4.7575531518090266e-06, "loss": 0.7261, "step": 1171 }, { "epoch": 0.04857225744954204, "grad_norm": 0.4777759313583374, "learning_rate": 4.757345932280659e-06, "loss": 0.7751, "step": 1172 }, { "epoch": 0.04861370135521571, "grad_norm": 0.46364954113960266, "learning_rate": 4.75713871275229e-06, "loss": 0.8276, "step": 1173 }, { "epoch": 0.04865514526088938, "grad_norm": 0.4810357689857483, "learning_rate": 4.7569314932239216e-06, "loss": 0.8169, "step": 1174 }, { "epoch": 0.048696589166563053, "grad_norm": 0.4100513756275177, "learning_rate": 4.756724273695553e-06, "loss": 0.7263, "step": 1175 }, { "epoch": 0.04873803307223673, "grad_norm": 0.4895865321159363, "learning_rate": 4.756517054167185e-06, "loss": 0.7684, "step": 1176 }, { "epoch": 0.0487794769779104, "grad_norm": 0.43320727348327637, "learning_rate": 4.756309834638817e-06, "loss": 0.741, "step": 1177 }, { "epoch": 0.04882092088358407, "grad_norm": 0.4820486307144165, "learning_rate": 4.756102615110448e-06, "loss": 0.8364, "step": 1178 }, { "epoch": 0.04886236478925774, "grad_norm": 0.4768984317779541, "learning_rate": 4.75589539558208e-06, "loss": 0.7727, "step": 1179 }, { "epoch": 0.04890380869493141, "grad_norm": 0.4264664053916931, "learning_rate": 4.7556881760537116e-06, "loss": 0.8025, "step": 1180 }, { "epoch": 0.04894525260060508, "grad_norm": 0.43624719977378845, "learning_rate": 4.755480956525344e-06, "loss": 0.7637, "step": 1181 }, { "epoch": 0.04898669650627875, "grad_norm": 0.4732251763343811, "learning_rate": 4.755273736996975e-06, "loss": 0.8291, "step": 1182 }, { "epoch": 0.04902814041195242, "grad_norm": 0.485446035861969, "learning_rate": 4.7550665174686066e-06, "loss": 0.8235, "step": 1183 }, { "epoch": 0.04906958431762609, "grad_norm": 0.46888467669487, "learning_rate": 4.754859297940238e-06, "loss": 0.7854, "step": 1184 }, { "epoch": 0.04911102822329976, "grad_norm": 0.5527017116546631, "learning_rate": 4.75465207841187e-06, "loss": 0.8191, "step": 1185 }, { "epoch": 0.04915247212897343, "grad_norm": 0.4443214535713196, "learning_rate": 4.7544448588835016e-06, "loss": 0.7739, "step": 1186 }, { "epoch": 0.0491939160346471, "grad_norm": 0.505150556564331, "learning_rate": 4.754237639355133e-06, "loss": 0.7915, "step": 1187 }, { "epoch": 0.04923535994032078, "grad_norm": 0.4519720673561096, "learning_rate": 4.754030419826765e-06, "loss": 0.8313, "step": 1188 }, { "epoch": 0.04927680384599445, "grad_norm": 0.46611085534095764, "learning_rate": 4.753823200298397e-06, "loss": 0.7517, "step": 1189 }, { "epoch": 0.04931824775166812, "grad_norm": 0.45731350779533386, "learning_rate": 4.753615980770028e-06, "loss": 0.7847, "step": 1190 }, { "epoch": 0.04935969165734179, "grad_norm": 0.4220495820045471, "learning_rate": 4.753408761241659e-06, "loss": 0.8071, "step": 1191 }, { "epoch": 0.04940113556301546, "grad_norm": 0.44269758462905884, "learning_rate": 4.753201541713292e-06, "loss": 0.7178, "step": 1192 }, { "epoch": 0.04944257946868913, "grad_norm": 0.4705568552017212, "learning_rate": 4.752994322184923e-06, "loss": 0.7759, "step": 1193 }, { "epoch": 0.0494840233743628, "grad_norm": 0.4717589020729065, "learning_rate": 4.752787102656554e-06, "loss": 0.7947, "step": 1194 }, { "epoch": 0.04952546728003647, "grad_norm": 0.42914721369743347, "learning_rate": 4.752579883128187e-06, "loss": 0.7551, "step": 1195 }, { "epoch": 0.04956691118571014, "grad_norm": 0.45622196793556213, "learning_rate": 4.752372663599818e-06, "loss": 0.8096, "step": 1196 }, { "epoch": 0.04960835509138381, "grad_norm": 0.4357914924621582, "learning_rate": 4.75216544407145e-06, "loss": 0.7366, "step": 1197 }, { "epoch": 0.04964979899705748, "grad_norm": 0.45871320366859436, "learning_rate": 4.751958224543082e-06, "loss": 0.7644, "step": 1198 }, { "epoch": 0.04969124290273115, "grad_norm": 0.4517674446105957, "learning_rate": 4.751751005014713e-06, "loss": 0.7742, "step": 1199 }, { "epoch": 0.04973268680840483, "grad_norm": 0.4565979540348053, "learning_rate": 4.751543785486344e-06, "loss": 0.7725, "step": 1200 }, { "epoch": 0.0497741307140785, "grad_norm": 0.5017029643058777, "learning_rate": 4.751336565957977e-06, "loss": 0.7979, "step": 1201 }, { "epoch": 0.04981557461975217, "grad_norm": 0.4316957890987396, "learning_rate": 4.751129346429608e-06, "loss": 0.7595, "step": 1202 }, { "epoch": 0.04985701852542584, "grad_norm": 0.44598981738090515, "learning_rate": 4.750922126901239e-06, "loss": 0.8191, "step": 1203 }, { "epoch": 0.04989846243109951, "grad_norm": 0.4582918882369995, "learning_rate": 4.750714907372872e-06, "loss": 0.7502, "step": 1204 }, { "epoch": 0.04993990633677318, "grad_norm": 0.44995737075805664, "learning_rate": 4.750507687844503e-06, "loss": 0.8379, "step": 1205 }, { "epoch": 0.04998135024244685, "grad_norm": 0.4665464162826538, "learning_rate": 4.750300468316134e-06, "loss": 0.7756, "step": 1206 }, { "epoch": 0.05002279414812052, "grad_norm": 0.4967416226863861, "learning_rate": 4.750093248787766e-06, "loss": 0.7883, "step": 1207 }, { "epoch": 0.05006423805379419, "grad_norm": 0.4364146590232849, "learning_rate": 4.749886029259398e-06, "loss": 0.7576, "step": 1208 }, { "epoch": 0.05010568195946786, "grad_norm": 0.510164201259613, "learning_rate": 4.749678809731029e-06, "loss": 0.7432, "step": 1209 }, { "epoch": 0.05014712586514153, "grad_norm": 0.4519222676753998, "learning_rate": 4.749471590202661e-06, "loss": 0.7375, "step": 1210 }, { "epoch": 0.0501885697708152, "grad_norm": 0.49874672293663025, "learning_rate": 4.749264370674292e-06, "loss": 0.7998, "step": 1211 }, { "epoch": 0.050230013676488876, "grad_norm": 0.45865997672080994, "learning_rate": 4.749057151145924e-06, "loss": 0.7561, "step": 1212 }, { "epoch": 0.050271457582162546, "grad_norm": 0.4214809834957123, "learning_rate": 4.748849931617557e-06, "loss": 0.783, "step": 1213 }, { "epoch": 0.050312901487836216, "grad_norm": 0.4474555253982544, "learning_rate": 4.748642712089188e-06, "loss": 0.7888, "step": 1214 }, { "epoch": 0.050354345393509886, "grad_norm": 0.44800856709480286, "learning_rate": 4.748435492560819e-06, "loss": 0.7622, "step": 1215 }, { "epoch": 0.050395789299183556, "grad_norm": 0.4633291959762573, "learning_rate": 4.748228273032451e-06, "loss": 0.7732, "step": 1216 }, { "epoch": 0.050437233204857226, "grad_norm": 0.4754111170768738, "learning_rate": 4.748021053504083e-06, "loss": 0.7993, "step": 1217 }, { "epoch": 0.050478677110530897, "grad_norm": 0.4533904790878296, "learning_rate": 4.747813833975714e-06, "loss": 0.7434, "step": 1218 }, { "epoch": 0.05052012101620457, "grad_norm": 0.44537970423698425, "learning_rate": 4.747606614447346e-06, "loss": 0.7986, "step": 1219 }, { "epoch": 0.05056156492187824, "grad_norm": 0.48878997564315796, "learning_rate": 4.747399394918977e-06, "loss": 0.7981, "step": 1220 }, { "epoch": 0.05060300882755191, "grad_norm": 0.48571038246154785, "learning_rate": 4.747192175390609e-06, "loss": 0.8184, "step": 1221 }, { "epoch": 0.05064445273322558, "grad_norm": 0.47738444805145264, "learning_rate": 4.746984955862241e-06, "loss": 0.8713, "step": 1222 }, { "epoch": 0.05068589663889925, "grad_norm": 0.5319210290908813, "learning_rate": 4.746777736333872e-06, "loss": 0.8184, "step": 1223 }, { "epoch": 0.05072734054457292, "grad_norm": 0.4766693115234375, "learning_rate": 4.746570516805504e-06, "loss": 0.8274, "step": 1224 }, { "epoch": 0.050768784450246594, "grad_norm": 0.4183281660079956, "learning_rate": 4.746363297277136e-06, "loss": 0.7517, "step": 1225 }, { "epoch": 0.050810228355920264, "grad_norm": 0.4512433111667633, "learning_rate": 4.746156077748767e-06, "loss": 0.7671, "step": 1226 }, { "epoch": 0.050851672261593935, "grad_norm": 0.43581053614616394, "learning_rate": 4.7459488582203986e-06, "loss": 0.7961, "step": 1227 }, { "epoch": 0.050893116167267605, "grad_norm": 0.47661325335502625, "learning_rate": 4.745741638692031e-06, "loss": 0.8318, "step": 1228 }, { "epoch": 0.050934560072941275, "grad_norm": 0.4405752718448639, "learning_rate": 4.745534419163663e-06, "loss": 0.7595, "step": 1229 }, { "epoch": 0.050976003978614945, "grad_norm": 0.4849138855934143, "learning_rate": 4.7453271996352936e-06, "loss": 0.7538, "step": 1230 }, { "epoch": 0.051017447884288615, "grad_norm": 0.4781374931335449, "learning_rate": 4.745119980106926e-06, "loss": 0.7668, "step": 1231 }, { "epoch": 0.051058891789962285, "grad_norm": 0.452385812997818, "learning_rate": 4.744912760578557e-06, "loss": 0.8054, "step": 1232 }, { "epoch": 0.051100335695635955, "grad_norm": 0.4391435980796814, "learning_rate": 4.744705541050189e-06, "loss": 0.8394, "step": 1233 }, { "epoch": 0.051141779601309625, "grad_norm": 0.44852590560913086, "learning_rate": 4.744498321521821e-06, "loss": 0.793, "step": 1234 }, { "epoch": 0.051183223506983296, "grad_norm": 0.4417193830013275, "learning_rate": 4.744291101993452e-06, "loss": 0.7629, "step": 1235 }, { "epoch": 0.051224667412656966, "grad_norm": 0.46197283267974854, "learning_rate": 4.7440838824650836e-06, "loss": 0.7734, "step": 1236 }, { "epoch": 0.05126611131833064, "grad_norm": 0.4928566813468933, "learning_rate": 4.743876662936716e-06, "loss": 0.8452, "step": 1237 }, { "epoch": 0.05130755522400431, "grad_norm": 0.4651973247528076, "learning_rate": 4.743669443408347e-06, "loss": 0.8003, "step": 1238 }, { "epoch": 0.05134899912967798, "grad_norm": 0.4768562912940979, "learning_rate": 4.7434622238799786e-06, "loss": 0.7642, "step": 1239 }, { "epoch": 0.05139044303535165, "grad_norm": 0.4419132173061371, "learning_rate": 4.743255004351611e-06, "loss": 0.7263, "step": 1240 }, { "epoch": 0.05143188694102532, "grad_norm": 0.4663984179496765, "learning_rate": 4.743047784823242e-06, "loss": 0.7424, "step": 1241 }, { "epoch": 0.05147333084669899, "grad_norm": 0.43316197395324707, "learning_rate": 4.7428405652948736e-06, "loss": 0.7874, "step": 1242 }, { "epoch": 0.05151477475237266, "grad_norm": 0.469791442155838, "learning_rate": 4.742633345766505e-06, "loss": 0.8474, "step": 1243 }, { "epoch": 0.051556218658046334, "grad_norm": 0.4579676389694214, "learning_rate": 4.742426126238137e-06, "loss": 0.7749, "step": 1244 }, { "epoch": 0.051597662563720004, "grad_norm": 0.4559544026851654, "learning_rate": 4.7422189067097686e-06, "loss": 0.7773, "step": 1245 }, { "epoch": 0.051639106469393674, "grad_norm": 0.46836841106414795, "learning_rate": 4.7420116871814e-06, "loss": 0.7803, "step": 1246 }, { "epoch": 0.051680550375067344, "grad_norm": 0.45971202850341797, "learning_rate": 4.741804467653032e-06, "loss": 0.7688, "step": 1247 }, { "epoch": 0.051721994280741014, "grad_norm": 0.4328509569168091, "learning_rate": 4.741597248124664e-06, "loss": 0.6812, "step": 1248 }, { "epoch": 0.05176343818641469, "grad_norm": 0.45725953578948975, "learning_rate": 4.741390028596296e-06, "loss": 0.79, "step": 1249 }, { "epoch": 0.05180488209208836, "grad_norm": 0.4621913433074951, "learning_rate": 4.741182809067927e-06, "loss": 0.7808, "step": 1250 }, { "epoch": 0.05184632599776203, "grad_norm": 0.42693284153938293, "learning_rate": 4.740975589539559e-06, "loss": 0.7255, "step": 1251 }, { "epoch": 0.0518877699034357, "grad_norm": 0.4413866400718689, "learning_rate": 4.74076837001119e-06, "loss": 0.8127, "step": 1252 }, { "epoch": 0.05192921380910937, "grad_norm": 0.46110832691192627, "learning_rate": 4.740561150482822e-06, "loss": 0.7615, "step": 1253 }, { "epoch": 0.05197065771478304, "grad_norm": 0.46893593668937683, "learning_rate": 4.740353930954454e-06, "loss": 0.7832, "step": 1254 }, { "epoch": 0.05201210162045671, "grad_norm": 0.46650230884552, "learning_rate": 4.740146711426085e-06, "loss": 0.7595, "step": 1255 }, { "epoch": 0.05205354552613038, "grad_norm": 0.45120805501937866, "learning_rate": 4.739939491897717e-06, "loss": 0.7834, "step": 1256 }, { "epoch": 0.05209498943180405, "grad_norm": 0.4651395082473755, "learning_rate": 4.739732272369349e-06, "loss": 0.7977, "step": 1257 }, { "epoch": 0.05213643333747772, "grad_norm": 0.4553588330745697, "learning_rate": 4.73952505284098e-06, "loss": 0.7512, "step": 1258 }, { "epoch": 0.05217787724315139, "grad_norm": 0.42591726779937744, "learning_rate": 4.739317833312611e-06, "loss": 0.7185, "step": 1259 }, { "epoch": 0.05221932114882506, "grad_norm": 0.4449254274368286, "learning_rate": 4.739110613784244e-06, "loss": 0.8083, "step": 1260 }, { "epoch": 0.05226076505449873, "grad_norm": 0.48253604769706726, "learning_rate": 4.738903394255875e-06, "loss": 0.7739, "step": 1261 }, { "epoch": 0.05230220896017241, "grad_norm": 0.5065217614173889, "learning_rate": 4.738696174727506e-06, "loss": 0.8267, "step": 1262 }, { "epoch": 0.05234365286584608, "grad_norm": 0.4378811717033386, "learning_rate": 4.738488955199138e-06, "loss": 0.7341, "step": 1263 }, { "epoch": 0.05238509677151975, "grad_norm": 0.46514174342155457, "learning_rate": 4.73828173567077e-06, "loss": 0.8027, "step": 1264 }, { "epoch": 0.05242654067719342, "grad_norm": 0.47746387124061584, "learning_rate": 4.738074516142402e-06, "loss": 0.7625, "step": 1265 }, { "epoch": 0.05246798458286709, "grad_norm": 0.44600263237953186, "learning_rate": 4.737867296614034e-06, "loss": 0.7877, "step": 1266 }, { "epoch": 0.05250942848854076, "grad_norm": 0.5016936659812927, "learning_rate": 4.737660077085665e-06, "loss": 0.8289, "step": 1267 }, { "epoch": 0.05255087239421443, "grad_norm": 0.451377809047699, "learning_rate": 4.737452857557296e-06, "loss": 0.7678, "step": 1268 }, { "epoch": 0.0525923162998881, "grad_norm": 0.48705771565437317, "learning_rate": 4.737245638028929e-06, "loss": 0.7784, "step": 1269 }, { "epoch": 0.05263376020556177, "grad_norm": 0.4315616190433502, "learning_rate": 4.73703841850056e-06, "loss": 0.7045, "step": 1270 }, { "epoch": 0.05267520411123544, "grad_norm": 0.44772353768348694, "learning_rate": 4.736831198972191e-06, "loss": 0.7651, "step": 1271 }, { "epoch": 0.05271664801690911, "grad_norm": 0.4597644805908203, "learning_rate": 4.736623979443823e-06, "loss": 0.7969, "step": 1272 }, { "epoch": 0.05275809192258278, "grad_norm": 0.4657128155231476, "learning_rate": 4.736416759915455e-06, "loss": 0.7544, "step": 1273 }, { "epoch": 0.05279953582825646, "grad_norm": 0.4601643681526184, "learning_rate": 4.736209540387086e-06, "loss": 0.7812, "step": 1274 }, { "epoch": 0.05284097973393013, "grad_norm": 0.44550570845603943, "learning_rate": 4.736002320858718e-06, "loss": 0.8123, "step": 1275 }, { "epoch": 0.0528824236396038, "grad_norm": 0.47838711738586426, "learning_rate": 4.73579510133035e-06, "loss": 0.7943, "step": 1276 }, { "epoch": 0.05292386754527747, "grad_norm": 0.4126342833042145, "learning_rate": 4.735587881801981e-06, "loss": 0.7927, "step": 1277 }, { "epoch": 0.05296531145095114, "grad_norm": 0.452780157327652, "learning_rate": 4.735380662273613e-06, "loss": 0.8071, "step": 1278 }, { "epoch": 0.05300675535662481, "grad_norm": 0.4484234154224396, "learning_rate": 4.735173442745244e-06, "loss": 0.7646, "step": 1279 }, { "epoch": 0.05304819926229848, "grad_norm": 0.4484492838382721, "learning_rate": 4.734966223216876e-06, "loss": 0.8096, "step": 1280 }, { "epoch": 0.05308964316797215, "grad_norm": 0.4752931296825409, "learning_rate": 4.734759003688508e-06, "loss": 0.7748, "step": 1281 }, { "epoch": 0.05313108707364582, "grad_norm": 0.4608139991760254, "learning_rate": 4.73455178416014e-06, "loss": 0.7795, "step": 1282 }, { "epoch": 0.05317253097931949, "grad_norm": 0.46299976110458374, "learning_rate": 4.734344564631771e-06, "loss": 0.8064, "step": 1283 }, { "epoch": 0.05321397488499316, "grad_norm": 0.4294183552265167, "learning_rate": 4.734137345103403e-06, "loss": 0.7417, "step": 1284 }, { "epoch": 0.05325541879066683, "grad_norm": 0.4433351159095764, "learning_rate": 4.733930125575035e-06, "loss": 0.7664, "step": 1285 }, { "epoch": 0.053296862696340506, "grad_norm": 0.4322192072868347, "learning_rate": 4.733722906046666e-06, "loss": 0.7188, "step": 1286 }, { "epoch": 0.05333830660201418, "grad_norm": 0.47386589646339417, "learning_rate": 4.733515686518298e-06, "loss": 0.8521, "step": 1287 }, { "epoch": 0.05337975050768785, "grad_norm": 0.41507917642593384, "learning_rate": 4.733308466989929e-06, "loss": 0.7207, "step": 1288 }, { "epoch": 0.05342119441336152, "grad_norm": 0.4711005389690399, "learning_rate": 4.733101247461561e-06, "loss": 0.7949, "step": 1289 }, { "epoch": 0.05346263831903519, "grad_norm": 0.4546932578086853, "learning_rate": 4.732894027933193e-06, "loss": 0.7776, "step": 1290 }, { "epoch": 0.05350408222470886, "grad_norm": 0.44616714119911194, "learning_rate": 4.732686808404824e-06, "loss": 0.8046, "step": 1291 }, { "epoch": 0.05354552613038253, "grad_norm": 0.42783424258232117, "learning_rate": 4.732479588876456e-06, "loss": 0.8296, "step": 1292 }, { "epoch": 0.0535869700360562, "grad_norm": 0.5227195620536804, "learning_rate": 4.732272369348088e-06, "loss": 0.7788, "step": 1293 }, { "epoch": 0.05362841394172987, "grad_norm": 0.4596068859100342, "learning_rate": 4.732065149819719e-06, "loss": 0.7849, "step": 1294 }, { "epoch": 0.05366985784740354, "grad_norm": 0.4606890082359314, "learning_rate": 4.7318579302913506e-06, "loss": 0.8018, "step": 1295 }, { "epoch": 0.05371130175307721, "grad_norm": 0.4518755078315735, "learning_rate": 4.731650710762983e-06, "loss": 0.761, "step": 1296 }, { "epoch": 0.05375274565875088, "grad_norm": 0.42724910378456116, "learning_rate": 4.731443491234614e-06, "loss": 0.7271, "step": 1297 }, { "epoch": 0.05379418956442455, "grad_norm": 0.4897063076496124, "learning_rate": 4.731236271706246e-06, "loss": 0.8608, "step": 1298 }, { "epoch": 0.053835633470098225, "grad_norm": 0.46976977586746216, "learning_rate": 4.731029052177878e-06, "loss": 0.7422, "step": 1299 }, { "epoch": 0.053877077375771895, "grad_norm": 0.4982362687587738, "learning_rate": 4.730821832649509e-06, "loss": 0.8499, "step": 1300 }, { "epoch": 0.053918521281445565, "grad_norm": 0.4878818690776825, "learning_rate": 4.730614613121141e-06, "loss": 0.7991, "step": 1301 }, { "epoch": 0.053959965187119235, "grad_norm": 0.46012625098228455, "learning_rate": 4.730407393592773e-06, "loss": 0.7606, "step": 1302 }, { "epoch": 0.054001409092792906, "grad_norm": 0.4629114270210266, "learning_rate": 4.730200174064404e-06, "loss": 0.7686, "step": 1303 }, { "epoch": 0.054042852998466576, "grad_norm": 0.4677286446094513, "learning_rate": 4.7299929545360356e-06, "loss": 0.7405, "step": 1304 }, { "epoch": 0.054084296904140246, "grad_norm": 0.49047672748565674, "learning_rate": 4.729785735007668e-06, "loss": 0.8142, "step": 1305 }, { "epoch": 0.054125740809813916, "grad_norm": 0.42805495858192444, "learning_rate": 4.729578515479299e-06, "loss": 0.7424, "step": 1306 }, { "epoch": 0.054167184715487586, "grad_norm": 0.44661861658096313, "learning_rate": 4.729371295950931e-06, "loss": 0.7452, "step": 1307 }, { "epoch": 0.054208628621161256, "grad_norm": 0.4593750834465027, "learning_rate": 4.729164076422563e-06, "loss": 0.7402, "step": 1308 }, { "epoch": 0.054250072526834926, "grad_norm": 0.4440130293369293, "learning_rate": 4.728956856894194e-06, "loss": 0.8062, "step": 1309 }, { "epoch": 0.054291516432508596, "grad_norm": 0.44352737069129944, "learning_rate": 4.728749637365826e-06, "loss": 0.7673, "step": 1310 }, { "epoch": 0.05433296033818227, "grad_norm": 0.4515160322189331, "learning_rate": 4.728542417837457e-06, "loss": 0.7649, "step": 1311 }, { "epoch": 0.054374404243855944, "grad_norm": 0.4359804689884186, "learning_rate": 4.728335198309089e-06, "loss": 0.7666, "step": 1312 }, { "epoch": 0.054415848149529614, "grad_norm": 0.4292433559894562, "learning_rate": 4.728127978780721e-06, "loss": 0.7646, "step": 1313 }, { "epoch": 0.054457292055203284, "grad_norm": 0.44275203347206116, "learning_rate": 4.727920759252352e-06, "loss": 0.782, "step": 1314 }, { "epoch": 0.054498735960876954, "grad_norm": 0.45347097516059875, "learning_rate": 4.727713539723983e-06, "loss": 0.7654, "step": 1315 }, { "epoch": 0.054540179866550624, "grad_norm": 0.5133408904075623, "learning_rate": 4.727506320195616e-06, "loss": 0.874, "step": 1316 }, { "epoch": 0.054581623772224294, "grad_norm": 0.47484713792800903, "learning_rate": 4.727299100667248e-06, "loss": 0.8022, "step": 1317 }, { "epoch": 0.054623067677897964, "grad_norm": 0.4249754846096039, "learning_rate": 4.727091881138879e-06, "loss": 0.7523, "step": 1318 }, { "epoch": 0.054664511583571634, "grad_norm": 0.46269604563713074, "learning_rate": 4.726884661610511e-06, "loss": 0.822, "step": 1319 }, { "epoch": 0.054705955489245305, "grad_norm": 0.5005534887313843, "learning_rate": 4.726677442082142e-06, "loss": 0.7722, "step": 1320 }, { "epoch": 0.054747399394918975, "grad_norm": 0.45653414726257324, "learning_rate": 4.726470222553774e-06, "loss": 0.8069, "step": 1321 }, { "epoch": 0.054788843300592645, "grad_norm": 0.4670669436454773, "learning_rate": 4.726263003025406e-06, "loss": 0.8142, "step": 1322 }, { "epoch": 0.05483028720626632, "grad_norm": 0.41480040550231934, "learning_rate": 4.726055783497037e-06, "loss": 0.7656, "step": 1323 }, { "epoch": 0.05487173111193999, "grad_norm": 0.41693124175071716, "learning_rate": 4.725848563968668e-06, "loss": 0.7551, "step": 1324 }, { "epoch": 0.05491317501761366, "grad_norm": 0.4565029740333557, "learning_rate": 4.725641344440301e-06, "loss": 0.7603, "step": 1325 }, { "epoch": 0.05495461892328733, "grad_norm": 0.47091975808143616, "learning_rate": 4.725434124911932e-06, "loss": 0.7986, "step": 1326 }, { "epoch": 0.054996062828961, "grad_norm": 0.44375982880592346, "learning_rate": 4.725226905383563e-06, "loss": 0.7969, "step": 1327 }, { "epoch": 0.05503750673463467, "grad_norm": 0.4751598536968231, "learning_rate": 4.725019685855196e-06, "loss": 0.8193, "step": 1328 }, { "epoch": 0.05507895064030834, "grad_norm": 0.5229005813598633, "learning_rate": 4.724812466326827e-06, "loss": 0.8435, "step": 1329 }, { "epoch": 0.05512039454598201, "grad_norm": 0.48139140009880066, "learning_rate": 4.724605246798458e-06, "loss": 0.853, "step": 1330 }, { "epoch": 0.05516183845165568, "grad_norm": 0.43209391832351685, "learning_rate": 4.72439802727009e-06, "loss": 0.7439, "step": 1331 }, { "epoch": 0.05520328235732935, "grad_norm": 0.44706329703330994, "learning_rate": 4.724190807741722e-06, "loss": 0.7378, "step": 1332 }, { "epoch": 0.05524472626300302, "grad_norm": 0.43676483631134033, "learning_rate": 4.723983588213353e-06, "loss": 0.7466, "step": 1333 }, { "epoch": 0.05528617016867669, "grad_norm": 0.45234090089797974, "learning_rate": 4.723776368684986e-06, "loss": 0.7336, "step": 1334 }, { "epoch": 0.05532761407435036, "grad_norm": 0.47403037548065186, "learning_rate": 4.723569149156617e-06, "loss": 0.7788, "step": 1335 }, { "epoch": 0.05536905798002404, "grad_norm": 0.4357151389122009, "learning_rate": 4.723361929628248e-06, "loss": 0.7493, "step": 1336 }, { "epoch": 0.05541050188569771, "grad_norm": 0.47107189893722534, "learning_rate": 4.723154710099881e-06, "loss": 0.7499, "step": 1337 }, { "epoch": 0.05545194579137138, "grad_norm": 0.4392364025115967, "learning_rate": 4.722947490571512e-06, "loss": 0.7244, "step": 1338 }, { "epoch": 0.05549338969704505, "grad_norm": 0.43431559205055237, "learning_rate": 4.722740271043143e-06, "loss": 0.7168, "step": 1339 }, { "epoch": 0.05553483360271872, "grad_norm": 0.486935019493103, "learning_rate": 4.722533051514775e-06, "loss": 0.7734, "step": 1340 }, { "epoch": 0.05557627750839239, "grad_norm": 0.44165152311325073, "learning_rate": 4.722325831986407e-06, "loss": 0.7812, "step": 1341 }, { "epoch": 0.05561772141406606, "grad_norm": 0.4955763518810272, "learning_rate": 4.722118612458038e-06, "loss": 0.865, "step": 1342 }, { "epoch": 0.05565916531973973, "grad_norm": 0.47544750571250916, "learning_rate": 4.72191139292967e-06, "loss": 0.7437, "step": 1343 }, { "epoch": 0.0557006092254134, "grad_norm": 0.4376749098300934, "learning_rate": 4.721704173401302e-06, "loss": 0.7534, "step": 1344 }, { "epoch": 0.05574205313108707, "grad_norm": 0.43140000104904175, "learning_rate": 4.721496953872933e-06, "loss": 0.7615, "step": 1345 }, { "epoch": 0.05578349703676074, "grad_norm": 0.4696462154388428, "learning_rate": 4.721289734344565e-06, "loss": 0.729, "step": 1346 }, { "epoch": 0.05582494094243441, "grad_norm": 0.4473852515220642, "learning_rate": 4.721082514816196e-06, "loss": 0.802, "step": 1347 }, { "epoch": 0.05586638484810809, "grad_norm": 0.48599231243133545, "learning_rate": 4.720875295287828e-06, "loss": 0.783, "step": 1348 }, { "epoch": 0.05590782875378176, "grad_norm": 0.42560818791389465, "learning_rate": 4.72066807575946e-06, "loss": 0.7476, "step": 1349 }, { "epoch": 0.05594927265945543, "grad_norm": 0.46392273902893066, "learning_rate": 4.720460856231092e-06, "loss": 0.8218, "step": 1350 }, { "epoch": 0.0559907165651291, "grad_norm": 0.4391045868396759, "learning_rate": 4.720253636702723e-06, "loss": 0.7446, "step": 1351 }, { "epoch": 0.05603216047080277, "grad_norm": 0.45642098784446716, "learning_rate": 4.720046417174355e-06, "loss": 0.7983, "step": 1352 }, { "epoch": 0.05607360437647644, "grad_norm": 0.48248791694641113, "learning_rate": 4.719839197645987e-06, "loss": 0.8186, "step": 1353 }, { "epoch": 0.05611504828215011, "grad_norm": 0.4436752498149872, "learning_rate": 4.719631978117618e-06, "loss": 0.832, "step": 1354 }, { "epoch": 0.05615649218782378, "grad_norm": 0.4888905882835388, "learning_rate": 4.71942475858925e-06, "loss": 0.7773, "step": 1355 }, { "epoch": 0.05619793609349745, "grad_norm": 0.430061399936676, "learning_rate": 4.719217539060881e-06, "loss": 0.7517, "step": 1356 }, { "epoch": 0.05623937999917112, "grad_norm": 0.43851524591445923, "learning_rate": 4.719010319532513e-06, "loss": 0.7766, "step": 1357 }, { "epoch": 0.05628082390484479, "grad_norm": 0.4564257562160492, "learning_rate": 4.718803100004145e-06, "loss": 0.8306, "step": 1358 }, { "epoch": 0.05632226781051846, "grad_norm": 0.46868595480918884, "learning_rate": 4.718595880475776e-06, "loss": 0.7771, "step": 1359 }, { "epoch": 0.05636371171619214, "grad_norm": 0.4619618356227875, "learning_rate": 4.718388660947408e-06, "loss": 0.7883, "step": 1360 }, { "epoch": 0.05640515562186581, "grad_norm": 0.4858212471008301, "learning_rate": 4.71818144141904e-06, "loss": 0.8131, "step": 1361 }, { "epoch": 0.05644659952753948, "grad_norm": 0.46253591775894165, "learning_rate": 4.717974221890671e-06, "loss": 0.8906, "step": 1362 }, { "epoch": 0.05648804343321315, "grad_norm": 0.44040533900260925, "learning_rate": 4.7177670023623026e-06, "loss": 0.7595, "step": 1363 }, { "epoch": 0.05652948733888682, "grad_norm": 0.47029218077659607, "learning_rate": 4.717559782833935e-06, "loss": 0.8262, "step": 1364 }, { "epoch": 0.05657093124456049, "grad_norm": 0.4347769021987915, "learning_rate": 4.717352563305566e-06, "loss": 0.7627, "step": 1365 }, { "epoch": 0.05661237515023416, "grad_norm": 0.4234626293182373, "learning_rate": 4.7171453437771984e-06, "loss": 0.7991, "step": 1366 }, { "epoch": 0.05665381905590783, "grad_norm": 0.425837904214859, "learning_rate": 4.716938124248829e-06, "loss": 0.7546, "step": 1367 }, { "epoch": 0.0566952629615815, "grad_norm": 0.4850502908229828, "learning_rate": 4.716730904720461e-06, "loss": 0.8048, "step": 1368 }, { "epoch": 0.05673670686725517, "grad_norm": 0.4783625900745392, "learning_rate": 4.7165236851920934e-06, "loss": 0.7489, "step": 1369 }, { "epoch": 0.05677815077292884, "grad_norm": 0.4982050061225891, "learning_rate": 4.716316465663725e-06, "loss": 0.7686, "step": 1370 }, { "epoch": 0.05681959467860251, "grad_norm": 0.47850582003593445, "learning_rate": 4.716109246135356e-06, "loss": 0.8325, "step": 1371 }, { "epoch": 0.056861038584276186, "grad_norm": 0.4520390033721924, "learning_rate": 4.715902026606988e-06, "loss": 0.7834, "step": 1372 }, { "epoch": 0.056902482489949856, "grad_norm": 0.4395468235015869, "learning_rate": 4.71569480707862e-06, "loss": 0.7832, "step": 1373 }, { "epoch": 0.056943926395623526, "grad_norm": 0.49860715866088867, "learning_rate": 4.715487587550251e-06, "loss": 0.8125, "step": 1374 }, { "epoch": 0.056985370301297196, "grad_norm": 0.4712047576904297, "learning_rate": 4.715280368021883e-06, "loss": 0.7559, "step": 1375 }, { "epoch": 0.057026814206970866, "grad_norm": 0.43081262707710266, "learning_rate": 4.715073148493514e-06, "loss": 0.749, "step": 1376 }, { "epoch": 0.057068258112644536, "grad_norm": 0.49594810605049133, "learning_rate": 4.714865928965146e-06, "loss": 0.748, "step": 1377 }, { "epoch": 0.057109702018318206, "grad_norm": 0.4482758939266205, "learning_rate": 4.714658709436778e-06, "loss": 0.7896, "step": 1378 }, { "epoch": 0.057151145923991876, "grad_norm": 0.4533829391002655, "learning_rate": 4.714451489908409e-06, "loss": 0.7551, "step": 1379 }, { "epoch": 0.05719258982966555, "grad_norm": 0.5021113157272339, "learning_rate": 4.714244270380041e-06, "loss": 0.7808, "step": 1380 }, { "epoch": 0.05723403373533922, "grad_norm": 0.5018630623817444, "learning_rate": 4.714037050851673e-06, "loss": 0.8, "step": 1381 }, { "epoch": 0.05727547764101289, "grad_norm": 0.5723501443862915, "learning_rate": 4.713829831323304e-06, "loss": 0.8782, "step": 1382 }, { "epoch": 0.05731692154668656, "grad_norm": 0.4557441473007202, "learning_rate": 4.713622611794935e-06, "loss": 0.8135, "step": 1383 }, { "epoch": 0.05735836545236023, "grad_norm": 0.49008315801620483, "learning_rate": 4.713415392266568e-06, "loss": 0.728, "step": 1384 }, { "epoch": 0.057399809358033904, "grad_norm": 0.49280646443367004, "learning_rate": 4.713208172738199e-06, "loss": 0.7808, "step": 1385 }, { "epoch": 0.057441253263707574, "grad_norm": 0.47247087955474854, "learning_rate": 4.713000953209831e-06, "loss": 0.7986, "step": 1386 }, { "epoch": 0.057482697169381244, "grad_norm": 0.4705398678779602, "learning_rate": 4.712793733681463e-06, "loss": 0.7925, "step": 1387 }, { "epoch": 0.057524141075054915, "grad_norm": 0.44505465030670166, "learning_rate": 4.712586514153094e-06, "loss": 0.7905, "step": 1388 }, { "epoch": 0.057565584980728585, "grad_norm": 0.5128839015960693, "learning_rate": 4.712379294624726e-06, "loss": 0.7319, "step": 1389 }, { "epoch": 0.057607028886402255, "grad_norm": 0.4779432713985443, "learning_rate": 4.712172075096358e-06, "loss": 0.7871, "step": 1390 }, { "epoch": 0.057648472792075925, "grad_norm": 0.4249486029148102, "learning_rate": 4.711964855567989e-06, "loss": 0.7109, "step": 1391 }, { "epoch": 0.057689916697749595, "grad_norm": 0.43019023537635803, "learning_rate": 4.71175763603962e-06, "loss": 0.8232, "step": 1392 }, { "epoch": 0.057731360603423265, "grad_norm": 0.43577319383621216, "learning_rate": 4.711550416511253e-06, "loss": 0.7368, "step": 1393 }, { "epoch": 0.057772804509096935, "grad_norm": 0.44867873191833496, "learning_rate": 4.711343196982884e-06, "loss": 0.7561, "step": 1394 }, { "epoch": 0.057814248414770605, "grad_norm": 0.45300158858299255, "learning_rate": 4.711135977454515e-06, "loss": 0.845, "step": 1395 }, { "epoch": 0.057855692320444276, "grad_norm": 0.45936110615730286, "learning_rate": 4.710928757926148e-06, "loss": 0.8066, "step": 1396 }, { "epoch": 0.05789713622611795, "grad_norm": 0.4553302228450775, "learning_rate": 4.710721538397779e-06, "loss": 0.7469, "step": 1397 }, { "epoch": 0.05793858013179162, "grad_norm": 0.4707634747028351, "learning_rate": 4.71051431886941e-06, "loss": 0.786, "step": 1398 }, { "epoch": 0.05798002403746529, "grad_norm": 0.49710139632225037, "learning_rate": 4.710307099341042e-06, "loss": 0.8093, "step": 1399 }, { "epoch": 0.05802146794313896, "grad_norm": 0.4399222135543823, "learning_rate": 4.710099879812674e-06, "loss": 0.7859, "step": 1400 }, { "epoch": 0.05806291184881263, "grad_norm": 0.4463959336280823, "learning_rate": 4.709892660284305e-06, "loss": 0.7429, "step": 1401 }, { "epoch": 0.0581043557544863, "grad_norm": 0.48263630270957947, "learning_rate": 4.709685440755938e-06, "loss": 0.7974, "step": 1402 }, { "epoch": 0.05814579966015997, "grad_norm": 0.47611480951309204, "learning_rate": 4.709478221227569e-06, "loss": 0.7676, "step": 1403 }, { "epoch": 0.05818724356583364, "grad_norm": 0.44558659195899963, "learning_rate": 4.7092710016992e-06, "loss": 0.7546, "step": 1404 }, { "epoch": 0.058228687471507314, "grad_norm": 0.4709329605102539, "learning_rate": 4.709063782170833e-06, "loss": 0.8303, "step": 1405 }, { "epoch": 0.058270131377180984, "grad_norm": 0.4659077227115631, "learning_rate": 4.708856562642464e-06, "loss": 0.8159, "step": 1406 }, { "epoch": 0.058311575282854654, "grad_norm": 0.4674285650253296, "learning_rate": 4.708649343114095e-06, "loss": 0.7412, "step": 1407 }, { "epoch": 0.058353019188528324, "grad_norm": 0.46319887042045593, "learning_rate": 4.708442123585727e-06, "loss": 0.8301, "step": 1408 }, { "epoch": 0.058394463094202, "grad_norm": 0.4718414545059204, "learning_rate": 4.708234904057359e-06, "loss": 0.7737, "step": 1409 }, { "epoch": 0.05843590699987567, "grad_norm": 0.42893242835998535, "learning_rate": 4.70802768452899e-06, "loss": 0.73, "step": 1410 }, { "epoch": 0.05847735090554934, "grad_norm": 0.42724114656448364, "learning_rate": 4.707820465000622e-06, "loss": 0.7969, "step": 1411 }, { "epoch": 0.05851879481122301, "grad_norm": 0.4840550422668457, "learning_rate": 4.707613245472254e-06, "loss": 0.8049, "step": 1412 }, { "epoch": 0.05856023871689668, "grad_norm": 0.4904533922672272, "learning_rate": 4.707406025943885e-06, "loss": 0.7834, "step": 1413 }, { "epoch": 0.05860168262257035, "grad_norm": 0.4311313033103943, "learning_rate": 4.707198806415517e-06, "loss": 0.6936, "step": 1414 }, { "epoch": 0.05864312652824402, "grad_norm": 0.45090553164482117, "learning_rate": 4.706991586887148e-06, "loss": 0.8176, "step": 1415 }, { "epoch": 0.05868457043391769, "grad_norm": 0.39051318168640137, "learning_rate": 4.70678436735878e-06, "loss": 0.752, "step": 1416 }, { "epoch": 0.05872601433959136, "grad_norm": 0.4844968616962433, "learning_rate": 4.706577147830412e-06, "loss": 0.7537, "step": 1417 }, { "epoch": 0.05876745824526503, "grad_norm": 0.4283495843410492, "learning_rate": 4.706369928302044e-06, "loss": 0.7722, "step": 1418 }, { "epoch": 0.0588089021509387, "grad_norm": 0.47521117329597473, "learning_rate": 4.7061627087736746e-06, "loss": 0.7988, "step": 1419 }, { "epoch": 0.05885034605661237, "grad_norm": 0.46391305327415466, "learning_rate": 4.705955489245307e-06, "loss": 0.8076, "step": 1420 }, { "epoch": 0.05889178996228604, "grad_norm": 0.45172983407974243, "learning_rate": 4.705748269716939e-06, "loss": 0.8005, "step": 1421 }, { "epoch": 0.05893323386795972, "grad_norm": 0.4898605942726135, "learning_rate": 4.7055410501885704e-06, "loss": 0.8145, "step": 1422 }, { "epoch": 0.05897467777363339, "grad_norm": 0.43830570578575134, "learning_rate": 4.705333830660202e-06, "loss": 0.8438, "step": 1423 }, { "epoch": 0.05901612167930706, "grad_norm": 0.43851032853126526, "learning_rate": 4.705126611131833e-06, "loss": 0.7546, "step": 1424 }, { "epoch": 0.05905756558498073, "grad_norm": 0.44282862544059753, "learning_rate": 4.7049193916034654e-06, "loss": 0.7466, "step": 1425 }, { "epoch": 0.0590990094906544, "grad_norm": 0.5012243390083313, "learning_rate": 4.704712172075097e-06, "loss": 0.7932, "step": 1426 }, { "epoch": 0.05914045339632807, "grad_norm": 0.42376306653022766, "learning_rate": 4.704504952546728e-06, "loss": 0.7358, "step": 1427 }, { "epoch": 0.05918189730200174, "grad_norm": 0.46943458914756775, "learning_rate": 4.70429773301836e-06, "loss": 0.7771, "step": 1428 }, { "epoch": 0.05922334120767541, "grad_norm": 0.48581844568252563, "learning_rate": 4.704090513489992e-06, "loss": 0.7795, "step": 1429 }, { "epoch": 0.05926478511334908, "grad_norm": 0.5191372036933899, "learning_rate": 4.703883293961623e-06, "loss": 0.8181, "step": 1430 }, { "epoch": 0.05930622901902275, "grad_norm": 0.5243611931800842, "learning_rate": 4.703676074433255e-06, "loss": 0.8247, "step": 1431 }, { "epoch": 0.05934767292469642, "grad_norm": 0.5012795925140381, "learning_rate": 4.703468854904887e-06, "loss": 0.7383, "step": 1432 }, { "epoch": 0.05938911683037009, "grad_norm": 0.46006128191947937, "learning_rate": 4.703261635376518e-06, "loss": 0.823, "step": 1433 }, { "epoch": 0.05943056073604377, "grad_norm": 0.4853540062904358, "learning_rate": 4.7030544158481504e-06, "loss": 0.8584, "step": 1434 }, { "epoch": 0.05947200464171744, "grad_norm": 0.4474965035915375, "learning_rate": 4.702847196319781e-06, "loss": 0.7578, "step": 1435 }, { "epoch": 0.05951344854739111, "grad_norm": 0.45749303698539734, "learning_rate": 4.702639976791413e-06, "loss": 0.7471, "step": 1436 }, { "epoch": 0.05955489245306478, "grad_norm": 0.4620939791202545, "learning_rate": 4.702432757263045e-06, "loss": 0.7568, "step": 1437 }, { "epoch": 0.05959633635873845, "grad_norm": 0.4508247673511505, "learning_rate": 4.702225537734677e-06, "loss": 0.7805, "step": 1438 }, { "epoch": 0.05963778026441212, "grad_norm": 0.43038907647132874, "learning_rate": 4.702018318206308e-06, "loss": 0.7759, "step": 1439 }, { "epoch": 0.05967922417008579, "grad_norm": 0.4614555239677429, "learning_rate": 4.70181109867794e-06, "loss": 0.7096, "step": 1440 }, { "epoch": 0.05972066807575946, "grad_norm": 0.5100172162055969, "learning_rate": 4.701603879149572e-06, "loss": 0.8169, "step": 1441 }, { "epoch": 0.05976211198143313, "grad_norm": 0.42111605405807495, "learning_rate": 4.701396659621203e-06, "loss": 0.7976, "step": 1442 }, { "epoch": 0.0598035558871068, "grad_norm": 0.46577054262161255, "learning_rate": 4.701189440092835e-06, "loss": 0.7642, "step": 1443 }, { "epoch": 0.05984499979278047, "grad_norm": 0.4743770956993103, "learning_rate": 4.700982220564466e-06, "loss": 0.8225, "step": 1444 }, { "epoch": 0.05988644369845414, "grad_norm": 0.4821048378944397, "learning_rate": 4.700775001036098e-06, "loss": 0.7988, "step": 1445 }, { "epoch": 0.059927887604127816, "grad_norm": 0.4471295475959778, "learning_rate": 4.70056778150773e-06, "loss": 0.78, "step": 1446 }, { "epoch": 0.059969331509801486, "grad_norm": 0.5131109356880188, "learning_rate": 4.700360561979361e-06, "loss": 0.813, "step": 1447 }, { "epoch": 0.06001077541547516, "grad_norm": 0.4415600895881653, "learning_rate": 4.700153342450993e-06, "loss": 0.7524, "step": 1448 }, { "epoch": 0.06005221932114883, "grad_norm": 0.4578064978122711, "learning_rate": 4.699946122922625e-06, "loss": 0.7898, "step": 1449 }, { "epoch": 0.0600936632268225, "grad_norm": 0.6379995942115784, "learning_rate": 4.699738903394257e-06, "loss": 0.8171, "step": 1450 }, { "epoch": 0.06013510713249617, "grad_norm": 0.4599008858203888, "learning_rate": 4.699531683865887e-06, "loss": 0.7585, "step": 1451 }, { "epoch": 0.06017655103816984, "grad_norm": 0.49943751096725464, "learning_rate": 4.69932446433752e-06, "loss": 0.77, "step": 1452 }, { "epoch": 0.06021799494384351, "grad_norm": 0.45110073685646057, "learning_rate": 4.699117244809151e-06, "loss": 0.7832, "step": 1453 }, { "epoch": 0.06025943884951718, "grad_norm": 0.4260523021221161, "learning_rate": 4.698910025280783e-06, "loss": 0.761, "step": 1454 }, { "epoch": 0.06030088275519085, "grad_norm": 0.4733874499797821, "learning_rate": 4.698702805752415e-06, "loss": 0.7107, "step": 1455 }, { "epoch": 0.06034232666086452, "grad_norm": 0.46258240938186646, "learning_rate": 4.698495586224046e-06, "loss": 0.7787, "step": 1456 }, { "epoch": 0.06038377056653819, "grad_norm": 0.4139939546585083, "learning_rate": 4.698288366695678e-06, "loss": 0.7402, "step": 1457 }, { "epoch": 0.06042521447221186, "grad_norm": 0.45899829268455505, "learning_rate": 4.69808114716731e-06, "loss": 0.7964, "step": 1458 }, { "epoch": 0.060466658377885535, "grad_norm": 0.41487041115760803, "learning_rate": 4.697873927638941e-06, "loss": 0.7874, "step": 1459 }, { "epoch": 0.060508102283559205, "grad_norm": 0.44744035601615906, "learning_rate": 4.697666708110572e-06, "loss": 0.7849, "step": 1460 }, { "epoch": 0.060549546189232875, "grad_norm": 0.4636276066303253, "learning_rate": 4.697459488582205e-06, "loss": 0.7988, "step": 1461 }, { "epoch": 0.060590990094906545, "grad_norm": 0.4395328462123871, "learning_rate": 4.697252269053836e-06, "loss": 0.7308, "step": 1462 }, { "epoch": 0.060632434000580215, "grad_norm": 0.47965776920318604, "learning_rate": 4.697045049525467e-06, "loss": 0.8169, "step": 1463 }, { "epoch": 0.060673877906253885, "grad_norm": 0.4725888967514038, "learning_rate": 4.6968378299971e-06, "loss": 0.7573, "step": 1464 }, { "epoch": 0.060715321811927556, "grad_norm": 0.459410160779953, "learning_rate": 4.696630610468731e-06, "loss": 0.7124, "step": 1465 }, { "epoch": 0.060756765717601226, "grad_norm": 0.47637781500816345, "learning_rate": 4.696423390940362e-06, "loss": 0.7988, "step": 1466 }, { "epoch": 0.060798209623274896, "grad_norm": 0.463990181684494, "learning_rate": 4.696216171411994e-06, "loss": 0.7771, "step": 1467 }, { "epoch": 0.060839653528948566, "grad_norm": 0.4259353280067444, "learning_rate": 4.696008951883626e-06, "loss": 0.7422, "step": 1468 }, { "epoch": 0.060881097434622236, "grad_norm": 0.443270206451416, "learning_rate": 4.695801732355257e-06, "loss": 0.8042, "step": 1469 }, { "epoch": 0.060922541340295906, "grad_norm": 0.44445404410362244, "learning_rate": 4.69559451282689e-06, "loss": 0.7572, "step": 1470 }, { "epoch": 0.06096398524596958, "grad_norm": 0.48976948857307434, "learning_rate": 4.695387293298521e-06, "loss": 0.8477, "step": 1471 }, { "epoch": 0.06100542915164325, "grad_norm": 0.437501460313797, "learning_rate": 4.695180073770152e-06, "loss": 0.7725, "step": 1472 }, { "epoch": 0.061046873057316924, "grad_norm": 0.4239104092121124, "learning_rate": 4.694972854241785e-06, "loss": 0.7419, "step": 1473 }, { "epoch": 0.061088316962990594, "grad_norm": 0.42657554149627686, "learning_rate": 4.694765634713416e-06, "loss": 0.7693, "step": 1474 }, { "epoch": 0.061129760868664264, "grad_norm": 0.4616900682449341, "learning_rate": 4.694558415185047e-06, "loss": 0.8027, "step": 1475 }, { "epoch": 0.061171204774337934, "grad_norm": 0.4385174512863159, "learning_rate": 4.694351195656679e-06, "loss": 0.824, "step": 1476 }, { "epoch": 0.061212648680011604, "grad_norm": 0.5222707390785217, "learning_rate": 4.694143976128311e-06, "loss": 0.7588, "step": 1477 }, { "epoch": 0.061254092585685274, "grad_norm": 0.46972206234931946, "learning_rate": 4.693936756599942e-06, "loss": 0.7766, "step": 1478 }, { "epoch": 0.061295536491358944, "grad_norm": 0.47943025827407837, "learning_rate": 4.693729537071574e-06, "loss": 0.8193, "step": 1479 }, { "epoch": 0.061336980397032614, "grad_norm": 0.42880117893218994, "learning_rate": 4.693522317543205e-06, "loss": 0.7551, "step": 1480 }, { "epoch": 0.061378424302706285, "grad_norm": 0.4733676314353943, "learning_rate": 4.6933150980148374e-06, "loss": 0.7756, "step": 1481 }, { "epoch": 0.061419868208379955, "grad_norm": 0.47677120566368103, "learning_rate": 4.693107878486469e-06, "loss": 0.7781, "step": 1482 }, { "epoch": 0.06146131211405363, "grad_norm": 0.465616375207901, "learning_rate": 4.6929006589581e-06, "loss": 0.7251, "step": 1483 }, { "epoch": 0.0615027560197273, "grad_norm": 0.4498819410800934, "learning_rate": 4.6926934394297324e-06, "loss": 0.7742, "step": 1484 }, { "epoch": 0.06154419992540097, "grad_norm": 0.46503645181655884, "learning_rate": 4.692486219901364e-06, "loss": 0.7786, "step": 1485 }, { "epoch": 0.06158564383107464, "grad_norm": 0.4819374978542328, "learning_rate": 4.692279000372996e-06, "loss": 0.7644, "step": 1486 }, { "epoch": 0.06162708773674831, "grad_norm": 0.4457657039165497, "learning_rate": 4.692071780844627e-06, "loss": 0.7695, "step": 1487 }, { "epoch": 0.06166853164242198, "grad_norm": 0.44114214181900024, "learning_rate": 4.691864561316259e-06, "loss": 0.793, "step": 1488 }, { "epoch": 0.06170997554809565, "grad_norm": 0.4471187889575958, "learning_rate": 4.69165734178789e-06, "loss": 0.7827, "step": 1489 }, { "epoch": 0.06175141945376932, "grad_norm": 0.4443762004375458, "learning_rate": 4.6914501222595224e-06, "loss": 0.7593, "step": 1490 }, { "epoch": 0.06179286335944299, "grad_norm": 0.4737187922000885, "learning_rate": 4.691242902731154e-06, "loss": 0.8752, "step": 1491 }, { "epoch": 0.06183430726511666, "grad_norm": 0.4137257933616638, "learning_rate": 4.691035683202785e-06, "loss": 0.715, "step": 1492 }, { "epoch": 0.06187575117079033, "grad_norm": 0.4365899860858917, "learning_rate": 4.6908284636744174e-06, "loss": 0.7819, "step": 1493 }, { "epoch": 0.061917195076464, "grad_norm": 0.48087775707244873, "learning_rate": 4.690621244146049e-06, "loss": 0.7505, "step": 1494 }, { "epoch": 0.06195863898213768, "grad_norm": 0.4672144651412964, "learning_rate": 4.69041402461768e-06, "loss": 0.7654, "step": 1495 }, { "epoch": 0.06200008288781135, "grad_norm": 0.4776240885257721, "learning_rate": 4.690206805089312e-06, "loss": 0.8257, "step": 1496 }, { "epoch": 0.06204152679348502, "grad_norm": 0.4997063875198364, "learning_rate": 4.689999585560944e-06, "loss": 0.806, "step": 1497 }, { "epoch": 0.06208297069915869, "grad_norm": 0.502731204032898, "learning_rate": 4.689792366032575e-06, "loss": 0.8081, "step": 1498 }, { "epoch": 0.06212441460483236, "grad_norm": 0.4546010494232178, "learning_rate": 4.689585146504207e-06, "loss": 0.8054, "step": 1499 }, { "epoch": 0.06216585851050603, "grad_norm": 0.43346622586250305, "learning_rate": 4.689377926975839e-06, "loss": 0.8323, "step": 1500 }, { "epoch": 0.0622073024161797, "grad_norm": 0.410091757774353, "learning_rate": 4.68917070744747e-06, "loss": 0.7571, "step": 1501 }, { "epoch": 0.06224874632185337, "grad_norm": 0.43019333481788635, "learning_rate": 4.6889634879191025e-06, "loss": 0.783, "step": 1502 }, { "epoch": 0.06229019022752704, "grad_norm": 0.47395995259284973, "learning_rate": 4.688756268390733e-06, "loss": 0.8215, "step": 1503 }, { "epoch": 0.06233163413320071, "grad_norm": 0.4449435770511627, "learning_rate": 4.688549048862365e-06, "loss": 0.7222, "step": 1504 }, { "epoch": 0.06237307803887438, "grad_norm": 0.476831316947937, "learning_rate": 4.688341829333997e-06, "loss": 0.7849, "step": 1505 }, { "epoch": 0.06241452194454805, "grad_norm": 0.45852184295654297, "learning_rate": 4.688134609805629e-06, "loss": 0.8142, "step": 1506 }, { "epoch": 0.06245596585022172, "grad_norm": 0.4783462882041931, "learning_rate": 4.68792739027726e-06, "loss": 0.7932, "step": 1507 }, { "epoch": 0.0624974097558954, "grad_norm": 0.4608703553676605, "learning_rate": 4.687720170748892e-06, "loss": 0.8369, "step": 1508 }, { "epoch": 0.06253885366156907, "grad_norm": 0.5077956318855286, "learning_rate": 4.687512951220524e-06, "loss": 0.8015, "step": 1509 }, { "epoch": 0.06258029756724273, "grad_norm": 0.4172244071960449, "learning_rate": 4.687305731692155e-06, "loss": 0.7051, "step": 1510 }, { "epoch": 0.06262174147291641, "grad_norm": 0.4445269703865051, "learning_rate": 4.687098512163787e-06, "loss": 0.7339, "step": 1511 }, { "epoch": 0.06266318537859007, "grad_norm": 0.4446474313735962, "learning_rate": 4.686891292635418e-06, "loss": 0.7917, "step": 1512 }, { "epoch": 0.06270462928426375, "grad_norm": 0.4380851089954376, "learning_rate": 4.68668407310705e-06, "loss": 0.7114, "step": 1513 }, { "epoch": 0.06274607318993743, "grad_norm": 0.4798642694950104, "learning_rate": 4.686476853578682e-06, "loss": 0.8422, "step": 1514 }, { "epoch": 0.06278751709561109, "grad_norm": 0.43944793939590454, "learning_rate": 4.686269634050313e-06, "loss": 0.7322, "step": 1515 }, { "epoch": 0.06282896100128477, "grad_norm": 0.5210661292076111, "learning_rate": 4.686062414521944e-06, "loss": 0.7836, "step": 1516 }, { "epoch": 0.06287040490695843, "grad_norm": 0.4589276611804962, "learning_rate": 4.685855194993577e-06, "loss": 0.783, "step": 1517 }, { "epoch": 0.0629118488126321, "grad_norm": 0.4895331561565399, "learning_rate": 4.685647975465209e-06, "loss": 0.8013, "step": 1518 }, { "epoch": 0.06295329271830577, "grad_norm": 0.43782681226730347, "learning_rate": 4.685440755936839e-06, "loss": 0.8188, "step": 1519 }, { "epoch": 0.06299473662397945, "grad_norm": 0.4066360294818878, "learning_rate": 4.685233536408472e-06, "loss": 0.7422, "step": 1520 }, { "epoch": 0.06303618052965311, "grad_norm": 0.4670545160770416, "learning_rate": 4.685026316880103e-06, "loss": 0.7659, "step": 1521 }, { "epoch": 0.06307762443532679, "grad_norm": 0.47140517830848694, "learning_rate": 4.684819097351735e-06, "loss": 0.761, "step": 1522 }, { "epoch": 0.06311906834100045, "grad_norm": 0.4558713138103485, "learning_rate": 4.684611877823367e-06, "loss": 0.7257, "step": 1523 }, { "epoch": 0.06316051224667413, "grad_norm": 0.39512622356414795, "learning_rate": 4.684404658294998e-06, "loss": 0.7466, "step": 1524 }, { "epoch": 0.06320195615234779, "grad_norm": 0.43386945128440857, "learning_rate": 4.68419743876663e-06, "loss": 0.7727, "step": 1525 }, { "epoch": 0.06324340005802147, "grad_norm": 0.4203483760356903, "learning_rate": 4.683990219238262e-06, "loss": 0.7289, "step": 1526 }, { "epoch": 0.06328484396369514, "grad_norm": 0.4208204746246338, "learning_rate": 4.683782999709893e-06, "loss": 0.7422, "step": 1527 }, { "epoch": 0.06332628786936881, "grad_norm": 0.4615058898925781, "learning_rate": 4.683575780181524e-06, "loss": 0.7581, "step": 1528 }, { "epoch": 0.06336773177504249, "grad_norm": 0.48876938223838806, "learning_rate": 4.683368560653157e-06, "loss": 0.7644, "step": 1529 }, { "epoch": 0.06340917568071615, "grad_norm": 0.42914554476737976, "learning_rate": 4.683161341124788e-06, "loss": 0.8068, "step": 1530 }, { "epoch": 0.06345061958638983, "grad_norm": 0.47351449728012085, "learning_rate": 4.682954121596419e-06, "loss": 0.7629, "step": 1531 }, { "epoch": 0.06349206349206349, "grad_norm": 0.4491868019104004, "learning_rate": 4.682746902068051e-06, "loss": 0.8113, "step": 1532 }, { "epoch": 0.06353350739773717, "grad_norm": 0.4416409134864807, "learning_rate": 4.682539682539683e-06, "loss": 0.731, "step": 1533 }, { "epoch": 0.06357495130341083, "grad_norm": 0.4276033341884613, "learning_rate": 4.682332463011314e-06, "loss": 0.7473, "step": 1534 }, { "epoch": 0.0636163952090845, "grad_norm": 0.4465705454349518, "learning_rate": 4.682125243482946e-06, "loss": 0.7928, "step": 1535 }, { "epoch": 0.06365783911475817, "grad_norm": 0.4503258168697357, "learning_rate": 4.681918023954578e-06, "loss": 0.8105, "step": 1536 }, { "epoch": 0.06369928302043185, "grad_norm": 0.4535083472728729, "learning_rate": 4.681710804426209e-06, "loss": 0.7063, "step": 1537 }, { "epoch": 0.06374072692610551, "grad_norm": 0.441040575504303, "learning_rate": 4.681503584897842e-06, "loss": 0.7783, "step": 1538 }, { "epoch": 0.06378217083177919, "grad_norm": 0.4295855760574341, "learning_rate": 4.681296365369473e-06, "loss": 0.748, "step": 1539 }, { "epoch": 0.06382361473745286, "grad_norm": 0.42273619771003723, "learning_rate": 4.6810891458411044e-06, "loss": 0.7822, "step": 1540 }, { "epoch": 0.06386505864312653, "grad_norm": 0.4403887987136841, "learning_rate": 4.680881926312736e-06, "loss": 0.7822, "step": 1541 }, { "epoch": 0.0639065025488002, "grad_norm": 0.42560821771621704, "learning_rate": 4.680674706784368e-06, "loss": 0.7515, "step": 1542 }, { "epoch": 0.06394794645447387, "grad_norm": 0.4214054346084595, "learning_rate": 4.6804674872559994e-06, "loss": 0.7244, "step": 1543 }, { "epoch": 0.06398939036014754, "grad_norm": 0.4270678758621216, "learning_rate": 4.680260267727631e-06, "loss": 0.7324, "step": 1544 }, { "epoch": 0.06403083426582121, "grad_norm": 0.45748379826545715, "learning_rate": 4.680053048199263e-06, "loss": 0.7888, "step": 1545 }, { "epoch": 0.06407227817149488, "grad_norm": 0.45477867126464844, "learning_rate": 4.6798458286708944e-06, "loss": 0.7793, "step": 1546 }, { "epoch": 0.06411372207716855, "grad_norm": 0.42887723445892334, "learning_rate": 4.679638609142526e-06, "loss": 0.7788, "step": 1547 }, { "epoch": 0.06415516598284222, "grad_norm": 0.44434836506843567, "learning_rate": 4.679431389614157e-06, "loss": 0.7415, "step": 1548 }, { "epoch": 0.06419660988851589, "grad_norm": 0.46402639150619507, "learning_rate": 4.6792241700857894e-06, "loss": 0.7993, "step": 1549 }, { "epoch": 0.06423805379418956, "grad_norm": 0.45409196615219116, "learning_rate": 4.679016950557421e-06, "loss": 0.8044, "step": 1550 }, { "epoch": 0.06427949769986324, "grad_norm": 0.4705032408237457, "learning_rate": 4.678809731029052e-06, "loss": 0.8037, "step": 1551 }, { "epoch": 0.0643209416055369, "grad_norm": 0.4447742998600006, "learning_rate": 4.6786025115006844e-06, "loss": 0.8101, "step": 1552 }, { "epoch": 0.06436238551121058, "grad_norm": 0.4853380620479584, "learning_rate": 4.678395291972316e-06, "loss": 0.8069, "step": 1553 }, { "epoch": 0.06440382941688425, "grad_norm": 0.4272081255912781, "learning_rate": 4.678188072443948e-06, "loss": 0.7444, "step": 1554 }, { "epoch": 0.06444527332255792, "grad_norm": 0.48527634143829346, "learning_rate": 4.677980852915579e-06, "loss": 0.8231, "step": 1555 }, { "epoch": 0.06448671722823159, "grad_norm": 0.4676365852355957, "learning_rate": 4.677773633387211e-06, "loss": 0.7561, "step": 1556 }, { "epoch": 0.06452816113390526, "grad_norm": 0.47746527194976807, "learning_rate": 4.677566413858842e-06, "loss": 0.7898, "step": 1557 }, { "epoch": 0.06456960503957893, "grad_norm": 0.44547533988952637, "learning_rate": 4.6773591943304744e-06, "loss": 0.7888, "step": 1558 }, { "epoch": 0.0646110489452526, "grad_norm": 0.4056016504764557, "learning_rate": 4.677151974802106e-06, "loss": 0.6843, "step": 1559 }, { "epoch": 0.06465249285092627, "grad_norm": 0.44302716851234436, "learning_rate": 4.676944755273737e-06, "loss": 0.821, "step": 1560 }, { "epoch": 0.06469393675659994, "grad_norm": 0.4118632674217224, "learning_rate": 4.6767375357453695e-06, "loss": 0.7083, "step": 1561 }, { "epoch": 0.0647353806622736, "grad_norm": 0.46236279606819153, "learning_rate": 4.676530316217001e-06, "loss": 0.7646, "step": 1562 }, { "epoch": 0.06477682456794728, "grad_norm": 0.49624866247177124, "learning_rate": 4.676323096688632e-06, "loss": 0.7404, "step": 1563 }, { "epoch": 0.06481826847362096, "grad_norm": 0.4422208070755005, "learning_rate": 4.676115877160264e-06, "loss": 0.7632, "step": 1564 }, { "epoch": 0.06485971237929462, "grad_norm": 0.48854729533195496, "learning_rate": 4.675908657631896e-06, "loss": 0.7744, "step": 1565 }, { "epoch": 0.0649011562849683, "grad_norm": 0.44912952184677124, "learning_rate": 4.675701438103527e-06, "loss": 0.7324, "step": 1566 }, { "epoch": 0.06494260019064196, "grad_norm": 0.468426376581192, "learning_rate": 4.675494218575159e-06, "loss": 0.7429, "step": 1567 }, { "epoch": 0.06498404409631564, "grad_norm": 0.4519636332988739, "learning_rate": 4.67528699904679e-06, "loss": 0.8245, "step": 1568 }, { "epoch": 0.0650254880019893, "grad_norm": 0.49408769607543945, "learning_rate": 4.675079779518422e-06, "loss": 0.7715, "step": 1569 }, { "epoch": 0.06506693190766298, "grad_norm": 0.46566078066825867, "learning_rate": 4.6748725599900545e-06, "loss": 0.7288, "step": 1570 }, { "epoch": 0.06510837581333664, "grad_norm": 0.43638646602630615, "learning_rate": 4.674665340461685e-06, "loss": 0.75, "step": 1571 }, { "epoch": 0.06514981971901032, "grad_norm": 0.4693780541419983, "learning_rate": 4.674458120933317e-06, "loss": 0.748, "step": 1572 }, { "epoch": 0.06519126362468398, "grad_norm": 0.4606259763240814, "learning_rate": 4.674250901404949e-06, "loss": 0.7391, "step": 1573 }, { "epoch": 0.06523270753035766, "grad_norm": 0.45809102058410645, "learning_rate": 4.674043681876581e-06, "loss": 0.7793, "step": 1574 }, { "epoch": 0.06527415143603134, "grad_norm": 0.43349623680114746, "learning_rate": 4.673836462348212e-06, "loss": 0.7623, "step": 1575 }, { "epoch": 0.065315595341705, "grad_norm": 0.4302397668361664, "learning_rate": 4.673629242819844e-06, "loss": 0.7102, "step": 1576 }, { "epoch": 0.06535703924737868, "grad_norm": 0.4467158317565918, "learning_rate": 4.673422023291475e-06, "loss": 0.7842, "step": 1577 }, { "epoch": 0.06539848315305234, "grad_norm": 0.5204900503158569, "learning_rate": 4.673214803763107e-06, "loss": 0.7776, "step": 1578 }, { "epoch": 0.06543992705872602, "grad_norm": 0.4935821294784546, "learning_rate": 4.673007584234739e-06, "loss": 0.8323, "step": 1579 }, { "epoch": 0.06548137096439968, "grad_norm": 0.450651079416275, "learning_rate": 4.67280036470637e-06, "loss": 0.7993, "step": 1580 }, { "epoch": 0.06552281487007336, "grad_norm": 0.41631844639778137, "learning_rate": 4.672593145178002e-06, "loss": 0.7034, "step": 1581 }, { "epoch": 0.06556425877574702, "grad_norm": 0.45050907135009766, "learning_rate": 4.672385925649634e-06, "loss": 0.7585, "step": 1582 }, { "epoch": 0.0656057026814207, "grad_norm": 0.4495653808116913, "learning_rate": 4.672178706121265e-06, "loss": 0.7335, "step": 1583 }, { "epoch": 0.06564714658709436, "grad_norm": 0.42504242062568665, "learning_rate": 4.671971486592896e-06, "loss": 0.7749, "step": 1584 }, { "epoch": 0.06568859049276804, "grad_norm": 0.44387149810791016, "learning_rate": 4.671764267064529e-06, "loss": 0.7383, "step": 1585 }, { "epoch": 0.0657300343984417, "grad_norm": 0.45584166049957275, "learning_rate": 4.671557047536161e-06, "loss": 0.744, "step": 1586 }, { "epoch": 0.06577147830411538, "grad_norm": 0.4470459520816803, "learning_rate": 4.671349828007791e-06, "loss": 0.7817, "step": 1587 }, { "epoch": 0.06581292220978906, "grad_norm": 0.45685678720474243, "learning_rate": 4.671142608479424e-06, "loss": 0.7222, "step": 1588 }, { "epoch": 0.06585436611546272, "grad_norm": 0.4680252969264984, "learning_rate": 4.670935388951055e-06, "loss": 0.7336, "step": 1589 }, { "epoch": 0.0658958100211364, "grad_norm": 0.42901307344436646, "learning_rate": 4.670728169422687e-06, "loss": 0.7434, "step": 1590 }, { "epoch": 0.06593725392681006, "grad_norm": 0.45285874605178833, "learning_rate": 4.670520949894319e-06, "loss": 0.7166, "step": 1591 }, { "epoch": 0.06597869783248374, "grad_norm": 0.4365731477737427, "learning_rate": 4.67031373036595e-06, "loss": 0.7371, "step": 1592 }, { "epoch": 0.0660201417381574, "grad_norm": 0.44257187843322754, "learning_rate": 4.670106510837581e-06, "loss": 0.7249, "step": 1593 }, { "epoch": 0.06606158564383108, "grad_norm": 0.4507882297039032, "learning_rate": 4.669899291309214e-06, "loss": 0.762, "step": 1594 }, { "epoch": 0.06610302954950474, "grad_norm": 0.43328067660331726, "learning_rate": 4.669692071780845e-06, "loss": 0.7585, "step": 1595 }, { "epoch": 0.06614447345517842, "grad_norm": 0.44777682423591614, "learning_rate": 4.669484852252476e-06, "loss": 0.7573, "step": 1596 }, { "epoch": 0.06618591736085208, "grad_norm": 0.4552536606788635, "learning_rate": 4.669277632724109e-06, "loss": 0.7507, "step": 1597 }, { "epoch": 0.06622736126652576, "grad_norm": 0.48245152831077576, "learning_rate": 4.66907041319574e-06, "loss": 0.7755, "step": 1598 }, { "epoch": 0.06626880517219942, "grad_norm": 0.47234493494033813, "learning_rate": 4.6688631936673714e-06, "loss": 0.7712, "step": 1599 }, { "epoch": 0.0663102490778731, "grad_norm": 0.5068262815475464, "learning_rate": 4.668655974139003e-06, "loss": 0.7896, "step": 1600 }, { "epoch": 0.06635169298354678, "grad_norm": 0.45288634300231934, "learning_rate": 4.668448754610635e-06, "loss": 0.7129, "step": 1601 }, { "epoch": 0.06639313688922044, "grad_norm": 0.4196891486644745, "learning_rate": 4.6682415350822664e-06, "loss": 0.718, "step": 1602 }, { "epoch": 0.06643458079489412, "grad_norm": 0.473163902759552, "learning_rate": 4.668034315553898e-06, "loss": 0.7795, "step": 1603 }, { "epoch": 0.06647602470056778, "grad_norm": 0.4324408769607544, "learning_rate": 4.66782709602553e-06, "loss": 0.7424, "step": 1604 }, { "epoch": 0.06651746860624146, "grad_norm": 0.46043696999549866, "learning_rate": 4.6676198764971614e-06, "loss": 0.8027, "step": 1605 }, { "epoch": 0.06655891251191512, "grad_norm": 0.4144105315208435, "learning_rate": 4.667412656968794e-06, "loss": 0.7634, "step": 1606 }, { "epoch": 0.0666003564175888, "grad_norm": 0.4830254912376404, "learning_rate": 4.667205437440425e-06, "loss": 0.7607, "step": 1607 }, { "epoch": 0.06664180032326246, "grad_norm": 0.4808370769023895, "learning_rate": 4.6669982179120564e-06, "loss": 0.8357, "step": 1608 }, { "epoch": 0.06668324422893614, "grad_norm": 0.4250320792198181, "learning_rate": 4.666790998383688e-06, "loss": 0.7776, "step": 1609 }, { "epoch": 0.0667246881346098, "grad_norm": 0.4177250862121582, "learning_rate": 4.66658377885532e-06, "loss": 0.7646, "step": 1610 }, { "epoch": 0.06676613204028348, "grad_norm": 0.3989412486553192, "learning_rate": 4.6663765593269514e-06, "loss": 0.7439, "step": 1611 }, { "epoch": 0.06680757594595715, "grad_norm": 0.4994300305843353, "learning_rate": 4.666169339798583e-06, "loss": 0.8162, "step": 1612 }, { "epoch": 0.06684901985163082, "grad_norm": 0.422446072101593, "learning_rate": 4.665962120270215e-06, "loss": 0.743, "step": 1613 }, { "epoch": 0.0668904637573045, "grad_norm": 0.4232836067676544, "learning_rate": 4.6657549007418464e-06, "loss": 0.7207, "step": 1614 }, { "epoch": 0.06693190766297816, "grad_norm": 0.45700791478157043, "learning_rate": 4.665547681213478e-06, "loss": 0.7759, "step": 1615 }, { "epoch": 0.06697335156865183, "grad_norm": 0.4804055690765381, "learning_rate": 4.665340461685109e-06, "loss": 0.76, "step": 1616 }, { "epoch": 0.0670147954743255, "grad_norm": 0.45987018942832947, "learning_rate": 4.6651332421567414e-06, "loss": 0.7122, "step": 1617 }, { "epoch": 0.06705623937999917, "grad_norm": 0.477420836687088, "learning_rate": 4.664926022628373e-06, "loss": 0.8225, "step": 1618 }, { "epoch": 0.06709768328567284, "grad_norm": 0.4248233437538147, "learning_rate": 4.664718803100004e-06, "loss": 0.7201, "step": 1619 }, { "epoch": 0.06713912719134651, "grad_norm": 0.4365939497947693, "learning_rate": 4.664511583571636e-06, "loss": 0.7185, "step": 1620 }, { "epoch": 0.06718057109702018, "grad_norm": 0.4280262887477875, "learning_rate": 4.664304364043268e-06, "loss": 0.8062, "step": 1621 }, { "epoch": 0.06722201500269386, "grad_norm": 0.43703097105026245, "learning_rate": 4.6640971445149e-06, "loss": 0.7522, "step": 1622 }, { "epoch": 0.06726345890836752, "grad_norm": 0.4505535066127777, "learning_rate": 4.6638899249865315e-06, "loss": 0.7996, "step": 1623 }, { "epoch": 0.0673049028140412, "grad_norm": 0.4685426354408264, "learning_rate": 4.663682705458163e-06, "loss": 0.781, "step": 1624 }, { "epoch": 0.06734634671971487, "grad_norm": 0.43711674213409424, "learning_rate": 4.663475485929794e-06, "loss": 0.7649, "step": 1625 }, { "epoch": 0.06738779062538854, "grad_norm": 0.43566495180130005, "learning_rate": 4.6632682664014265e-06, "loss": 0.733, "step": 1626 }, { "epoch": 0.06742923453106221, "grad_norm": 0.4581296741962433, "learning_rate": 4.663061046873058e-06, "loss": 0.7876, "step": 1627 }, { "epoch": 0.06747067843673588, "grad_norm": 0.5490488409996033, "learning_rate": 4.662853827344689e-06, "loss": 0.9067, "step": 1628 }, { "epoch": 0.06751212234240955, "grad_norm": 0.46907538175582886, "learning_rate": 4.662646607816321e-06, "loss": 0.7646, "step": 1629 }, { "epoch": 0.06755356624808322, "grad_norm": 0.49457135796546936, "learning_rate": 4.662439388287953e-06, "loss": 0.7527, "step": 1630 }, { "epoch": 0.0675950101537569, "grad_norm": 0.4529842436313629, "learning_rate": 4.662232168759584e-06, "loss": 0.7402, "step": 1631 }, { "epoch": 0.06763645405943056, "grad_norm": 0.45160120725631714, "learning_rate": 4.662024949231216e-06, "loss": 0.7551, "step": 1632 }, { "epoch": 0.06767789796510423, "grad_norm": 0.4262199103832245, "learning_rate": 4.661817729702848e-06, "loss": 0.7288, "step": 1633 }, { "epoch": 0.0677193418707779, "grad_norm": 0.454715758562088, "learning_rate": 4.661610510174479e-06, "loss": 0.7764, "step": 1634 }, { "epoch": 0.06776078577645157, "grad_norm": 0.5298017263412476, "learning_rate": 4.661403290646111e-06, "loss": 0.8652, "step": 1635 }, { "epoch": 0.06780222968212524, "grad_norm": 0.4710478186607361, "learning_rate": 4.661196071117742e-06, "loss": 0.7661, "step": 1636 }, { "epoch": 0.06784367358779891, "grad_norm": 0.5291531085968018, "learning_rate": 4.660988851589374e-06, "loss": 0.7936, "step": 1637 }, { "epoch": 0.06788511749347259, "grad_norm": 0.4823746979236603, "learning_rate": 4.660781632061006e-06, "loss": 0.7834, "step": 1638 }, { "epoch": 0.06792656139914625, "grad_norm": 0.43640822172164917, "learning_rate": 4.660574412532637e-06, "loss": 0.8044, "step": 1639 }, { "epoch": 0.06796800530481993, "grad_norm": 0.48759734630584717, "learning_rate": 4.660367193004269e-06, "loss": 0.7856, "step": 1640 }, { "epoch": 0.0680094492104936, "grad_norm": 0.49422430992126465, "learning_rate": 4.660159973475901e-06, "loss": 0.8447, "step": 1641 }, { "epoch": 0.06805089311616727, "grad_norm": 0.48106130957603455, "learning_rate": 4.659952753947533e-06, "loss": 0.7454, "step": 1642 }, { "epoch": 0.06809233702184093, "grad_norm": 0.4398304224014282, "learning_rate": 4.659745534419164e-06, "loss": 0.7585, "step": 1643 }, { "epoch": 0.06813378092751461, "grad_norm": 0.448781818151474, "learning_rate": 4.659538314890796e-06, "loss": 0.7178, "step": 1644 }, { "epoch": 0.06817522483318827, "grad_norm": 0.43553951382637024, "learning_rate": 4.659331095362427e-06, "loss": 0.7378, "step": 1645 }, { "epoch": 0.06821666873886195, "grad_norm": 0.4915466904640198, "learning_rate": 4.659123875834059e-06, "loss": 0.8108, "step": 1646 }, { "epoch": 0.06825811264453562, "grad_norm": 0.43068745732307434, "learning_rate": 4.658916656305691e-06, "loss": 0.759, "step": 1647 }, { "epoch": 0.06829955655020929, "grad_norm": 0.48811671137809753, "learning_rate": 4.658709436777322e-06, "loss": 0.7252, "step": 1648 }, { "epoch": 0.06834100045588297, "grad_norm": 0.42943674325942993, "learning_rate": 4.658502217248954e-06, "loss": 0.7346, "step": 1649 }, { "epoch": 0.06838244436155663, "grad_norm": 0.45456504821777344, "learning_rate": 4.658294997720586e-06, "loss": 0.8611, "step": 1650 }, { "epoch": 0.06842388826723031, "grad_norm": 0.471125990152359, "learning_rate": 4.658087778192217e-06, "loss": 0.7593, "step": 1651 }, { "epoch": 0.06846533217290397, "grad_norm": 0.42458441853523254, "learning_rate": 4.657880558663848e-06, "loss": 0.7579, "step": 1652 }, { "epoch": 0.06850677607857765, "grad_norm": 0.48126089572906494, "learning_rate": 4.657673339135481e-06, "loss": 0.7642, "step": 1653 }, { "epoch": 0.06854821998425131, "grad_norm": 0.48993566632270813, "learning_rate": 4.657466119607112e-06, "loss": 0.8167, "step": 1654 }, { "epoch": 0.06858966388992499, "grad_norm": 0.4605776369571686, "learning_rate": 4.657258900078743e-06, "loss": 0.7513, "step": 1655 }, { "epoch": 0.06863110779559865, "grad_norm": 0.4563843905925751, "learning_rate": 4.657051680550376e-06, "loss": 0.7415, "step": 1656 }, { "epoch": 0.06867255170127233, "grad_norm": 0.47484487295150757, "learning_rate": 4.656844461022007e-06, "loss": 0.7703, "step": 1657 }, { "epoch": 0.068713995606946, "grad_norm": 0.41562286019325256, "learning_rate": 4.656637241493639e-06, "loss": 0.7227, "step": 1658 }, { "epoch": 0.06875543951261967, "grad_norm": 0.46570515632629395, "learning_rate": 4.656430021965271e-06, "loss": 0.772, "step": 1659 }, { "epoch": 0.06879688341829333, "grad_norm": 0.407827228307724, "learning_rate": 4.656222802436902e-06, "loss": 0.7377, "step": 1660 }, { "epoch": 0.06883832732396701, "grad_norm": 0.46641311049461365, "learning_rate": 4.6560155829085334e-06, "loss": 0.7974, "step": 1661 }, { "epoch": 0.06887977122964069, "grad_norm": 0.45002833008766174, "learning_rate": 4.655808363380166e-06, "loss": 0.7939, "step": 1662 }, { "epoch": 0.06892121513531435, "grad_norm": 0.4493491053581238, "learning_rate": 4.655601143851797e-06, "loss": 0.8389, "step": 1663 }, { "epoch": 0.06896265904098803, "grad_norm": 0.42593252658843994, "learning_rate": 4.6553939243234284e-06, "loss": 0.6902, "step": 1664 }, { "epoch": 0.06900410294666169, "grad_norm": 0.4662606120109558, "learning_rate": 4.655186704795061e-06, "loss": 0.7375, "step": 1665 }, { "epoch": 0.06904554685233537, "grad_norm": 0.4615080654621124, "learning_rate": 4.654979485266692e-06, "loss": 0.7437, "step": 1666 }, { "epoch": 0.06908699075800903, "grad_norm": 0.44211065769195557, "learning_rate": 4.6547722657383234e-06, "loss": 0.7905, "step": 1667 }, { "epoch": 0.06912843466368271, "grad_norm": 0.465715616941452, "learning_rate": 4.654565046209955e-06, "loss": 0.7839, "step": 1668 }, { "epoch": 0.06916987856935637, "grad_norm": 0.4777056872844696, "learning_rate": 4.654357826681587e-06, "loss": 0.7595, "step": 1669 }, { "epoch": 0.06921132247503005, "grad_norm": 0.4435918629169464, "learning_rate": 4.6541506071532184e-06, "loss": 0.8022, "step": 1670 }, { "epoch": 0.06925276638070371, "grad_norm": 0.4559552073478699, "learning_rate": 4.65394338762485e-06, "loss": 0.761, "step": 1671 }, { "epoch": 0.06929421028637739, "grad_norm": 0.444330632686615, "learning_rate": 4.653736168096481e-06, "loss": 0.7439, "step": 1672 }, { "epoch": 0.06933565419205105, "grad_norm": 0.4851124584674835, "learning_rate": 4.6535289485681134e-06, "loss": 0.8171, "step": 1673 }, { "epoch": 0.06937709809772473, "grad_norm": 0.43949106335639954, "learning_rate": 4.653321729039746e-06, "loss": 0.8054, "step": 1674 }, { "epoch": 0.0694185420033984, "grad_norm": 0.435026079416275, "learning_rate": 4.653114509511377e-06, "loss": 0.8062, "step": 1675 }, { "epoch": 0.06945998590907207, "grad_norm": 0.43054988980293274, "learning_rate": 4.6529072899830084e-06, "loss": 0.7554, "step": 1676 }, { "epoch": 0.06950142981474575, "grad_norm": 0.4451897144317627, "learning_rate": 4.65270007045464e-06, "loss": 0.7673, "step": 1677 }, { "epoch": 0.06954287372041941, "grad_norm": 0.4691118896007538, "learning_rate": 4.652492850926272e-06, "loss": 0.781, "step": 1678 }, { "epoch": 0.06958431762609309, "grad_norm": 0.4606602191925049, "learning_rate": 4.6522856313979035e-06, "loss": 0.8108, "step": 1679 }, { "epoch": 0.06962576153176675, "grad_norm": 0.47613826394081116, "learning_rate": 4.652078411869535e-06, "loss": 0.8213, "step": 1680 }, { "epoch": 0.06966720543744043, "grad_norm": 0.45324766635894775, "learning_rate": 4.651871192341166e-06, "loss": 0.8015, "step": 1681 }, { "epoch": 0.06970864934311409, "grad_norm": 0.48258739709854126, "learning_rate": 4.6516639728127985e-06, "loss": 0.8423, "step": 1682 }, { "epoch": 0.06975009324878777, "grad_norm": 0.40911394357681274, "learning_rate": 4.65145675328443e-06, "loss": 0.6725, "step": 1683 }, { "epoch": 0.06979153715446143, "grad_norm": 0.45600974559783936, "learning_rate": 4.651249533756061e-06, "loss": 0.7744, "step": 1684 }, { "epoch": 0.06983298106013511, "grad_norm": 0.4636862277984619, "learning_rate": 4.6510423142276935e-06, "loss": 0.7986, "step": 1685 }, { "epoch": 0.06987442496580878, "grad_norm": 0.44224169850349426, "learning_rate": 4.650835094699325e-06, "loss": 0.7793, "step": 1686 }, { "epoch": 0.06991586887148245, "grad_norm": 0.4245778024196625, "learning_rate": 4.650627875170956e-06, "loss": 0.7649, "step": 1687 }, { "epoch": 0.06995731277715612, "grad_norm": 0.4712643325328827, "learning_rate": 4.650420655642588e-06, "loss": 0.792, "step": 1688 }, { "epoch": 0.06999875668282979, "grad_norm": 0.41124558448791504, "learning_rate": 4.65021343611422e-06, "loss": 0.7478, "step": 1689 }, { "epoch": 0.07004020058850347, "grad_norm": 0.4189101457595825, "learning_rate": 4.650006216585851e-06, "loss": 0.7231, "step": 1690 }, { "epoch": 0.07008164449417713, "grad_norm": 0.44122588634490967, "learning_rate": 4.6497989970574835e-06, "loss": 0.7632, "step": 1691 }, { "epoch": 0.0701230883998508, "grad_norm": 0.44176021218299866, "learning_rate": 4.649591777529115e-06, "loss": 0.7637, "step": 1692 }, { "epoch": 0.07016453230552447, "grad_norm": 0.47611570358276367, "learning_rate": 4.649384558000746e-06, "loss": 0.79, "step": 1693 }, { "epoch": 0.07020597621119815, "grad_norm": 0.4398527145385742, "learning_rate": 4.6491773384723785e-06, "loss": 0.7966, "step": 1694 }, { "epoch": 0.07024742011687181, "grad_norm": 0.49485838413238525, "learning_rate": 4.64897011894401e-06, "loss": 0.7539, "step": 1695 }, { "epoch": 0.07028886402254549, "grad_norm": 0.46022871136665344, "learning_rate": 4.648762899415641e-06, "loss": 0.771, "step": 1696 }, { "epoch": 0.07033030792821915, "grad_norm": 0.4651918113231659, "learning_rate": 4.648555679887273e-06, "loss": 0.7654, "step": 1697 }, { "epoch": 0.07037175183389283, "grad_norm": 0.4170083701610565, "learning_rate": 4.648348460358905e-06, "loss": 0.73, "step": 1698 }, { "epoch": 0.0704131957395665, "grad_norm": 0.43438443541526794, "learning_rate": 4.648141240830536e-06, "loss": 0.7209, "step": 1699 }, { "epoch": 0.07045463964524017, "grad_norm": 0.4811383783817291, "learning_rate": 4.647934021302168e-06, "loss": 0.7734, "step": 1700 }, { "epoch": 0.07049608355091384, "grad_norm": 0.47572991251945496, "learning_rate": 4.6477268017738e-06, "loss": 0.7439, "step": 1701 }, { "epoch": 0.0705375274565875, "grad_norm": 0.46861550211906433, "learning_rate": 4.647519582245431e-06, "loss": 0.8271, "step": 1702 }, { "epoch": 0.07057897136226118, "grad_norm": 0.534471333026886, "learning_rate": 4.647312362717063e-06, "loss": 0.7556, "step": 1703 }, { "epoch": 0.07062041526793485, "grad_norm": 0.5152859091758728, "learning_rate": 4.647105143188694e-06, "loss": 0.7727, "step": 1704 }, { "epoch": 0.07066185917360852, "grad_norm": 0.4691943824291229, "learning_rate": 4.646897923660326e-06, "loss": 0.7405, "step": 1705 }, { "epoch": 0.07070330307928219, "grad_norm": 0.45241427421569824, "learning_rate": 4.646690704131958e-06, "loss": 0.7932, "step": 1706 }, { "epoch": 0.07074474698495586, "grad_norm": 0.4873049855232239, "learning_rate": 4.646483484603589e-06, "loss": 0.781, "step": 1707 }, { "epoch": 0.07078619089062953, "grad_norm": 0.4366340637207031, "learning_rate": 4.646276265075221e-06, "loss": 0.7665, "step": 1708 }, { "epoch": 0.0708276347963032, "grad_norm": 0.4557400047779083, "learning_rate": 4.646069045546853e-06, "loss": 0.7502, "step": 1709 }, { "epoch": 0.07086907870197687, "grad_norm": 0.44708120822906494, "learning_rate": 4.645861826018485e-06, "loss": 0.7664, "step": 1710 }, { "epoch": 0.07091052260765054, "grad_norm": 0.4516519606113434, "learning_rate": 4.645654606490116e-06, "loss": 0.7607, "step": 1711 }, { "epoch": 0.07095196651332422, "grad_norm": 0.4604353904724121, "learning_rate": 4.645447386961748e-06, "loss": 0.7756, "step": 1712 }, { "epoch": 0.07099341041899788, "grad_norm": 0.47678330540657043, "learning_rate": 4.645240167433379e-06, "loss": 0.7566, "step": 1713 }, { "epoch": 0.07103485432467156, "grad_norm": 0.4621085226535797, "learning_rate": 4.645032947905011e-06, "loss": 0.7588, "step": 1714 }, { "epoch": 0.07107629823034523, "grad_norm": 0.46358540654182434, "learning_rate": 4.644825728376643e-06, "loss": 0.7412, "step": 1715 }, { "epoch": 0.0711177421360189, "grad_norm": 0.4876124858856201, "learning_rate": 4.644618508848274e-06, "loss": 0.7544, "step": 1716 }, { "epoch": 0.07115918604169257, "grad_norm": 0.49796178936958313, "learning_rate": 4.644411289319906e-06, "loss": 0.8438, "step": 1717 }, { "epoch": 0.07120062994736624, "grad_norm": 0.44890812039375305, "learning_rate": 4.644204069791538e-06, "loss": 0.7877, "step": 1718 }, { "epoch": 0.0712420738530399, "grad_norm": 0.4732809066772461, "learning_rate": 4.643996850263169e-06, "loss": 0.748, "step": 1719 }, { "epoch": 0.07128351775871358, "grad_norm": 0.46018171310424805, "learning_rate": 4.6437896307348004e-06, "loss": 0.7466, "step": 1720 }, { "epoch": 0.07132496166438725, "grad_norm": 0.4570143222808838, "learning_rate": 4.643582411206433e-06, "loss": 0.7585, "step": 1721 }, { "epoch": 0.07136640557006092, "grad_norm": 0.4149571359157562, "learning_rate": 4.643375191678064e-06, "loss": 0.7573, "step": 1722 }, { "epoch": 0.0714078494757346, "grad_norm": 0.43422549962997437, "learning_rate": 4.6431679721496954e-06, "loss": 0.77, "step": 1723 }, { "epoch": 0.07144929338140826, "grad_norm": 0.49335604906082153, "learning_rate": 4.642960752621327e-06, "loss": 0.8105, "step": 1724 }, { "epoch": 0.07149073728708194, "grad_norm": 0.6594836115837097, "learning_rate": 4.642753533092959e-06, "loss": 0.7544, "step": 1725 }, { "epoch": 0.0715321811927556, "grad_norm": 0.44890618324279785, "learning_rate": 4.642546313564591e-06, "loss": 0.7498, "step": 1726 }, { "epoch": 0.07157362509842928, "grad_norm": 0.455856591463089, "learning_rate": 4.642339094036223e-06, "loss": 0.7451, "step": 1727 }, { "epoch": 0.07161506900410294, "grad_norm": 0.4311956465244293, "learning_rate": 4.642131874507854e-06, "loss": 0.7832, "step": 1728 }, { "epoch": 0.07165651290977662, "grad_norm": 0.44108057022094727, "learning_rate": 4.6419246549794854e-06, "loss": 0.7881, "step": 1729 }, { "epoch": 0.07169795681545028, "grad_norm": 0.4821377396583557, "learning_rate": 4.641717435451118e-06, "loss": 0.8066, "step": 1730 }, { "epoch": 0.07173940072112396, "grad_norm": 0.46589818596839905, "learning_rate": 4.641510215922749e-06, "loss": 0.7408, "step": 1731 }, { "epoch": 0.07178084462679762, "grad_norm": 0.4932117164134979, "learning_rate": 4.6413029963943804e-06, "loss": 0.8564, "step": 1732 }, { "epoch": 0.0718222885324713, "grad_norm": 0.4159393906593323, "learning_rate": 4.641095776866012e-06, "loss": 0.7754, "step": 1733 }, { "epoch": 0.07186373243814496, "grad_norm": 0.5250783562660217, "learning_rate": 4.640888557337644e-06, "loss": 0.8098, "step": 1734 }, { "epoch": 0.07190517634381864, "grad_norm": 0.43870437145233154, "learning_rate": 4.6406813378092754e-06, "loss": 0.8259, "step": 1735 }, { "epoch": 0.07194662024949232, "grad_norm": 0.4944676160812378, "learning_rate": 4.640474118280907e-06, "loss": 0.7622, "step": 1736 }, { "epoch": 0.07198806415516598, "grad_norm": 0.46410930156707764, "learning_rate": 4.640266898752539e-06, "loss": 0.7576, "step": 1737 }, { "epoch": 0.07202950806083966, "grad_norm": 0.46144595742225647, "learning_rate": 4.6400596792241705e-06, "loss": 0.79, "step": 1738 }, { "epoch": 0.07207095196651332, "grad_norm": 0.4611121416091919, "learning_rate": 4.639852459695802e-06, "loss": 0.7866, "step": 1739 }, { "epoch": 0.072112395872187, "grad_norm": 0.4492151439189911, "learning_rate": 4.639645240167433e-06, "loss": 0.781, "step": 1740 }, { "epoch": 0.07215383977786066, "grad_norm": 0.4567330479621887, "learning_rate": 4.6394380206390655e-06, "loss": 0.7595, "step": 1741 }, { "epoch": 0.07219528368353434, "grad_norm": 0.4769268333911896, "learning_rate": 4.639230801110697e-06, "loss": 0.8462, "step": 1742 }, { "epoch": 0.072236727589208, "grad_norm": 0.46909695863723755, "learning_rate": 4.639023581582329e-06, "loss": 0.7361, "step": 1743 }, { "epoch": 0.07227817149488168, "grad_norm": 0.4566887319087982, "learning_rate": 4.6388163620539605e-06, "loss": 0.7832, "step": 1744 }, { "epoch": 0.07231961540055534, "grad_norm": 0.4635995626449585, "learning_rate": 4.638609142525592e-06, "loss": 0.8091, "step": 1745 }, { "epoch": 0.07236105930622902, "grad_norm": 0.4519689083099365, "learning_rate": 4.638401922997224e-06, "loss": 0.7576, "step": 1746 }, { "epoch": 0.07240250321190268, "grad_norm": 0.476813405752182, "learning_rate": 4.6381947034688555e-06, "loss": 0.7925, "step": 1747 }, { "epoch": 0.07244394711757636, "grad_norm": 0.4610627293586731, "learning_rate": 4.637987483940487e-06, "loss": 0.7559, "step": 1748 }, { "epoch": 0.07248539102325004, "grad_norm": 0.4306154251098633, "learning_rate": 4.637780264412118e-06, "loss": 0.7178, "step": 1749 }, { "epoch": 0.0725268349289237, "grad_norm": 0.46092885732650757, "learning_rate": 4.6375730448837505e-06, "loss": 0.7737, "step": 1750 }, { "epoch": 0.07256827883459738, "grad_norm": 0.465017169713974, "learning_rate": 4.637365825355382e-06, "loss": 0.76, "step": 1751 }, { "epoch": 0.07260972274027104, "grad_norm": 0.4280245304107666, "learning_rate": 4.637158605827013e-06, "loss": 0.7446, "step": 1752 }, { "epoch": 0.07265116664594472, "grad_norm": 0.4563804268836975, "learning_rate": 4.6369513862986455e-06, "loss": 0.7339, "step": 1753 }, { "epoch": 0.07269261055161838, "grad_norm": 0.5036217570304871, "learning_rate": 4.636744166770277e-06, "loss": 0.7544, "step": 1754 }, { "epoch": 0.07273405445729206, "grad_norm": 0.4994726777076721, "learning_rate": 4.636536947241908e-06, "loss": 0.8147, "step": 1755 }, { "epoch": 0.07277549836296572, "grad_norm": 0.45011934638023376, "learning_rate": 4.63632972771354e-06, "loss": 0.8234, "step": 1756 }, { "epoch": 0.0728169422686394, "grad_norm": 0.44011151790618896, "learning_rate": 4.636122508185172e-06, "loss": 0.7568, "step": 1757 }, { "epoch": 0.07285838617431306, "grad_norm": 0.4454973340034485, "learning_rate": 4.635915288656803e-06, "loss": 0.8315, "step": 1758 }, { "epoch": 0.07289983007998674, "grad_norm": 0.45458829402923584, "learning_rate": 4.6357080691284355e-06, "loss": 0.7703, "step": 1759 }, { "epoch": 0.07294127398566042, "grad_norm": 0.43965891003608704, "learning_rate": 4.635500849600067e-06, "loss": 0.7583, "step": 1760 }, { "epoch": 0.07298271789133408, "grad_norm": 0.43473947048187256, "learning_rate": 4.635293630071698e-06, "loss": 0.7438, "step": 1761 }, { "epoch": 0.07302416179700776, "grad_norm": 0.46284231543540955, "learning_rate": 4.6350864105433305e-06, "loss": 0.7578, "step": 1762 }, { "epoch": 0.07306560570268142, "grad_norm": 0.4338874816894531, "learning_rate": 4.634879191014962e-06, "loss": 0.7261, "step": 1763 }, { "epoch": 0.0731070496083551, "grad_norm": 0.47353309392929077, "learning_rate": 4.634671971486593e-06, "loss": 0.8005, "step": 1764 }, { "epoch": 0.07314849351402876, "grad_norm": 0.4663343131542206, "learning_rate": 4.634464751958225e-06, "loss": 0.7659, "step": 1765 }, { "epoch": 0.07318993741970244, "grad_norm": 0.4709503948688507, "learning_rate": 4.634257532429857e-06, "loss": 0.8142, "step": 1766 }, { "epoch": 0.0732313813253761, "grad_norm": 0.458511084318161, "learning_rate": 4.634050312901488e-06, "loss": 0.7346, "step": 1767 }, { "epoch": 0.07327282523104978, "grad_norm": 0.44481754302978516, "learning_rate": 4.63384309337312e-06, "loss": 0.7502, "step": 1768 }, { "epoch": 0.07331426913672344, "grad_norm": 0.44879600405693054, "learning_rate": 4.633635873844752e-06, "loss": 0.7341, "step": 1769 }, { "epoch": 0.07335571304239712, "grad_norm": 0.4418463110923767, "learning_rate": 4.633428654316383e-06, "loss": 0.7878, "step": 1770 }, { "epoch": 0.07339715694807078, "grad_norm": 0.487883985042572, "learning_rate": 4.633221434788015e-06, "loss": 0.7795, "step": 1771 }, { "epoch": 0.07343860085374446, "grad_norm": 0.45064201951026917, "learning_rate": 4.633014215259646e-06, "loss": 0.7622, "step": 1772 }, { "epoch": 0.07348004475941813, "grad_norm": 0.415192574262619, "learning_rate": 4.632806995731278e-06, "loss": 0.7065, "step": 1773 }, { "epoch": 0.0735214886650918, "grad_norm": 0.4516944885253906, "learning_rate": 4.63259977620291e-06, "loss": 0.7307, "step": 1774 }, { "epoch": 0.07356293257076547, "grad_norm": 0.4642982482910156, "learning_rate": 4.632392556674542e-06, "loss": 0.791, "step": 1775 }, { "epoch": 0.07360437647643914, "grad_norm": 0.43663284182548523, "learning_rate": 4.6321853371461724e-06, "loss": 0.7566, "step": 1776 }, { "epoch": 0.07364582038211281, "grad_norm": 0.4271349608898163, "learning_rate": 4.631978117617805e-06, "loss": 0.7798, "step": 1777 }, { "epoch": 0.07368726428778648, "grad_norm": 0.43104761838912964, "learning_rate": 4.631770898089437e-06, "loss": 0.7344, "step": 1778 }, { "epoch": 0.07372870819346015, "grad_norm": 0.4554607570171356, "learning_rate": 4.631563678561068e-06, "loss": 0.7886, "step": 1779 }, { "epoch": 0.07377015209913382, "grad_norm": 0.45183441042900085, "learning_rate": 4.6313564590327e-06, "loss": 0.7346, "step": 1780 }, { "epoch": 0.0738115960048075, "grad_norm": 0.41788461804389954, "learning_rate": 4.631149239504331e-06, "loss": 0.7795, "step": 1781 }, { "epoch": 0.07385303991048116, "grad_norm": 0.4727994203567505, "learning_rate": 4.630942019975963e-06, "loss": 0.7151, "step": 1782 }, { "epoch": 0.07389448381615484, "grad_norm": 0.45103439688682556, "learning_rate": 4.630734800447595e-06, "loss": 0.7476, "step": 1783 }, { "epoch": 0.0739359277218285, "grad_norm": 0.4557344913482666, "learning_rate": 4.630527580919226e-06, "loss": 0.7764, "step": 1784 }, { "epoch": 0.07397737162750218, "grad_norm": 0.44310054183006287, "learning_rate": 4.6303203613908574e-06, "loss": 0.7083, "step": 1785 }, { "epoch": 0.07401881553317585, "grad_norm": 0.4647631347179413, "learning_rate": 4.63011314186249e-06, "loss": 0.7305, "step": 1786 }, { "epoch": 0.07406025943884952, "grad_norm": 0.44456273317337036, "learning_rate": 4.629905922334121e-06, "loss": 0.8049, "step": 1787 }, { "epoch": 0.07410170334452319, "grad_norm": 0.41680532693862915, "learning_rate": 4.6296987028057524e-06, "loss": 0.7422, "step": 1788 }, { "epoch": 0.07414314725019686, "grad_norm": 0.4570443332195282, "learning_rate": 4.629491483277385e-06, "loss": 0.7717, "step": 1789 }, { "epoch": 0.07418459115587053, "grad_norm": 0.4430422782897949, "learning_rate": 4.629284263749016e-06, "loss": 0.7542, "step": 1790 }, { "epoch": 0.0742260350615442, "grad_norm": 0.4563167691230774, "learning_rate": 4.6290770442206474e-06, "loss": 0.7268, "step": 1791 }, { "epoch": 0.07426747896721787, "grad_norm": 0.47300484776496887, "learning_rate": 4.628869824692279e-06, "loss": 0.7957, "step": 1792 }, { "epoch": 0.07430892287289154, "grad_norm": 0.4505418539047241, "learning_rate": 4.628662605163911e-06, "loss": 0.7656, "step": 1793 }, { "epoch": 0.07435036677856521, "grad_norm": 0.42699867486953735, "learning_rate": 4.6284553856355424e-06, "loss": 0.7562, "step": 1794 }, { "epoch": 0.07439181068423888, "grad_norm": 0.44827938079833984, "learning_rate": 4.628248166107175e-06, "loss": 0.72, "step": 1795 }, { "epoch": 0.07443325458991255, "grad_norm": 0.5012211799621582, "learning_rate": 4.628040946578806e-06, "loss": 0.7952, "step": 1796 }, { "epoch": 0.07447469849558623, "grad_norm": 0.45306214690208435, "learning_rate": 4.6278337270504375e-06, "loss": 0.7749, "step": 1797 }, { "epoch": 0.0745161424012599, "grad_norm": 0.45056119561195374, "learning_rate": 4.62762650752207e-06, "loss": 0.7694, "step": 1798 }, { "epoch": 0.07455758630693357, "grad_norm": 0.4454525411128998, "learning_rate": 4.627419287993701e-06, "loss": 0.7412, "step": 1799 }, { "epoch": 0.07459903021260723, "grad_norm": 0.45956140756607056, "learning_rate": 4.6272120684653325e-06, "loss": 0.7942, "step": 1800 }, { "epoch": 0.07464047411828091, "grad_norm": 0.4564327597618103, "learning_rate": 4.627004848936964e-06, "loss": 0.8018, "step": 1801 }, { "epoch": 0.07468191802395457, "grad_norm": 0.4510807693004608, "learning_rate": 4.626797629408596e-06, "loss": 0.8018, "step": 1802 }, { "epoch": 0.07472336192962825, "grad_norm": 0.5180459022521973, "learning_rate": 4.6265904098802275e-06, "loss": 0.8035, "step": 1803 }, { "epoch": 0.07476480583530191, "grad_norm": 0.44152164459228516, "learning_rate": 4.626383190351859e-06, "loss": 0.7864, "step": 1804 }, { "epoch": 0.07480624974097559, "grad_norm": 0.47599801421165466, "learning_rate": 4.626175970823491e-06, "loss": 0.8071, "step": 1805 }, { "epoch": 0.07484769364664925, "grad_norm": 0.4898378252983093, "learning_rate": 4.6259687512951225e-06, "loss": 0.7656, "step": 1806 }, { "epoch": 0.07488913755232293, "grad_norm": 0.4746813178062439, "learning_rate": 4.625761531766754e-06, "loss": 0.7423, "step": 1807 }, { "epoch": 0.0749305814579966, "grad_norm": 0.4731309115886688, "learning_rate": 4.625554312238385e-06, "loss": 0.7949, "step": 1808 }, { "epoch": 0.07497202536367027, "grad_norm": 0.43048739433288574, "learning_rate": 4.6253470927100175e-06, "loss": 0.7268, "step": 1809 }, { "epoch": 0.07501346926934395, "grad_norm": 0.4917040169239044, "learning_rate": 4.625139873181649e-06, "loss": 0.7804, "step": 1810 }, { "epoch": 0.07505491317501761, "grad_norm": 0.4528306722640991, "learning_rate": 4.624932653653281e-06, "loss": 0.7878, "step": 1811 }, { "epoch": 0.07509635708069129, "grad_norm": 0.4328532814979553, "learning_rate": 4.624725434124912e-06, "loss": 0.7419, "step": 1812 }, { "epoch": 0.07513780098636495, "grad_norm": 0.44404372572898865, "learning_rate": 4.624518214596544e-06, "loss": 0.7148, "step": 1813 }, { "epoch": 0.07517924489203863, "grad_norm": 0.43389609456062317, "learning_rate": 4.624310995068176e-06, "loss": 0.802, "step": 1814 }, { "epoch": 0.07522068879771229, "grad_norm": 0.42758652567863464, "learning_rate": 4.6241037755398075e-06, "loss": 0.7297, "step": 1815 }, { "epoch": 0.07526213270338597, "grad_norm": 0.42928996682167053, "learning_rate": 4.623896556011439e-06, "loss": 0.7554, "step": 1816 }, { "epoch": 0.07530357660905963, "grad_norm": 0.4639790654182434, "learning_rate": 4.62368933648307e-06, "loss": 0.7373, "step": 1817 }, { "epoch": 0.07534502051473331, "grad_norm": 0.4239185154438019, "learning_rate": 4.6234821169547025e-06, "loss": 0.7074, "step": 1818 }, { "epoch": 0.07538646442040697, "grad_norm": 0.4425605237483978, "learning_rate": 4.623274897426334e-06, "loss": 0.7534, "step": 1819 }, { "epoch": 0.07542790832608065, "grad_norm": 0.45999810099601746, "learning_rate": 4.623067677897965e-06, "loss": 0.8037, "step": 1820 }, { "epoch": 0.07546935223175431, "grad_norm": 0.4081891179084778, "learning_rate": 4.6228604583695975e-06, "loss": 0.7026, "step": 1821 }, { "epoch": 0.07551079613742799, "grad_norm": 0.510579526424408, "learning_rate": 4.622653238841229e-06, "loss": 0.8203, "step": 1822 }, { "epoch": 0.07555224004310167, "grad_norm": 0.49238812923431396, "learning_rate": 4.62244601931286e-06, "loss": 0.7388, "step": 1823 }, { "epoch": 0.07559368394877533, "grad_norm": 0.46058303117752075, "learning_rate": 4.622238799784492e-06, "loss": 0.7537, "step": 1824 }, { "epoch": 0.07563512785444901, "grad_norm": 0.4655688405036926, "learning_rate": 4.622031580256124e-06, "loss": 0.7637, "step": 1825 }, { "epoch": 0.07567657176012267, "grad_norm": 0.44930219650268555, "learning_rate": 4.621824360727755e-06, "loss": 0.8131, "step": 1826 }, { "epoch": 0.07571801566579635, "grad_norm": 0.4302092492580414, "learning_rate": 4.6216171411993875e-06, "loss": 0.7528, "step": 1827 }, { "epoch": 0.07575945957147001, "grad_norm": 0.423718124628067, "learning_rate": 4.621409921671018e-06, "loss": 0.7457, "step": 1828 }, { "epoch": 0.07580090347714369, "grad_norm": 0.46975329518318176, "learning_rate": 4.62120270214265e-06, "loss": 0.7742, "step": 1829 }, { "epoch": 0.07584234738281735, "grad_norm": 0.45333707332611084, "learning_rate": 4.6209954826142825e-06, "loss": 0.7683, "step": 1830 }, { "epoch": 0.07588379128849103, "grad_norm": 0.48876506090164185, "learning_rate": 4.620788263085914e-06, "loss": 0.8501, "step": 1831 }, { "epoch": 0.07592523519416469, "grad_norm": 0.46611636877059937, "learning_rate": 4.620581043557545e-06, "loss": 0.8022, "step": 1832 }, { "epoch": 0.07596667909983837, "grad_norm": 0.4619766175746918, "learning_rate": 4.620373824029177e-06, "loss": 0.7773, "step": 1833 }, { "epoch": 0.07600812300551205, "grad_norm": 0.46724531054496765, "learning_rate": 4.620166604500809e-06, "loss": 0.745, "step": 1834 }, { "epoch": 0.07604956691118571, "grad_norm": 0.46512022614479065, "learning_rate": 4.61995938497244e-06, "loss": 0.71, "step": 1835 }, { "epoch": 0.07609101081685939, "grad_norm": 0.4498942494392395, "learning_rate": 4.619752165444072e-06, "loss": 0.7638, "step": 1836 }, { "epoch": 0.07613245472253305, "grad_norm": 0.44740912318229675, "learning_rate": 4.619544945915703e-06, "loss": 0.7937, "step": 1837 }, { "epoch": 0.07617389862820673, "grad_norm": 0.4779653251171112, "learning_rate": 4.619337726387335e-06, "loss": 0.8081, "step": 1838 }, { "epoch": 0.07621534253388039, "grad_norm": 0.4262476861476898, "learning_rate": 4.619130506858967e-06, "loss": 0.7131, "step": 1839 }, { "epoch": 0.07625678643955407, "grad_norm": 0.4475859999656677, "learning_rate": 4.618923287330598e-06, "loss": 0.7463, "step": 1840 }, { "epoch": 0.07629823034522773, "grad_norm": 0.4014715850353241, "learning_rate": 4.61871606780223e-06, "loss": 0.6975, "step": 1841 }, { "epoch": 0.07633967425090141, "grad_norm": 0.4433557987213135, "learning_rate": 4.618508848273862e-06, "loss": 0.7993, "step": 1842 }, { "epoch": 0.07638111815657507, "grad_norm": 0.46803778409957886, "learning_rate": 4.618301628745494e-06, "loss": 0.7153, "step": 1843 }, { "epoch": 0.07642256206224875, "grad_norm": 0.4221131503582001, "learning_rate": 4.6180944092171244e-06, "loss": 0.709, "step": 1844 }, { "epoch": 0.07646400596792241, "grad_norm": 0.4289880394935608, "learning_rate": 4.617887189688757e-06, "loss": 0.8154, "step": 1845 }, { "epoch": 0.07650544987359609, "grad_norm": 0.47242870926856995, "learning_rate": 4.617679970160388e-06, "loss": 0.8127, "step": 1846 }, { "epoch": 0.07654689377926976, "grad_norm": 0.4612913727760315, "learning_rate": 4.61747275063202e-06, "loss": 0.791, "step": 1847 }, { "epoch": 0.07658833768494343, "grad_norm": 0.4600946009159088, "learning_rate": 4.617265531103652e-06, "loss": 0.7964, "step": 1848 }, { "epoch": 0.0766297815906171, "grad_norm": 0.5173287987709045, "learning_rate": 4.617058311575283e-06, "loss": 0.8289, "step": 1849 }, { "epoch": 0.07667122549629077, "grad_norm": 0.4896432161331177, "learning_rate": 4.616851092046915e-06, "loss": 0.8408, "step": 1850 }, { "epoch": 0.07671266940196445, "grad_norm": 0.44647738337516785, "learning_rate": 4.616643872518547e-06, "loss": 0.7458, "step": 1851 }, { "epoch": 0.07675411330763811, "grad_norm": 0.4450191259384155, "learning_rate": 4.616436652990178e-06, "loss": 0.7839, "step": 1852 }, { "epoch": 0.07679555721331179, "grad_norm": 0.4258873462677002, "learning_rate": 4.6162294334618094e-06, "loss": 0.7537, "step": 1853 }, { "epoch": 0.07683700111898545, "grad_norm": 0.4854743480682373, "learning_rate": 4.616022213933442e-06, "loss": 0.7612, "step": 1854 }, { "epoch": 0.07687844502465913, "grad_norm": 0.45001545548439026, "learning_rate": 4.615814994405073e-06, "loss": 0.7522, "step": 1855 }, { "epoch": 0.07691988893033279, "grad_norm": 0.48317083716392517, "learning_rate": 4.6156077748767045e-06, "loss": 0.7351, "step": 1856 }, { "epoch": 0.07696133283600647, "grad_norm": 0.44324567914009094, "learning_rate": 4.615400555348337e-06, "loss": 0.7673, "step": 1857 }, { "epoch": 0.07700277674168014, "grad_norm": 0.47571250796318054, "learning_rate": 4.615193335819968e-06, "loss": 0.772, "step": 1858 }, { "epoch": 0.0770442206473538, "grad_norm": 0.44750216603279114, "learning_rate": 4.6149861162915995e-06, "loss": 0.7661, "step": 1859 }, { "epoch": 0.07708566455302748, "grad_norm": 0.4616469442844391, "learning_rate": 4.614778896763231e-06, "loss": 0.7515, "step": 1860 }, { "epoch": 0.07712710845870115, "grad_norm": 0.44754815101623535, "learning_rate": 4.614571677234863e-06, "loss": 0.769, "step": 1861 }, { "epoch": 0.07716855236437482, "grad_norm": 0.4579787850379944, "learning_rate": 4.6143644577064945e-06, "loss": 0.7974, "step": 1862 }, { "epoch": 0.07720999627004849, "grad_norm": 0.4634620249271393, "learning_rate": 4.614157238178127e-06, "loss": 0.7599, "step": 1863 }, { "epoch": 0.07725144017572216, "grad_norm": 0.5181484222412109, "learning_rate": 4.613950018649758e-06, "loss": 0.8057, "step": 1864 }, { "epoch": 0.07729288408139583, "grad_norm": 0.4826219975948334, "learning_rate": 4.6137427991213895e-06, "loss": 0.7866, "step": 1865 }, { "epoch": 0.0773343279870695, "grad_norm": 0.4336509704589844, "learning_rate": 4.613535579593022e-06, "loss": 0.6987, "step": 1866 }, { "epoch": 0.07737577189274317, "grad_norm": 0.5099712014198303, "learning_rate": 4.613328360064653e-06, "loss": 0.7466, "step": 1867 }, { "epoch": 0.07741721579841684, "grad_norm": 0.44108128547668457, "learning_rate": 4.6131211405362845e-06, "loss": 0.7507, "step": 1868 }, { "epoch": 0.07745865970409051, "grad_norm": 0.46545690298080444, "learning_rate": 4.612913921007916e-06, "loss": 0.7551, "step": 1869 }, { "epoch": 0.07750010360976418, "grad_norm": 0.4285983443260193, "learning_rate": 4.612706701479548e-06, "loss": 0.7216, "step": 1870 }, { "epoch": 0.07754154751543786, "grad_norm": 0.45119625329971313, "learning_rate": 4.6124994819511795e-06, "loss": 0.7583, "step": 1871 }, { "epoch": 0.07758299142111152, "grad_norm": 0.4831332862377167, "learning_rate": 4.612292262422811e-06, "loss": 0.7666, "step": 1872 }, { "epoch": 0.0776244353267852, "grad_norm": 0.43292492628097534, "learning_rate": 4.612085042894442e-06, "loss": 0.7573, "step": 1873 }, { "epoch": 0.07766587923245886, "grad_norm": 0.4782642722129822, "learning_rate": 4.6118778233660745e-06, "loss": 0.8037, "step": 1874 }, { "epoch": 0.07770732313813254, "grad_norm": 0.5037537217140198, "learning_rate": 4.611670603837706e-06, "loss": 0.783, "step": 1875 }, { "epoch": 0.0777487670438062, "grad_norm": 0.43975114822387695, "learning_rate": 4.611463384309337e-06, "loss": 0.7878, "step": 1876 }, { "epoch": 0.07779021094947988, "grad_norm": 0.467578649520874, "learning_rate": 4.6112561647809695e-06, "loss": 0.79, "step": 1877 }, { "epoch": 0.07783165485515355, "grad_norm": 0.44706177711486816, "learning_rate": 4.611048945252601e-06, "loss": 0.8015, "step": 1878 }, { "epoch": 0.07787309876082722, "grad_norm": 0.45218080282211304, "learning_rate": 4.610841725724233e-06, "loss": 0.7659, "step": 1879 }, { "epoch": 0.07791454266650089, "grad_norm": 0.43183889985084534, "learning_rate": 4.610634506195864e-06, "loss": 0.761, "step": 1880 }, { "epoch": 0.07795598657217456, "grad_norm": 0.4780930280685425, "learning_rate": 4.610427286667496e-06, "loss": 0.7717, "step": 1881 }, { "epoch": 0.07799743047784823, "grad_norm": 0.47216352820396423, "learning_rate": 4.610220067139128e-06, "loss": 0.7263, "step": 1882 }, { "epoch": 0.0780388743835219, "grad_norm": 0.4856502413749695, "learning_rate": 4.6100128476107595e-06, "loss": 0.7703, "step": 1883 }, { "epoch": 0.07808031828919558, "grad_norm": 0.4615204334259033, "learning_rate": 4.609805628082391e-06, "loss": 0.7324, "step": 1884 }, { "epoch": 0.07812176219486924, "grad_norm": 0.4522905945777893, "learning_rate": 4.609598408554022e-06, "loss": 0.8352, "step": 1885 }, { "epoch": 0.07816320610054292, "grad_norm": 0.4266384243965149, "learning_rate": 4.6093911890256545e-06, "loss": 0.7415, "step": 1886 }, { "epoch": 0.07820465000621658, "grad_norm": 0.4685969054698944, "learning_rate": 4.609183969497286e-06, "loss": 0.7869, "step": 1887 }, { "epoch": 0.07824609391189026, "grad_norm": 0.47244465351104736, "learning_rate": 4.608976749968917e-06, "loss": 0.7898, "step": 1888 }, { "epoch": 0.07828753781756392, "grad_norm": 0.4805828332901001, "learning_rate": 4.608769530440549e-06, "loss": 0.7979, "step": 1889 }, { "epoch": 0.0783289817232376, "grad_norm": 0.45322510600090027, "learning_rate": 4.608562310912181e-06, "loss": 0.7227, "step": 1890 }, { "epoch": 0.07837042562891126, "grad_norm": 0.4402100443840027, "learning_rate": 4.608355091383812e-06, "loss": 0.7358, "step": 1891 }, { "epoch": 0.07841186953458494, "grad_norm": 0.463314414024353, "learning_rate": 4.608147871855444e-06, "loss": 0.844, "step": 1892 }, { "epoch": 0.0784533134402586, "grad_norm": 0.4943394958972931, "learning_rate": 4.607940652327076e-06, "loss": 0.8186, "step": 1893 }, { "epoch": 0.07849475734593228, "grad_norm": 0.4464211165904999, "learning_rate": 4.607733432798707e-06, "loss": 0.7534, "step": 1894 }, { "epoch": 0.07853620125160596, "grad_norm": 0.46243879199028015, "learning_rate": 4.6075262132703395e-06, "loss": 0.7307, "step": 1895 }, { "epoch": 0.07857764515727962, "grad_norm": 0.45432737469673157, "learning_rate": 4.60731899374197e-06, "loss": 0.7786, "step": 1896 }, { "epoch": 0.0786190890629533, "grad_norm": 0.45538121461868286, "learning_rate": 4.607111774213602e-06, "loss": 0.791, "step": 1897 }, { "epoch": 0.07866053296862696, "grad_norm": 0.4796019196510315, "learning_rate": 4.606904554685234e-06, "loss": 0.7864, "step": 1898 }, { "epoch": 0.07870197687430064, "grad_norm": 0.4167230427265167, "learning_rate": 4.606697335156866e-06, "loss": 0.7327, "step": 1899 }, { "epoch": 0.0787434207799743, "grad_norm": 0.4214766323566437, "learning_rate": 4.606490115628497e-06, "loss": 0.761, "step": 1900 }, { "epoch": 0.07878486468564798, "grad_norm": 0.4327312707901001, "learning_rate": 4.606282896100129e-06, "loss": 0.6953, "step": 1901 }, { "epoch": 0.07882630859132164, "grad_norm": 0.44663581252098083, "learning_rate": 4.606075676571761e-06, "loss": 0.7932, "step": 1902 }, { "epoch": 0.07886775249699532, "grad_norm": 0.4363744258880615, "learning_rate": 4.605868457043392e-06, "loss": 0.7649, "step": 1903 }, { "epoch": 0.07890919640266898, "grad_norm": 0.4099828004837036, "learning_rate": 4.605661237515024e-06, "loss": 0.8247, "step": 1904 }, { "epoch": 0.07895064030834266, "grad_norm": 0.4296228885650635, "learning_rate": 4.605454017986655e-06, "loss": 0.7888, "step": 1905 }, { "epoch": 0.07899208421401632, "grad_norm": 0.434773713350296, "learning_rate": 4.605246798458287e-06, "loss": 0.7224, "step": 1906 }, { "epoch": 0.07903352811969, "grad_norm": 0.4375283122062683, "learning_rate": 4.605039578929919e-06, "loss": 0.7751, "step": 1907 }, { "epoch": 0.07907497202536368, "grad_norm": 0.4627740681171417, "learning_rate": 4.60483235940155e-06, "loss": 0.761, "step": 1908 }, { "epoch": 0.07911641593103734, "grad_norm": 0.42502644658088684, "learning_rate": 4.604625139873182e-06, "loss": 0.7346, "step": 1909 }, { "epoch": 0.07915785983671102, "grad_norm": 0.4524705111980438, "learning_rate": 4.604417920344814e-06, "loss": 0.7283, "step": 1910 }, { "epoch": 0.07919930374238468, "grad_norm": 0.4681034982204437, "learning_rate": 4.604210700816446e-06, "loss": 0.8347, "step": 1911 }, { "epoch": 0.07924074764805836, "grad_norm": 0.46250027418136597, "learning_rate": 4.6040034812880764e-06, "loss": 0.7784, "step": 1912 }, { "epoch": 0.07928219155373202, "grad_norm": 0.4581931531429291, "learning_rate": 4.603796261759709e-06, "loss": 0.8081, "step": 1913 }, { "epoch": 0.0793236354594057, "grad_norm": 0.4367046654224396, "learning_rate": 4.60358904223134e-06, "loss": 0.7017, "step": 1914 }, { "epoch": 0.07936507936507936, "grad_norm": 0.46020442247390747, "learning_rate": 4.603381822702972e-06, "loss": 0.7974, "step": 1915 }, { "epoch": 0.07940652327075304, "grad_norm": 0.41448089480400085, "learning_rate": 4.603174603174604e-06, "loss": 0.7317, "step": 1916 }, { "epoch": 0.0794479671764267, "grad_norm": 0.4349333345890045, "learning_rate": 4.602967383646235e-06, "loss": 0.6948, "step": 1917 }, { "epoch": 0.07948941108210038, "grad_norm": 0.4636598527431488, "learning_rate": 4.602760164117867e-06, "loss": 0.7952, "step": 1918 }, { "epoch": 0.07953085498777404, "grad_norm": 0.48504671454429626, "learning_rate": 4.602552944589499e-06, "loss": 0.825, "step": 1919 }, { "epoch": 0.07957229889344772, "grad_norm": 0.49243369698524475, "learning_rate": 4.60234572506113e-06, "loss": 0.803, "step": 1920 }, { "epoch": 0.0796137427991214, "grad_norm": 0.4513554871082306, "learning_rate": 4.6021385055327615e-06, "loss": 0.7253, "step": 1921 }, { "epoch": 0.07965518670479506, "grad_norm": 0.4651351571083069, "learning_rate": 4.601931286004394e-06, "loss": 0.7645, "step": 1922 }, { "epoch": 0.07969663061046874, "grad_norm": 0.4624966084957123, "learning_rate": 4.601724066476025e-06, "loss": 0.7452, "step": 1923 }, { "epoch": 0.0797380745161424, "grad_norm": 0.4130507707595825, "learning_rate": 4.6015168469476565e-06, "loss": 0.7126, "step": 1924 }, { "epoch": 0.07977951842181608, "grad_norm": 0.4527469277381897, "learning_rate": 4.601309627419288e-06, "loss": 0.7476, "step": 1925 }, { "epoch": 0.07982096232748974, "grad_norm": 0.4475564956665039, "learning_rate": 4.60110240789092e-06, "loss": 0.7803, "step": 1926 }, { "epoch": 0.07986240623316342, "grad_norm": 0.44627103209495544, "learning_rate": 4.6008951883625515e-06, "loss": 0.689, "step": 1927 }, { "epoch": 0.07990385013883708, "grad_norm": 0.4628615081310272, "learning_rate": 4.600687968834183e-06, "loss": 0.8145, "step": 1928 }, { "epoch": 0.07994529404451076, "grad_norm": 0.4401334822177887, "learning_rate": 4.600480749305815e-06, "loss": 0.7498, "step": 1929 }, { "epoch": 0.07998673795018442, "grad_norm": 0.4138008654117584, "learning_rate": 4.6002735297774465e-06, "loss": 0.6868, "step": 1930 }, { "epoch": 0.0800281818558581, "grad_norm": 0.4302521347999573, "learning_rate": 4.600066310249079e-06, "loss": 0.7698, "step": 1931 }, { "epoch": 0.08006962576153177, "grad_norm": 0.4618249535560608, "learning_rate": 4.59985909072071e-06, "loss": 0.8013, "step": 1932 }, { "epoch": 0.08011106966720544, "grad_norm": 0.441426157951355, "learning_rate": 4.5996518711923415e-06, "loss": 0.739, "step": 1933 }, { "epoch": 0.08015251357287911, "grad_norm": 0.4190951883792877, "learning_rate": 4.599444651663973e-06, "loss": 0.6686, "step": 1934 }, { "epoch": 0.08019395747855278, "grad_norm": 0.4135172367095947, "learning_rate": 4.599237432135605e-06, "loss": 0.7686, "step": 1935 }, { "epoch": 0.08023540138422645, "grad_norm": 0.44404852390289307, "learning_rate": 4.5990302126072365e-06, "loss": 0.6989, "step": 1936 }, { "epoch": 0.08027684528990012, "grad_norm": 0.4356656074523926, "learning_rate": 4.598822993078868e-06, "loss": 0.8096, "step": 1937 }, { "epoch": 0.0803182891955738, "grad_norm": 0.4899286925792694, "learning_rate": 4.5986157735505e-06, "loss": 0.8252, "step": 1938 }, { "epoch": 0.08035973310124746, "grad_norm": 0.445220023393631, "learning_rate": 4.5984085540221315e-06, "loss": 0.7866, "step": 1939 }, { "epoch": 0.08040117700692113, "grad_norm": 0.4810304641723633, "learning_rate": 4.598201334493763e-06, "loss": 0.772, "step": 1940 }, { "epoch": 0.0804426209125948, "grad_norm": 0.4239565134048462, "learning_rate": 4.597994114965394e-06, "loss": 0.7096, "step": 1941 }, { "epoch": 0.08048406481826847, "grad_norm": 0.456948846578598, "learning_rate": 4.5977868954370265e-06, "loss": 0.7961, "step": 1942 }, { "epoch": 0.08052550872394214, "grad_norm": 0.4533609449863434, "learning_rate": 4.597579675908658e-06, "loss": 0.7356, "step": 1943 }, { "epoch": 0.08056695262961582, "grad_norm": 0.4711451232433319, "learning_rate": 4.597372456380289e-06, "loss": 0.7578, "step": 1944 }, { "epoch": 0.08060839653528949, "grad_norm": 0.47080543637275696, "learning_rate": 4.5971652368519215e-06, "loss": 0.8152, "step": 1945 }, { "epoch": 0.08064984044096316, "grad_norm": 0.4283369183540344, "learning_rate": 4.596958017323553e-06, "loss": 0.7065, "step": 1946 }, { "epoch": 0.08069128434663683, "grad_norm": 0.4214217960834503, "learning_rate": 4.596750797795185e-06, "loss": 0.7466, "step": 1947 }, { "epoch": 0.0807327282523105, "grad_norm": 0.4439764618873596, "learning_rate": 4.5965435782668165e-06, "loss": 0.8103, "step": 1948 }, { "epoch": 0.08077417215798417, "grad_norm": 0.44330283999443054, "learning_rate": 4.596336358738448e-06, "loss": 0.8318, "step": 1949 }, { "epoch": 0.08081561606365784, "grad_norm": 0.41693663597106934, "learning_rate": 4.596129139210079e-06, "loss": 0.7219, "step": 1950 }, { "epoch": 0.08085705996933151, "grad_norm": 0.4680829346179962, "learning_rate": 4.5959219196817115e-06, "loss": 0.7461, "step": 1951 }, { "epoch": 0.08089850387500518, "grad_norm": 0.43037012219429016, "learning_rate": 4.595714700153343e-06, "loss": 0.7157, "step": 1952 }, { "epoch": 0.08093994778067885, "grad_norm": 0.49243882298469543, "learning_rate": 4.595507480624974e-06, "loss": 0.7725, "step": 1953 }, { "epoch": 0.08098139168635252, "grad_norm": 0.44856080412864685, "learning_rate": 4.5953002610966065e-06, "loss": 0.7278, "step": 1954 }, { "epoch": 0.0810228355920262, "grad_norm": 0.4417436420917511, "learning_rate": 4.595093041568238e-06, "loss": 0.7793, "step": 1955 }, { "epoch": 0.08106427949769986, "grad_norm": 0.40614381432533264, "learning_rate": 4.594885822039869e-06, "loss": 0.7368, "step": 1956 }, { "epoch": 0.08110572340337353, "grad_norm": 0.4587932527065277, "learning_rate": 4.594678602511501e-06, "loss": 0.7842, "step": 1957 }, { "epoch": 0.08114716730904721, "grad_norm": 0.4725556969642639, "learning_rate": 4.594471382983133e-06, "loss": 0.7366, "step": 1958 }, { "epoch": 0.08118861121472087, "grad_norm": 0.44160541892051697, "learning_rate": 4.594264163454764e-06, "loss": 0.7676, "step": 1959 }, { "epoch": 0.08123005512039455, "grad_norm": 0.43638449907302856, "learning_rate": 4.594056943926396e-06, "loss": 0.7231, "step": 1960 }, { "epoch": 0.08127149902606821, "grad_norm": 0.43149739503860474, "learning_rate": 4.593849724398028e-06, "loss": 0.6981, "step": 1961 }, { "epoch": 0.08131294293174189, "grad_norm": 0.47470149397850037, "learning_rate": 4.593642504869659e-06, "loss": 0.7316, "step": 1962 }, { "epoch": 0.08135438683741555, "grad_norm": 0.4233434796333313, "learning_rate": 4.5934352853412915e-06, "loss": 0.7559, "step": 1963 }, { "epoch": 0.08139583074308923, "grad_norm": 0.46559303998947144, "learning_rate": 4.593228065812922e-06, "loss": 0.8, "step": 1964 }, { "epoch": 0.0814372746487629, "grad_norm": 0.45025813579559326, "learning_rate": 4.593020846284554e-06, "loss": 0.7407, "step": 1965 }, { "epoch": 0.08147871855443657, "grad_norm": 0.4029502868652344, "learning_rate": 4.592813626756186e-06, "loss": 0.696, "step": 1966 }, { "epoch": 0.08152016246011023, "grad_norm": 0.4447357654571533, "learning_rate": 4.592606407227818e-06, "loss": 0.7544, "step": 1967 }, { "epoch": 0.08156160636578391, "grad_norm": 0.4972272217273712, "learning_rate": 4.592399187699449e-06, "loss": 0.7617, "step": 1968 }, { "epoch": 0.08160305027145759, "grad_norm": 0.4270951449871063, "learning_rate": 4.592191968171081e-06, "loss": 0.8064, "step": 1969 }, { "epoch": 0.08164449417713125, "grad_norm": 0.4376661777496338, "learning_rate": 4.591984748642713e-06, "loss": 0.6991, "step": 1970 }, { "epoch": 0.08168593808280493, "grad_norm": 0.4429546892642975, "learning_rate": 4.591777529114344e-06, "loss": 0.7368, "step": 1971 }, { "epoch": 0.08172738198847859, "grad_norm": 0.4448795020580292, "learning_rate": 4.591570309585976e-06, "loss": 0.7522, "step": 1972 }, { "epoch": 0.08176882589415227, "grad_norm": 0.43445849418640137, "learning_rate": 4.591363090057607e-06, "loss": 0.7544, "step": 1973 }, { "epoch": 0.08181026979982593, "grad_norm": 0.46531492471694946, "learning_rate": 4.591155870529239e-06, "loss": 0.7952, "step": 1974 }, { "epoch": 0.08185171370549961, "grad_norm": 0.44272223114967346, "learning_rate": 4.590948651000871e-06, "loss": 0.7378, "step": 1975 }, { "epoch": 0.08189315761117327, "grad_norm": 0.4426511824131012, "learning_rate": 4.590741431472502e-06, "loss": 0.7012, "step": 1976 }, { "epoch": 0.08193460151684695, "grad_norm": 0.4502531588077545, "learning_rate": 4.5905342119441335e-06, "loss": 0.7981, "step": 1977 }, { "epoch": 0.08197604542252061, "grad_norm": 0.4311940371990204, "learning_rate": 4.590326992415766e-06, "loss": 0.7292, "step": 1978 }, { "epoch": 0.08201748932819429, "grad_norm": 0.44279080629348755, "learning_rate": 4.590119772887398e-06, "loss": 0.7612, "step": 1979 }, { "epoch": 0.08205893323386795, "grad_norm": 0.4665888845920563, "learning_rate": 4.5899125533590285e-06, "loss": 0.7866, "step": 1980 }, { "epoch": 0.08210037713954163, "grad_norm": 0.4285626709461212, "learning_rate": 4.589705333830661e-06, "loss": 0.8071, "step": 1981 }, { "epoch": 0.08214182104521531, "grad_norm": 0.38651043176651, "learning_rate": 4.589498114302292e-06, "loss": 0.7646, "step": 1982 }, { "epoch": 0.08218326495088897, "grad_norm": 0.4539370834827423, "learning_rate": 4.589290894773924e-06, "loss": 0.7781, "step": 1983 }, { "epoch": 0.08222470885656265, "grad_norm": 0.45575448870658875, "learning_rate": 4.589083675245556e-06, "loss": 0.7468, "step": 1984 }, { "epoch": 0.08226615276223631, "grad_norm": 0.4629756808280945, "learning_rate": 4.588876455717187e-06, "loss": 0.7571, "step": 1985 }, { "epoch": 0.08230759666790999, "grad_norm": 0.46269774436950684, "learning_rate": 4.5886692361888185e-06, "loss": 0.7686, "step": 1986 }, { "epoch": 0.08234904057358365, "grad_norm": 0.4966188073158264, "learning_rate": 4.588462016660451e-06, "loss": 0.7507, "step": 1987 }, { "epoch": 0.08239048447925733, "grad_norm": 0.41765865683555603, "learning_rate": 4.588254797132082e-06, "loss": 0.7181, "step": 1988 }, { "epoch": 0.08243192838493099, "grad_norm": 0.4706251919269562, "learning_rate": 4.5880475776037135e-06, "loss": 0.7397, "step": 1989 }, { "epoch": 0.08247337229060467, "grad_norm": 0.4798257350921631, "learning_rate": 4.587840358075346e-06, "loss": 0.8026, "step": 1990 }, { "epoch": 0.08251481619627833, "grad_norm": 0.42262500524520874, "learning_rate": 4.587633138546977e-06, "loss": 0.7637, "step": 1991 }, { "epoch": 0.08255626010195201, "grad_norm": 0.4746358394622803, "learning_rate": 4.5874259190186085e-06, "loss": 0.7744, "step": 1992 }, { "epoch": 0.08259770400762567, "grad_norm": 0.4563295841217041, "learning_rate": 4.58721869949024e-06, "loss": 0.8027, "step": 1993 }, { "epoch": 0.08263914791329935, "grad_norm": 0.4500739276409149, "learning_rate": 4.587011479961872e-06, "loss": 0.7471, "step": 1994 }, { "epoch": 0.08268059181897303, "grad_norm": 0.47931191325187683, "learning_rate": 4.5868042604335035e-06, "loss": 0.8054, "step": 1995 }, { "epoch": 0.08272203572464669, "grad_norm": 0.44504377245903015, "learning_rate": 4.586597040905135e-06, "loss": 0.7583, "step": 1996 }, { "epoch": 0.08276347963032037, "grad_norm": 0.4895908832550049, "learning_rate": 4.586389821376767e-06, "loss": 0.7554, "step": 1997 }, { "epoch": 0.08280492353599403, "grad_norm": 0.43344560265541077, "learning_rate": 4.5861826018483985e-06, "loss": 0.7681, "step": 1998 }, { "epoch": 0.0828463674416677, "grad_norm": 0.43635421991348267, "learning_rate": 4.585975382320031e-06, "loss": 0.7266, "step": 1999 }, { "epoch": 0.08288781134734137, "grad_norm": 0.43765681982040405, "learning_rate": 4.585768162791662e-06, "loss": 0.7883, "step": 2000 }, { "epoch": 0.08292925525301505, "grad_norm": 0.47952449321746826, "learning_rate": 4.5855609432632935e-06, "loss": 0.7192, "step": 2001 }, { "epoch": 0.08297069915868871, "grad_norm": 0.47122427821159363, "learning_rate": 4.585353723734925e-06, "loss": 0.8179, "step": 2002 }, { "epoch": 0.08301214306436239, "grad_norm": 0.472246915102005, "learning_rate": 4.585146504206557e-06, "loss": 0.79, "step": 2003 }, { "epoch": 0.08305358697003605, "grad_norm": 0.4613513946533203, "learning_rate": 4.5849392846781885e-06, "loss": 0.7603, "step": 2004 }, { "epoch": 0.08309503087570973, "grad_norm": 0.43824225664138794, "learning_rate": 4.58473206514982e-06, "loss": 0.7854, "step": 2005 }, { "epoch": 0.0831364747813834, "grad_norm": 0.4677543640136719, "learning_rate": 4.584524845621452e-06, "loss": 0.7914, "step": 2006 }, { "epoch": 0.08317791868705707, "grad_norm": 0.45726025104522705, "learning_rate": 4.5843176260930835e-06, "loss": 0.7512, "step": 2007 }, { "epoch": 0.08321936259273074, "grad_norm": 0.46152862906455994, "learning_rate": 4.584110406564715e-06, "loss": 0.78, "step": 2008 }, { "epoch": 0.08326080649840441, "grad_norm": 0.42357802391052246, "learning_rate": 4.583903187036346e-06, "loss": 0.7854, "step": 2009 }, { "epoch": 0.08330225040407808, "grad_norm": 0.3974681794643402, "learning_rate": 4.5836959675079785e-06, "loss": 0.7004, "step": 2010 }, { "epoch": 0.08334369430975175, "grad_norm": 0.4321729838848114, "learning_rate": 4.58348874797961e-06, "loss": 0.8076, "step": 2011 }, { "epoch": 0.08338513821542543, "grad_norm": 0.4288319945335388, "learning_rate": 4.583281528451241e-06, "loss": 0.7783, "step": 2012 }, { "epoch": 0.08342658212109909, "grad_norm": 0.4062556326389313, "learning_rate": 4.5830743089228735e-06, "loss": 0.6647, "step": 2013 }, { "epoch": 0.08346802602677277, "grad_norm": 0.4751129448413849, "learning_rate": 4.582867089394505e-06, "loss": 0.7788, "step": 2014 }, { "epoch": 0.08350946993244643, "grad_norm": 0.47318270802497864, "learning_rate": 4.582659869866137e-06, "loss": 0.8162, "step": 2015 }, { "epoch": 0.0835509138381201, "grad_norm": 0.42475032806396484, "learning_rate": 4.5824526503377685e-06, "loss": 0.7732, "step": 2016 }, { "epoch": 0.08359235774379377, "grad_norm": 0.4487524926662445, "learning_rate": 4.5822454308094e-06, "loss": 0.7549, "step": 2017 }, { "epoch": 0.08363380164946745, "grad_norm": 0.44328489899635315, "learning_rate": 4.582038211281031e-06, "loss": 0.7993, "step": 2018 }, { "epoch": 0.08367524555514112, "grad_norm": 0.46115025877952576, "learning_rate": 4.5818309917526635e-06, "loss": 0.7607, "step": 2019 }, { "epoch": 0.08371668946081479, "grad_norm": 0.40650030970573425, "learning_rate": 4.581623772224295e-06, "loss": 0.6987, "step": 2020 }, { "epoch": 0.08375813336648846, "grad_norm": 0.41901496052742004, "learning_rate": 4.581416552695926e-06, "loss": 0.7, "step": 2021 }, { "epoch": 0.08379957727216213, "grad_norm": 0.446773886680603, "learning_rate": 4.5812093331675585e-06, "loss": 0.7585, "step": 2022 }, { "epoch": 0.0838410211778358, "grad_norm": 0.46564799547195435, "learning_rate": 4.58100211363919e-06, "loss": 0.7622, "step": 2023 }, { "epoch": 0.08388246508350947, "grad_norm": 0.49480322003364563, "learning_rate": 4.580794894110821e-06, "loss": 0.7839, "step": 2024 }, { "epoch": 0.08392390898918314, "grad_norm": 0.47816893458366394, "learning_rate": 4.580587674582453e-06, "loss": 0.7383, "step": 2025 }, { "epoch": 0.0839653528948568, "grad_norm": 0.4518311321735382, "learning_rate": 4.580380455054085e-06, "loss": 0.7544, "step": 2026 }, { "epoch": 0.08400679680053048, "grad_norm": 0.4600609242916107, "learning_rate": 4.580173235525716e-06, "loss": 0.8022, "step": 2027 }, { "epoch": 0.08404824070620415, "grad_norm": 0.43373215198516846, "learning_rate": 4.579966015997348e-06, "loss": 0.7205, "step": 2028 }, { "epoch": 0.08408968461187782, "grad_norm": 0.453763872385025, "learning_rate": 4.579758796468979e-06, "loss": 0.7444, "step": 2029 }, { "epoch": 0.08413112851755149, "grad_norm": 0.5405118465423584, "learning_rate": 4.579551576940611e-06, "loss": 0.7988, "step": 2030 }, { "epoch": 0.08417257242322516, "grad_norm": 0.45820483565330505, "learning_rate": 4.5793443574122435e-06, "loss": 0.8445, "step": 2031 }, { "epoch": 0.08421401632889884, "grad_norm": 0.4445345997810364, "learning_rate": 4.579137137883874e-06, "loss": 0.7205, "step": 2032 }, { "epoch": 0.0842554602345725, "grad_norm": 0.4242554306983948, "learning_rate": 4.578929918355506e-06, "loss": 0.743, "step": 2033 }, { "epoch": 0.08429690414024618, "grad_norm": 0.4658140540122986, "learning_rate": 4.578722698827138e-06, "loss": 0.769, "step": 2034 }, { "epoch": 0.08433834804591984, "grad_norm": 0.4437236785888672, "learning_rate": 4.57851547929877e-06, "loss": 0.7383, "step": 2035 }, { "epoch": 0.08437979195159352, "grad_norm": 0.44685378670692444, "learning_rate": 4.578308259770401e-06, "loss": 0.7769, "step": 2036 }, { "epoch": 0.08442123585726719, "grad_norm": 0.4076836109161377, "learning_rate": 4.578101040242033e-06, "loss": 0.6995, "step": 2037 }, { "epoch": 0.08446267976294086, "grad_norm": 0.44023066759109497, "learning_rate": 4.577893820713664e-06, "loss": 0.7913, "step": 2038 }, { "epoch": 0.08450412366861453, "grad_norm": 0.4099765121936798, "learning_rate": 4.577686601185296e-06, "loss": 0.7917, "step": 2039 }, { "epoch": 0.0845455675742882, "grad_norm": 0.42275750637054443, "learning_rate": 4.577479381656928e-06, "loss": 0.7505, "step": 2040 }, { "epoch": 0.08458701147996187, "grad_norm": 0.46205073595046997, "learning_rate": 4.577272162128559e-06, "loss": 0.7925, "step": 2041 }, { "epoch": 0.08462845538563554, "grad_norm": 0.4566892385482788, "learning_rate": 4.577064942600191e-06, "loss": 0.7844, "step": 2042 }, { "epoch": 0.08466989929130922, "grad_norm": 0.420128732919693, "learning_rate": 4.576857723071823e-06, "loss": 0.7554, "step": 2043 }, { "epoch": 0.08471134319698288, "grad_norm": 0.46599262952804565, "learning_rate": 4.576650503543454e-06, "loss": 0.77, "step": 2044 }, { "epoch": 0.08475278710265656, "grad_norm": 0.44940847158432007, "learning_rate": 4.5764432840150855e-06, "loss": 0.7651, "step": 2045 }, { "epoch": 0.08479423100833022, "grad_norm": 0.46521294116973877, "learning_rate": 4.576236064486718e-06, "loss": 0.7649, "step": 2046 }, { "epoch": 0.0848356749140039, "grad_norm": 0.47755691409111023, "learning_rate": 4.576028844958349e-06, "loss": 0.7881, "step": 2047 }, { "epoch": 0.08487711881967756, "grad_norm": 0.4413328766822815, "learning_rate": 4.5758216254299805e-06, "loss": 0.7373, "step": 2048 }, { "epoch": 0.08491856272535124, "grad_norm": 0.48594722151756287, "learning_rate": 4.575614405901613e-06, "loss": 0.7664, "step": 2049 }, { "epoch": 0.0849600066310249, "grad_norm": 0.4363350570201874, "learning_rate": 4.575407186373244e-06, "loss": 0.7007, "step": 2050 }, { "epoch": 0.08500145053669858, "grad_norm": 0.42702746391296387, "learning_rate": 4.575199966844876e-06, "loss": 0.7295, "step": 2051 }, { "epoch": 0.08504289444237224, "grad_norm": 0.45240429043769836, "learning_rate": 4.574992747316508e-06, "loss": 0.7747, "step": 2052 }, { "epoch": 0.08508433834804592, "grad_norm": 0.430084764957428, "learning_rate": 4.574785527788139e-06, "loss": 0.7402, "step": 2053 }, { "epoch": 0.08512578225371958, "grad_norm": 0.43184056878089905, "learning_rate": 4.5745783082597705e-06, "loss": 0.7717, "step": 2054 }, { "epoch": 0.08516722615939326, "grad_norm": 0.4326060116291046, "learning_rate": 4.574371088731403e-06, "loss": 0.7966, "step": 2055 }, { "epoch": 0.08520867006506694, "grad_norm": 0.41815802454948425, "learning_rate": 4.574163869203034e-06, "loss": 0.7358, "step": 2056 }, { "epoch": 0.0852501139707406, "grad_norm": 0.436719685792923, "learning_rate": 4.5739566496746655e-06, "loss": 0.7148, "step": 2057 }, { "epoch": 0.08529155787641428, "grad_norm": 0.4108443558216095, "learning_rate": 4.573749430146298e-06, "loss": 0.728, "step": 2058 }, { "epoch": 0.08533300178208794, "grad_norm": 0.43048134446144104, "learning_rate": 4.573542210617929e-06, "loss": 0.7246, "step": 2059 }, { "epoch": 0.08537444568776162, "grad_norm": 0.43298161029815674, "learning_rate": 4.5733349910895605e-06, "loss": 0.7622, "step": 2060 }, { "epoch": 0.08541588959343528, "grad_norm": 0.43782326579093933, "learning_rate": 4.573127771561192e-06, "loss": 0.7415, "step": 2061 }, { "epoch": 0.08545733349910896, "grad_norm": 0.4883464574813843, "learning_rate": 4.572920552032824e-06, "loss": 0.7957, "step": 2062 }, { "epoch": 0.08549877740478262, "grad_norm": 0.4369872808456421, "learning_rate": 4.5727133325044555e-06, "loss": 0.8206, "step": 2063 }, { "epoch": 0.0855402213104563, "grad_norm": 0.4636523127555847, "learning_rate": 4.572506112976087e-06, "loss": 0.8057, "step": 2064 }, { "epoch": 0.08558166521612996, "grad_norm": 0.4862779378890991, "learning_rate": 4.572298893447719e-06, "loss": 0.7648, "step": 2065 }, { "epoch": 0.08562310912180364, "grad_norm": 0.44743070006370544, "learning_rate": 4.5720916739193505e-06, "loss": 0.7405, "step": 2066 }, { "epoch": 0.0856645530274773, "grad_norm": 0.5206807851791382, "learning_rate": 4.571884454390983e-06, "loss": 0.8411, "step": 2067 }, { "epoch": 0.08570599693315098, "grad_norm": 0.46740373969078064, "learning_rate": 4.571677234862614e-06, "loss": 0.7673, "step": 2068 }, { "epoch": 0.08574744083882466, "grad_norm": 0.44362175464630127, "learning_rate": 4.5714700153342455e-06, "loss": 0.8147, "step": 2069 }, { "epoch": 0.08578888474449832, "grad_norm": 0.4234970510005951, "learning_rate": 4.571262795805877e-06, "loss": 0.7571, "step": 2070 }, { "epoch": 0.085830328650172, "grad_norm": 0.4279879629611969, "learning_rate": 4.571055576277509e-06, "loss": 0.7905, "step": 2071 }, { "epoch": 0.08587177255584566, "grad_norm": 0.42760854959487915, "learning_rate": 4.5708483567491405e-06, "loss": 0.7793, "step": 2072 }, { "epoch": 0.08591321646151934, "grad_norm": 0.47730764746665955, "learning_rate": 4.570641137220772e-06, "loss": 0.7266, "step": 2073 }, { "epoch": 0.085954660367193, "grad_norm": 0.4440353810787201, "learning_rate": 4.570433917692404e-06, "loss": 0.7228, "step": 2074 }, { "epoch": 0.08599610427286668, "grad_norm": 0.442749559879303, "learning_rate": 4.5702266981640355e-06, "loss": 0.7861, "step": 2075 }, { "epoch": 0.08603754817854034, "grad_norm": 0.47952306270599365, "learning_rate": 4.570019478635667e-06, "loss": 0.782, "step": 2076 }, { "epoch": 0.08607899208421402, "grad_norm": 0.4330749213695526, "learning_rate": 4.569812259107298e-06, "loss": 0.775, "step": 2077 }, { "epoch": 0.08612043598988768, "grad_norm": 0.4757136404514313, "learning_rate": 4.5696050395789305e-06, "loss": 0.8667, "step": 2078 }, { "epoch": 0.08616187989556136, "grad_norm": 0.4289725422859192, "learning_rate": 4.569397820050562e-06, "loss": 0.7312, "step": 2079 }, { "epoch": 0.08620332380123504, "grad_norm": 0.44398823380470276, "learning_rate": 4.569190600522193e-06, "loss": 0.7118, "step": 2080 }, { "epoch": 0.0862447677069087, "grad_norm": 0.4574585556983948, "learning_rate": 4.568983380993825e-06, "loss": 0.8228, "step": 2081 }, { "epoch": 0.08628621161258238, "grad_norm": 0.43872395157814026, "learning_rate": 4.568776161465457e-06, "loss": 0.7446, "step": 2082 }, { "epoch": 0.08632765551825604, "grad_norm": 0.48115092515945435, "learning_rate": 4.568568941937089e-06, "loss": 0.8088, "step": 2083 }, { "epoch": 0.08636909942392972, "grad_norm": 0.43788594007492065, "learning_rate": 4.5683617224087205e-06, "loss": 0.7666, "step": 2084 }, { "epoch": 0.08641054332960338, "grad_norm": 0.548052966594696, "learning_rate": 4.568154502880352e-06, "loss": 0.78, "step": 2085 }, { "epoch": 0.08645198723527706, "grad_norm": 0.45637160539627075, "learning_rate": 4.567947283351983e-06, "loss": 0.7542, "step": 2086 }, { "epoch": 0.08649343114095072, "grad_norm": 0.4704807698726654, "learning_rate": 4.5677400638236155e-06, "loss": 0.7786, "step": 2087 }, { "epoch": 0.0865348750466244, "grad_norm": 0.43890872597694397, "learning_rate": 4.567532844295247e-06, "loss": 0.7681, "step": 2088 }, { "epoch": 0.08657631895229806, "grad_norm": 0.47336286306381226, "learning_rate": 4.567325624766878e-06, "loss": 0.738, "step": 2089 }, { "epoch": 0.08661776285797174, "grad_norm": 0.5095197558403015, "learning_rate": 4.56711840523851e-06, "loss": 0.7847, "step": 2090 }, { "epoch": 0.0866592067636454, "grad_norm": 0.4290327727794647, "learning_rate": 4.566911185710142e-06, "loss": 0.7085, "step": 2091 }, { "epoch": 0.08670065066931908, "grad_norm": 0.44808709621429443, "learning_rate": 4.566703966181773e-06, "loss": 0.7822, "step": 2092 }, { "epoch": 0.08674209457499275, "grad_norm": 0.4921129047870636, "learning_rate": 4.566496746653405e-06, "loss": 0.7795, "step": 2093 }, { "epoch": 0.08678353848066642, "grad_norm": 0.3895568251609802, "learning_rate": 4.566289527125037e-06, "loss": 0.7161, "step": 2094 }, { "epoch": 0.0868249823863401, "grad_norm": 0.43971338868141174, "learning_rate": 4.566082307596668e-06, "loss": 0.7393, "step": 2095 }, { "epoch": 0.08686642629201376, "grad_norm": 0.44493353366851807, "learning_rate": 4.5658750880683e-06, "loss": 0.7795, "step": 2096 }, { "epoch": 0.08690787019768743, "grad_norm": 0.4715208411216736, "learning_rate": 4.565667868539931e-06, "loss": 0.7412, "step": 2097 }, { "epoch": 0.0869493141033611, "grad_norm": 0.4596356451511383, "learning_rate": 4.565460649011563e-06, "loss": 0.7715, "step": 2098 }, { "epoch": 0.08699075800903477, "grad_norm": 0.4211691617965698, "learning_rate": 4.565253429483195e-06, "loss": 0.7393, "step": 2099 }, { "epoch": 0.08703220191470844, "grad_norm": 0.46153560280799866, "learning_rate": 4.565046209954826e-06, "loss": 0.7485, "step": 2100 }, { "epoch": 0.08707364582038211, "grad_norm": 0.4615577161312103, "learning_rate": 4.564838990426458e-06, "loss": 0.793, "step": 2101 }, { "epoch": 0.08711508972605578, "grad_norm": 0.4837665557861328, "learning_rate": 4.56463177089809e-06, "loss": 0.7795, "step": 2102 }, { "epoch": 0.08715653363172945, "grad_norm": 0.4686914384365082, "learning_rate": 4.564424551369722e-06, "loss": 0.8149, "step": 2103 }, { "epoch": 0.08719797753740312, "grad_norm": 0.45950695872306824, "learning_rate": 4.564217331841353e-06, "loss": 0.7915, "step": 2104 }, { "epoch": 0.0872394214430768, "grad_norm": 0.4595943093299866, "learning_rate": 4.564010112312985e-06, "loss": 0.7483, "step": 2105 }, { "epoch": 0.08728086534875047, "grad_norm": 0.4625067710876465, "learning_rate": 4.563802892784616e-06, "loss": 0.8135, "step": 2106 }, { "epoch": 0.08732230925442414, "grad_norm": 0.42756184935569763, "learning_rate": 4.563595673256248e-06, "loss": 0.7593, "step": 2107 }, { "epoch": 0.08736375316009781, "grad_norm": 0.48455724120140076, "learning_rate": 4.56338845372788e-06, "loss": 0.8306, "step": 2108 }, { "epoch": 0.08740519706577148, "grad_norm": 0.41179582476615906, "learning_rate": 4.563181234199511e-06, "loss": 0.7173, "step": 2109 }, { "epoch": 0.08744664097144515, "grad_norm": 0.41084831953048706, "learning_rate": 4.562974014671143e-06, "loss": 0.6708, "step": 2110 }, { "epoch": 0.08748808487711882, "grad_norm": 0.4693790674209595, "learning_rate": 4.562766795142775e-06, "loss": 0.7507, "step": 2111 }, { "epoch": 0.08752952878279249, "grad_norm": 0.4720715582370758, "learning_rate": 4.562559575614406e-06, "loss": 0.8196, "step": 2112 }, { "epoch": 0.08757097268846616, "grad_norm": 0.4883628785610199, "learning_rate": 4.5623523560860375e-06, "loss": 0.7793, "step": 2113 }, { "epoch": 0.08761241659413983, "grad_norm": 0.4234831631183624, "learning_rate": 4.56214513655767e-06, "loss": 0.7026, "step": 2114 }, { "epoch": 0.0876538604998135, "grad_norm": 0.46147656440734863, "learning_rate": 4.561937917029301e-06, "loss": 0.7417, "step": 2115 }, { "epoch": 0.08769530440548717, "grad_norm": 0.4476986229419708, "learning_rate": 4.5617306975009325e-06, "loss": 0.7954, "step": 2116 }, { "epoch": 0.08773674831116085, "grad_norm": 0.42565757036209106, "learning_rate": 4.561523477972565e-06, "loss": 0.7241, "step": 2117 }, { "epoch": 0.08777819221683451, "grad_norm": 0.41643714904785156, "learning_rate": 4.561316258444196e-06, "loss": 0.761, "step": 2118 }, { "epoch": 0.08781963612250819, "grad_norm": 0.4193111062049866, "learning_rate": 4.561109038915828e-06, "loss": 0.7162, "step": 2119 }, { "epoch": 0.08786108002818185, "grad_norm": 0.48522114753723145, "learning_rate": 4.56090181938746e-06, "loss": 0.7461, "step": 2120 }, { "epoch": 0.08790252393385553, "grad_norm": 0.4176945686340332, "learning_rate": 4.560694599859091e-06, "loss": 0.7898, "step": 2121 }, { "epoch": 0.0879439678395292, "grad_norm": 0.4507688283920288, "learning_rate": 4.5604873803307225e-06, "loss": 0.8208, "step": 2122 }, { "epoch": 0.08798541174520287, "grad_norm": 0.4388856589794159, "learning_rate": 4.560280160802355e-06, "loss": 0.7109, "step": 2123 }, { "epoch": 0.08802685565087653, "grad_norm": 0.4523058235645294, "learning_rate": 4.560072941273986e-06, "loss": 0.7888, "step": 2124 }, { "epoch": 0.08806829955655021, "grad_norm": 0.4143514037132263, "learning_rate": 4.5598657217456175e-06, "loss": 0.7422, "step": 2125 }, { "epoch": 0.08810974346222387, "grad_norm": 0.43963322043418884, "learning_rate": 4.55965850221725e-06, "loss": 0.7083, "step": 2126 }, { "epoch": 0.08815118736789755, "grad_norm": 0.43000903725624084, "learning_rate": 4.559451282688881e-06, "loss": 0.7068, "step": 2127 }, { "epoch": 0.08819263127357121, "grad_norm": 0.4487351179122925, "learning_rate": 4.5592440631605125e-06, "loss": 0.7871, "step": 2128 }, { "epoch": 0.08823407517924489, "grad_norm": 0.4382845163345337, "learning_rate": 4.559036843632144e-06, "loss": 0.7869, "step": 2129 }, { "epoch": 0.08827551908491857, "grad_norm": 0.4721006155014038, "learning_rate": 4.558829624103776e-06, "loss": 0.6833, "step": 2130 }, { "epoch": 0.08831696299059223, "grad_norm": 0.4833557903766632, "learning_rate": 4.5586224045754075e-06, "loss": 0.7559, "step": 2131 }, { "epoch": 0.08835840689626591, "grad_norm": 0.4151139259338379, "learning_rate": 4.558415185047039e-06, "loss": 0.7314, "step": 2132 }, { "epoch": 0.08839985080193957, "grad_norm": 0.4292290210723877, "learning_rate": 4.55820796551867e-06, "loss": 0.7209, "step": 2133 }, { "epoch": 0.08844129470761325, "grad_norm": 0.392171710729599, "learning_rate": 4.5580007459903025e-06, "loss": 0.7556, "step": 2134 }, { "epoch": 0.08848273861328691, "grad_norm": 0.40045082569122314, "learning_rate": 4.557793526461935e-06, "loss": 0.7462, "step": 2135 }, { "epoch": 0.08852418251896059, "grad_norm": 0.46485257148742676, "learning_rate": 4.557586306933566e-06, "loss": 0.7937, "step": 2136 }, { "epoch": 0.08856562642463425, "grad_norm": 0.42961594462394714, "learning_rate": 4.5573790874051975e-06, "loss": 0.7849, "step": 2137 }, { "epoch": 0.08860707033030793, "grad_norm": 0.46037158370018005, "learning_rate": 4.557171867876829e-06, "loss": 0.7925, "step": 2138 }, { "epoch": 0.0886485142359816, "grad_norm": 0.4338686764240265, "learning_rate": 4.556964648348461e-06, "loss": 0.7146, "step": 2139 }, { "epoch": 0.08868995814165527, "grad_norm": 0.4479596018791199, "learning_rate": 4.5567574288200925e-06, "loss": 0.7203, "step": 2140 }, { "epoch": 0.08873140204732895, "grad_norm": 0.421063095331192, "learning_rate": 4.556550209291724e-06, "loss": 0.7537, "step": 2141 }, { "epoch": 0.08877284595300261, "grad_norm": 0.3934994339942932, "learning_rate": 4.556342989763355e-06, "loss": 0.7366, "step": 2142 }, { "epoch": 0.08881428985867629, "grad_norm": 0.4762936532497406, "learning_rate": 4.5561357702349875e-06, "loss": 0.7483, "step": 2143 }, { "epoch": 0.08885573376434995, "grad_norm": 0.4404817223548889, "learning_rate": 4.555928550706619e-06, "loss": 0.7859, "step": 2144 }, { "epoch": 0.08889717767002363, "grad_norm": 0.444450318813324, "learning_rate": 4.55572133117825e-06, "loss": 0.7773, "step": 2145 }, { "epoch": 0.08893862157569729, "grad_norm": 0.41657501459121704, "learning_rate": 4.5555141116498825e-06, "loss": 0.7781, "step": 2146 }, { "epoch": 0.08898006548137097, "grad_norm": 0.462635338306427, "learning_rate": 4.555306892121514e-06, "loss": 0.7537, "step": 2147 }, { "epoch": 0.08902150938704463, "grad_norm": 0.44731810688972473, "learning_rate": 4.555099672593145e-06, "loss": 0.7394, "step": 2148 }, { "epoch": 0.08906295329271831, "grad_norm": 0.4948531687259674, "learning_rate": 4.554892453064777e-06, "loss": 0.7473, "step": 2149 }, { "epoch": 0.08910439719839197, "grad_norm": 0.45920631289482117, "learning_rate": 4.554685233536409e-06, "loss": 0.7312, "step": 2150 }, { "epoch": 0.08914584110406565, "grad_norm": 0.4803909361362457, "learning_rate": 4.55447801400804e-06, "loss": 0.771, "step": 2151 }, { "epoch": 0.08918728500973931, "grad_norm": 0.4469764828681946, "learning_rate": 4.5542707944796725e-06, "loss": 0.8157, "step": 2152 }, { "epoch": 0.08922872891541299, "grad_norm": 0.44928717613220215, "learning_rate": 4.554063574951304e-06, "loss": 0.7478, "step": 2153 }, { "epoch": 0.08927017282108667, "grad_norm": 0.46665889024734497, "learning_rate": 4.553856355422935e-06, "loss": 0.7363, "step": 2154 }, { "epoch": 0.08931161672676033, "grad_norm": 0.4329316318035126, "learning_rate": 4.5536491358945675e-06, "loss": 0.7427, "step": 2155 }, { "epoch": 0.089353060632434, "grad_norm": 0.4584934711456299, "learning_rate": 4.553441916366199e-06, "loss": 0.7533, "step": 2156 }, { "epoch": 0.08939450453810767, "grad_norm": 0.41812360286712646, "learning_rate": 4.55323469683783e-06, "loss": 0.7803, "step": 2157 }, { "epoch": 0.08943594844378135, "grad_norm": 0.47068193554878235, "learning_rate": 4.553027477309462e-06, "loss": 0.7443, "step": 2158 }, { "epoch": 0.08947739234945501, "grad_norm": 0.44261103868484497, "learning_rate": 4.552820257781094e-06, "loss": 0.793, "step": 2159 }, { "epoch": 0.08951883625512869, "grad_norm": 0.4177280068397522, "learning_rate": 4.552613038252725e-06, "loss": 0.7266, "step": 2160 }, { "epoch": 0.08956028016080235, "grad_norm": 0.47246408462524414, "learning_rate": 4.552405818724357e-06, "loss": 0.7979, "step": 2161 }, { "epoch": 0.08960172406647603, "grad_norm": 0.4821568429470062, "learning_rate": 4.552198599195989e-06, "loss": 0.731, "step": 2162 }, { "epoch": 0.08964316797214969, "grad_norm": 0.45064979791641235, "learning_rate": 4.55199137966762e-06, "loss": 0.7869, "step": 2163 }, { "epoch": 0.08968461187782337, "grad_norm": 0.44254612922668457, "learning_rate": 4.551784160139252e-06, "loss": 0.729, "step": 2164 }, { "epoch": 0.08972605578349703, "grad_norm": 0.41059446334838867, "learning_rate": 4.551576940610883e-06, "loss": 0.752, "step": 2165 }, { "epoch": 0.08976749968917071, "grad_norm": 0.4164467751979828, "learning_rate": 4.551369721082515e-06, "loss": 0.7522, "step": 2166 }, { "epoch": 0.08980894359484438, "grad_norm": 0.4167279303073883, "learning_rate": 4.551162501554147e-06, "loss": 0.7561, "step": 2167 }, { "epoch": 0.08985038750051805, "grad_norm": 0.47502654790878296, "learning_rate": 4.550955282025779e-06, "loss": 0.8206, "step": 2168 }, { "epoch": 0.08989183140619172, "grad_norm": 0.4153778553009033, "learning_rate": 4.5507480624974095e-06, "loss": 0.708, "step": 2169 }, { "epoch": 0.08993327531186539, "grad_norm": 0.5410426259040833, "learning_rate": 4.550540842969042e-06, "loss": 0.8738, "step": 2170 }, { "epoch": 0.08997471921753906, "grad_norm": 0.40579575300216675, "learning_rate": 4.550333623440674e-06, "loss": 0.7051, "step": 2171 }, { "epoch": 0.09001616312321273, "grad_norm": 0.4710105359554291, "learning_rate": 4.550126403912305e-06, "loss": 0.7837, "step": 2172 }, { "epoch": 0.0900576070288864, "grad_norm": 0.6112576127052307, "learning_rate": 4.549919184383937e-06, "loss": 0.7905, "step": 2173 }, { "epoch": 0.09009905093456007, "grad_norm": 0.4224766194820404, "learning_rate": 4.549711964855568e-06, "loss": 0.7351, "step": 2174 }, { "epoch": 0.09014049484023375, "grad_norm": 0.4324423670768738, "learning_rate": 4.5495047453272e-06, "loss": 0.762, "step": 2175 }, { "epoch": 0.09018193874590741, "grad_norm": 0.4714941382408142, "learning_rate": 4.549297525798832e-06, "loss": 0.8257, "step": 2176 }, { "epoch": 0.09022338265158109, "grad_norm": 0.45183053612709045, "learning_rate": 4.549090306270463e-06, "loss": 0.7898, "step": 2177 }, { "epoch": 0.09026482655725476, "grad_norm": 0.4123820662498474, "learning_rate": 4.548883086742095e-06, "loss": 0.7273, "step": 2178 }, { "epoch": 0.09030627046292843, "grad_norm": 0.41791433095932007, "learning_rate": 4.548675867213727e-06, "loss": 0.7314, "step": 2179 }, { "epoch": 0.0903477143686021, "grad_norm": 0.4358455538749695, "learning_rate": 4.548468647685358e-06, "loss": 0.7366, "step": 2180 }, { "epoch": 0.09038915827427577, "grad_norm": 0.4254612624645233, "learning_rate": 4.5482614281569895e-06, "loss": 0.7024, "step": 2181 }, { "epoch": 0.09043060217994944, "grad_norm": 0.4269399642944336, "learning_rate": 4.548054208628622e-06, "loss": 0.7455, "step": 2182 }, { "epoch": 0.0904720460856231, "grad_norm": 0.443946897983551, "learning_rate": 4.547846989100253e-06, "loss": 0.75, "step": 2183 }, { "epoch": 0.09051348999129678, "grad_norm": 0.41850340366363525, "learning_rate": 4.5476397695718845e-06, "loss": 0.7671, "step": 2184 }, { "epoch": 0.09055493389697045, "grad_norm": 0.434459924697876, "learning_rate": 4.547432550043516e-06, "loss": 0.7163, "step": 2185 }, { "epoch": 0.09059637780264412, "grad_norm": 0.44378164410591125, "learning_rate": 4.547225330515148e-06, "loss": 0.7025, "step": 2186 }, { "epoch": 0.09063782170831779, "grad_norm": 0.45942020416259766, "learning_rate": 4.54701811098678e-06, "loss": 0.7581, "step": 2187 }, { "epoch": 0.09067926561399146, "grad_norm": 0.4831501245498657, "learning_rate": 4.546810891458412e-06, "loss": 0.79, "step": 2188 }, { "epoch": 0.09072070951966513, "grad_norm": 0.45938074588775635, "learning_rate": 4.546603671930043e-06, "loss": 0.782, "step": 2189 }, { "epoch": 0.0907621534253388, "grad_norm": 0.42186984419822693, "learning_rate": 4.5463964524016745e-06, "loss": 0.6826, "step": 2190 }, { "epoch": 0.09080359733101248, "grad_norm": 0.38702264428138733, "learning_rate": 4.546189232873307e-06, "loss": 0.7036, "step": 2191 }, { "epoch": 0.09084504123668614, "grad_norm": 0.4260973632335663, "learning_rate": 4.545982013344938e-06, "loss": 0.7407, "step": 2192 }, { "epoch": 0.09088648514235982, "grad_norm": 0.40211766958236694, "learning_rate": 4.5457747938165695e-06, "loss": 0.7639, "step": 2193 }, { "epoch": 0.09092792904803348, "grad_norm": 0.43616944551467896, "learning_rate": 4.545567574288201e-06, "loss": 0.7733, "step": 2194 }, { "epoch": 0.09096937295370716, "grad_norm": 0.47748178243637085, "learning_rate": 4.545360354759833e-06, "loss": 0.7722, "step": 2195 }, { "epoch": 0.09101081685938082, "grad_norm": 0.4525977373123169, "learning_rate": 4.5451531352314645e-06, "loss": 0.7145, "step": 2196 }, { "epoch": 0.0910522607650545, "grad_norm": 0.42960894107818604, "learning_rate": 4.544945915703096e-06, "loss": 0.7783, "step": 2197 }, { "epoch": 0.09109370467072817, "grad_norm": 0.43034225702285767, "learning_rate": 4.544738696174728e-06, "loss": 0.7727, "step": 2198 }, { "epoch": 0.09113514857640184, "grad_norm": 0.43229594826698303, "learning_rate": 4.5445314766463595e-06, "loss": 0.8049, "step": 2199 }, { "epoch": 0.0911765924820755, "grad_norm": 0.4749002456665039, "learning_rate": 4.544324257117991e-06, "loss": 0.8025, "step": 2200 }, { "epoch": 0.09121803638774918, "grad_norm": 0.4332364499568939, "learning_rate": 4.544117037589622e-06, "loss": 0.748, "step": 2201 }, { "epoch": 0.09125948029342285, "grad_norm": 0.4769018292427063, "learning_rate": 4.5439098180612545e-06, "loss": 0.7388, "step": 2202 }, { "epoch": 0.09130092419909652, "grad_norm": 0.4501079320907593, "learning_rate": 4.543702598532886e-06, "loss": 0.7856, "step": 2203 }, { "epoch": 0.0913423681047702, "grad_norm": 0.41608110070228577, "learning_rate": 4.543495379004518e-06, "loss": 0.7788, "step": 2204 }, { "epoch": 0.09138381201044386, "grad_norm": 0.4151354730129242, "learning_rate": 4.5432881594761495e-06, "loss": 0.7214, "step": 2205 }, { "epoch": 0.09142525591611754, "grad_norm": 0.45879191160202026, "learning_rate": 4.543080939947781e-06, "loss": 0.7197, "step": 2206 }, { "epoch": 0.0914666998217912, "grad_norm": 0.45620161294937134, "learning_rate": 4.542873720419413e-06, "loss": 0.7737, "step": 2207 }, { "epoch": 0.09150814372746488, "grad_norm": 0.43143945932388306, "learning_rate": 4.5426665008910445e-06, "loss": 0.7932, "step": 2208 }, { "epoch": 0.09154958763313854, "grad_norm": 0.44396746158599854, "learning_rate": 4.542459281362676e-06, "loss": 0.8022, "step": 2209 }, { "epoch": 0.09159103153881222, "grad_norm": 0.4247385263442993, "learning_rate": 4.542252061834307e-06, "loss": 0.7632, "step": 2210 }, { "epoch": 0.09163247544448588, "grad_norm": 0.44195446372032166, "learning_rate": 4.5420448423059395e-06, "loss": 0.7297, "step": 2211 }, { "epoch": 0.09167391935015956, "grad_norm": 0.4388570487499237, "learning_rate": 4.541837622777571e-06, "loss": 0.7844, "step": 2212 }, { "epoch": 0.09171536325583322, "grad_norm": 0.44064751267433167, "learning_rate": 4.541630403249202e-06, "loss": 0.7893, "step": 2213 }, { "epoch": 0.0917568071615069, "grad_norm": 0.4489535391330719, "learning_rate": 4.5414231837208345e-06, "loss": 0.7346, "step": 2214 }, { "epoch": 0.09179825106718058, "grad_norm": 0.43959829211235046, "learning_rate": 4.541215964192466e-06, "loss": 0.7675, "step": 2215 }, { "epoch": 0.09183969497285424, "grad_norm": 0.4443313181400299, "learning_rate": 4.541008744664097e-06, "loss": 0.7329, "step": 2216 }, { "epoch": 0.09188113887852792, "grad_norm": 0.4552377760410309, "learning_rate": 4.540801525135729e-06, "loss": 0.7402, "step": 2217 }, { "epoch": 0.09192258278420158, "grad_norm": 0.45374757051467896, "learning_rate": 4.540594305607361e-06, "loss": 0.7852, "step": 2218 }, { "epoch": 0.09196402668987526, "grad_norm": 0.40958738327026367, "learning_rate": 4.540387086078992e-06, "loss": 0.7463, "step": 2219 }, { "epoch": 0.09200547059554892, "grad_norm": 0.4430464208126068, "learning_rate": 4.5401798665506245e-06, "loss": 0.7616, "step": 2220 }, { "epoch": 0.0920469145012226, "grad_norm": 0.4105941355228424, "learning_rate": 4.539972647022255e-06, "loss": 0.7471, "step": 2221 }, { "epoch": 0.09208835840689626, "grad_norm": 0.4176451861858368, "learning_rate": 4.539765427493887e-06, "loss": 0.7048, "step": 2222 }, { "epoch": 0.09212980231256994, "grad_norm": 0.42156854271888733, "learning_rate": 4.5395582079655195e-06, "loss": 0.7032, "step": 2223 }, { "epoch": 0.0921712462182436, "grad_norm": 0.42863553762435913, "learning_rate": 4.539350988437151e-06, "loss": 0.7466, "step": 2224 }, { "epoch": 0.09221269012391728, "grad_norm": 0.4090249836444855, "learning_rate": 4.539143768908782e-06, "loss": 0.7502, "step": 2225 }, { "epoch": 0.09225413402959094, "grad_norm": 0.4316571354866028, "learning_rate": 4.538936549380414e-06, "loss": 0.7739, "step": 2226 }, { "epoch": 0.09229557793526462, "grad_norm": 0.48924705386161804, "learning_rate": 4.538729329852046e-06, "loss": 0.8042, "step": 2227 }, { "epoch": 0.0923370218409383, "grad_norm": 0.4519100785255432, "learning_rate": 4.538522110323677e-06, "loss": 0.7473, "step": 2228 }, { "epoch": 0.09237846574661196, "grad_norm": 0.417803019285202, "learning_rate": 4.538314890795309e-06, "loss": 0.7415, "step": 2229 }, { "epoch": 0.09241990965228564, "grad_norm": 0.4330748915672302, "learning_rate": 4.53810767126694e-06, "loss": 0.7053, "step": 2230 }, { "epoch": 0.0924613535579593, "grad_norm": 0.43731456995010376, "learning_rate": 4.537900451738572e-06, "loss": 0.7944, "step": 2231 }, { "epoch": 0.09250279746363298, "grad_norm": 0.4673445224761963, "learning_rate": 4.537693232210204e-06, "loss": 0.7341, "step": 2232 }, { "epoch": 0.09254424136930664, "grad_norm": 0.4800216853618622, "learning_rate": 4.537486012681835e-06, "loss": 0.7487, "step": 2233 }, { "epoch": 0.09258568527498032, "grad_norm": 0.46607205271720886, "learning_rate": 4.537278793153467e-06, "loss": 0.8008, "step": 2234 }, { "epoch": 0.09262712918065398, "grad_norm": 0.4351932108402252, "learning_rate": 4.537071573625099e-06, "loss": 0.7266, "step": 2235 }, { "epoch": 0.09266857308632766, "grad_norm": 0.4656636714935303, "learning_rate": 4.536864354096731e-06, "loss": 0.7661, "step": 2236 }, { "epoch": 0.09271001699200132, "grad_norm": 0.48204293847084045, "learning_rate": 4.5366571345683615e-06, "loss": 0.8252, "step": 2237 }, { "epoch": 0.092751460897675, "grad_norm": 0.4329359531402588, "learning_rate": 4.536449915039994e-06, "loss": 0.7908, "step": 2238 }, { "epoch": 0.09279290480334866, "grad_norm": 0.47167110443115234, "learning_rate": 4.536242695511626e-06, "loss": 0.7334, "step": 2239 }, { "epoch": 0.09283434870902234, "grad_norm": 0.43544137477874756, "learning_rate": 4.536035475983257e-06, "loss": 0.7808, "step": 2240 }, { "epoch": 0.09287579261469601, "grad_norm": 0.4410264790058136, "learning_rate": 4.535828256454889e-06, "loss": 0.748, "step": 2241 }, { "epoch": 0.09291723652036968, "grad_norm": 0.40531888604164124, "learning_rate": 4.53562103692652e-06, "loss": 0.7112, "step": 2242 }, { "epoch": 0.09295868042604336, "grad_norm": 0.45738229155540466, "learning_rate": 4.535413817398152e-06, "loss": 0.7966, "step": 2243 }, { "epoch": 0.09300012433171702, "grad_norm": 0.4197201728820801, "learning_rate": 4.535206597869784e-06, "loss": 0.7849, "step": 2244 }, { "epoch": 0.0930415682373907, "grad_norm": 0.4063815176486969, "learning_rate": 4.534999378341415e-06, "loss": 0.7271, "step": 2245 }, { "epoch": 0.09308301214306436, "grad_norm": 0.4056161642074585, "learning_rate": 4.5347921588130465e-06, "loss": 0.7253, "step": 2246 }, { "epoch": 0.09312445604873804, "grad_norm": 0.42396119236946106, "learning_rate": 4.534584939284679e-06, "loss": 0.7036, "step": 2247 }, { "epoch": 0.0931658999544117, "grad_norm": 0.448004812002182, "learning_rate": 4.53437771975631e-06, "loss": 0.761, "step": 2248 }, { "epoch": 0.09320734386008538, "grad_norm": 0.47135046124458313, "learning_rate": 4.5341705002279415e-06, "loss": 0.7759, "step": 2249 }, { "epoch": 0.09324878776575904, "grad_norm": 0.44456666707992554, "learning_rate": 4.533963280699574e-06, "loss": 0.7004, "step": 2250 }, { "epoch": 0.09329023167143272, "grad_norm": 0.4362632930278778, "learning_rate": 4.533756061171205e-06, "loss": 0.7052, "step": 2251 }, { "epoch": 0.0933316755771064, "grad_norm": 0.43177878856658936, "learning_rate": 4.5335488416428365e-06, "loss": 0.761, "step": 2252 }, { "epoch": 0.09337311948278006, "grad_norm": 0.4715634882450104, "learning_rate": 4.533341622114468e-06, "loss": 0.7847, "step": 2253 }, { "epoch": 0.09341456338845373, "grad_norm": 0.433210551738739, "learning_rate": 4.5331344025861e-06, "loss": 0.7864, "step": 2254 }, { "epoch": 0.0934560072941274, "grad_norm": 0.4599769711494446, "learning_rate": 4.5329271830577315e-06, "loss": 0.7391, "step": 2255 }, { "epoch": 0.09349745119980107, "grad_norm": 0.45310524106025696, "learning_rate": 4.532719963529364e-06, "loss": 0.6971, "step": 2256 }, { "epoch": 0.09353889510547474, "grad_norm": 0.43435582518577576, "learning_rate": 4.532512744000995e-06, "loss": 0.7406, "step": 2257 }, { "epoch": 0.09358033901114841, "grad_norm": 0.43927106261253357, "learning_rate": 4.5323055244726265e-06, "loss": 0.7043, "step": 2258 }, { "epoch": 0.09362178291682208, "grad_norm": 0.4449627697467804, "learning_rate": 4.532098304944259e-06, "loss": 0.6885, "step": 2259 }, { "epoch": 0.09366322682249575, "grad_norm": 0.45201462507247925, "learning_rate": 4.53189108541589e-06, "loss": 0.8042, "step": 2260 }, { "epoch": 0.09370467072816942, "grad_norm": 0.4368429481983185, "learning_rate": 4.5316838658875215e-06, "loss": 0.7289, "step": 2261 }, { "epoch": 0.0937461146338431, "grad_norm": 0.45706290006637573, "learning_rate": 4.531476646359153e-06, "loss": 0.769, "step": 2262 }, { "epoch": 0.09378755853951676, "grad_norm": 0.42083480954170227, "learning_rate": 4.531269426830785e-06, "loss": 0.6844, "step": 2263 }, { "epoch": 0.09382900244519043, "grad_norm": 0.44023415446281433, "learning_rate": 4.5310622073024165e-06, "loss": 0.7339, "step": 2264 }, { "epoch": 0.09387044635086411, "grad_norm": 0.42445361614227295, "learning_rate": 4.530854987774048e-06, "loss": 0.6924, "step": 2265 }, { "epoch": 0.09391189025653778, "grad_norm": 0.46486759185791016, "learning_rate": 4.53064776824568e-06, "loss": 0.734, "step": 2266 }, { "epoch": 0.09395333416221145, "grad_norm": 0.45771509408950806, "learning_rate": 4.5304405487173115e-06, "loss": 0.8447, "step": 2267 }, { "epoch": 0.09399477806788512, "grad_norm": 0.45528465509414673, "learning_rate": 4.530233329188943e-06, "loss": 0.7349, "step": 2268 }, { "epoch": 0.09403622197355879, "grad_norm": 0.44459179043769836, "learning_rate": 4.530026109660574e-06, "loss": 0.769, "step": 2269 }, { "epoch": 0.09407766587923246, "grad_norm": 0.4331132173538208, "learning_rate": 4.5298188901322065e-06, "loss": 0.6864, "step": 2270 }, { "epoch": 0.09411910978490613, "grad_norm": 0.4719201326370239, "learning_rate": 4.529611670603838e-06, "loss": 0.7505, "step": 2271 }, { "epoch": 0.0941605536905798, "grad_norm": 0.41887593269348145, "learning_rate": 4.52940445107547e-06, "loss": 0.7229, "step": 2272 }, { "epoch": 0.09420199759625347, "grad_norm": 0.4257392883300781, "learning_rate": 4.5291972315471015e-06, "loss": 0.7419, "step": 2273 }, { "epoch": 0.09424344150192714, "grad_norm": 0.4003089666366577, "learning_rate": 4.528990012018733e-06, "loss": 0.7562, "step": 2274 }, { "epoch": 0.09428488540760081, "grad_norm": 0.4327313303947449, "learning_rate": 4.528782792490365e-06, "loss": 0.7163, "step": 2275 }, { "epoch": 0.09432632931327448, "grad_norm": 0.4412200450897217, "learning_rate": 4.5285755729619965e-06, "loss": 0.7474, "step": 2276 }, { "epoch": 0.09436777321894815, "grad_norm": 0.48064202070236206, "learning_rate": 4.528368353433628e-06, "loss": 0.7634, "step": 2277 }, { "epoch": 0.09440921712462183, "grad_norm": 0.49322381615638733, "learning_rate": 4.528161133905259e-06, "loss": 0.7866, "step": 2278 }, { "epoch": 0.0944506610302955, "grad_norm": 0.4353357255458832, "learning_rate": 4.5279539143768915e-06, "loss": 0.7461, "step": 2279 }, { "epoch": 0.09449210493596917, "grad_norm": 0.4630735516548157, "learning_rate": 4.527746694848523e-06, "loss": 0.7156, "step": 2280 }, { "epoch": 0.09453354884164283, "grad_norm": 0.4633691906929016, "learning_rate": 4.527539475320154e-06, "loss": 0.7847, "step": 2281 }, { "epoch": 0.09457499274731651, "grad_norm": 0.4207591712474823, "learning_rate": 4.527332255791786e-06, "loss": 0.7368, "step": 2282 }, { "epoch": 0.09461643665299017, "grad_norm": 0.4572641849517822, "learning_rate": 4.527125036263418e-06, "loss": 0.8069, "step": 2283 }, { "epoch": 0.09465788055866385, "grad_norm": 0.41714513301849365, "learning_rate": 4.526917816735049e-06, "loss": 0.7258, "step": 2284 }, { "epoch": 0.09469932446433751, "grad_norm": 0.45576101541519165, "learning_rate": 4.526710597206681e-06, "loss": 0.7556, "step": 2285 }, { "epoch": 0.09474076837001119, "grad_norm": 0.4351746141910553, "learning_rate": 4.526503377678313e-06, "loss": 0.7434, "step": 2286 }, { "epoch": 0.09478221227568485, "grad_norm": 0.4505622386932373, "learning_rate": 4.526296158149944e-06, "loss": 0.7644, "step": 2287 }, { "epoch": 0.09482365618135853, "grad_norm": 0.4147385060787201, "learning_rate": 4.5260889386215766e-06, "loss": 0.7148, "step": 2288 }, { "epoch": 0.09486510008703221, "grad_norm": 0.4238247275352478, "learning_rate": 4.525881719093207e-06, "loss": 0.7292, "step": 2289 }, { "epoch": 0.09490654399270587, "grad_norm": 0.44142287969589233, "learning_rate": 4.525674499564839e-06, "loss": 0.7595, "step": 2290 }, { "epoch": 0.09494798789837955, "grad_norm": 0.45950353145599365, "learning_rate": 4.525467280036471e-06, "loss": 0.7754, "step": 2291 }, { "epoch": 0.09498943180405321, "grad_norm": 0.43218994140625, "learning_rate": 4.525260060508103e-06, "loss": 0.7825, "step": 2292 }, { "epoch": 0.09503087570972689, "grad_norm": 0.4497007131576538, "learning_rate": 4.525052840979734e-06, "loss": 0.7441, "step": 2293 }, { "epoch": 0.09507231961540055, "grad_norm": 0.43192440271377563, "learning_rate": 4.524845621451366e-06, "loss": 0.699, "step": 2294 }, { "epoch": 0.09511376352107423, "grad_norm": 0.5404394865036011, "learning_rate": 4.524638401922998e-06, "loss": 0.7585, "step": 2295 }, { "epoch": 0.09515520742674789, "grad_norm": 0.41696080565452576, "learning_rate": 4.524431182394629e-06, "loss": 0.7375, "step": 2296 }, { "epoch": 0.09519665133242157, "grad_norm": 0.43760812282562256, "learning_rate": 4.524223962866261e-06, "loss": 0.7527, "step": 2297 }, { "epoch": 0.09523809523809523, "grad_norm": 0.43164634704589844, "learning_rate": 4.524016743337892e-06, "loss": 0.7449, "step": 2298 }, { "epoch": 0.09527953914376891, "grad_norm": 0.42652374505996704, "learning_rate": 4.523809523809524e-06, "loss": 0.7286, "step": 2299 }, { "epoch": 0.09532098304944257, "grad_norm": 0.414614737033844, "learning_rate": 4.523602304281156e-06, "loss": 0.7874, "step": 2300 }, { "epoch": 0.09536242695511625, "grad_norm": 0.4192710518836975, "learning_rate": 4.523395084752787e-06, "loss": 0.8032, "step": 2301 }, { "epoch": 0.09540387086078993, "grad_norm": 0.4798049032688141, "learning_rate": 4.523187865224419e-06, "loss": 0.7407, "step": 2302 }, { "epoch": 0.09544531476646359, "grad_norm": 0.49399110674858093, "learning_rate": 4.522980645696051e-06, "loss": 0.7969, "step": 2303 }, { "epoch": 0.09548675867213727, "grad_norm": 0.4425923824310303, "learning_rate": 4.522773426167683e-06, "loss": 0.7661, "step": 2304 }, { "epoch": 0.09552820257781093, "grad_norm": 0.40333569049835205, "learning_rate": 4.5225662066393135e-06, "loss": 0.7089, "step": 2305 }, { "epoch": 0.09556964648348461, "grad_norm": 0.448172390460968, "learning_rate": 4.522358987110946e-06, "loss": 0.7136, "step": 2306 }, { "epoch": 0.09561109038915827, "grad_norm": 0.4313826858997345, "learning_rate": 4.522151767582577e-06, "loss": 0.738, "step": 2307 }, { "epoch": 0.09565253429483195, "grad_norm": 0.45727795362472534, "learning_rate": 4.521944548054209e-06, "loss": 0.7377, "step": 2308 }, { "epoch": 0.09569397820050561, "grad_norm": 0.4521796405315399, "learning_rate": 4.521737328525841e-06, "loss": 0.7378, "step": 2309 }, { "epoch": 0.09573542210617929, "grad_norm": 0.4456719756126404, "learning_rate": 4.521530108997472e-06, "loss": 0.7361, "step": 2310 }, { "epoch": 0.09577686601185295, "grad_norm": 0.4329984486103058, "learning_rate": 4.521322889469104e-06, "loss": 0.7621, "step": 2311 }, { "epoch": 0.09581830991752663, "grad_norm": 0.42228633165359497, "learning_rate": 4.521115669940736e-06, "loss": 0.7156, "step": 2312 }, { "epoch": 0.09585975382320029, "grad_norm": 0.4969192445278168, "learning_rate": 4.520908450412367e-06, "loss": 0.7617, "step": 2313 }, { "epoch": 0.09590119772887397, "grad_norm": 0.4804287254810333, "learning_rate": 4.5207012308839985e-06, "loss": 0.8372, "step": 2314 }, { "epoch": 0.09594264163454765, "grad_norm": 0.43762508034706116, "learning_rate": 4.520494011355631e-06, "loss": 0.7146, "step": 2315 }, { "epoch": 0.09598408554022131, "grad_norm": 0.5054106712341309, "learning_rate": 4.520286791827262e-06, "loss": 0.8176, "step": 2316 }, { "epoch": 0.09602552944589499, "grad_norm": 0.4765777885913849, "learning_rate": 4.5200795722988935e-06, "loss": 0.7622, "step": 2317 }, { "epoch": 0.09606697335156865, "grad_norm": 0.4310450255870819, "learning_rate": 4.519872352770526e-06, "loss": 0.7227, "step": 2318 }, { "epoch": 0.09610841725724233, "grad_norm": 0.47286340594291687, "learning_rate": 4.519665133242157e-06, "loss": 0.8103, "step": 2319 }, { "epoch": 0.09614986116291599, "grad_norm": 0.43081432580947876, "learning_rate": 4.519457913713789e-06, "loss": 0.7185, "step": 2320 }, { "epoch": 0.09619130506858967, "grad_norm": 0.4538341164588928, "learning_rate": 4.51925069418542e-06, "loss": 0.7561, "step": 2321 }, { "epoch": 0.09623274897426333, "grad_norm": 0.44100120663642883, "learning_rate": 4.519043474657052e-06, "loss": 0.7252, "step": 2322 }, { "epoch": 0.096274192879937, "grad_norm": 0.4621768891811371, "learning_rate": 4.5188362551286835e-06, "loss": 0.8247, "step": 2323 }, { "epoch": 0.09631563678561067, "grad_norm": 0.4036440849304199, "learning_rate": 4.518629035600316e-06, "loss": 0.6981, "step": 2324 }, { "epoch": 0.09635708069128435, "grad_norm": 0.4260554313659668, "learning_rate": 4.518421816071947e-06, "loss": 0.7581, "step": 2325 }, { "epoch": 0.09639852459695802, "grad_norm": 0.3968186378479004, "learning_rate": 4.5182145965435785e-06, "loss": 0.6887, "step": 2326 }, { "epoch": 0.09643996850263169, "grad_norm": 0.4328472912311554, "learning_rate": 4.518007377015211e-06, "loss": 0.7793, "step": 2327 }, { "epoch": 0.09648141240830536, "grad_norm": 0.40531015396118164, "learning_rate": 4.517800157486842e-06, "loss": 0.7085, "step": 2328 }, { "epoch": 0.09652285631397903, "grad_norm": 0.44980961084365845, "learning_rate": 4.5175929379584735e-06, "loss": 0.7505, "step": 2329 }, { "epoch": 0.0965643002196527, "grad_norm": 0.4177475571632385, "learning_rate": 4.517385718430105e-06, "loss": 0.7262, "step": 2330 }, { "epoch": 0.09660574412532637, "grad_norm": 0.4285491704940796, "learning_rate": 4.517178498901737e-06, "loss": 0.7157, "step": 2331 }, { "epoch": 0.09664718803100004, "grad_norm": 0.43489131331443787, "learning_rate": 4.5169712793733685e-06, "loss": 0.7301, "step": 2332 }, { "epoch": 0.09668863193667371, "grad_norm": 0.4530631899833679, "learning_rate": 4.516764059845e-06, "loss": 0.8218, "step": 2333 }, { "epoch": 0.09673007584234738, "grad_norm": 0.47835442423820496, "learning_rate": 4.516556840316631e-06, "loss": 0.7917, "step": 2334 }, { "epoch": 0.09677151974802105, "grad_norm": 0.4514983296394348, "learning_rate": 4.5163496207882635e-06, "loss": 0.7356, "step": 2335 }, { "epoch": 0.09681296365369473, "grad_norm": 0.45691269636154175, "learning_rate": 4.516142401259895e-06, "loss": 0.7798, "step": 2336 }, { "epoch": 0.09685440755936839, "grad_norm": 0.543431282043457, "learning_rate": 4.515935181731526e-06, "loss": 0.7854, "step": 2337 }, { "epoch": 0.09689585146504207, "grad_norm": 0.44547513127326965, "learning_rate": 4.5157279622031585e-06, "loss": 0.7864, "step": 2338 }, { "epoch": 0.09693729537071574, "grad_norm": 0.42431867122650146, "learning_rate": 4.51552074267479e-06, "loss": 0.7405, "step": 2339 }, { "epoch": 0.0969787392763894, "grad_norm": 0.4645279347896576, "learning_rate": 4.515313523146422e-06, "loss": 0.7289, "step": 2340 }, { "epoch": 0.09702018318206308, "grad_norm": 0.46230387687683105, "learning_rate": 4.5151063036180535e-06, "loss": 0.7703, "step": 2341 }, { "epoch": 0.09706162708773675, "grad_norm": 0.4439159333705902, "learning_rate": 4.514899084089685e-06, "loss": 0.7275, "step": 2342 }, { "epoch": 0.09710307099341042, "grad_norm": 0.43524986505508423, "learning_rate": 4.514691864561316e-06, "loss": 0.7283, "step": 2343 }, { "epoch": 0.09714451489908409, "grad_norm": 0.4739815890789032, "learning_rate": 4.5144846450329486e-06, "loss": 0.7825, "step": 2344 }, { "epoch": 0.09718595880475776, "grad_norm": 0.4382423460483551, "learning_rate": 4.51427742550458e-06, "loss": 0.7793, "step": 2345 }, { "epoch": 0.09722740271043143, "grad_norm": 0.43471187353134155, "learning_rate": 4.514070205976211e-06, "loss": 0.7449, "step": 2346 }, { "epoch": 0.0972688466161051, "grad_norm": 0.4425707161426544, "learning_rate": 4.5138629864478436e-06, "loss": 0.7762, "step": 2347 }, { "epoch": 0.09731029052177877, "grad_norm": 0.43949663639068604, "learning_rate": 4.513655766919475e-06, "loss": 0.7577, "step": 2348 }, { "epoch": 0.09735173442745244, "grad_norm": 0.42473387718200684, "learning_rate": 4.513448547391106e-06, "loss": 0.6975, "step": 2349 }, { "epoch": 0.09739317833312611, "grad_norm": 0.42673492431640625, "learning_rate": 4.513241327862738e-06, "loss": 0.7512, "step": 2350 }, { "epoch": 0.09743462223879978, "grad_norm": 0.43722769618034363, "learning_rate": 4.51303410833437e-06, "loss": 0.7368, "step": 2351 }, { "epoch": 0.09747606614447346, "grad_norm": 0.4997045397758484, "learning_rate": 4.512826888806001e-06, "loss": 0.8533, "step": 2352 }, { "epoch": 0.09751751005014712, "grad_norm": 0.45096832513809204, "learning_rate": 4.512619669277633e-06, "loss": 0.7732, "step": 2353 }, { "epoch": 0.0975589539558208, "grad_norm": 0.43369951844215393, "learning_rate": 4.512412449749265e-06, "loss": 0.6788, "step": 2354 }, { "epoch": 0.09760039786149446, "grad_norm": 0.42994019389152527, "learning_rate": 4.512205230220896e-06, "loss": 0.7388, "step": 2355 }, { "epoch": 0.09764184176716814, "grad_norm": 0.47742557525634766, "learning_rate": 4.5119980106925286e-06, "loss": 0.7717, "step": 2356 }, { "epoch": 0.0976832856728418, "grad_norm": 0.4480413794517517, "learning_rate": 4.511790791164159e-06, "loss": 0.8276, "step": 2357 }, { "epoch": 0.09772472957851548, "grad_norm": 0.46683523058891296, "learning_rate": 4.511583571635791e-06, "loss": 0.7778, "step": 2358 }, { "epoch": 0.09776617348418914, "grad_norm": 0.4650786221027374, "learning_rate": 4.511376352107423e-06, "loss": 0.7566, "step": 2359 }, { "epoch": 0.09780761738986282, "grad_norm": 0.4565683901309967, "learning_rate": 4.511169132579055e-06, "loss": 0.8088, "step": 2360 }, { "epoch": 0.09784906129553649, "grad_norm": 0.45051896572113037, "learning_rate": 4.510961913050686e-06, "loss": 0.7339, "step": 2361 }, { "epoch": 0.09789050520121016, "grad_norm": 0.47070810198783875, "learning_rate": 4.510754693522318e-06, "loss": 0.7905, "step": 2362 }, { "epoch": 0.09793194910688384, "grad_norm": 0.4410671293735504, "learning_rate": 4.51054747399395e-06, "loss": 0.7964, "step": 2363 }, { "epoch": 0.0979733930125575, "grad_norm": 0.47352078557014465, "learning_rate": 4.510340254465581e-06, "loss": 0.7673, "step": 2364 }, { "epoch": 0.09801483691823118, "grad_norm": 0.47699326276779175, "learning_rate": 4.510133034937213e-06, "loss": 0.8162, "step": 2365 }, { "epoch": 0.09805628082390484, "grad_norm": 0.475406289100647, "learning_rate": 4.509925815408844e-06, "loss": 0.7203, "step": 2366 }, { "epoch": 0.09809772472957852, "grad_norm": 0.46560657024383545, "learning_rate": 4.509718595880476e-06, "loss": 0.7744, "step": 2367 }, { "epoch": 0.09813916863525218, "grad_norm": 0.42518529295921326, "learning_rate": 4.509511376352108e-06, "loss": 0.791, "step": 2368 }, { "epoch": 0.09818061254092586, "grad_norm": 0.4530797302722931, "learning_rate": 4.509304156823739e-06, "loss": 0.8135, "step": 2369 }, { "epoch": 0.09822205644659952, "grad_norm": 0.4168483018875122, "learning_rate": 4.509096937295371e-06, "loss": 0.6866, "step": 2370 }, { "epoch": 0.0982635003522732, "grad_norm": 0.42781397700309753, "learning_rate": 4.508889717767003e-06, "loss": 0.7295, "step": 2371 }, { "epoch": 0.09830494425794686, "grad_norm": 0.4907296597957611, "learning_rate": 4.508682498238635e-06, "loss": 0.8088, "step": 2372 }, { "epoch": 0.09834638816362054, "grad_norm": 0.4282696545124054, "learning_rate": 4.5084752787102655e-06, "loss": 0.6992, "step": 2373 }, { "epoch": 0.0983878320692942, "grad_norm": 0.4264950752258301, "learning_rate": 4.508268059181898e-06, "loss": 0.801, "step": 2374 }, { "epoch": 0.09842927597496788, "grad_norm": 0.45369046926498413, "learning_rate": 4.508060839653529e-06, "loss": 0.7729, "step": 2375 }, { "epoch": 0.09847071988064156, "grad_norm": 0.42282843589782715, "learning_rate": 4.507853620125161e-06, "loss": 0.7598, "step": 2376 }, { "epoch": 0.09851216378631522, "grad_norm": 0.41421714425086975, "learning_rate": 4.507646400596793e-06, "loss": 0.7455, "step": 2377 }, { "epoch": 0.0985536076919889, "grad_norm": 0.425515353679657, "learning_rate": 4.507439181068424e-06, "loss": 0.7725, "step": 2378 }, { "epoch": 0.09859505159766256, "grad_norm": 0.4112476110458374, "learning_rate": 4.507231961540056e-06, "loss": 0.7505, "step": 2379 }, { "epoch": 0.09863649550333624, "grad_norm": 0.43414306640625, "learning_rate": 4.507024742011688e-06, "loss": 0.7224, "step": 2380 }, { "epoch": 0.0986779394090099, "grad_norm": 0.4688867926597595, "learning_rate": 4.506817522483319e-06, "loss": 0.7466, "step": 2381 }, { "epoch": 0.09871938331468358, "grad_norm": 0.4288109838962555, "learning_rate": 4.5066103029549505e-06, "loss": 0.7375, "step": 2382 }, { "epoch": 0.09876082722035724, "grad_norm": 0.44669681787490845, "learning_rate": 4.506403083426583e-06, "loss": 0.7356, "step": 2383 }, { "epoch": 0.09880227112603092, "grad_norm": 0.43237873911857605, "learning_rate": 4.506195863898214e-06, "loss": 0.75, "step": 2384 }, { "epoch": 0.09884371503170458, "grad_norm": 0.4640229046344757, "learning_rate": 4.5059886443698455e-06, "loss": 0.707, "step": 2385 }, { "epoch": 0.09888515893737826, "grad_norm": 0.4520900547504425, "learning_rate": 4.505781424841477e-06, "loss": 0.7847, "step": 2386 }, { "epoch": 0.09892660284305192, "grad_norm": 0.45799869298934937, "learning_rate": 4.505574205313109e-06, "loss": 0.8435, "step": 2387 }, { "epoch": 0.0989680467487256, "grad_norm": 0.4351831078529358, "learning_rate": 4.505366985784741e-06, "loss": 0.8125, "step": 2388 }, { "epoch": 0.09900949065439928, "grad_norm": 0.40404462814331055, "learning_rate": 4.505159766256372e-06, "loss": 0.7172, "step": 2389 }, { "epoch": 0.09905093456007294, "grad_norm": 0.4919203817844391, "learning_rate": 4.504952546728004e-06, "loss": 0.7803, "step": 2390 }, { "epoch": 0.09909237846574662, "grad_norm": 0.45305073261260986, "learning_rate": 4.5047453271996355e-06, "loss": 0.7959, "step": 2391 }, { "epoch": 0.09913382237142028, "grad_norm": 0.48433443903923035, "learning_rate": 4.504538107671268e-06, "loss": 0.7947, "step": 2392 }, { "epoch": 0.09917526627709396, "grad_norm": 0.44037485122680664, "learning_rate": 4.504330888142899e-06, "loss": 0.7468, "step": 2393 }, { "epoch": 0.09921671018276762, "grad_norm": 0.4270979166030884, "learning_rate": 4.5041236686145305e-06, "loss": 0.74, "step": 2394 }, { "epoch": 0.0992581540884413, "grad_norm": 0.4232253134250641, "learning_rate": 4.503916449086162e-06, "loss": 0.7063, "step": 2395 }, { "epoch": 0.09929959799411496, "grad_norm": 0.42492538690567017, "learning_rate": 4.503709229557794e-06, "loss": 0.7073, "step": 2396 }, { "epoch": 0.09934104189978864, "grad_norm": 0.4379681944847107, "learning_rate": 4.5035020100294255e-06, "loss": 0.7483, "step": 2397 }, { "epoch": 0.0993824858054623, "grad_norm": 0.42816707491874695, "learning_rate": 4.503294790501057e-06, "loss": 0.7349, "step": 2398 }, { "epoch": 0.09942392971113598, "grad_norm": 0.44663336873054504, "learning_rate": 4.503087570972689e-06, "loss": 0.7542, "step": 2399 }, { "epoch": 0.09946537361680965, "grad_norm": 0.4171334505081177, "learning_rate": 4.5028803514443205e-06, "loss": 0.7378, "step": 2400 }, { "epoch": 0.09950681752248332, "grad_norm": 0.4079301655292511, "learning_rate": 4.502673131915952e-06, "loss": 0.6808, "step": 2401 }, { "epoch": 0.099548261428157, "grad_norm": 0.4644434154033661, "learning_rate": 4.502465912387583e-06, "loss": 0.7812, "step": 2402 }, { "epoch": 0.09958970533383066, "grad_norm": 0.4629955589771271, "learning_rate": 4.5022586928592156e-06, "loss": 0.7646, "step": 2403 }, { "epoch": 0.09963114923950434, "grad_norm": 0.4141670763492584, "learning_rate": 4.502051473330847e-06, "loss": 0.7648, "step": 2404 }, { "epoch": 0.099672593145178, "grad_norm": 0.41274768114089966, "learning_rate": 4.501844253802478e-06, "loss": 0.7202, "step": 2405 }, { "epoch": 0.09971403705085168, "grad_norm": 0.4692912697792053, "learning_rate": 4.5016370342741106e-06, "loss": 0.752, "step": 2406 }, { "epoch": 0.09975548095652534, "grad_norm": 0.46297743916511536, "learning_rate": 4.501429814745742e-06, "loss": 0.7844, "step": 2407 }, { "epoch": 0.09979692486219902, "grad_norm": 0.4384287893772125, "learning_rate": 4.501222595217374e-06, "loss": 0.6854, "step": 2408 }, { "epoch": 0.09983836876787268, "grad_norm": 0.4322988986968994, "learning_rate": 4.5010153756890056e-06, "loss": 0.7126, "step": 2409 }, { "epoch": 0.09987981267354636, "grad_norm": 0.4194031059741974, "learning_rate": 4.500808156160637e-06, "loss": 0.7532, "step": 2410 }, { "epoch": 0.09992125657922002, "grad_norm": 0.4283793270587921, "learning_rate": 4.500600936632268e-06, "loss": 0.7795, "step": 2411 }, { "epoch": 0.0999627004848937, "grad_norm": 0.42768609523773193, "learning_rate": 4.5003937171039006e-06, "loss": 0.7563, "step": 2412 }, { "epoch": 0.10000414439056737, "grad_norm": 0.42699337005615234, "learning_rate": 4.500186497575532e-06, "loss": 0.717, "step": 2413 }, { "epoch": 0.10004558829624104, "grad_norm": 0.4601013660430908, "learning_rate": 4.499979278047163e-06, "loss": 0.8013, "step": 2414 }, { "epoch": 0.10008703220191471, "grad_norm": 0.4400237798690796, "learning_rate": 4.4997720585187956e-06, "loss": 0.7749, "step": 2415 }, { "epoch": 0.10012847610758838, "grad_norm": 0.4356423318386078, "learning_rate": 4.499564838990427e-06, "loss": 0.7533, "step": 2416 }, { "epoch": 0.10016992001326205, "grad_norm": 0.41153624653816223, "learning_rate": 4.499357619462058e-06, "loss": 0.6985, "step": 2417 }, { "epoch": 0.10021136391893572, "grad_norm": 0.4521646201610565, "learning_rate": 4.49915039993369e-06, "loss": 0.7161, "step": 2418 }, { "epoch": 0.1002528078246094, "grad_norm": 0.4561423659324646, "learning_rate": 4.498943180405322e-06, "loss": 0.7825, "step": 2419 }, { "epoch": 0.10029425173028306, "grad_norm": 0.4558728039264679, "learning_rate": 4.498735960876953e-06, "loss": 0.7307, "step": 2420 }, { "epoch": 0.10033569563595673, "grad_norm": 0.44516822695732117, "learning_rate": 4.498528741348585e-06, "loss": 0.802, "step": 2421 }, { "epoch": 0.1003771395416304, "grad_norm": 0.45842206478118896, "learning_rate": 4.498321521820217e-06, "loss": 0.7593, "step": 2422 }, { "epoch": 0.10041858344730407, "grad_norm": 0.42760998010635376, "learning_rate": 4.498114302291848e-06, "loss": 0.7222, "step": 2423 }, { "epoch": 0.10046002735297775, "grad_norm": 0.43931469321250916, "learning_rate": 4.497907082763481e-06, "loss": 0.7515, "step": 2424 }, { "epoch": 0.10050147125865141, "grad_norm": 0.4742116928100586, "learning_rate": 4.497699863235111e-06, "loss": 0.7638, "step": 2425 }, { "epoch": 0.10054291516432509, "grad_norm": 0.43348821997642517, "learning_rate": 4.497492643706743e-06, "loss": 0.7791, "step": 2426 }, { "epoch": 0.10058435906999875, "grad_norm": 0.5565977692604065, "learning_rate": 4.497285424178375e-06, "loss": 0.8474, "step": 2427 }, { "epoch": 0.10062580297567243, "grad_norm": 0.45578351616859436, "learning_rate": 4.497078204650007e-06, "loss": 0.7788, "step": 2428 }, { "epoch": 0.1006672468813461, "grad_norm": 0.4438045024871826, "learning_rate": 4.496870985121638e-06, "loss": 0.7173, "step": 2429 }, { "epoch": 0.10070869078701977, "grad_norm": 0.4236430525779724, "learning_rate": 4.49666376559327e-06, "loss": 0.749, "step": 2430 }, { "epoch": 0.10075013469269344, "grad_norm": 0.455037385225296, "learning_rate": 4.496456546064902e-06, "loss": 0.7686, "step": 2431 }, { "epoch": 0.10079157859836711, "grad_norm": 0.42024797201156616, "learning_rate": 4.496249326536533e-06, "loss": 0.6854, "step": 2432 }, { "epoch": 0.10083302250404078, "grad_norm": 0.42516160011291504, "learning_rate": 4.496042107008165e-06, "loss": 0.7798, "step": 2433 }, { "epoch": 0.10087446640971445, "grad_norm": 0.43512436747550964, "learning_rate": 4.495834887479796e-06, "loss": 0.7808, "step": 2434 }, { "epoch": 0.10091591031538812, "grad_norm": 0.43528231978416443, "learning_rate": 4.495627667951428e-06, "loss": 0.708, "step": 2435 }, { "epoch": 0.10095735422106179, "grad_norm": 0.4744385778903961, "learning_rate": 4.49542044842306e-06, "loss": 0.761, "step": 2436 }, { "epoch": 0.10099879812673547, "grad_norm": 0.430096298456192, "learning_rate": 4.495213228894691e-06, "loss": 0.7249, "step": 2437 }, { "epoch": 0.10104024203240913, "grad_norm": 0.4591529071331024, "learning_rate": 4.4950060093663225e-06, "loss": 0.7539, "step": 2438 }, { "epoch": 0.10108168593808281, "grad_norm": 0.431298166513443, "learning_rate": 4.494798789837955e-06, "loss": 0.7463, "step": 2439 }, { "epoch": 0.10112312984375647, "grad_norm": 0.4818033277988434, "learning_rate": 4.494591570309587e-06, "loss": 0.7974, "step": 2440 }, { "epoch": 0.10116457374943015, "grad_norm": 0.41909778118133545, "learning_rate": 4.4943843507812175e-06, "loss": 0.7341, "step": 2441 }, { "epoch": 0.10120601765510381, "grad_norm": 0.44373542070388794, "learning_rate": 4.49417713125285e-06, "loss": 0.8207, "step": 2442 }, { "epoch": 0.10124746156077749, "grad_norm": 0.4719321131706238, "learning_rate": 4.493969911724481e-06, "loss": 0.8121, "step": 2443 }, { "epoch": 0.10128890546645115, "grad_norm": 0.4662165939807892, "learning_rate": 4.493762692196113e-06, "loss": 0.7581, "step": 2444 }, { "epoch": 0.10133034937212483, "grad_norm": 0.44250357151031494, "learning_rate": 4.493555472667745e-06, "loss": 0.749, "step": 2445 }, { "epoch": 0.1013717932777985, "grad_norm": 0.4132310450077057, "learning_rate": 4.493348253139376e-06, "loss": 0.7676, "step": 2446 }, { "epoch": 0.10141323718347217, "grad_norm": 0.46850740909576416, "learning_rate": 4.4931410336110075e-06, "loss": 0.8042, "step": 2447 }, { "epoch": 0.10145468108914583, "grad_norm": 0.44116607308387756, "learning_rate": 4.49293381408264e-06, "loss": 0.7405, "step": 2448 }, { "epoch": 0.10149612499481951, "grad_norm": 0.46291738748550415, "learning_rate": 4.492726594554271e-06, "loss": 0.7917, "step": 2449 }, { "epoch": 0.10153756890049319, "grad_norm": 0.4250962436199188, "learning_rate": 4.4925193750259025e-06, "loss": 0.7881, "step": 2450 }, { "epoch": 0.10157901280616685, "grad_norm": 0.434390127658844, "learning_rate": 4.492312155497535e-06, "loss": 0.7727, "step": 2451 }, { "epoch": 0.10162045671184053, "grad_norm": 0.4137495458126068, "learning_rate": 4.492104935969166e-06, "loss": 0.7195, "step": 2452 }, { "epoch": 0.10166190061751419, "grad_norm": 0.42863282561302185, "learning_rate": 4.4918977164407975e-06, "loss": 0.7402, "step": 2453 }, { "epoch": 0.10170334452318787, "grad_norm": 0.4046284556388855, "learning_rate": 4.491690496912429e-06, "loss": 0.7312, "step": 2454 }, { "epoch": 0.10174478842886153, "grad_norm": 0.47594335675239563, "learning_rate": 4.491483277384061e-06, "loss": 0.8127, "step": 2455 }, { "epoch": 0.10178623233453521, "grad_norm": 0.43427473306655884, "learning_rate": 4.4912760578556925e-06, "loss": 0.7078, "step": 2456 }, { "epoch": 0.10182767624020887, "grad_norm": 0.4573884606361389, "learning_rate": 4.491068838327324e-06, "loss": 0.7356, "step": 2457 }, { "epoch": 0.10186912014588255, "grad_norm": 0.5104098916053772, "learning_rate": 4.490861618798956e-06, "loss": 0.876, "step": 2458 }, { "epoch": 0.10191056405155621, "grad_norm": 0.43663156032562256, "learning_rate": 4.4906543992705876e-06, "loss": 0.722, "step": 2459 }, { "epoch": 0.10195200795722989, "grad_norm": 0.47033756971359253, "learning_rate": 4.49044717974222e-06, "loss": 0.7706, "step": 2460 }, { "epoch": 0.10199345186290357, "grad_norm": 0.4738774299621582, "learning_rate": 4.490239960213851e-06, "loss": 0.7686, "step": 2461 }, { "epoch": 0.10203489576857723, "grad_norm": 0.454360693693161, "learning_rate": 4.4900327406854826e-06, "loss": 0.7281, "step": 2462 }, { "epoch": 0.10207633967425091, "grad_norm": 0.38072669506073, "learning_rate": 4.489825521157114e-06, "loss": 0.6763, "step": 2463 }, { "epoch": 0.10211778357992457, "grad_norm": 0.4466675817966461, "learning_rate": 4.489618301628746e-06, "loss": 0.7584, "step": 2464 }, { "epoch": 0.10215922748559825, "grad_norm": 0.40937522053718567, "learning_rate": 4.4894110821003776e-06, "loss": 0.7222, "step": 2465 }, { "epoch": 0.10220067139127191, "grad_norm": 0.4338492751121521, "learning_rate": 4.489203862572009e-06, "loss": 0.7441, "step": 2466 }, { "epoch": 0.10224211529694559, "grad_norm": 0.414019376039505, "learning_rate": 4.488996643043641e-06, "loss": 0.7166, "step": 2467 }, { "epoch": 0.10228355920261925, "grad_norm": 0.4475066065788269, "learning_rate": 4.4887894235152726e-06, "loss": 0.7474, "step": 2468 }, { "epoch": 0.10232500310829293, "grad_norm": 0.4825494885444641, "learning_rate": 4.488582203986904e-06, "loss": 0.8452, "step": 2469 }, { "epoch": 0.10236644701396659, "grad_norm": 0.4023198187351227, "learning_rate": 4.488374984458535e-06, "loss": 0.7152, "step": 2470 }, { "epoch": 0.10240789091964027, "grad_norm": 0.452576607465744, "learning_rate": 4.4881677649301676e-06, "loss": 0.7773, "step": 2471 }, { "epoch": 0.10244933482531393, "grad_norm": 0.45021331310272217, "learning_rate": 4.487960545401799e-06, "loss": 0.7952, "step": 2472 }, { "epoch": 0.10249077873098761, "grad_norm": 0.4413323998451233, "learning_rate": 4.48775332587343e-06, "loss": 0.7383, "step": 2473 }, { "epoch": 0.10253222263666129, "grad_norm": 0.5122082829475403, "learning_rate": 4.4875461063450626e-06, "loss": 0.783, "step": 2474 }, { "epoch": 0.10257366654233495, "grad_norm": 0.4322240650653839, "learning_rate": 4.487338886816694e-06, "loss": 0.6738, "step": 2475 }, { "epoch": 0.10261511044800863, "grad_norm": 0.44828489422798157, "learning_rate": 4.487131667288326e-06, "loss": 0.7644, "step": 2476 }, { "epoch": 0.10265655435368229, "grad_norm": 0.4050629138946533, "learning_rate": 4.4869244477599576e-06, "loss": 0.7172, "step": 2477 }, { "epoch": 0.10269799825935597, "grad_norm": 0.4359167814254761, "learning_rate": 4.486717228231589e-06, "loss": 0.7549, "step": 2478 }, { "epoch": 0.10273944216502963, "grad_norm": 0.43352094292640686, "learning_rate": 4.48651000870322e-06, "loss": 0.739, "step": 2479 }, { "epoch": 0.1027808860707033, "grad_norm": 0.45650553703308105, "learning_rate": 4.486302789174853e-06, "loss": 0.7534, "step": 2480 }, { "epoch": 0.10282232997637697, "grad_norm": 0.4786919951438904, "learning_rate": 4.486095569646484e-06, "loss": 0.7859, "step": 2481 }, { "epoch": 0.10286377388205065, "grad_norm": 0.409066766500473, "learning_rate": 4.485888350118115e-06, "loss": 0.7078, "step": 2482 }, { "epoch": 0.10290521778772431, "grad_norm": 0.49375882744789124, "learning_rate": 4.485681130589748e-06, "loss": 0.7885, "step": 2483 }, { "epoch": 0.10294666169339799, "grad_norm": 0.4763132929801941, "learning_rate": 4.485473911061379e-06, "loss": 0.7661, "step": 2484 }, { "epoch": 0.10298810559907165, "grad_norm": 0.48109549283981323, "learning_rate": 4.48526669153301e-06, "loss": 0.751, "step": 2485 }, { "epoch": 0.10302954950474533, "grad_norm": 0.45813482999801636, "learning_rate": 4.485059472004642e-06, "loss": 0.7551, "step": 2486 }, { "epoch": 0.103070993410419, "grad_norm": 0.44686657190322876, "learning_rate": 4.484852252476274e-06, "loss": 0.7463, "step": 2487 }, { "epoch": 0.10311243731609267, "grad_norm": 0.48090413212776184, "learning_rate": 4.484645032947905e-06, "loss": 0.802, "step": 2488 }, { "epoch": 0.10315388122176634, "grad_norm": 0.4238318204879761, "learning_rate": 4.484437813419537e-06, "loss": 0.7263, "step": 2489 }, { "epoch": 0.10319532512744001, "grad_norm": 0.42219048738479614, "learning_rate": 4.484230593891168e-06, "loss": 0.771, "step": 2490 }, { "epoch": 0.10323676903311368, "grad_norm": 0.43518730998039246, "learning_rate": 4.4840233743628e-06, "loss": 0.7622, "step": 2491 }, { "epoch": 0.10327821293878735, "grad_norm": 0.4431527256965637, "learning_rate": 4.483816154834433e-06, "loss": 0.7168, "step": 2492 }, { "epoch": 0.10331965684446102, "grad_norm": 0.439017653465271, "learning_rate": 4.483608935306064e-06, "loss": 0.7205, "step": 2493 }, { "epoch": 0.10336110075013469, "grad_norm": 0.430141806602478, "learning_rate": 4.483401715777695e-06, "loss": 0.6853, "step": 2494 }, { "epoch": 0.10340254465580836, "grad_norm": 0.49170151352882385, "learning_rate": 4.483194496249327e-06, "loss": 0.7952, "step": 2495 }, { "epoch": 0.10344398856148203, "grad_norm": 0.43707892298698425, "learning_rate": 4.482987276720959e-06, "loss": 0.7352, "step": 2496 }, { "epoch": 0.1034854324671557, "grad_norm": 0.4098874032497406, "learning_rate": 4.48278005719259e-06, "loss": 0.6731, "step": 2497 }, { "epoch": 0.10352687637282938, "grad_norm": 0.41782280802726746, "learning_rate": 4.482572837664222e-06, "loss": 0.7188, "step": 2498 }, { "epoch": 0.10356832027850305, "grad_norm": 0.5006802082061768, "learning_rate": 4.482365618135853e-06, "loss": 0.8198, "step": 2499 }, { "epoch": 0.10360976418417672, "grad_norm": 0.44261375069618225, "learning_rate": 4.482158398607485e-06, "loss": 0.7517, "step": 2500 }, { "epoch": 0.10365120808985039, "grad_norm": 0.3972148597240448, "learning_rate": 4.481951179079117e-06, "loss": 0.7292, "step": 2501 }, { "epoch": 0.10369265199552406, "grad_norm": 0.45424169301986694, "learning_rate": 4.481743959550748e-06, "loss": 0.7461, "step": 2502 }, { "epoch": 0.10373409590119773, "grad_norm": 0.420501708984375, "learning_rate": 4.48153674002238e-06, "loss": 0.7889, "step": 2503 }, { "epoch": 0.1037755398068714, "grad_norm": 0.42208561301231384, "learning_rate": 4.481329520494012e-06, "loss": 0.7844, "step": 2504 }, { "epoch": 0.10381698371254507, "grad_norm": 0.46895819902420044, "learning_rate": 4.481122300965643e-06, "loss": 0.8037, "step": 2505 }, { "epoch": 0.10385842761821874, "grad_norm": 0.39926281571388245, "learning_rate": 4.4809150814372745e-06, "loss": 0.6953, "step": 2506 }, { "epoch": 0.1038998715238924, "grad_norm": 0.45672452449798584, "learning_rate": 4.480707861908907e-06, "loss": 0.7646, "step": 2507 }, { "epoch": 0.10394131542956608, "grad_norm": 0.46605178713798523, "learning_rate": 4.480500642380538e-06, "loss": 0.8022, "step": 2508 }, { "epoch": 0.10398275933523975, "grad_norm": 0.4274391233921051, "learning_rate": 4.4802934228521695e-06, "loss": 0.7285, "step": 2509 }, { "epoch": 0.10402420324091342, "grad_norm": 0.4286547303199768, "learning_rate": 4.480086203323802e-06, "loss": 0.7651, "step": 2510 }, { "epoch": 0.1040656471465871, "grad_norm": 0.43266093730926514, "learning_rate": 4.479878983795433e-06, "loss": 0.752, "step": 2511 }, { "epoch": 0.10410709105226076, "grad_norm": 0.4616123139858246, "learning_rate": 4.479671764267065e-06, "loss": 0.7871, "step": 2512 }, { "epoch": 0.10414853495793444, "grad_norm": 0.4442767798900604, "learning_rate": 4.479464544738697e-06, "loss": 0.7927, "step": 2513 }, { "epoch": 0.1041899788636081, "grad_norm": 0.41791102290153503, "learning_rate": 4.479257325210328e-06, "loss": 0.7212, "step": 2514 }, { "epoch": 0.10423142276928178, "grad_norm": 0.44849756360054016, "learning_rate": 4.4790501056819595e-06, "loss": 0.7461, "step": 2515 }, { "epoch": 0.10427286667495544, "grad_norm": 0.4391424059867859, "learning_rate": 4.478842886153592e-06, "loss": 0.7451, "step": 2516 }, { "epoch": 0.10431431058062912, "grad_norm": 0.43719181418418884, "learning_rate": 4.478635666625223e-06, "loss": 0.7454, "step": 2517 }, { "epoch": 0.10435575448630278, "grad_norm": 0.44565802812576294, "learning_rate": 4.4784284470968546e-06, "loss": 0.7488, "step": 2518 }, { "epoch": 0.10439719839197646, "grad_norm": 0.4534648656845093, "learning_rate": 4.478221227568487e-06, "loss": 0.7, "step": 2519 }, { "epoch": 0.10443864229765012, "grad_norm": 0.5323036313056946, "learning_rate": 4.478014008040118e-06, "loss": 0.8271, "step": 2520 }, { "epoch": 0.1044800862033238, "grad_norm": 0.43365198373794556, "learning_rate": 4.4778067885117496e-06, "loss": 0.8071, "step": 2521 }, { "epoch": 0.10452153010899747, "grad_norm": 0.38153836131095886, "learning_rate": 4.477599568983381e-06, "loss": 0.6445, "step": 2522 }, { "epoch": 0.10456297401467114, "grad_norm": 0.44764432311058044, "learning_rate": 4.477392349455013e-06, "loss": 0.7786, "step": 2523 }, { "epoch": 0.10460441792034482, "grad_norm": 0.454918771982193, "learning_rate": 4.4771851299266446e-06, "loss": 0.7927, "step": 2524 }, { "epoch": 0.10464586182601848, "grad_norm": 0.44914036989212036, "learning_rate": 4.476977910398276e-06, "loss": 0.6968, "step": 2525 }, { "epoch": 0.10468730573169216, "grad_norm": 0.46323275566101074, "learning_rate": 4.476770690869907e-06, "loss": 0.7622, "step": 2526 }, { "epoch": 0.10472874963736582, "grad_norm": 0.42381998896598816, "learning_rate": 4.4765634713415396e-06, "loss": 0.6992, "step": 2527 }, { "epoch": 0.1047701935430395, "grad_norm": 0.47327014803886414, "learning_rate": 4.476356251813172e-06, "loss": 0.7434, "step": 2528 }, { "epoch": 0.10481163744871316, "grad_norm": 0.4108796715736389, "learning_rate": 4.476149032284803e-06, "loss": 0.7495, "step": 2529 }, { "epoch": 0.10485308135438684, "grad_norm": 0.45763590931892395, "learning_rate": 4.4759418127564346e-06, "loss": 0.7761, "step": 2530 }, { "epoch": 0.1048945252600605, "grad_norm": 0.43307146430015564, "learning_rate": 4.475734593228066e-06, "loss": 0.7324, "step": 2531 }, { "epoch": 0.10493596916573418, "grad_norm": 0.4290485382080078, "learning_rate": 4.475527373699698e-06, "loss": 0.7224, "step": 2532 }, { "epoch": 0.10497741307140784, "grad_norm": 0.4771115183830261, "learning_rate": 4.4753201541713296e-06, "loss": 0.7942, "step": 2533 }, { "epoch": 0.10501885697708152, "grad_norm": 0.4714314639568329, "learning_rate": 4.475112934642961e-06, "loss": 0.7698, "step": 2534 }, { "epoch": 0.1050603008827552, "grad_norm": 0.4226863980293274, "learning_rate": 4.474905715114593e-06, "loss": 0.7131, "step": 2535 }, { "epoch": 0.10510174478842886, "grad_norm": 0.4509071707725525, "learning_rate": 4.4746984955862246e-06, "loss": 0.8113, "step": 2536 }, { "epoch": 0.10514318869410254, "grad_norm": 0.43067076802253723, "learning_rate": 4.474491276057856e-06, "loss": 0.7808, "step": 2537 }, { "epoch": 0.1051846325997762, "grad_norm": 0.46068039536476135, "learning_rate": 4.474284056529487e-06, "loss": 0.8279, "step": 2538 }, { "epoch": 0.10522607650544988, "grad_norm": 0.43353769183158875, "learning_rate": 4.47407683700112e-06, "loss": 0.7732, "step": 2539 }, { "epoch": 0.10526752041112354, "grad_norm": 0.46565356850624084, "learning_rate": 4.473869617472751e-06, "loss": 0.7446, "step": 2540 }, { "epoch": 0.10530896431679722, "grad_norm": 0.46216118335723877, "learning_rate": 4.473662397944382e-06, "loss": 0.8176, "step": 2541 }, { "epoch": 0.10535040822247088, "grad_norm": 0.44926711916923523, "learning_rate": 4.473455178416014e-06, "loss": 0.7461, "step": 2542 }, { "epoch": 0.10539185212814456, "grad_norm": 0.4275146424770355, "learning_rate": 4.473247958887646e-06, "loss": 0.6875, "step": 2543 }, { "epoch": 0.10543329603381822, "grad_norm": 0.4907032549381256, "learning_rate": 4.473040739359278e-06, "loss": 0.7419, "step": 2544 }, { "epoch": 0.1054747399394919, "grad_norm": 0.4054960012435913, "learning_rate": 4.47283351983091e-06, "loss": 0.7429, "step": 2545 }, { "epoch": 0.10551618384516556, "grad_norm": 0.42188334465026855, "learning_rate": 4.472626300302541e-06, "loss": 0.7524, "step": 2546 }, { "epoch": 0.10555762775083924, "grad_norm": 0.40305376052856445, "learning_rate": 4.472419080774172e-06, "loss": 0.7075, "step": 2547 }, { "epoch": 0.10559907165651292, "grad_norm": 0.4404488801956177, "learning_rate": 4.472211861245805e-06, "loss": 0.7319, "step": 2548 }, { "epoch": 0.10564051556218658, "grad_norm": 0.45241448283195496, "learning_rate": 4.472004641717436e-06, "loss": 0.7332, "step": 2549 }, { "epoch": 0.10568195946786026, "grad_norm": 0.43186691403388977, "learning_rate": 4.471797422189067e-06, "loss": 0.7551, "step": 2550 }, { "epoch": 0.10572340337353392, "grad_norm": 0.45685335993766785, "learning_rate": 4.471590202660699e-06, "loss": 0.782, "step": 2551 }, { "epoch": 0.1057648472792076, "grad_norm": 0.4209917485713959, "learning_rate": 4.471382983132331e-06, "loss": 0.6509, "step": 2552 }, { "epoch": 0.10580629118488126, "grad_norm": 0.4786608815193176, "learning_rate": 4.471175763603962e-06, "loss": 0.7712, "step": 2553 }, { "epoch": 0.10584773509055494, "grad_norm": 0.44296300411224365, "learning_rate": 4.470968544075594e-06, "loss": 0.7753, "step": 2554 }, { "epoch": 0.1058891789962286, "grad_norm": 0.4304429292678833, "learning_rate": 4.470761324547226e-06, "loss": 0.7332, "step": 2555 }, { "epoch": 0.10593062290190228, "grad_norm": 0.47287848591804504, "learning_rate": 4.470554105018857e-06, "loss": 0.8389, "step": 2556 }, { "epoch": 0.10597206680757594, "grad_norm": 0.4448120594024658, "learning_rate": 4.470346885490489e-06, "loss": 0.7271, "step": 2557 }, { "epoch": 0.10601351071324962, "grad_norm": 0.41209185123443604, "learning_rate": 4.47013966596212e-06, "loss": 0.7312, "step": 2558 }, { "epoch": 0.10605495461892328, "grad_norm": 0.42535993456840515, "learning_rate": 4.469932446433752e-06, "loss": 0.79, "step": 2559 }, { "epoch": 0.10609639852459696, "grad_norm": 0.41931992769241333, "learning_rate": 4.469725226905384e-06, "loss": 0.7361, "step": 2560 }, { "epoch": 0.10613784243027063, "grad_norm": 0.4432069659233093, "learning_rate": 4.469518007377016e-06, "loss": 0.7776, "step": 2561 }, { "epoch": 0.1061792863359443, "grad_norm": 0.41144877672195435, "learning_rate": 4.469310787848647e-06, "loss": 0.7363, "step": 2562 }, { "epoch": 0.10622073024161797, "grad_norm": 0.3849984407424927, "learning_rate": 4.469103568320279e-06, "loss": 0.6924, "step": 2563 }, { "epoch": 0.10626217414729164, "grad_norm": 0.4466310143470764, "learning_rate": 4.468896348791911e-06, "loss": 0.7712, "step": 2564 }, { "epoch": 0.10630361805296532, "grad_norm": 0.4497566223144531, "learning_rate": 4.468689129263542e-06, "loss": 0.8071, "step": 2565 }, { "epoch": 0.10634506195863898, "grad_norm": 0.4391622245311737, "learning_rate": 4.468481909735174e-06, "loss": 0.7344, "step": 2566 }, { "epoch": 0.10638650586431266, "grad_norm": 0.4689805507659912, "learning_rate": 4.468274690206805e-06, "loss": 0.7126, "step": 2567 }, { "epoch": 0.10642794976998632, "grad_norm": 0.42441266775131226, "learning_rate": 4.468067470678437e-06, "loss": 0.7561, "step": 2568 }, { "epoch": 0.10646939367566, "grad_norm": 0.4246194064617157, "learning_rate": 4.467860251150069e-06, "loss": 0.7844, "step": 2569 }, { "epoch": 0.10651083758133366, "grad_norm": 0.4340292811393738, "learning_rate": 4.4676530316217e-06, "loss": 0.7267, "step": 2570 }, { "epoch": 0.10655228148700734, "grad_norm": 0.4364875853061676, "learning_rate": 4.467445812093332e-06, "loss": 0.8528, "step": 2571 }, { "epoch": 0.10659372539268101, "grad_norm": 0.4468110203742981, "learning_rate": 4.467238592564964e-06, "loss": 0.7371, "step": 2572 }, { "epoch": 0.10663516929835468, "grad_norm": 0.42062655091285706, "learning_rate": 4.467031373036595e-06, "loss": 0.7671, "step": 2573 }, { "epoch": 0.10667661320402835, "grad_norm": 0.42927059531211853, "learning_rate": 4.4668241535082265e-06, "loss": 0.7045, "step": 2574 }, { "epoch": 0.10671805710970202, "grad_norm": 0.4020428955554962, "learning_rate": 4.466616933979859e-06, "loss": 0.7146, "step": 2575 }, { "epoch": 0.1067595010153757, "grad_norm": 0.4750811457633972, "learning_rate": 4.46640971445149e-06, "loss": 0.7996, "step": 2576 }, { "epoch": 0.10680094492104936, "grad_norm": 0.40622594952583313, "learning_rate": 4.4662024949231216e-06, "loss": 0.7219, "step": 2577 }, { "epoch": 0.10684238882672303, "grad_norm": 0.4146714210510254, "learning_rate": 4.465995275394753e-06, "loss": 0.7397, "step": 2578 }, { "epoch": 0.1068838327323967, "grad_norm": 0.42968955636024475, "learning_rate": 4.465788055866385e-06, "loss": 0.6786, "step": 2579 }, { "epoch": 0.10692527663807037, "grad_norm": 0.4560728073120117, "learning_rate": 4.465580836338017e-06, "loss": 0.7311, "step": 2580 }, { "epoch": 0.10696672054374404, "grad_norm": 0.44176554679870605, "learning_rate": 4.465373616809649e-06, "loss": 0.7356, "step": 2581 }, { "epoch": 0.10700816444941771, "grad_norm": 0.43821981549263, "learning_rate": 4.46516639728128e-06, "loss": 0.7476, "step": 2582 }, { "epoch": 0.10704960835509138, "grad_norm": 0.4196327328681946, "learning_rate": 4.4649591777529116e-06, "loss": 0.7139, "step": 2583 }, { "epoch": 0.10709105226076505, "grad_norm": 0.4328017830848694, "learning_rate": 4.464751958224544e-06, "loss": 0.7554, "step": 2584 }, { "epoch": 0.10713249616643873, "grad_norm": 0.48729100823402405, "learning_rate": 4.464544738696175e-06, "loss": 0.8037, "step": 2585 }, { "epoch": 0.1071739400721124, "grad_norm": 0.45932716131210327, "learning_rate": 4.4643375191678066e-06, "loss": 0.7622, "step": 2586 }, { "epoch": 0.10721538397778607, "grad_norm": 0.41230276226997375, "learning_rate": 4.464130299639438e-06, "loss": 0.7251, "step": 2587 }, { "epoch": 0.10725682788345973, "grad_norm": 0.409277081489563, "learning_rate": 4.46392308011107e-06, "loss": 0.7498, "step": 2588 }, { "epoch": 0.10729827178913341, "grad_norm": 0.43532902002334595, "learning_rate": 4.4637158605827016e-06, "loss": 0.74, "step": 2589 }, { "epoch": 0.10733971569480708, "grad_norm": 0.4510020613670349, "learning_rate": 4.463508641054333e-06, "loss": 0.734, "step": 2590 }, { "epoch": 0.10738115960048075, "grad_norm": 0.45036837458610535, "learning_rate": 4.463301421525965e-06, "loss": 0.6987, "step": 2591 }, { "epoch": 0.10742260350615442, "grad_norm": 0.41393253207206726, "learning_rate": 4.4630942019975966e-06, "loss": 0.752, "step": 2592 }, { "epoch": 0.10746404741182809, "grad_norm": 0.467176228761673, "learning_rate": 4.462886982469228e-06, "loss": 0.8203, "step": 2593 }, { "epoch": 0.10750549131750176, "grad_norm": 0.4415111541748047, "learning_rate": 4.462679762940859e-06, "loss": 0.7368, "step": 2594 }, { "epoch": 0.10754693522317543, "grad_norm": 0.43596959114074707, "learning_rate": 4.4624725434124916e-06, "loss": 0.75, "step": 2595 }, { "epoch": 0.1075883791288491, "grad_norm": 0.42121055722236633, "learning_rate": 4.462265323884124e-06, "loss": 0.7039, "step": 2596 }, { "epoch": 0.10762982303452277, "grad_norm": 0.4978175163269043, "learning_rate": 4.462058104355755e-06, "loss": 0.8113, "step": 2597 }, { "epoch": 0.10767126694019645, "grad_norm": 0.44776424765586853, "learning_rate": 4.461850884827387e-06, "loss": 0.731, "step": 2598 }, { "epoch": 0.10771271084587011, "grad_norm": 0.46529704332351685, "learning_rate": 4.461643665299018e-06, "loss": 0.7786, "step": 2599 }, { "epoch": 0.10775415475154379, "grad_norm": 0.4415530264377594, "learning_rate": 4.46143644577065e-06, "loss": 0.7673, "step": 2600 }, { "epoch": 0.10779559865721745, "grad_norm": 0.41878318786621094, "learning_rate": 4.461229226242282e-06, "loss": 0.7866, "step": 2601 }, { "epoch": 0.10783704256289113, "grad_norm": 0.4278019964694977, "learning_rate": 4.461022006713913e-06, "loss": 0.7476, "step": 2602 }, { "epoch": 0.1078784864685648, "grad_norm": 0.47304239869117737, "learning_rate": 4.460814787185544e-06, "loss": 0.7734, "step": 2603 }, { "epoch": 0.10791993037423847, "grad_norm": 0.4639924466609955, "learning_rate": 4.460607567657177e-06, "loss": 0.7527, "step": 2604 }, { "epoch": 0.10796137427991213, "grad_norm": 0.4383813142776489, "learning_rate": 4.460400348128808e-06, "loss": 0.729, "step": 2605 }, { "epoch": 0.10800281818558581, "grad_norm": 0.4432336986064911, "learning_rate": 4.460193128600439e-06, "loss": 0.8047, "step": 2606 }, { "epoch": 0.10804426209125947, "grad_norm": 0.4126124978065491, "learning_rate": 4.459985909072072e-06, "loss": 0.8184, "step": 2607 }, { "epoch": 0.10808570599693315, "grad_norm": 0.40564653277397156, "learning_rate": 4.459778689543703e-06, "loss": 0.7267, "step": 2608 }, { "epoch": 0.10812714990260683, "grad_norm": 0.4342767894268036, "learning_rate": 4.459571470015334e-06, "loss": 0.7693, "step": 2609 }, { "epoch": 0.10816859380828049, "grad_norm": 0.4603235125541687, "learning_rate": 4.459364250486966e-06, "loss": 0.7416, "step": 2610 }, { "epoch": 0.10821003771395417, "grad_norm": 0.4464719295501709, "learning_rate": 4.459157030958598e-06, "loss": 0.7688, "step": 2611 }, { "epoch": 0.10825148161962783, "grad_norm": 0.44505593180656433, "learning_rate": 4.458949811430229e-06, "loss": 0.7732, "step": 2612 }, { "epoch": 0.10829292552530151, "grad_norm": 0.4624794125556946, "learning_rate": 4.458742591901862e-06, "loss": 0.7346, "step": 2613 }, { "epoch": 0.10833436943097517, "grad_norm": 0.44707027077674866, "learning_rate": 4.458535372373493e-06, "loss": 0.7065, "step": 2614 }, { "epoch": 0.10837581333664885, "grad_norm": 0.43216633796691895, "learning_rate": 4.458328152845124e-06, "loss": 0.7554, "step": 2615 }, { "epoch": 0.10841725724232251, "grad_norm": 0.44442763924598694, "learning_rate": 4.458120933316757e-06, "loss": 0.7781, "step": 2616 }, { "epoch": 0.10845870114799619, "grad_norm": 0.4309834837913513, "learning_rate": 4.457913713788388e-06, "loss": 0.7676, "step": 2617 }, { "epoch": 0.10850014505366985, "grad_norm": 0.4000563621520996, "learning_rate": 4.457706494260019e-06, "loss": 0.7036, "step": 2618 }, { "epoch": 0.10854158895934353, "grad_norm": 0.4388279318809509, "learning_rate": 4.457499274731651e-06, "loss": 0.783, "step": 2619 }, { "epoch": 0.10858303286501719, "grad_norm": 0.4665571451187134, "learning_rate": 4.457292055203283e-06, "loss": 0.782, "step": 2620 }, { "epoch": 0.10862447677069087, "grad_norm": 0.42799240350723267, "learning_rate": 4.457084835674914e-06, "loss": 0.7573, "step": 2621 }, { "epoch": 0.10866592067636455, "grad_norm": 0.4220544695854187, "learning_rate": 4.456877616146546e-06, "loss": 0.71, "step": 2622 }, { "epoch": 0.10870736458203821, "grad_norm": 0.4573543071746826, "learning_rate": 4.456670396618178e-06, "loss": 0.8184, "step": 2623 }, { "epoch": 0.10874880848771189, "grad_norm": 0.45465657114982605, "learning_rate": 4.456463177089809e-06, "loss": 0.7831, "step": 2624 }, { "epoch": 0.10879025239338555, "grad_norm": 0.4359979033470154, "learning_rate": 4.456255957561441e-06, "loss": 0.7766, "step": 2625 }, { "epoch": 0.10883169629905923, "grad_norm": 0.4265727996826172, "learning_rate": 4.456048738033072e-06, "loss": 0.7344, "step": 2626 }, { "epoch": 0.10887314020473289, "grad_norm": 0.46706801652908325, "learning_rate": 4.455841518504704e-06, "loss": 0.8069, "step": 2627 }, { "epoch": 0.10891458411040657, "grad_norm": 0.4411037564277649, "learning_rate": 4.455634298976336e-06, "loss": 0.7567, "step": 2628 }, { "epoch": 0.10895602801608023, "grad_norm": 0.4615686237812042, "learning_rate": 4.455427079447968e-06, "loss": 0.7776, "step": 2629 }, { "epoch": 0.10899747192175391, "grad_norm": 0.44318488240242004, "learning_rate": 4.4552198599195985e-06, "loss": 0.7323, "step": 2630 }, { "epoch": 0.10903891582742757, "grad_norm": 0.43315544724464417, "learning_rate": 4.455012640391231e-06, "loss": 0.7627, "step": 2631 }, { "epoch": 0.10908035973310125, "grad_norm": 0.45497411489486694, "learning_rate": 4.454805420862863e-06, "loss": 0.7639, "step": 2632 }, { "epoch": 0.10912180363877491, "grad_norm": 0.4300709068775177, "learning_rate": 4.454598201334494e-06, "loss": 0.7358, "step": 2633 }, { "epoch": 0.10916324754444859, "grad_norm": 0.44174784421920776, "learning_rate": 4.454390981806126e-06, "loss": 0.7284, "step": 2634 }, { "epoch": 0.10920469145012227, "grad_norm": 0.4037221670150757, "learning_rate": 4.454183762277757e-06, "loss": 0.7273, "step": 2635 }, { "epoch": 0.10924613535579593, "grad_norm": 0.4601919949054718, "learning_rate": 4.453976542749389e-06, "loss": 0.8163, "step": 2636 }, { "epoch": 0.1092875792614696, "grad_norm": 0.4445796310901642, "learning_rate": 4.453769323221021e-06, "loss": 0.7301, "step": 2637 }, { "epoch": 0.10932902316714327, "grad_norm": 0.4262236952781677, "learning_rate": 4.453562103692652e-06, "loss": 0.7471, "step": 2638 }, { "epoch": 0.10937046707281695, "grad_norm": 0.4741750657558441, "learning_rate": 4.4533548841642836e-06, "loss": 0.7175, "step": 2639 }, { "epoch": 0.10941191097849061, "grad_norm": 0.45271116495132446, "learning_rate": 4.453147664635916e-06, "loss": 0.7458, "step": 2640 }, { "epoch": 0.10945335488416429, "grad_norm": 0.46410730481147766, "learning_rate": 4.452940445107547e-06, "loss": 0.792, "step": 2641 }, { "epoch": 0.10949479878983795, "grad_norm": 0.47136595845222473, "learning_rate": 4.4527332255791786e-06, "loss": 0.7114, "step": 2642 }, { "epoch": 0.10953624269551163, "grad_norm": 0.4221239984035492, "learning_rate": 4.452526006050811e-06, "loss": 0.7295, "step": 2643 }, { "epoch": 0.10957768660118529, "grad_norm": 0.44076746702194214, "learning_rate": 4.452318786522442e-06, "loss": 0.7869, "step": 2644 }, { "epoch": 0.10961913050685897, "grad_norm": 0.45560532808303833, "learning_rate": 4.4521115669940736e-06, "loss": 0.6993, "step": 2645 }, { "epoch": 0.10966057441253264, "grad_norm": 0.44277799129486084, "learning_rate": 4.451904347465705e-06, "loss": 0.7393, "step": 2646 }, { "epoch": 0.1097020183182063, "grad_norm": 0.42285966873168945, "learning_rate": 4.451697127937337e-06, "loss": 0.7262, "step": 2647 }, { "epoch": 0.10974346222387998, "grad_norm": 0.461492121219635, "learning_rate": 4.4514899084089686e-06, "loss": 0.7725, "step": 2648 }, { "epoch": 0.10978490612955365, "grad_norm": 0.44727009534835815, "learning_rate": 4.451282688880601e-06, "loss": 0.7903, "step": 2649 }, { "epoch": 0.10982635003522732, "grad_norm": 0.4347189962863922, "learning_rate": 4.451075469352232e-06, "loss": 0.7607, "step": 2650 }, { "epoch": 0.10986779394090099, "grad_norm": 0.47462618350982666, "learning_rate": 4.4508682498238636e-06, "loss": 0.7822, "step": 2651 }, { "epoch": 0.10990923784657466, "grad_norm": 0.46686244010925293, "learning_rate": 4.450661030295496e-06, "loss": 0.791, "step": 2652 }, { "epoch": 0.10995068175224833, "grad_norm": 0.42717838287353516, "learning_rate": 4.450453810767127e-06, "loss": 0.759, "step": 2653 }, { "epoch": 0.109992125657922, "grad_norm": 0.48696795105934143, "learning_rate": 4.4502465912387586e-06, "loss": 0.7581, "step": 2654 }, { "epoch": 0.11003356956359567, "grad_norm": 0.44663336873054504, "learning_rate": 4.45003937171039e-06, "loss": 0.7292, "step": 2655 }, { "epoch": 0.11007501346926934, "grad_norm": 0.46645960211753845, "learning_rate": 4.449832152182022e-06, "loss": 0.7485, "step": 2656 }, { "epoch": 0.11011645737494301, "grad_norm": 0.49968335032463074, "learning_rate": 4.449624932653654e-06, "loss": 0.7942, "step": 2657 }, { "epoch": 0.11015790128061669, "grad_norm": 0.43673422932624817, "learning_rate": 4.449417713125285e-06, "loss": 0.7932, "step": 2658 }, { "epoch": 0.11019934518629036, "grad_norm": 0.4820801615715027, "learning_rate": 4.449210493596917e-06, "loss": 0.7268, "step": 2659 }, { "epoch": 0.11024078909196403, "grad_norm": 0.445431113243103, "learning_rate": 4.449003274068549e-06, "loss": 0.7314, "step": 2660 }, { "epoch": 0.1102822329976377, "grad_norm": 0.44820666313171387, "learning_rate": 4.44879605454018e-06, "loss": 0.7737, "step": 2661 }, { "epoch": 0.11032367690331137, "grad_norm": 0.4587949514389038, "learning_rate": 4.448588835011811e-06, "loss": 0.7268, "step": 2662 }, { "epoch": 0.11036512080898504, "grad_norm": 0.4527418613433838, "learning_rate": 4.448381615483444e-06, "loss": 0.7917, "step": 2663 }, { "epoch": 0.1104065647146587, "grad_norm": 0.4519367516040802, "learning_rate": 4.448174395955075e-06, "loss": 0.7375, "step": 2664 }, { "epoch": 0.11044800862033238, "grad_norm": 0.4452589154243469, "learning_rate": 4.447967176426707e-06, "loss": 0.7505, "step": 2665 }, { "epoch": 0.11048945252600605, "grad_norm": 0.42448121309280396, "learning_rate": 4.447759956898339e-06, "loss": 0.7324, "step": 2666 }, { "epoch": 0.11053089643167972, "grad_norm": 0.4940919578075409, "learning_rate": 4.44755273736997e-06, "loss": 0.7766, "step": 2667 }, { "epoch": 0.11057234033735339, "grad_norm": 0.4402981698513031, "learning_rate": 4.447345517841602e-06, "loss": 0.7637, "step": 2668 }, { "epoch": 0.11061378424302706, "grad_norm": 0.4562647342681885, "learning_rate": 4.447138298313234e-06, "loss": 0.7572, "step": 2669 }, { "epoch": 0.11065522814870073, "grad_norm": 0.41179582476615906, "learning_rate": 4.446931078784865e-06, "loss": 0.7583, "step": 2670 }, { "epoch": 0.1106966720543744, "grad_norm": 0.4757821261882782, "learning_rate": 4.446723859256496e-06, "loss": 0.7209, "step": 2671 }, { "epoch": 0.11073811596004808, "grad_norm": 0.41268208622932434, "learning_rate": 4.446516639728129e-06, "loss": 0.7495, "step": 2672 }, { "epoch": 0.11077955986572174, "grad_norm": 0.4295004606246948, "learning_rate": 4.44630942019976e-06, "loss": 0.7671, "step": 2673 }, { "epoch": 0.11082100377139542, "grad_norm": 0.45691704750061035, "learning_rate": 4.446102200671391e-06, "loss": 0.7629, "step": 2674 }, { "epoch": 0.11086244767706908, "grad_norm": 0.4245308041572571, "learning_rate": 4.445894981143024e-06, "loss": 0.7201, "step": 2675 }, { "epoch": 0.11090389158274276, "grad_norm": 0.42640846967697144, "learning_rate": 4.445687761614655e-06, "loss": 0.7725, "step": 2676 }, { "epoch": 0.11094533548841642, "grad_norm": 0.4370523989200592, "learning_rate": 4.445480542086286e-06, "loss": 0.731, "step": 2677 }, { "epoch": 0.1109867793940901, "grad_norm": 0.4884931147098541, "learning_rate": 4.445273322557918e-06, "loss": 0.8086, "step": 2678 }, { "epoch": 0.11102822329976376, "grad_norm": 0.4532877206802368, "learning_rate": 4.44506610302955e-06, "loss": 0.692, "step": 2679 }, { "epoch": 0.11106966720543744, "grad_norm": 0.452131986618042, "learning_rate": 4.444858883501181e-06, "loss": 0.7786, "step": 2680 }, { "epoch": 0.1111111111111111, "grad_norm": 0.4116024971008301, "learning_rate": 4.444651663972814e-06, "loss": 0.7418, "step": 2681 }, { "epoch": 0.11115255501678478, "grad_norm": 0.46495407819747925, "learning_rate": 4.444444444444444e-06, "loss": 0.7256, "step": 2682 }, { "epoch": 0.11119399892245846, "grad_norm": 0.403834730386734, "learning_rate": 4.444237224916076e-06, "loss": 0.6836, "step": 2683 }, { "epoch": 0.11123544282813212, "grad_norm": 0.47125089168548584, "learning_rate": 4.444030005387709e-06, "loss": 0.7793, "step": 2684 }, { "epoch": 0.1112768867338058, "grad_norm": 0.48455461859703064, "learning_rate": 4.44382278585934e-06, "loss": 0.8442, "step": 2685 }, { "epoch": 0.11131833063947946, "grad_norm": 0.42586570978164673, "learning_rate": 4.443615566330971e-06, "loss": 0.7323, "step": 2686 }, { "epoch": 0.11135977454515314, "grad_norm": 0.42276620864868164, "learning_rate": 4.443408346802603e-06, "loss": 0.6871, "step": 2687 }, { "epoch": 0.1114012184508268, "grad_norm": 0.45395201444625854, "learning_rate": 4.443201127274235e-06, "loss": 0.7664, "step": 2688 }, { "epoch": 0.11144266235650048, "grad_norm": 0.4246496856212616, "learning_rate": 4.442993907745866e-06, "loss": 0.748, "step": 2689 }, { "epoch": 0.11148410626217414, "grad_norm": 0.5120562314987183, "learning_rate": 4.442786688217498e-06, "loss": 0.7644, "step": 2690 }, { "epoch": 0.11152555016784782, "grad_norm": 0.41809457540512085, "learning_rate": 4.442579468689129e-06, "loss": 0.7563, "step": 2691 }, { "epoch": 0.11156699407352148, "grad_norm": 0.4900083839893341, "learning_rate": 4.442372249160761e-06, "loss": 0.7598, "step": 2692 }, { "epoch": 0.11160843797919516, "grad_norm": 0.4255141019821167, "learning_rate": 4.442165029632393e-06, "loss": 0.7058, "step": 2693 }, { "epoch": 0.11164988188486882, "grad_norm": 0.4236755073070526, "learning_rate": 4.441957810104024e-06, "loss": 0.7273, "step": 2694 }, { "epoch": 0.1116913257905425, "grad_norm": 0.45340660214424133, "learning_rate": 4.441750590575656e-06, "loss": 0.7192, "step": 2695 }, { "epoch": 0.11173276969621618, "grad_norm": 0.4422585666179657, "learning_rate": 4.441543371047288e-06, "loss": 0.7549, "step": 2696 }, { "epoch": 0.11177421360188984, "grad_norm": 0.4164329469203949, "learning_rate": 4.44133615151892e-06, "loss": 0.6973, "step": 2697 }, { "epoch": 0.11181565750756352, "grad_norm": 0.41723304986953735, "learning_rate": 4.4411289319905506e-06, "loss": 0.7424, "step": 2698 }, { "epoch": 0.11185710141323718, "grad_norm": 0.4329625368118286, "learning_rate": 4.440921712462183e-06, "loss": 0.7218, "step": 2699 }, { "epoch": 0.11189854531891086, "grad_norm": 0.44698718190193176, "learning_rate": 4.440714492933814e-06, "loss": 0.7212, "step": 2700 }, { "epoch": 0.11193998922458452, "grad_norm": 0.44116076827049255, "learning_rate": 4.440507273405446e-06, "loss": 0.7168, "step": 2701 }, { "epoch": 0.1119814331302582, "grad_norm": 0.4057162404060364, "learning_rate": 4.440300053877078e-06, "loss": 0.6975, "step": 2702 }, { "epoch": 0.11202287703593186, "grad_norm": 0.44595563411712646, "learning_rate": 4.440092834348709e-06, "loss": 0.7754, "step": 2703 }, { "epoch": 0.11206432094160554, "grad_norm": 0.43207693099975586, "learning_rate": 4.439885614820341e-06, "loss": 0.7507, "step": 2704 }, { "epoch": 0.1121057648472792, "grad_norm": 0.44918304681777954, "learning_rate": 4.439678395291973e-06, "loss": 0.7043, "step": 2705 }, { "epoch": 0.11214720875295288, "grad_norm": 0.4036732316017151, "learning_rate": 4.439471175763604e-06, "loss": 0.6873, "step": 2706 }, { "epoch": 0.11218865265862656, "grad_norm": 0.5025752186775208, "learning_rate": 4.4392639562352356e-06, "loss": 0.8108, "step": 2707 }, { "epoch": 0.11223009656430022, "grad_norm": 0.44829800724983215, "learning_rate": 4.439056736706868e-06, "loss": 0.7283, "step": 2708 }, { "epoch": 0.1122715404699739, "grad_norm": 0.42198845744132996, "learning_rate": 4.438849517178499e-06, "loss": 0.7668, "step": 2709 }, { "epoch": 0.11231298437564756, "grad_norm": 0.4700067937374115, "learning_rate": 4.4386422976501306e-06, "loss": 0.7439, "step": 2710 }, { "epoch": 0.11235442828132124, "grad_norm": 0.4713732600212097, "learning_rate": 4.438435078121763e-06, "loss": 0.7996, "step": 2711 }, { "epoch": 0.1123958721869949, "grad_norm": 0.4232773780822754, "learning_rate": 4.438227858593394e-06, "loss": 0.6786, "step": 2712 }, { "epoch": 0.11243731609266858, "grad_norm": 0.42234620451927185, "learning_rate": 4.438020639065026e-06, "loss": 0.746, "step": 2713 }, { "epoch": 0.11247875999834224, "grad_norm": 0.4128069281578064, "learning_rate": 4.437813419536657e-06, "loss": 0.7758, "step": 2714 }, { "epoch": 0.11252020390401592, "grad_norm": 0.4280022382736206, "learning_rate": 4.437606200008289e-06, "loss": 0.7405, "step": 2715 }, { "epoch": 0.11256164780968958, "grad_norm": 0.44889023900032043, "learning_rate": 4.437398980479921e-06, "loss": 0.7471, "step": 2716 }, { "epoch": 0.11260309171536326, "grad_norm": 0.4560686945915222, "learning_rate": 4.437191760951553e-06, "loss": 0.7874, "step": 2717 }, { "epoch": 0.11264453562103692, "grad_norm": 0.42480576038360596, "learning_rate": 4.436984541423184e-06, "loss": 0.781, "step": 2718 }, { "epoch": 0.1126859795267106, "grad_norm": 0.45407724380493164, "learning_rate": 4.436777321894816e-06, "loss": 0.7649, "step": 2719 }, { "epoch": 0.11272742343238427, "grad_norm": 0.4099579453468323, "learning_rate": 4.436570102366448e-06, "loss": 0.7183, "step": 2720 }, { "epoch": 0.11276886733805794, "grad_norm": 0.4756443500518799, "learning_rate": 4.436362882838079e-06, "loss": 0.7568, "step": 2721 }, { "epoch": 0.11281031124373161, "grad_norm": 0.4558103084564209, "learning_rate": 4.436155663309711e-06, "loss": 0.7421, "step": 2722 }, { "epoch": 0.11285175514940528, "grad_norm": 0.4526768624782562, "learning_rate": 4.435948443781342e-06, "loss": 0.7441, "step": 2723 }, { "epoch": 0.11289319905507895, "grad_norm": 0.43101441860198975, "learning_rate": 4.435741224252974e-06, "loss": 0.7542, "step": 2724 }, { "epoch": 0.11293464296075262, "grad_norm": 0.45606544613838196, "learning_rate": 4.435534004724606e-06, "loss": 0.647, "step": 2725 }, { "epoch": 0.1129760868664263, "grad_norm": 0.4541940689086914, "learning_rate": 4.435326785196237e-06, "loss": 0.7512, "step": 2726 }, { "epoch": 0.11301753077209996, "grad_norm": 0.42886826395988464, "learning_rate": 4.435119565667869e-06, "loss": 0.7468, "step": 2727 }, { "epoch": 0.11305897467777364, "grad_norm": 0.43372687697410583, "learning_rate": 4.434912346139501e-06, "loss": 0.6733, "step": 2728 }, { "epoch": 0.1131004185834473, "grad_norm": 0.5088160634040833, "learning_rate": 4.434705126611132e-06, "loss": 0.8206, "step": 2729 }, { "epoch": 0.11314186248912098, "grad_norm": 0.4149557054042816, "learning_rate": 4.434497907082763e-06, "loss": 0.6901, "step": 2730 }, { "epoch": 0.11318330639479464, "grad_norm": 0.42920351028442383, "learning_rate": 4.434290687554396e-06, "loss": 0.7567, "step": 2731 }, { "epoch": 0.11322475030046832, "grad_norm": 0.440023809671402, "learning_rate": 4.434083468026027e-06, "loss": 0.7563, "step": 2732 }, { "epoch": 0.11326619420614199, "grad_norm": 0.43239036202430725, "learning_rate": 4.433876248497659e-06, "loss": 0.759, "step": 2733 }, { "epoch": 0.11330763811181566, "grad_norm": 0.4365358054637909, "learning_rate": 4.433669028969291e-06, "loss": 0.699, "step": 2734 }, { "epoch": 0.11334908201748933, "grad_norm": 0.42414575815200806, "learning_rate": 4.433461809440922e-06, "loss": 0.7576, "step": 2735 }, { "epoch": 0.113390525923163, "grad_norm": 0.4178504943847656, "learning_rate": 4.433254589912554e-06, "loss": 0.7292, "step": 2736 }, { "epoch": 0.11343196982883667, "grad_norm": 0.4506518542766571, "learning_rate": 4.433047370384186e-06, "loss": 0.7974, "step": 2737 }, { "epoch": 0.11347341373451034, "grad_norm": 0.41514337062835693, "learning_rate": 4.432840150855817e-06, "loss": 0.7666, "step": 2738 }, { "epoch": 0.11351485764018401, "grad_norm": 0.440890371799469, "learning_rate": 4.432632931327448e-06, "loss": 0.7747, "step": 2739 }, { "epoch": 0.11355630154585768, "grad_norm": 0.4055033028125763, "learning_rate": 4.432425711799081e-06, "loss": 0.7517, "step": 2740 }, { "epoch": 0.11359774545153135, "grad_norm": 0.4848836362361908, "learning_rate": 4.432218492270712e-06, "loss": 0.8174, "step": 2741 }, { "epoch": 0.11363918935720502, "grad_norm": 0.4597133696079254, "learning_rate": 4.432011272742343e-06, "loss": 0.7229, "step": 2742 }, { "epoch": 0.1136806332628787, "grad_norm": 0.4338879883289337, "learning_rate": 4.431804053213975e-06, "loss": 0.6809, "step": 2743 }, { "epoch": 0.11372207716855237, "grad_norm": 0.4469551146030426, "learning_rate": 4.431596833685607e-06, "loss": 0.8262, "step": 2744 }, { "epoch": 0.11376352107422603, "grad_norm": 0.42504581809043884, "learning_rate": 4.431389614157238e-06, "loss": 0.7744, "step": 2745 }, { "epoch": 0.11380496497989971, "grad_norm": 0.44543859362602234, "learning_rate": 4.43118239462887e-06, "loss": 0.7966, "step": 2746 }, { "epoch": 0.11384640888557337, "grad_norm": 0.428748220205307, "learning_rate": 4.430975175100502e-06, "loss": 0.7635, "step": 2747 }, { "epoch": 0.11388785279124705, "grad_norm": 0.41466960310935974, "learning_rate": 4.430767955572133e-06, "loss": 0.7266, "step": 2748 }, { "epoch": 0.11392929669692071, "grad_norm": 0.4365457594394684, "learning_rate": 4.430560736043766e-06, "loss": 0.7126, "step": 2749 }, { "epoch": 0.11397074060259439, "grad_norm": 0.41085758805274963, "learning_rate": 4.430353516515396e-06, "loss": 0.7722, "step": 2750 }, { "epoch": 0.11401218450826806, "grad_norm": 0.43684977293014526, "learning_rate": 4.430146296987028e-06, "loss": 0.74, "step": 2751 }, { "epoch": 0.11405362841394173, "grad_norm": 0.46030476689338684, "learning_rate": 4.42993907745866e-06, "loss": 0.7358, "step": 2752 }, { "epoch": 0.1140950723196154, "grad_norm": 0.45212092995643616, "learning_rate": 4.429731857930292e-06, "loss": 0.7379, "step": 2753 }, { "epoch": 0.11413651622528907, "grad_norm": 0.4332735538482666, "learning_rate": 4.429524638401923e-06, "loss": 0.7803, "step": 2754 }, { "epoch": 0.11417796013096274, "grad_norm": 0.5195847153663635, "learning_rate": 4.429317418873555e-06, "loss": 0.7593, "step": 2755 }, { "epoch": 0.11421940403663641, "grad_norm": 0.5018402934074402, "learning_rate": 4.429110199345187e-06, "loss": 0.7976, "step": 2756 }, { "epoch": 0.11426084794231009, "grad_norm": 0.4255725145339966, "learning_rate": 4.428902979816818e-06, "loss": 0.7102, "step": 2757 }, { "epoch": 0.11430229184798375, "grad_norm": 0.44767361879348755, "learning_rate": 4.42869576028845e-06, "loss": 0.7186, "step": 2758 }, { "epoch": 0.11434373575365743, "grad_norm": 0.44474491477012634, "learning_rate": 4.428488540760081e-06, "loss": 0.7628, "step": 2759 }, { "epoch": 0.1143851796593311, "grad_norm": 0.47939980030059814, "learning_rate": 4.428281321231713e-06, "loss": 0.7556, "step": 2760 }, { "epoch": 0.11442662356500477, "grad_norm": 0.4553447961807251, "learning_rate": 4.428074101703345e-06, "loss": 0.7471, "step": 2761 }, { "epoch": 0.11446806747067843, "grad_norm": 0.4238758385181427, "learning_rate": 4.427866882174976e-06, "loss": 0.7615, "step": 2762 }, { "epoch": 0.11450951137635211, "grad_norm": 0.4461573660373688, "learning_rate": 4.427659662646608e-06, "loss": 0.7463, "step": 2763 }, { "epoch": 0.11455095528202577, "grad_norm": 0.46187373995780945, "learning_rate": 4.42745244311824e-06, "loss": 0.7378, "step": 2764 }, { "epoch": 0.11459239918769945, "grad_norm": 0.4527425765991211, "learning_rate": 4.427245223589872e-06, "loss": 0.7766, "step": 2765 }, { "epoch": 0.11463384309337311, "grad_norm": 0.43725085258483887, "learning_rate": 4.4270380040615026e-06, "loss": 0.7151, "step": 2766 }, { "epoch": 0.11467528699904679, "grad_norm": 0.42665162682533264, "learning_rate": 4.426830784533135e-06, "loss": 0.677, "step": 2767 }, { "epoch": 0.11471673090472045, "grad_norm": 0.4208908677101135, "learning_rate": 4.426623565004766e-06, "loss": 0.7637, "step": 2768 }, { "epoch": 0.11475817481039413, "grad_norm": 0.46116581559181213, "learning_rate": 4.426416345476398e-06, "loss": 0.8081, "step": 2769 }, { "epoch": 0.11479961871606781, "grad_norm": 0.4412277638912201, "learning_rate": 4.42620912594803e-06, "loss": 0.7505, "step": 2770 }, { "epoch": 0.11484106262174147, "grad_norm": 0.39626502990722656, "learning_rate": 4.426001906419661e-06, "loss": 0.7441, "step": 2771 }, { "epoch": 0.11488250652741515, "grad_norm": 0.45137420296669006, "learning_rate": 4.425794686891293e-06, "loss": 0.7388, "step": 2772 }, { "epoch": 0.11492395043308881, "grad_norm": 0.45052120089530945, "learning_rate": 4.425587467362925e-06, "loss": 0.7671, "step": 2773 }, { "epoch": 0.11496539433876249, "grad_norm": 0.4616880416870117, "learning_rate": 4.425380247834556e-06, "loss": 0.7346, "step": 2774 }, { "epoch": 0.11500683824443615, "grad_norm": 0.4316191077232361, "learning_rate": 4.425173028306188e-06, "loss": 0.7233, "step": 2775 }, { "epoch": 0.11504828215010983, "grad_norm": 0.4336789846420288, "learning_rate": 4.42496580877782e-06, "loss": 0.7964, "step": 2776 }, { "epoch": 0.11508972605578349, "grad_norm": 0.7159467339515686, "learning_rate": 4.424758589249451e-06, "loss": 0.7932, "step": 2777 }, { "epoch": 0.11513116996145717, "grad_norm": 0.4159564971923828, "learning_rate": 4.424551369721083e-06, "loss": 0.7306, "step": 2778 }, { "epoch": 0.11517261386713083, "grad_norm": 0.43652257323265076, "learning_rate": 4.424344150192715e-06, "loss": 0.73, "step": 2779 }, { "epoch": 0.11521405777280451, "grad_norm": 0.43522194027900696, "learning_rate": 4.424136930664346e-06, "loss": 0.7019, "step": 2780 }, { "epoch": 0.11525550167847819, "grad_norm": 0.45654237270355225, "learning_rate": 4.4239297111359784e-06, "loss": 0.7343, "step": 2781 }, { "epoch": 0.11529694558415185, "grad_norm": 0.4355565309524536, "learning_rate": 4.423722491607609e-06, "loss": 0.7112, "step": 2782 }, { "epoch": 0.11533838948982553, "grad_norm": 0.44342443346977234, "learning_rate": 4.423515272079241e-06, "loss": 0.7437, "step": 2783 }, { "epoch": 0.11537983339549919, "grad_norm": 0.45237478613853455, "learning_rate": 4.423308052550873e-06, "loss": 0.7783, "step": 2784 }, { "epoch": 0.11542127730117287, "grad_norm": 0.4410804510116577, "learning_rate": 4.423100833022505e-06, "loss": 0.793, "step": 2785 }, { "epoch": 0.11546272120684653, "grad_norm": 0.40193304419517517, "learning_rate": 4.422893613494136e-06, "loss": 0.7175, "step": 2786 }, { "epoch": 0.11550416511252021, "grad_norm": 0.46401405334472656, "learning_rate": 4.422686393965768e-06, "loss": 0.7769, "step": 2787 }, { "epoch": 0.11554560901819387, "grad_norm": 0.4480639100074768, "learning_rate": 4.4224791744374e-06, "loss": 0.7383, "step": 2788 }, { "epoch": 0.11558705292386755, "grad_norm": 0.4836352467536926, "learning_rate": 4.422271954909031e-06, "loss": 0.8516, "step": 2789 }, { "epoch": 0.11562849682954121, "grad_norm": 0.4390290677547455, "learning_rate": 4.422064735380663e-06, "loss": 0.7268, "step": 2790 }, { "epoch": 0.11566994073521489, "grad_norm": 0.40692535042762756, "learning_rate": 4.421857515852294e-06, "loss": 0.7292, "step": 2791 }, { "epoch": 0.11571138464088855, "grad_norm": 0.41716626286506653, "learning_rate": 4.421650296323926e-06, "loss": 0.7427, "step": 2792 }, { "epoch": 0.11575282854656223, "grad_norm": 0.4251644015312195, "learning_rate": 4.421443076795558e-06, "loss": 0.7302, "step": 2793 }, { "epoch": 0.1157942724522359, "grad_norm": 0.45191988348960876, "learning_rate": 4.421235857267189e-06, "loss": 0.7212, "step": 2794 }, { "epoch": 0.11583571635790957, "grad_norm": 0.5190073251724243, "learning_rate": 4.42102863773882e-06, "loss": 0.7698, "step": 2795 }, { "epoch": 0.11587716026358325, "grad_norm": 0.44582998752593994, "learning_rate": 4.420821418210453e-06, "loss": 0.7231, "step": 2796 }, { "epoch": 0.11591860416925691, "grad_norm": 0.4789949059486389, "learning_rate": 4.420614198682084e-06, "loss": 0.802, "step": 2797 }, { "epoch": 0.11596004807493059, "grad_norm": 0.42170462012290955, "learning_rate": 4.420406979153715e-06, "loss": 0.7805, "step": 2798 }, { "epoch": 0.11600149198060425, "grad_norm": 0.46203818917274475, "learning_rate": 4.420199759625348e-06, "loss": 0.7177, "step": 2799 }, { "epoch": 0.11604293588627793, "grad_norm": 0.45809707045555115, "learning_rate": 4.419992540096979e-06, "loss": 0.8408, "step": 2800 }, { "epoch": 0.11608437979195159, "grad_norm": 0.4528385102748871, "learning_rate": 4.419785320568611e-06, "loss": 0.7649, "step": 2801 }, { "epoch": 0.11612582369762527, "grad_norm": 0.45841744542121887, "learning_rate": 4.419578101040243e-06, "loss": 0.749, "step": 2802 }, { "epoch": 0.11616726760329893, "grad_norm": 0.43136778473854065, "learning_rate": 4.419370881511874e-06, "loss": 0.7336, "step": 2803 }, { "epoch": 0.1162087115089726, "grad_norm": 0.4518454968929291, "learning_rate": 4.419163661983505e-06, "loss": 0.8184, "step": 2804 }, { "epoch": 0.11625015541464627, "grad_norm": 0.43808814883232117, "learning_rate": 4.418956442455138e-06, "loss": 0.7864, "step": 2805 }, { "epoch": 0.11629159932031995, "grad_norm": 0.4551939070224762, "learning_rate": 4.418749222926769e-06, "loss": 0.792, "step": 2806 }, { "epoch": 0.11633304322599362, "grad_norm": 0.4534491002559662, "learning_rate": 4.4185420033984e-06, "loss": 0.7656, "step": 2807 }, { "epoch": 0.11637448713166729, "grad_norm": 0.47964048385620117, "learning_rate": 4.418334783870033e-06, "loss": 0.7192, "step": 2808 }, { "epoch": 0.11641593103734096, "grad_norm": 0.602651834487915, "learning_rate": 4.418127564341664e-06, "loss": 0.811, "step": 2809 }, { "epoch": 0.11645737494301463, "grad_norm": 0.4457892179489136, "learning_rate": 4.417920344813295e-06, "loss": 0.7021, "step": 2810 }, { "epoch": 0.1164988188486883, "grad_norm": 0.44804519414901733, "learning_rate": 4.417713125284927e-06, "loss": 0.833, "step": 2811 }, { "epoch": 0.11654026275436197, "grad_norm": 0.42318880558013916, "learning_rate": 4.417505905756559e-06, "loss": 0.7114, "step": 2812 }, { "epoch": 0.11658170666003564, "grad_norm": 0.42301371693611145, "learning_rate": 4.41729868622819e-06, "loss": 0.6998, "step": 2813 }, { "epoch": 0.11662315056570931, "grad_norm": 0.44101518392562866, "learning_rate": 4.417091466699822e-06, "loss": 0.7778, "step": 2814 }, { "epoch": 0.11666459447138298, "grad_norm": 0.4298291802406311, "learning_rate": 4.416884247171454e-06, "loss": 0.7009, "step": 2815 }, { "epoch": 0.11670603837705665, "grad_norm": 0.41529765725135803, "learning_rate": 4.416677027643085e-06, "loss": 0.7078, "step": 2816 }, { "epoch": 0.11674748228273032, "grad_norm": 0.4735003709793091, "learning_rate": 4.416469808114718e-06, "loss": 0.7148, "step": 2817 }, { "epoch": 0.116788926188404, "grad_norm": 0.47490695118904114, "learning_rate": 4.416262588586349e-06, "loss": 0.7284, "step": 2818 }, { "epoch": 0.11683037009407767, "grad_norm": 0.4374504089355469, "learning_rate": 4.41605536905798e-06, "loss": 0.7554, "step": 2819 }, { "epoch": 0.11687181399975134, "grad_norm": 0.4816664755344391, "learning_rate": 4.415848149529612e-06, "loss": 0.7041, "step": 2820 }, { "epoch": 0.116913257905425, "grad_norm": 0.4486456513404846, "learning_rate": 4.415640930001244e-06, "loss": 0.6736, "step": 2821 }, { "epoch": 0.11695470181109868, "grad_norm": 0.4763176739215851, "learning_rate": 4.415433710472875e-06, "loss": 0.8169, "step": 2822 }, { "epoch": 0.11699614571677235, "grad_norm": 0.4386517107486725, "learning_rate": 4.415226490944507e-06, "loss": 0.7712, "step": 2823 }, { "epoch": 0.11703758962244602, "grad_norm": 0.43346861004829407, "learning_rate": 4.415019271416139e-06, "loss": 0.7861, "step": 2824 }, { "epoch": 0.11707903352811969, "grad_norm": 0.44658470153808594, "learning_rate": 4.41481205188777e-06, "loss": 0.8118, "step": 2825 }, { "epoch": 0.11712047743379336, "grad_norm": 0.40515998005867004, "learning_rate": 4.414604832359402e-06, "loss": 0.6902, "step": 2826 }, { "epoch": 0.11716192133946703, "grad_norm": 0.4851025342941284, "learning_rate": 4.414397612831033e-06, "loss": 0.7261, "step": 2827 }, { "epoch": 0.1172033652451407, "grad_norm": 0.42829954624176025, "learning_rate": 4.414190393302665e-06, "loss": 0.7395, "step": 2828 }, { "epoch": 0.11724480915081437, "grad_norm": 0.4407811164855957, "learning_rate": 4.413983173774297e-06, "loss": 0.6882, "step": 2829 }, { "epoch": 0.11728625305648804, "grad_norm": 0.4513356685638428, "learning_rate": 4.413775954245928e-06, "loss": 0.7505, "step": 2830 }, { "epoch": 0.11732769696216172, "grad_norm": 0.4447530210018158, "learning_rate": 4.41356873471756e-06, "loss": 0.7021, "step": 2831 }, { "epoch": 0.11736914086783538, "grad_norm": 0.4741172194480896, "learning_rate": 4.413361515189192e-06, "loss": 0.8032, "step": 2832 }, { "epoch": 0.11741058477350906, "grad_norm": 0.42143911123275757, "learning_rate": 4.413154295660824e-06, "loss": 0.7732, "step": 2833 }, { "epoch": 0.11745202867918272, "grad_norm": 0.4959280788898468, "learning_rate": 4.412947076132455e-06, "loss": 0.7542, "step": 2834 }, { "epoch": 0.1174934725848564, "grad_norm": 0.46807384490966797, "learning_rate": 4.412739856604087e-06, "loss": 0.7886, "step": 2835 }, { "epoch": 0.11753491649053006, "grad_norm": 0.44510769844055176, "learning_rate": 4.412532637075718e-06, "loss": 0.6924, "step": 2836 }, { "epoch": 0.11757636039620374, "grad_norm": 0.4515511393547058, "learning_rate": 4.4123254175473504e-06, "loss": 0.8188, "step": 2837 }, { "epoch": 0.1176178043018774, "grad_norm": 0.40653982758522034, "learning_rate": 4.412118198018982e-06, "loss": 0.762, "step": 2838 }, { "epoch": 0.11765924820755108, "grad_norm": 0.4183030128479004, "learning_rate": 4.411910978490613e-06, "loss": 0.7261, "step": 2839 }, { "epoch": 0.11770069211322474, "grad_norm": 0.4268110692501068, "learning_rate": 4.4117037589622454e-06, "loss": 0.8242, "step": 2840 }, { "epoch": 0.11774213601889842, "grad_norm": 0.44316720962524414, "learning_rate": 4.411496539433877e-06, "loss": 0.7043, "step": 2841 }, { "epoch": 0.11778357992457208, "grad_norm": 0.4742127060890198, "learning_rate": 4.411289319905508e-06, "loss": 0.8293, "step": 2842 }, { "epoch": 0.11782502383024576, "grad_norm": 0.48345068097114563, "learning_rate": 4.41108210037714e-06, "loss": 0.7239, "step": 2843 }, { "epoch": 0.11786646773591944, "grad_norm": 0.44981148838996887, "learning_rate": 4.410874880848772e-06, "loss": 0.7478, "step": 2844 }, { "epoch": 0.1179079116415931, "grad_norm": 0.45307838916778564, "learning_rate": 4.410667661320403e-06, "loss": 0.7642, "step": 2845 }, { "epoch": 0.11794935554726678, "grad_norm": 0.42163315415382385, "learning_rate": 4.410460441792035e-06, "loss": 0.7073, "step": 2846 }, { "epoch": 0.11799079945294044, "grad_norm": 0.4323239028453827, "learning_rate": 4.410253222263666e-06, "loss": 0.7734, "step": 2847 }, { "epoch": 0.11803224335861412, "grad_norm": 0.45530304312705994, "learning_rate": 4.410046002735298e-06, "loss": 0.7803, "step": 2848 }, { "epoch": 0.11807368726428778, "grad_norm": 0.44923776388168335, "learning_rate": 4.4098387832069304e-06, "loss": 0.7141, "step": 2849 }, { "epoch": 0.11811513116996146, "grad_norm": 0.40777868032455444, "learning_rate": 4.409631563678561e-06, "loss": 0.7539, "step": 2850 }, { "epoch": 0.11815657507563512, "grad_norm": 0.44848841428756714, "learning_rate": 4.409424344150193e-06, "loss": 0.7744, "step": 2851 }, { "epoch": 0.1181980189813088, "grad_norm": 0.42930397391319275, "learning_rate": 4.409217124621825e-06, "loss": 0.7891, "step": 2852 }, { "epoch": 0.11823946288698246, "grad_norm": 0.42152366042137146, "learning_rate": 4.409009905093457e-06, "loss": 0.7692, "step": 2853 }, { "epoch": 0.11828090679265614, "grad_norm": 0.4435049295425415, "learning_rate": 4.408802685565088e-06, "loss": 0.7336, "step": 2854 }, { "epoch": 0.11832235069832982, "grad_norm": 0.4527696669101715, "learning_rate": 4.40859546603672e-06, "loss": 0.7751, "step": 2855 }, { "epoch": 0.11836379460400348, "grad_norm": 0.4073115885257721, "learning_rate": 4.408388246508351e-06, "loss": 0.7212, "step": 2856 }, { "epoch": 0.11840523850967716, "grad_norm": 0.4140501916408539, "learning_rate": 4.408181026979983e-06, "loss": 0.8445, "step": 2857 }, { "epoch": 0.11844668241535082, "grad_norm": 0.4351992607116699, "learning_rate": 4.407973807451615e-06, "loss": 0.7981, "step": 2858 }, { "epoch": 0.1184881263210245, "grad_norm": 0.4774436354637146, "learning_rate": 4.407766587923246e-06, "loss": 0.6777, "step": 2859 }, { "epoch": 0.11852957022669816, "grad_norm": 0.4216829538345337, "learning_rate": 4.407559368394878e-06, "loss": 0.7344, "step": 2860 }, { "epoch": 0.11857101413237184, "grad_norm": 0.4740227162837982, "learning_rate": 4.40735214886651e-06, "loss": 0.7681, "step": 2861 }, { "epoch": 0.1186124580380455, "grad_norm": 0.45561811327934265, "learning_rate": 4.407144929338141e-06, "loss": 0.7295, "step": 2862 }, { "epoch": 0.11865390194371918, "grad_norm": 0.42300140857696533, "learning_rate": 4.406937709809772e-06, "loss": 0.7798, "step": 2863 }, { "epoch": 0.11869534584939284, "grad_norm": 0.448377400636673, "learning_rate": 4.406730490281405e-06, "loss": 0.7944, "step": 2864 }, { "epoch": 0.11873678975506652, "grad_norm": 0.4524001479148865, "learning_rate": 4.406523270753036e-06, "loss": 0.761, "step": 2865 }, { "epoch": 0.11877823366074018, "grad_norm": 0.4489184021949768, "learning_rate": 4.406316051224667e-06, "loss": 0.7761, "step": 2866 }, { "epoch": 0.11881967756641386, "grad_norm": 0.39889711141586304, "learning_rate": 4.4061088316963e-06, "loss": 0.7598, "step": 2867 }, { "epoch": 0.11886112147208754, "grad_norm": 0.5122438073158264, "learning_rate": 4.405901612167931e-06, "loss": 0.7888, "step": 2868 }, { "epoch": 0.1189025653777612, "grad_norm": 0.4299689531326294, "learning_rate": 4.405694392639563e-06, "loss": 0.761, "step": 2869 }, { "epoch": 0.11894400928343488, "grad_norm": 0.4325951039791107, "learning_rate": 4.405487173111195e-06, "loss": 0.7532, "step": 2870 }, { "epoch": 0.11898545318910854, "grad_norm": 0.43123406171798706, "learning_rate": 4.405279953582826e-06, "loss": 0.7488, "step": 2871 }, { "epoch": 0.11902689709478222, "grad_norm": 0.46627750992774963, "learning_rate": 4.405072734054457e-06, "loss": 0.703, "step": 2872 }, { "epoch": 0.11906834100045588, "grad_norm": 0.44895997643470764, "learning_rate": 4.40486551452609e-06, "loss": 0.7954, "step": 2873 }, { "epoch": 0.11910978490612956, "grad_norm": 0.4298035800457001, "learning_rate": 4.404658294997721e-06, "loss": 0.7396, "step": 2874 }, { "epoch": 0.11915122881180322, "grad_norm": 0.4431382715702057, "learning_rate": 4.404451075469352e-06, "loss": 0.7332, "step": 2875 }, { "epoch": 0.1191926727174769, "grad_norm": 0.4455166459083557, "learning_rate": 4.404243855940985e-06, "loss": 0.7067, "step": 2876 }, { "epoch": 0.11923411662315056, "grad_norm": 0.5093527436256409, "learning_rate": 4.404036636412616e-06, "loss": 0.7581, "step": 2877 }, { "epoch": 0.11927556052882424, "grad_norm": 0.43303173780441284, "learning_rate": 4.403829416884247e-06, "loss": 0.7734, "step": 2878 }, { "epoch": 0.1193170044344979, "grad_norm": 0.4100220203399658, "learning_rate": 4.403622197355879e-06, "loss": 0.7384, "step": 2879 }, { "epoch": 0.11935844834017158, "grad_norm": 0.436322957277298, "learning_rate": 4.403414977827511e-06, "loss": 0.7747, "step": 2880 }, { "epoch": 0.11939989224584525, "grad_norm": 0.4316721260547638, "learning_rate": 4.403207758299142e-06, "loss": 0.6942, "step": 2881 }, { "epoch": 0.11944133615151892, "grad_norm": 0.4586374759674072, "learning_rate": 4.403000538770774e-06, "loss": 0.886, "step": 2882 }, { "epoch": 0.1194827800571926, "grad_norm": 0.46106719970703125, "learning_rate": 4.402793319242405e-06, "loss": 0.7458, "step": 2883 }, { "epoch": 0.11952422396286626, "grad_norm": 0.46807432174682617, "learning_rate": 4.402586099714037e-06, "loss": 0.7629, "step": 2884 }, { "epoch": 0.11956566786853993, "grad_norm": 0.4275927245616913, "learning_rate": 4.40237888018567e-06, "loss": 0.705, "step": 2885 }, { "epoch": 0.1196071117742136, "grad_norm": 0.4402529299259186, "learning_rate": 4.402171660657301e-06, "loss": 0.7563, "step": 2886 }, { "epoch": 0.11964855567988728, "grad_norm": 0.43256986141204834, "learning_rate": 4.401964441128932e-06, "loss": 0.7546, "step": 2887 }, { "epoch": 0.11968999958556094, "grad_norm": 0.42073407769203186, "learning_rate": 4.401757221600564e-06, "loss": 0.7241, "step": 2888 }, { "epoch": 0.11973144349123462, "grad_norm": 0.4477077126502991, "learning_rate": 4.401550002072196e-06, "loss": 0.7366, "step": 2889 }, { "epoch": 0.11977288739690828, "grad_norm": 0.4193621873855591, "learning_rate": 4.401342782543827e-06, "loss": 0.6843, "step": 2890 }, { "epoch": 0.11981433130258196, "grad_norm": 0.4283328652381897, "learning_rate": 4.401135563015459e-06, "loss": 0.6659, "step": 2891 }, { "epoch": 0.11985577520825563, "grad_norm": 0.4206558167934418, "learning_rate": 4.400928343487091e-06, "loss": 0.7437, "step": 2892 }, { "epoch": 0.1198972191139293, "grad_norm": 0.4411073625087738, "learning_rate": 4.4007211239587224e-06, "loss": 0.7627, "step": 2893 }, { "epoch": 0.11993866301960297, "grad_norm": 0.44352224469184875, "learning_rate": 4.400513904430354e-06, "loss": 0.7817, "step": 2894 }, { "epoch": 0.11998010692527664, "grad_norm": 0.4222686290740967, "learning_rate": 4.400306684901985e-06, "loss": 0.7134, "step": 2895 }, { "epoch": 0.12002155083095031, "grad_norm": 0.45008957386016846, "learning_rate": 4.4000994653736174e-06, "loss": 0.71, "step": 2896 }, { "epoch": 0.12006299473662398, "grad_norm": 0.44572871923446655, "learning_rate": 4.399892245845249e-06, "loss": 0.73, "step": 2897 }, { "epoch": 0.12010443864229765, "grad_norm": 0.40540584921836853, "learning_rate": 4.39968502631688e-06, "loss": 0.7375, "step": 2898 }, { "epoch": 0.12014588254797132, "grad_norm": 0.4343511760234833, "learning_rate": 4.399477806788512e-06, "loss": 0.771, "step": 2899 }, { "epoch": 0.120187326453645, "grad_norm": 0.45251742005348206, "learning_rate": 4.399270587260144e-06, "loss": 0.7266, "step": 2900 }, { "epoch": 0.12022877035931866, "grad_norm": 0.4736974239349365, "learning_rate": 4.399063367731776e-06, "loss": 0.7695, "step": 2901 }, { "epoch": 0.12027021426499233, "grad_norm": 0.4555623233318329, "learning_rate": 4.398856148203407e-06, "loss": 0.7463, "step": 2902 }, { "epoch": 0.120311658170666, "grad_norm": 0.4468677043914795, "learning_rate": 4.398648928675039e-06, "loss": 0.7612, "step": 2903 }, { "epoch": 0.12035310207633967, "grad_norm": 0.3920592665672302, "learning_rate": 4.39844170914667e-06, "loss": 0.6816, "step": 2904 }, { "epoch": 0.12039454598201335, "grad_norm": 0.44974249601364136, "learning_rate": 4.3982344896183024e-06, "loss": 0.7266, "step": 2905 }, { "epoch": 0.12043598988768701, "grad_norm": 0.47260984778404236, "learning_rate": 4.398027270089934e-06, "loss": 0.7517, "step": 2906 }, { "epoch": 0.12047743379336069, "grad_norm": 0.40345868468284607, "learning_rate": 4.397820050561565e-06, "loss": 0.713, "step": 2907 }, { "epoch": 0.12051887769903435, "grad_norm": 0.449064165353775, "learning_rate": 4.397612831033197e-06, "loss": 0.7798, "step": 2908 }, { "epoch": 0.12056032160470803, "grad_norm": 0.44487282633781433, "learning_rate": 4.397405611504829e-06, "loss": 0.7124, "step": 2909 }, { "epoch": 0.1206017655103817, "grad_norm": 0.41603511571884155, "learning_rate": 4.39719839197646e-06, "loss": 0.748, "step": 2910 }, { "epoch": 0.12064320941605537, "grad_norm": 0.4312427043914795, "learning_rate": 4.396991172448092e-06, "loss": 0.6926, "step": 2911 }, { "epoch": 0.12068465332172904, "grad_norm": 0.4089362323284149, "learning_rate": 4.396783952919724e-06, "loss": 0.707, "step": 2912 }, { "epoch": 0.12072609722740271, "grad_norm": 0.44800814986228943, "learning_rate": 4.396576733391355e-06, "loss": 0.7507, "step": 2913 }, { "epoch": 0.12076754113307638, "grad_norm": 0.40577617287635803, "learning_rate": 4.396369513862987e-06, "loss": 0.7172, "step": 2914 }, { "epoch": 0.12080898503875005, "grad_norm": 0.4601323902606964, "learning_rate": 4.396162294334618e-06, "loss": 0.7559, "step": 2915 }, { "epoch": 0.12085042894442372, "grad_norm": 0.4524902105331421, "learning_rate": 4.39595507480625e-06, "loss": 0.7922, "step": 2916 }, { "epoch": 0.12089187285009739, "grad_norm": 0.4456711411476135, "learning_rate": 4.395747855277882e-06, "loss": 0.6899, "step": 2917 }, { "epoch": 0.12093331675577107, "grad_norm": 0.42603957653045654, "learning_rate": 4.395540635749513e-06, "loss": 0.7205, "step": 2918 }, { "epoch": 0.12097476066144473, "grad_norm": 0.45018836855888367, "learning_rate": 4.395333416221145e-06, "loss": 0.8123, "step": 2919 }, { "epoch": 0.12101620456711841, "grad_norm": 0.4259987473487854, "learning_rate": 4.395126196692777e-06, "loss": 0.7389, "step": 2920 }, { "epoch": 0.12105764847279207, "grad_norm": 0.42970335483551025, "learning_rate": 4.394918977164409e-06, "loss": 0.7197, "step": 2921 }, { "epoch": 0.12109909237846575, "grad_norm": 0.47988903522491455, "learning_rate": 4.39471175763604e-06, "loss": 0.7737, "step": 2922 }, { "epoch": 0.12114053628413941, "grad_norm": 0.4404818117618561, "learning_rate": 4.394504538107672e-06, "loss": 0.7515, "step": 2923 }, { "epoch": 0.12118198018981309, "grad_norm": 0.3940800428390503, "learning_rate": 4.394297318579303e-06, "loss": 0.75, "step": 2924 }, { "epoch": 0.12122342409548675, "grad_norm": 0.45118245482444763, "learning_rate": 4.394090099050935e-06, "loss": 0.7585, "step": 2925 }, { "epoch": 0.12126486800116043, "grad_norm": 0.4794446527957916, "learning_rate": 4.393882879522567e-06, "loss": 0.8066, "step": 2926 }, { "epoch": 0.1213063119068341, "grad_norm": 0.46741825342178345, "learning_rate": 4.393675659994198e-06, "loss": 0.7278, "step": 2927 }, { "epoch": 0.12134775581250777, "grad_norm": 0.444358766078949, "learning_rate": 4.39346844046583e-06, "loss": 0.7727, "step": 2928 }, { "epoch": 0.12138919971818145, "grad_norm": 0.45364388823509216, "learning_rate": 4.393261220937462e-06, "loss": 0.7749, "step": 2929 }, { "epoch": 0.12143064362385511, "grad_norm": 0.45744767785072327, "learning_rate": 4.393054001409093e-06, "loss": 0.771, "step": 2930 }, { "epoch": 0.12147208752952879, "grad_norm": 0.45419538021087646, "learning_rate": 4.392846781880724e-06, "loss": 0.7424, "step": 2931 }, { "epoch": 0.12151353143520245, "grad_norm": 0.449069619178772, "learning_rate": 4.392639562352357e-06, "loss": 0.7705, "step": 2932 }, { "epoch": 0.12155497534087613, "grad_norm": 0.41920971870422363, "learning_rate": 4.392432342823988e-06, "loss": 0.7261, "step": 2933 }, { "epoch": 0.12159641924654979, "grad_norm": 0.4316352307796478, "learning_rate": 4.392225123295619e-06, "loss": 0.7712, "step": 2934 }, { "epoch": 0.12163786315222347, "grad_norm": 0.5340210199356079, "learning_rate": 4.392017903767251e-06, "loss": 0.8035, "step": 2935 }, { "epoch": 0.12167930705789713, "grad_norm": 0.43614745140075684, "learning_rate": 4.391810684238883e-06, "loss": 0.7351, "step": 2936 }, { "epoch": 0.12172075096357081, "grad_norm": 0.4065016210079193, "learning_rate": 4.391603464710515e-06, "loss": 0.7637, "step": 2937 }, { "epoch": 0.12176219486924447, "grad_norm": 0.4912908971309662, "learning_rate": 4.391396245182147e-06, "loss": 0.7786, "step": 2938 }, { "epoch": 0.12180363877491815, "grad_norm": 0.4159591495990753, "learning_rate": 4.391189025653778e-06, "loss": 0.719, "step": 2939 }, { "epoch": 0.12184508268059181, "grad_norm": 0.47345712780952454, "learning_rate": 4.390981806125409e-06, "loss": 0.7397, "step": 2940 }, { "epoch": 0.12188652658626549, "grad_norm": 0.4322415292263031, "learning_rate": 4.390774586597042e-06, "loss": 0.74, "step": 2941 }, { "epoch": 0.12192797049193917, "grad_norm": 0.46097829937934875, "learning_rate": 4.390567367068673e-06, "loss": 0.8188, "step": 2942 }, { "epoch": 0.12196941439761283, "grad_norm": 0.4559336304664612, "learning_rate": 4.390360147540304e-06, "loss": 0.7959, "step": 2943 }, { "epoch": 0.1220108583032865, "grad_norm": 0.44493523240089417, "learning_rate": 4.390152928011936e-06, "loss": 0.7668, "step": 2944 }, { "epoch": 0.12205230220896017, "grad_norm": 0.4219712018966675, "learning_rate": 4.389945708483568e-06, "loss": 0.7839, "step": 2945 }, { "epoch": 0.12209374611463385, "grad_norm": 0.43804484605789185, "learning_rate": 4.389738488955199e-06, "loss": 0.7244, "step": 2946 }, { "epoch": 0.12213519002030751, "grad_norm": 0.4084610641002655, "learning_rate": 4.389531269426831e-06, "loss": 0.7383, "step": 2947 }, { "epoch": 0.12217663392598119, "grad_norm": 0.41283106803894043, "learning_rate": 4.389324049898463e-06, "loss": 0.731, "step": 2948 }, { "epoch": 0.12221807783165485, "grad_norm": 0.4139554798603058, "learning_rate": 4.389116830370094e-06, "loss": 0.6837, "step": 2949 }, { "epoch": 0.12225952173732853, "grad_norm": 0.4571232199668884, "learning_rate": 4.388909610841726e-06, "loss": 0.7466, "step": 2950 }, { "epoch": 0.12230096564300219, "grad_norm": 0.4528792202472687, "learning_rate": 4.388702391313357e-06, "loss": 0.8035, "step": 2951 }, { "epoch": 0.12234240954867587, "grad_norm": 0.4224989414215088, "learning_rate": 4.3884951717849894e-06, "loss": 0.7268, "step": 2952 }, { "epoch": 0.12238385345434953, "grad_norm": 0.41462570428848267, "learning_rate": 4.388287952256622e-06, "loss": 0.7725, "step": 2953 }, { "epoch": 0.12242529736002321, "grad_norm": 0.4372042119503021, "learning_rate": 4.388080732728253e-06, "loss": 0.7341, "step": 2954 }, { "epoch": 0.12246674126569689, "grad_norm": 0.4109656810760498, "learning_rate": 4.3878735131998844e-06, "loss": 0.6855, "step": 2955 }, { "epoch": 0.12250818517137055, "grad_norm": 0.4509662985801697, "learning_rate": 4.387666293671516e-06, "loss": 0.8091, "step": 2956 }, { "epoch": 0.12254962907704423, "grad_norm": 0.4340035021305084, "learning_rate": 4.387459074143148e-06, "loss": 0.7505, "step": 2957 }, { "epoch": 0.12259107298271789, "grad_norm": 0.43569880723953247, "learning_rate": 4.3872518546147794e-06, "loss": 0.7517, "step": 2958 }, { "epoch": 0.12263251688839157, "grad_norm": 0.4499976933002472, "learning_rate": 4.387044635086411e-06, "loss": 0.811, "step": 2959 }, { "epoch": 0.12267396079406523, "grad_norm": 0.43169182538986206, "learning_rate": 4.386837415558042e-06, "loss": 0.74, "step": 2960 }, { "epoch": 0.1227154046997389, "grad_norm": 0.4279778301715851, "learning_rate": 4.3866301960296744e-06, "loss": 0.7776, "step": 2961 }, { "epoch": 0.12275684860541257, "grad_norm": 0.41094255447387695, "learning_rate": 4.386422976501306e-06, "loss": 0.7539, "step": 2962 }, { "epoch": 0.12279829251108625, "grad_norm": 0.4356067180633545, "learning_rate": 4.386215756972937e-06, "loss": 0.7899, "step": 2963 }, { "epoch": 0.12283973641675991, "grad_norm": 0.41189250349998474, "learning_rate": 4.3860085374445694e-06, "loss": 0.7063, "step": 2964 }, { "epoch": 0.12288118032243359, "grad_norm": 0.44539976119995117, "learning_rate": 4.385801317916201e-06, "loss": 0.7643, "step": 2965 }, { "epoch": 0.12292262422810726, "grad_norm": 0.4432009160518646, "learning_rate": 4.385594098387832e-06, "loss": 0.772, "step": 2966 }, { "epoch": 0.12296406813378093, "grad_norm": 0.4932974576950073, "learning_rate": 4.385386878859464e-06, "loss": 0.7695, "step": 2967 }, { "epoch": 0.1230055120394546, "grad_norm": 0.42760759592056274, "learning_rate": 4.385179659331096e-06, "loss": 0.67, "step": 2968 }, { "epoch": 0.12304695594512827, "grad_norm": 0.4448728859424591, "learning_rate": 4.384972439802727e-06, "loss": 0.7644, "step": 2969 }, { "epoch": 0.12308839985080194, "grad_norm": 0.4384644329547882, "learning_rate": 4.384765220274359e-06, "loss": 0.7247, "step": 2970 }, { "epoch": 0.12312984375647561, "grad_norm": 0.4520401358604431, "learning_rate": 4.384558000745991e-06, "loss": 0.7673, "step": 2971 }, { "epoch": 0.12317128766214928, "grad_norm": 0.45528051257133484, "learning_rate": 4.384350781217622e-06, "loss": 0.7649, "step": 2972 }, { "epoch": 0.12321273156782295, "grad_norm": 0.49318763613700867, "learning_rate": 4.3841435616892545e-06, "loss": 0.761, "step": 2973 }, { "epoch": 0.12325417547349662, "grad_norm": 0.4304576516151428, "learning_rate": 4.383936342160886e-06, "loss": 0.752, "step": 2974 }, { "epoch": 0.12329561937917029, "grad_norm": 0.4300905466079712, "learning_rate": 4.383729122632517e-06, "loss": 0.7407, "step": 2975 }, { "epoch": 0.12333706328484396, "grad_norm": 0.4694576561450958, "learning_rate": 4.383521903104149e-06, "loss": 0.7551, "step": 2976 }, { "epoch": 0.12337850719051763, "grad_norm": 0.42495089769363403, "learning_rate": 4.383314683575781e-06, "loss": 0.783, "step": 2977 }, { "epoch": 0.1234199510961913, "grad_norm": 0.4327869415283203, "learning_rate": 4.383107464047412e-06, "loss": 0.7305, "step": 2978 }, { "epoch": 0.12346139500186498, "grad_norm": 0.4472733438014984, "learning_rate": 4.382900244519044e-06, "loss": 0.7358, "step": 2979 }, { "epoch": 0.12350283890753865, "grad_norm": 0.48325395584106445, "learning_rate": 4.382693024990676e-06, "loss": 0.7482, "step": 2980 }, { "epoch": 0.12354428281321232, "grad_norm": 0.5064083933830261, "learning_rate": 4.382485805462307e-06, "loss": 0.7468, "step": 2981 }, { "epoch": 0.12358572671888599, "grad_norm": 0.4461282193660736, "learning_rate": 4.382278585933939e-06, "loss": 0.7247, "step": 2982 }, { "epoch": 0.12362717062455966, "grad_norm": 0.4487856924533844, "learning_rate": 4.38207136640557e-06, "loss": 0.7207, "step": 2983 }, { "epoch": 0.12366861453023333, "grad_norm": 0.41898149251937866, "learning_rate": 4.381864146877202e-06, "loss": 0.7168, "step": 2984 }, { "epoch": 0.123710058435907, "grad_norm": 0.5153146982192993, "learning_rate": 4.381656927348834e-06, "loss": 0.8486, "step": 2985 }, { "epoch": 0.12375150234158067, "grad_norm": 0.4400433599948883, "learning_rate": 4.381449707820465e-06, "loss": 0.7878, "step": 2986 }, { "epoch": 0.12379294624725434, "grad_norm": 0.4535733461380005, "learning_rate": 4.381242488292096e-06, "loss": 0.7603, "step": 2987 }, { "epoch": 0.123834390152928, "grad_norm": 0.44199758768081665, "learning_rate": 4.381035268763729e-06, "loss": 0.7864, "step": 2988 }, { "epoch": 0.12387583405860168, "grad_norm": 0.43000081181526184, "learning_rate": 4.380828049235361e-06, "loss": 0.7812, "step": 2989 }, { "epoch": 0.12391727796427536, "grad_norm": 0.434529572725296, "learning_rate": 4.380620829706992e-06, "loss": 0.7559, "step": 2990 }, { "epoch": 0.12395872186994902, "grad_norm": 0.39960378408432007, "learning_rate": 4.380413610178624e-06, "loss": 0.7512, "step": 2991 }, { "epoch": 0.1240001657756227, "grad_norm": 0.46752679347991943, "learning_rate": 4.380206390650255e-06, "loss": 0.7847, "step": 2992 }, { "epoch": 0.12404160968129636, "grad_norm": 0.44536927342414856, "learning_rate": 4.379999171121887e-06, "loss": 0.7485, "step": 2993 }, { "epoch": 0.12408305358697004, "grad_norm": 0.46931543946266174, "learning_rate": 4.379791951593519e-06, "loss": 0.7788, "step": 2994 }, { "epoch": 0.1241244974926437, "grad_norm": 0.450585275888443, "learning_rate": 4.37958473206515e-06, "loss": 0.7852, "step": 2995 }, { "epoch": 0.12416594139831738, "grad_norm": 0.4099261164665222, "learning_rate": 4.379377512536781e-06, "loss": 0.7432, "step": 2996 }, { "epoch": 0.12420738530399104, "grad_norm": 0.42129647731781006, "learning_rate": 4.379170293008414e-06, "loss": 0.7612, "step": 2997 }, { "epoch": 0.12424882920966472, "grad_norm": 0.4596455991268158, "learning_rate": 4.378963073480045e-06, "loss": 0.7452, "step": 2998 }, { "epoch": 0.12429027311533838, "grad_norm": 0.4385070502758026, "learning_rate": 4.378755853951676e-06, "loss": 0.6667, "step": 2999 }, { "epoch": 0.12433171702101206, "grad_norm": 0.4641198515892029, "learning_rate": 4.378548634423309e-06, "loss": 0.7365, "step": 3000 }, { "epoch": 0.12437316092668572, "grad_norm": 0.4399277865886688, "learning_rate": 4.37834141489494e-06, "loss": 0.738, "step": 3001 }, { "epoch": 0.1244146048323594, "grad_norm": 0.44207197427749634, "learning_rate": 4.378134195366571e-06, "loss": 0.8035, "step": 3002 }, { "epoch": 0.12445604873803308, "grad_norm": 0.4337393641471863, "learning_rate": 4.377926975838203e-06, "loss": 0.7451, "step": 3003 }, { "epoch": 0.12449749264370674, "grad_norm": 0.4571448564529419, "learning_rate": 4.377719756309835e-06, "loss": 0.7678, "step": 3004 }, { "epoch": 0.12453893654938042, "grad_norm": 0.49137502908706665, "learning_rate": 4.377512536781466e-06, "loss": 0.8188, "step": 3005 }, { "epoch": 0.12458038045505408, "grad_norm": 0.44722428917884827, "learning_rate": 4.377305317253099e-06, "loss": 0.7583, "step": 3006 }, { "epoch": 0.12462182436072776, "grad_norm": 0.4372878968715668, "learning_rate": 4.37709809772473e-06, "loss": 0.7061, "step": 3007 }, { "epoch": 0.12466326826640142, "grad_norm": 0.3905053436756134, "learning_rate": 4.376890878196361e-06, "loss": 0.6547, "step": 3008 }, { "epoch": 0.1247047121720751, "grad_norm": 0.4260025918483734, "learning_rate": 4.376683658667994e-06, "loss": 0.7659, "step": 3009 }, { "epoch": 0.12474615607774876, "grad_norm": 0.4143962562084198, "learning_rate": 4.376476439139625e-06, "loss": 0.7529, "step": 3010 }, { "epoch": 0.12478759998342244, "grad_norm": 0.4323735535144806, "learning_rate": 4.3762692196112564e-06, "loss": 0.7195, "step": 3011 }, { "epoch": 0.1248290438890961, "grad_norm": 0.42522549629211426, "learning_rate": 4.376062000082888e-06, "loss": 0.7368, "step": 3012 }, { "epoch": 0.12487048779476978, "grad_norm": 0.4337555170059204, "learning_rate": 4.37585478055452e-06, "loss": 0.7361, "step": 3013 }, { "epoch": 0.12491193170044344, "grad_norm": 0.4288124740123749, "learning_rate": 4.3756475610261514e-06, "loss": 0.7161, "step": 3014 }, { "epoch": 0.12495337560611712, "grad_norm": 0.4593937397003174, "learning_rate": 4.375440341497783e-06, "loss": 0.7607, "step": 3015 }, { "epoch": 0.1249948195117908, "grad_norm": 0.4478203058242798, "learning_rate": 4.375233121969415e-06, "loss": 0.7432, "step": 3016 }, { "epoch": 0.12503626341746446, "grad_norm": 0.43422773480415344, "learning_rate": 4.3750259024410464e-06, "loss": 0.7393, "step": 3017 }, { "epoch": 0.12507770732313814, "grad_norm": 0.4143618047237396, "learning_rate": 4.374818682912678e-06, "loss": 0.7397, "step": 3018 }, { "epoch": 0.12511915122881181, "grad_norm": 0.4719943404197693, "learning_rate": 4.374611463384309e-06, "loss": 0.7642, "step": 3019 }, { "epoch": 0.12516059513448546, "grad_norm": 0.4293724000453949, "learning_rate": 4.3744042438559414e-06, "loss": 0.78, "step": 3020 }, { "epoch": 0.12520203904015914, "grad_norm": 0.4163784980773926, "learning_rate": 4.374197024327573e-06, "loss": 0.7156, "step": 3021 }, { "epoch": 0.12524348294583282, "grad_norm": 0.4988276958465576, "learning_rate": 4.373989804799205e-06, "loss": 0.7073, "step": 3022 }, { "epoch": 0.1252849268515065, "grad_norm": 0.4419735074043274, "learning_rate": 4.3737825852708364e-06, "loss": 0.795, "step": 3023 }, { "epoch": 0.12532637075718014, "grad_norm": 0.4776926338672638, "learning_rate": 4.373575365742468e-06, "loss": 0.7587, "step": 3024 }, { "epoch": 0.12536781466285382, "grad_norm": 0.4149540364742279, "learning_rate": 4.3733681462141e-06, "loss": 0.7532, "step": 3025 }, { "epoch": 0.1254092585685275, "grad_norm": 0.3856103718280792, "learning_rate": 4.3731609266857314e-06, "loss": 0.7129, "step": 3026 }, { "epoch": 0.12545070247420118, "grad_norm": 0.4375776946544647, "learning_rate": 4.372953707157363e-06, "loss": 0.7985, "step": 3027 }, { "epoch": 0.12549214637987485, "grad_norm": 0.4199005365371704, "learning_rate": 4.372746487628994e-06, "loss": 0.7688, "step": 3028 }, { "epoch": 0.1255335902855485, "grad_norm": 0.44858938455581665, "learning_rate": 4.3725392681006265e-06, "loss": 0.7104, "step": 3029 }, { "epoch": 0.12557503419122218, "grad_norm": 0.44958579540252686, "learning_rate": 4.372332048572258e-06, "loss": 0.7324, "step": 3030 }, { "epoch": 0.12561647809689586, "grad_norm": 0.45155689120292664, "learning_rate": 4.372124829043889e-06, "loss": 0.7001, "step": 3031 }, { "epoch": 0.12565792200256953, "grad_norm": 0.490447461605072, "learning_rate": 4.3719176095155215e-06, "loss": 0.8486, "step": 3032 }, { "epoch": 0.12569936590824318, "grad_norm": 0.4496222734451294, "learning_rate": 4.371710389987153e-06, "loss": 0.7847, "step": 3033 }, { "epoch": 0.12574080981391686, "grad_norm": 0.4434768855571747, "learning_rate": 4.371503170458784e-06, "loss": 0.7484, "step": 3034 }, { "epoch": 0.12578225371959054, "grad_norm": 0.39802369475364685, "learning_rate": 4.371295950930416e-06, "loss": 0.7393, "step": 3035 }, { "epoch": 0.1258236976252642, "grad_norm": 0.4425632357597351, "learning_rate": 4.371088731402048e-06, "loss": 0.7412, "step": 3036 }, { "epoch": 0.12586514153093786, "grad_norm": 0.41171756386756897, "learning_rate": 4.370881511873679e-06, "loss": 0.718, "step": 3037 }, { "epoch": 0.12590658543661154, "grad_norm": 0.43169206380844116, "learning_rate": 4.3706742923453115e-06, "loss": 0.7114, "step": 3038 }, { "epoch": 0.12594802934228522, "grad_norm": 0.4423796534538269, "learning_rate": 4.370467072816942e-06, "loss": 0.8245, "step": 3039 }, { "epoch": 0.1259894732479589, "grad_norm": 0.45997801423072815, "learning_rate": 4.370259853288574e-06, "loss": 0.8053, "step": 3040 }, { "epoch": 0.12603091715363257, "grad_norm": 0.41697803139686584, "learning_rate": 4.3700526337602065e-06, "loss": 0.7527, "step": 3041 }, { "epoch": 0.12607236105930622, "grad_norm": 0.41332074999809265, "learning_rate": 4.369845414231838e-06, "loss": 0.7273, "step": 3042 }, { "epoch": 0.1261138049649799, "grad_norm": 0.44044753909111023, "learning_rate": 4.369638194703469e-06, "loss": 0.7289, "step": 3043 }, { "epoch": 0.12615524887065357, "grad_norm": 0.43636149168014526, "learning_rate": 4.369430975175101e-06, "loss": 0.7742, "step": 3044 }, { "epoch": 0.12619669277632725, "grad_norm": 0.4411197602748871, "learning_rate": 4.369223755646733e-06, "loss": 0.748, "step": 3045 }, { "epoch": 0.1262381366820009, "grad_norm": 0.4383072853088379, "learning_rate": 4.369016536118364e-06, "loss": 0.7751, "step": 3046 }, { "epoch": 0.12627958058767458, "grad_norm": 0.4440144896507263, "learning_rate": 4.368809316589996e-06, "loss": 0.7419, "step": 3047 }, { "epoch": 0.12632102449334826, "grad_norm": 0.4993993937969208, "learning_rate": 4.368602097061627e-06, "loss": 0.7664, "step": 3048 }, { "epoch": 0.12636246839902193, "grad_norm": 0.42858147621154785, "learning_rate": 4.368394877533259e-06, "loss": 0.7562, "step": 3049 }, { "epoch": 0.12640391230469558, "grad_norm": 0.4735046327114105, "learning_rate": 4.368187658004891e-06, "loss": 0.772, "step": 3050 }, { "epoch": 0.12644535621036926, "grad_norm": 0.48770636320114136, "learning_rate": 4.367980438476522e-06, "loss": 0.7937, "step": 3051 }, { "epoch": 0.12648680011604294, "grad_norm": 0.3792746365070343, "learning_rate": 4.367773218948154e-06, "loss": 0.6726, "step": 3052 }, { "epoch": 0.1265282440217166, "grad_norm": 0.46097612380981445, "learning_rate": 4.367565999419786e-06, "loss": 0.7332, "step": 3053 }, { "epoch": 0.1265696879273903, "grad_norm": 0.40957921743392944, "learning_rate": 4.367358779891417e-06, "loss": 0.6927, "step": 3054 }, { "epoch": 0.12661113183306394, "grad_norm": 0.45638734102249146, "learning_rate": 4.367151560363048e-06, "loss": 0.7284, "step": 3055 }, { "epoch": 0.12665257573873762, "grad_norm": 0.5011829733848572, "learning_rate": 4.366944340834681e-06, "loss": 0.7761, "step": 3056 }, { "epoch": 0.1266940196444113, "grad_norm": 0.387362539768219, "learning_rate": 4.366737121306312e-06, "loss": 0.7012, "step": 3057 }, { "epoch": 0.12673546355008497, "grad_norm": 0.4381357729434967, "learning_rate": 4.366529901777944e-06, "loss": 0.7415, "step": 3058 }, { "epoch": 0.12677690745575862, "grad_norm": 0.44601863622665405, "learning_rate": 4.366322682249576e-06, "loss": 0.7715, "step": 3059 }, { "epoch": 0.1268183513614323, "grad_norm": 0.43791520595550537, "learning_rate": 4.366115462721207e-06, "loss": 0.7739, "step": 3060 }, { "epoch": 0.12685979526710597, "grad_norm": 0.4443438947200775, "learning_rate": 4.365908243192839e-06, "loss": 0.771, "step": 3061 }, { "epoch": 0.12690123917277965, "grad_norm": 0.4737672507762909, "learning_rate": 4.365701023664471e-06, "loss": 0.7205, "step": 3062 }, { "epoch": 0.1269426830784533, "grad_norm": 0.42101678252220154, "learning_rate": 4.365493804136102e-06, "loss": 0.7632, "step": 3063 }, { "epoch": 0.12698412698412698, "grad_norm": 0.43539363145828247, "learning_rate": 4.365286584607733e-06, "loss": 0.7179, "step": 3064 }, { "epoch": 0.12702557088980065, "grad_norm": 0.43787074089050293, "learning_rate": 4.365079365079366e-06, "loss": 0.7397, "step": 3065 }, { "epoch": 0.12706701479547433, "grad_norm": 0.4471369683742523, "learning_rate": 4.364872145550997e-06, "loss": 0.7703, "step": 3066 }, { "epoch": 0.127108458701148, "grad_norm": 0.43062305450439453, "learning_rate": 4.364664926022628e-06, "loss": 0.7574, "step": 3067 }, { "epoch": 0.12714990260682166, "grad_norm": 0.44166362285614014, "learning_rate": 4.364457706494261e-06, "loss": 0.7693, "step": 3068 }, { "epoch": 0.12719134651249533, "grad_norm": 0.43321236968040466, "learning_rate": 4.364250486965892e-06, "loss": 0.7594, "step": 3069 }, { "epoch": 0.127232790418169, "grad_norm": 0.4487900733947754, "learning_rate": 4.3640432674375234e-06, "loss": 0.7559, "step": 3070 }, { "epoch": 0.1272742343238427, "grad_norm": 0.4515455365180969, "learning_rate": 4.363836047909155e-06, "loss": 0.7415, "step": 3071 }, { "epoch": 0.12731567822951634, "grad_norm": 0.4024031460285187, "learning_rate": 4.363628828380787e-06, "loss": 0.7407, "step": 3072 }, { "epoch": 0.12735712213519002, "grad_norm": 0.528317391872406, "learning_rate": 4.3634216088524184e-06, "loss": 0.7402, "step": 3073 }, { "epoch": 0.1273985660408637, "grad_norm": 0.44311344623565674, "learning_rate": 4.363214389324051e-06, "loss": 0.781, "step": 3074 }, { "epoch": 0.12744000994653737, "grad_norm": 0.4543335735797882, "learning_rate": 4.363007169795682e-06, "loss": 0.8591, "step": 3075 }, { "epoch": 0.12748145385221102, "grad_norm": 0.39681488275527954, "learning_rate": 4.3627999502673134e-06, "loss": 0.7146, "step": 3076 }, { "epoch": 0.1275228977578847, "grad_norm": 0.4308423101902008, "learning_rate": 4.362592730738946e-06, "loss": 0.7194, "step": 3077 }, { "epoch": 0.12756434166355837, "grad_norm": 0.3952022194862366, "learning_rate": 4.362385511210577e-06, "loss": 0.7471, "step": 3078 }, { "epoch": 0.12760578556923205, "grad_norm": 0.442682147026062, "learning_rate": 4.3621782916822084e-06, "loss": 0.717, "step": 3079 }, { "epoch": 0.12764722947490573, "grad_norm": 0.4454176127910614, "learning_rate": 4.36197107215384e-06, "loss": 0.7273, "step": 3080 }, { "epoch": 0.12768867338057938, "grad_norm": 0.39107218384742737, "learning_rate": 4.361763852625472e-06, "loss": 0.707, "step": 3081 }, { "epoch": 0.12773011728625305, "grad_norm": 0.44460615515708923, "learning_rate": 4.3615566330971034e-06, "loss": 0.7937, "step": 3082 }, { "epoch": 0.12777156119192673, "grad_norm": 0.44838109612464905, "learning_rate": 4.361349413568735e-06, "loss": 0.7693, "step": 3083 }, { "epoch": 0.1278130050976004, "grad_norm": 0.4448307156562805, "learning_rate": 4.361142194040367e-06, "loss": 0.801, "step": 3084 }, { "epoch": 0.12785444900327406, "grad_norm": 0.4295814633369446, "learning_rate": 4.3609349745119984e-06, "loss": 0.7084, "step": 3085 }, { "epoch": 0.12789589290894773, "grad_norm": 0.45000559091567993, "learning_rate": 4.36072775498363e-06, "loss": 0.7268, "step": 3086 }, { "epoch": 0.1279373368146214, "grad_norm": 0.41829347610473633, "learning_rate": 4.360520535455261e-06, "loss": 0.762, "step": 3087 }, { "epoch": 0.1279787807202951, "grad_norm": 0.4617316722869873, "learning_rate": 4.3603133159268935e-06, "loss": 0.7268, "step": 3088 }, { "epoch": 0.12802022462596876, "grad_norm": 0.4162595868110657, "learning_rate": 4.360106096398525e-06, "loss": 0.7449, "step": 3089 }, { "epoch": 0.12806166853164241, "grad_norm": 0.4488827884197235, "learning_rate": 4.359898876870157e-06, "loss": 0.7729, "step": 3090 }, { "epoch": 0.1281031124373161, "grad_norm": 0.4416542053222656, "learning_rate": 4.359691657341788e-06, "loss": 0.7539, "step": 3091 }, { "epoch": 0.12814455634298977, "grad_norm": 0.40357670187950134, "learning_rate": 4.35948443781342e-06, "loss": 0.6993, "step": 3092 }, { "epoch": 0.12818600024866345, "grad_norm": 0.39695340394973755, "learning_rate": 4.359277218285052e-06, "loss": 0.6995, "step": 3093 }, { "epoch": 0.1282274441543371, "grad_norm": 0.40370526909828186, "learning_rate": 4.3590699987566835e-06, "loss": 0.7346, "step": 3094 }, { "epoch": 0.12826888806001077, "grad_norm": 0.44125548005104065, "learning_rate": 4.358862779228315e-06, "loss": 0.6948, "step": 3095 }, { "epoch": 0.12831033196568445, "grad_norm": 0.4347740411758423, "learning_rate": 4.358655559699946e-06, "loss": 0.7532, "step": 3096 }, { "epoch": 0.12835177587135813, "grad_norm": 0.4402252733707428, "learning_rate": 4.3584483401715785e-06, "loss": 0.7091, "step": 3097 }, { "epoch": 0.12839321977703178, "grad_norm": 0.4315100312232971, "learning_rate": 4.35824112064321e-06, "loss": 0.7283, "step": 3098 }, { "epoch": 0.12843466368270545, "grad_norm": 0.4290374517440796, "learning_rate": 4.358033901114841e-06, "loss": 0.734, "step": 3099 }, { "epoch": 0.12847610758837913, "grad_norm": 0.48330551385879517, "learning_rate": 4.357826681586473e-06, "loss": 0.7529, "step": 3100 }, { "epoch": 0.1285175514940528, "grad_norm": 0.41826382279396057, "learning_rate": 4.357619462058105e-06, "loss": 0.7124, "step": 3101 }, { "epoch": 0.12855899539972648, "grad_norm": 0.46399909257888794, "learning_rate": 4.357412242529736e-06, "loss": 0.7466, "step": 3102 }, { "epoch": 0.12860043930540013, "grad_norm": 0.4514673054218292, "learning_rate": 4.357205023001368e-06, "loss": 0.6809, "step": 3103 }, { "epoch": 0.1286418832110738, "grad_norm": 0.45701003074645996, "learning_rate": 4.356997803473e-06, "loss": 0.8027, "step": 3104 }, { "epoch": 0.1286833271167475, "grad_norm": 0.3961890637874603, "learning_rate": 4.356790583944631e-06, "loss": 0.7341, "step": 3105 }, { "epoch": 0.12872477102242116, "grad_norm": 0.4148775041103363, "learning_rate": 4.3565833644162635e-06, "loss": 0.7786, "step": 3106 }, { "epoch": 0.1287662149280948, "grad_norm": 0.445646196603775, "learning_rate": 4.356376144887894e-06, "loss": 0.7068, "step": 3107 }, { "epoch": 0.1288076588337685, "grad_norm": 0.42004355788230896, "learning_rate": 4.356168925359526e-06, "loss": 0.7711, "step": 3108 }, { "epoch": 0.12884910273944217, "grad_norm": 0.41880160570144653, "learning_rate": 4.355961705831158e-06, "loss": 0.6787, "step": 3109 }, { "epoch": 0.12889054664511584, "grad_norm": 0.46185821294784546, "learning_rate": 4.35575448630279e-06, "loss": 0.7668, "step": 3110 }, { "epoch": 0.1289319905507895, "grad_norm": 0.43727779388427734, "learning_rate": 4.355547266774421e-06, "loss": 0.7405, "step": 3111 }, { "epoch": 0.12897343445646317, "grad_norm": 0.40684273838996887, "learning_rate": 4.355340047246053e-06, "loss": 0.731, "step": 3112 }, { "epoch": 0.12901487836213685, "grad_norm": 0.44705119729042053, "learning_rate": 4.355132827717685e-06, "loss": 0.6917, "step": 3113 }, { "epoch": 0.12905632226781052, "grad_norm": 0.4360480010509491, "learning_rate": 4.354925608189316e-06, "loss": 0.781, "step": 3114 }, { "epoch": 0.1290977661734842, "grad_norm": 0.4306013584136963, "learning_rate": 4.354718388660948e-06, "loss": 0.7815, "step": 3115 }, { "epoch": 0.12913921007915785, "grad_norm": 0.4243769347667694, "learning_rate": 4.354511169132579e-06, "loss": 0.7509, "step": 3116 }, { "epoch": 0.12918065398483153, "grad_norm": 0.4535301625728607, "learning_rate": 4.354303949604211e-06, "loss": 0.6791, "step": 3117 }, { "epoch": 0.1292220978905052, "grad_norm": 0.4242030680179596, "learning_rate": 4.354096730075843e-06, "loss": 0.7239, "step": 3118 }, { "epoch": 0.12926354179617888, "grad_norm": 0.4205408990383148, "learning_rate": 4.353889510547474e-06, "loss": 0.7393, "step": 3119 }, { "epoch": 0.12930498570185253, "grad_norm": 0.4527781307697296, "learning_rate": 4.353682291019106e-06, "loss": 0.7505, "step": 3120 }, { "epoch": 0.1293464296075262, "grad_norm": 0.4405979514122009, "learning_rate": 4.353475071490738e-06, "loss": 0.74, "step": 3121 }, { "epoch": 0.12938787351319989, "grad_norm": 0.42699339985847473, "learning_rate": 4.353267851962369e-06, "loss": 0.7524, "step": 3122 }, { "epoch": 0.12942931741887356, "grad_norm": 0.43927860260009766, "learning_rate": 4.353060632434e-06, "loss": 0.6995, "step": 3123 }, { "epoch": 0.1294707613245472, "grad_norm": 0.4340886175632477, "learning_rate": 4.352853412905633e-06, "loss": 0.7976, "step": 3124 }, { "epoch": 0.1295122052302209, "grad_norm": 0.4235353171825409, "learning_rate": 4.352646193377264e-06, "loss": 0.7427, "step": 3125 }, { "epoch": 0.12955364913589457, "grad_norm": 0.4078538417816162, "learning_rate": 4.352438973848896e-06, "loss": 0.7251, "step": 3126 }, { "epoch": 0.12959509304156824, "grad_norm": 0.417101114988327, "learning_rate": 4.352231754320528e-06, "loss": 0.7047, "step": 3127 }, { "epoch": 0.12963653694724192, "grad_norm": 0.4423764944076538, "learning_rate": 4.352024534792159e-06, "loss": 0.7605, "step": 3128 }, { "epoch": 0.12967798085291557, "grad_norm": 0.41999706625938416, "learning_rate": 4.351817315263791e-06, "loss": 0.748, "step": 3129 }, { "epoch": 0.12971942475858925, "grad_norm": 0.3999955952167511, "learning_rate": 4.351610095735423e-06, "loss": 0.7222, "step": 3130 }, { "epoch": 0.12976086866426292, "grad_norm": 0.42091724276542664, "learning_rate": 4.351402876207054e-06, "loss": 0.7526, "step": 3131 }, { "epoch": 0.1298023125699366, "grad_norm": 0.44426479935646057, "learning_rate": 4.3511956566786854e-06, "loss": 0.7349, "step": 3132 }, { "epoch": 0.12984375647561025, "grad_norm": 0.45102280378341675, "learning_rate": 4.350988437150318e-06, "loss": 0.7507, "step": 3133 }, { "epoch": 0.12988520038128393, "grad_norm": 0.4707501232624054, "learning_rate": 4.350781217621949e-06, "loss": 0.7944, "step": 3134 }, { "epoch": 0.1299266442869576, "grad_norm": 0.4298677444458008, "learning_rate": 4.3505739980935804e-06, "loss": 0.7773, "step": 3135 }, { "epoch": 0.12996808819263128, "grad_norm": 0.47007015347480774, "learning_rate": 4.350366778565213e-06, "loss": 0.8071, "step": 3136 }, { "epoch": 0.13000953209830493, "grad_norm": 0.43346676230430603, "learning_rate": 4.350159559036844e-06, "loss": 0.771, "step": 3137 }, { "epoch": 0.1300509760039786, "grad_norm": 0.43266457319259644, "learning_rate": 4.3499523395084754e-06, "loss": 0.7354, "step": 3138 }, { "epoch": 0.13009241990965228, "grad_norm": 0.4574453830718994, "learning_rate": 4.349745119980107e-06, "loss": 0.8176, "step": 3139 }, { "epoch": 0.13013386381532596, "grad_norm": 0.4442731440067291, "learning_rate": 4.349537900451739e-06, "loss": 0.7595, "step": 3140 }, { "epoch": 0.13017530772099964, "grad_norm": 0.4240463376045227, "learning_rate": 4.3493306809233704e-06, "loss": 0.7268, "step": 3141 }, { "epoch": 0.1302167516266733, "grad_norm": 0.4719716012477875, "learning_rate": 4.349123461395003e-06, "loss": 0.7417, "step": 3142 }, { "epoch": 0.13025819553234697, "grad_norm": 0.43604332208633423, "learning_rate": 4.348916241866633e-06, "loss": 0.6914, "step": 3143 }, { "epoch": 0.13029963943802064, "grad_norm": 0.4241204857826233, "learning_rate": 4.3487090223382654e-06, "loss": 0.7007, "step": 3144 }, { "epoch": 0.13034108334369432, "grad_norm": 0.44318723678588867, "learning_rate": 4.348501802809898e-06, "loss": 0.7031, "step": 3145 }, { "epoch": 0.13038252724936797, "grad_norm": 0.40496712923049927, "learning_rate": 4.348294583281529e-06, "loss": 0.7048, "step": 3146 }, { "epoch": 0.13042397115504165, "grad_norm": 0.4204508662223816, "learning_rate": 4.3480873637531605e-06, "loss": 0.7161, "step": 3147 }, { "epoch": 0.13046541506071532, "grad_norm": 0.4179397225379944, "learning_rate": 4.347880144224792e-06, "loss": 0.7515, "step": 3148 }, { "epoch": 0.130506858966389, "grad_norm": 0.43040668964385986, "learning_rate": 4.347672924696424e-06, "loss": 0.6654, "step": 3149 }, { "epoch": 0.13054830287206268, "grad_norm": 0.4331599175930023, "learning_rate": 4.3474657051680555e-06, "loss": 0.8103, "step": 3150 }, { "epoch": 0.13058974677773633, "grad_norm": 0.4416988790035248, "learning_rate": 4.347258485639687e-06, "loss": 0.7239, "step": 3151 }, { "epoch": 0.13063119068341, "grad_norm": 0.4782413840293884, "learning_rate": 4.347051266111318e-06, "loss": 0.8135, "step": 3152 }, { "epoch": 0.13067263458908368, "grad_norm": 0.4335290193557739, "learning_rate": 4.3468440465829505e-06, "loss": 0.7173, "step": 3153 }, { "epoch": 0.13071407849475736, "grad_norm": 0.448612242937088, "learning_rate": 4.346636827054582e-06, "loss": 0.7466, "step": 3154 }, { "epoch": 0.130755522400431, "grad_norm": 0.43644189834594727, "learning_rate": 4.346429607526213e-06, "loss": 0.7297, "step": 3155 }, { "epoch": 0.13079696630610468, "grad_norm": 0.4221780300140381, "learning_rate": 4.3462223879978455e-06, "loss": 0.7546, "step": 3156 }, { "epoch": 0.13083841021177836, "grad_norm": 0.40728509426116943, "learning_rate": 4.346015168469477e-06, "loss": 0.7498, "step": 3157 }, { "epoch": 0.13087985411745204, "grad_norm": 0.42668360471725464, "learning_rate": 4.345807948941109e-06, "loss": 0.7466, "step": 3158 }, { "epoch": 0.1309212980231257, "grad_norm": 0.4262279272079468, "learning_rate": 4.34560072941274e-06, "loss": 0.7385, "step": 3159 }, { "epoch": 0.13096274192879936, "grad_norm": 0.4606529772281647, "learning_rate": 4.345393509884372e-06, "loss": 0.7588, "step": 3160 }, { "epoch": 0.13100418583447304, "grad_norm": 0.4493151605129242, "learning_rate": 4.345186290356003e-06, "loss": 0.7566, "step": 3161 }, { "epoch": 0.13104562974014672, "grad_norm": 0.4094814956188202, "learning_rate": 4.3449790708276355e-06, "loss": 0.7405, "step": 3162 }, { "epoch": 0.1310870736458204, "grad_norm": 0.4557473361492157, "learning_rate": 4.344771851299267e-06, "loss": 0.7952, "step": 3163 }, { "epoch": 0.13112851755149404, "grad_norm": 0.45078229904174805, "learning_rate": 4.344564631770898e-06, "loss": 0.7612, "step": 3164 }, { "epoch": 0.13116996145716772, "grad_norm": 0.4167359471321106, "learning_rate": 4.3443574122425305e-06, "loss": 0.7173, "step": 3165 }, { "epoch": 0.1312114053628414, "grad_norm": 0.4306659996509552, "learning_rate": 4.344150192714162e-06, "loss": 0.7551, "step": 3166 }, { "epoch": 0.13125284926851508, "grad_norm": 0.4610150456428528, "learning_rate": 4.343942973185793e-06, "loss": 0.7682, "step": 3167 }, { "epoch": 0.13129429317418873, "grad_norm": 0.4115774929523468, "learning_rate": 4.343735753657425e-06, "loss": 0.7168, "step": 3168 }, { "epoch": 0.1313357370798624, "grad_norm": 0.43952569365501404, "learning_rate": 4.343528534129057e-06, "loss": 0.7122, "step": 3169 }, { "epoch": 0.13137718098553608, "grad_norm": 0.4395902454853058, "learning_rate": 4.343321314600688e-06, "loss": 0.7059, "step": 3170 }, { "epoch": 0.13141862489120976, "grad_norm": 0.444606214761734, "learning_rate": 4.34311409507232e-06, "loss": 0.7233, "step": 3171 }, { "epoch": 0.1314600687968834, "grad_norm": 0.48068445920944214, "learning_rate": 4.342906875543952e-06, "loss": 0.771, "step": 3172 }, { "epoch": 0.13150151270255708, "grad_norm": 0.39579591155052185, "learning_rate": 4.342699656015583e-06, "loss": 0.6877, "step": 3173 }, { "epoch": 0.13154295660823076, "grad_norm": 0.4405721426010132, "learning_rate": 4.3424924364872155e-06, "loss": 0.7402, "step": 3174 }, { "epoch": 0.13158440051390444, "grad_norm": 0.4287813901901245, "learning_rate": 4.342285216958846e-06, "loss": 0.7473, "step": 3175 }, { "epoch": 0.13162584441957811, "grad_norm": 0.444559782743454, "learning_rate": 4.342077997430478e-06, "loss": 0.7805, "step": 3176 }, { "epoch": 0.13166728832525176, "grad_norm": 0.40739330649375916, "learning_rate": 4.34187077790211e-06, "loss": 0.7112, "step": 3177 }, { "epoch": 0.13170873223092544, "grad_norm": 0.43769571185112, "learning_rate": 4.341663558373742e-06, "loss": 0.7341, "step": 3178 }, { "epoch": 0.13175017613659912, "grad_norm": 0.4883594810962677, "learning_rate": 4.341456338845373e-06, "loss": 0.7534, "step": 3179 }, { "epoch": 0.1317916200422728, "grad_norm": 0.4317252039909363, "learning_rate": 4.341249119317005e-06, "loss": 0.7488, "step": 3180 }, { "epoch": 0.13183306394794644, "grad_norm": 0.4231094419956207, "learning_rate": 4.341041899788637e-06, "loss": 0.6772, "step": 3181 }, { "epoch": 0.13187450785362012, "grad_norm": 0.4293449819087982, "learning_rate": 4.340834680260268e-06, "loss": 0.7444, "step": 3182 }, { "epoch": 0.1319159517592938, "grad_norm": 0.42454656958580017, "learning_rate": 4.3406274607319e-06, "loss": 0.7776, "step": 3183 }, { "epoch": 0.13195739566496748, "grad_norm": 0.3830515742301941, "learning_rate": 4.340420241203531e-06, "loss": 0.7133, "step": 3184 }, { "epoch": 0.13199883957064112, "grad_norm": 0.4094764292240143, "learning_rate": 4.340213021675163e-06, "loss": 0.7407, "step": 3185 }, { "epoch": 0.1320402834763148, "grad_norm": 0.4515026807785034, "learning_rate": 4.340005802146795e-06, "loss": 0.7629, "step": 3186 }, { "epoch": 0.13208172738198848, "grad_norm": 0.40350958704948425, "learning_rate": 4.339798582618426e-06, "loss": 0.7378, "step": 3187 }, { "epoch": 0.13212317128766216, "grad_norm": 0.4185269773006439, "learning_rate": 4.3395913630900574e-06, "loss": 0.7222, "step": 3188 }, { "epoch": 0.13216461519333583, "grad_norm": 0.3971675634384155, "learning_rate": 4.33938414356169e-06, "loss": 0.7391, "step": 3189 }, { "epoch": 0.13220605909900948, "grad_norm": 0.4705488085746765, "learning_rate": 4.339176924033322e-06, "loss": 0.7688, "step": 3190 }, { "epoch": 0.13224750300468316, "grad_norm": 0.44235652685165405, "learning_rate": 4.3389697045049524e-06, "loss": 0.7261, "step": 3191 }, { "epoch": 0.13228894691035684, "grad_norm": 0.46159955859184265, "learning_rate": 4.338762484976585e-06, "loss": 0.7402, "step": 3192 }, { "epoch": 0.1323303908160305, "grad_norm": 0.40473857522010803, "learning_rate": 4.338555265448216e-06, "loss": 0.6825, "step": 3193 }, { "epoch": 0.13237183472170416, "grad_norm": 0.41825172305107117, "learning_rate": 4.338348045919848e-06, "loss": 0.7617, "step": 3194 }, { "epoch": 0.13241327862737784, "grad_norm": 0.41895949840545654, "learning_rate": 4.33814082639148e-06, "loss": 0.7319, "step": 3195 }, { "epoch": 0.13245472253305152, "grad_norm": 0.4375893771648407, "learning_rate": 4.337933606863111e-06, "loss": 0.7463, "step": 3196 }, { "epoch": 0.1324961664387252, "grad_norm": 0.47569039463996887, "learning_rate": 4.337726387334743e-06, "loss": 0.8157, "step": 3197 }, { "epoch": 0.13253761034439884, "grad_norm": 0.42760708928108215, "learning_rate": 4.337519167806375e-06, "loss": 0.6865, "step": 3198 }, { "epoch": 0.13257905425007252, "grad_norm": 0.4418492019176483, "learning_rate": 4.337311948278006e-06, "loss": 0.7866, "step": 3199 }, { "epoch": 0.1326204981557462, "grad_norm": 0.42479759454727173, "learning_rate": 4.3371047287496374e-06, "loss": 0.7052, "step": 3200 }, { "epoch": 0.13266194206141987, "grad_norm": 0.4728982448577881, "learning_rate": 4.33689750922127e-06, "loss": 0.7617, "step": 3201 }, { "epoch": 0.13270338596709355, "grad_norm": 0.44464796781539917, "learning_rate": 4.336690289692901e-06, "loss": 0.7169, "step": 3202 }, { "epoch": 0.1327448298727672, "grad_norm": 0.4362186789512634, "learning_rate": 4.3364830701645324e-06, "loss": 0.7261, "step": 3203 }, { "epoch": 0.13278627377844088, "grad_norm": 0.43370163440704346, "learning_rate": 4.336275850636164e-06, "loss": 0.74, "step": 3204 }, { "epoch": 0.13282771768411455, "grad_norm": 0.4373166859149933, "learning_rate": 4.336068631107796e-06, "loss": 0.7583, "step": 3205 }, { "epoch": 0.13286916158978823, "grad_norm": 0.43720272183418274, "learning_rate": 4.3358614115794275e-06, "loss": 0.7885, "step": 3206 }, { "epoch": 0.13291060549546188, "grad_norm": 0.44951850175857544, "learning_rate": 4.335654192051059e-06, "loss": 0.7463, "step": 3207 }, { "epoch": 0.13295204940113556, "grad_norm": 0.4723678231239319, "learning_rate": 4.335446972522691e-06, "loss": 0.7705, "step": 3208 }, { "epoch": 0.13299349330680924, "grad_norm": 0.44698601961135864, "learning_rate": 4.3352397529943225e-06, "loss": 0.7474, "step": 3209 }, { "epoch": 0.1330349372124829, "grad_norm": 0.45008814334869385, "learning_rate": 4.335032533465955e-06, "loss": 0.7432, "step": 3210 }, { "epoch": 0.13307638111815656, "grad_norm": 0.409915953874588, "learning_rate": 4.334825313937586e-06, "loss": 0.7231, "step": 3211 }, { "epoch": 0.13311782502383024, "grad_norm": 0.43293067812919617, "learning_rate": 4.3346180944092175e-06, "loss": 0.7415, "step": 3212 }, { "epoch": 0.13315926892950392, "grad_norm": 0.466900497674942, "learning_rate": 4.334410874880849e-06, "loss": 0.7241, "step": 3213 }, { "epoch": 0.1332007128351776, "grad_norm": 0.39762866497039795, "learning_rate": 4.334203655352481e-06, "loss": 0.7322, "step": 3214 }, { "epoch": 0.13324215674085127, "grad_norm": 0.45224279165267944, "learning_rate": 4.3339964358241125e-06, "loss": 0.7708, "step": 3215 }, { "epoch": 0.13328360064652492, "grad_norm": 0.45189782977104187, "learning_rate": 4.333789216295744e-06, "loss": 0.7986, "step": 3216 }, { "epoch": 0.1333250445521986, "grad_norm": 0.4780016243457794, "learning_rate": 4.333581996767376e-06, "loss": 0.7485, "step": 3217 }, { "epoch": 0.13336648845787227, "grad_norm": 0.4144827723503113, "learning_rate": 4.3333747772390075e-06, "loss": 0.772, "step": 3218 }, { "epoch": 0.13340793236354595, "grad_norm": 0.4675489366054535, "learning_rate": 4.333167557710639e-06, "loss": 0.7981, "step": 3219 }, { "epoch": 0.1334493762692196, "grad_norm": 0.43756312131881714, "learning_rate": 4.33296033818227e-06, "loss": 0.728, "step": 3220 }, { "epoch": 0.13349082017489328, "grad_norm": 0.4187451899051666, "learning_rate": 4.3327531186539025e-06, "loss": 0.6693, "step": 3221 }, { "epoch": 0.13353226408056695, "grad_norm": 0.44672414660453796, "learning_rate": 4.332545899125534e-06, "loss": 0.7822, "step": 3222 }, { "epoch": 0.13357370798624063, "grad_norm": 0.41613438725471497, "learning_rate": 4.332338679597165e-06, "loss": 0.7236, "step": 3223 }, { "epoch": 0.1336151518919143, "grad_norm": 0.44749435782432556, "learning_rate": 4.3321314600687975e-06, "loss": 0.7554, "step": 3224 }, { "epoch": 0.13365659579758796, "grad_norm": 0.4225656986236572, "learning_rate": 4.331924240540429e-06, "loss": 0.744, "step": 3225 }, { "epoch": 0.13369803970326163, "grad_norm": 0.45646581053733826, "learning_rate": 4.331717021012061e-06, "loss": 0.772, "step": 3226 }, { "epoch": 0.1337394836089353, "grad_norm": 0.4501378834247589, "learning_rate": 4.331509801483692e-06, "loss": 0.725, "step": 3227 }, { "epoch": 0.133780927514609, "grad_norm": 0.46129289269447327, "learning_rate": 4.331302581955324e-06, "loss": 0.7576, "step": 3228 }, { "epoch": 0.13382237142028264, "grad_norm": 0.44970420002937317, "learning_rate": 4.331095362426955e-06, "loss": 0.748, "step": 3229 }, { "epoch": 0.13386381532595631, "grad_norm": 0.44980454444885254, "learning_rate": 4.3308881428985875e-06, "loss": 0.7805, "step": 3230 }, { "epoch": 0.13390525923163, "grad_norm": 0.4057115316390991, "learning_rate": 4.330680923370219e-06, "loss": 0.7798, "step": 3231 }, { "epoch": 0.13394670313730367, "grad_norm": 0.4357262849807739, "learning_rate": 4.33047370384185e-06, "loss": 0.8035, "step": 3232 }, { "epoch": 0.13398814704297732, "grad_norm": 0.473522424697876, "learning_rate": 4.3302664843134825e-06, "loss": 0.788, "step": 3233 }, { "epoch": 0.134029590948651, "grad_norm": 0.40912574529647827, "learning_rate": 4.330059264785114e-06, "loss": 0.6713, "step": 3234 }, { "epoch": 0.13407103485432467, "grad_norm": 0.41532158851623535, "learning_rate": 4.329852045256745e-06, "loss": 0.7729, "step": 3235 }, { "epoch": 0.13411247875999835, "grad_norm": 0.48318198323249817, "learning_rate": 4.329644825728377e-06, "loss": 0.7312, "step": 3236 }, { "epoch": 0.13415392266567203, "grad_norm": 0.5060474872589111, "learning_rate": 4.329437606200009e-06, "loss": 0.7581, "step": 3237 }, { "epoch": 0.13419536657134568, "grad_norm": 0.41201525926589966, "learning_rate": 4.32923038667164e-06, "loss": 0.749, "step": 3238 }, { "epoch": 0.13423681047701935, "grad_norm": 0.4343048930168152, "learning_rate": 4.329023167143272e-06, "loss": 0.7188, "step": 3239 }, { "epoch": 0.13427825438269303, "grad_norm": 0.428295373916626, "learning_rate": 4.328815947614903e-06, "loss": 0.73, "step": 3240 }, { "epoch": 0.1343196982883667, "grad_norm": 0.4618155062198639, "learning_rate": 4.328608728086535e-06, "loss": 0.7122, "step": 3241 }, { "epoch": 0.13436114219404036, "grad_norm": 0.4237076938152313, "learning_rate": 4.3284015085581675e-06, "loss": 0.6992, "step": 3242 }, { "epoch": 0.13440258609971403, "grad_norm": 0.4964372515678406, "learning_rate": 4.328194289029798e-06, "loss": 0.7593, "step": 3243 }, { "epoch": 0.1344440300053877, "grad_norm": 0.43819156289100647, "learning_rate": 4.32798706950143e-06, "loss": 0.697, "step": 3244 }, { "epoch": 0.1344854739110614, "grad_norm": 0.4704984426498413, "learning_rate": 4.327779849973062e-06, "loss": 0.7153, "step": 3245 }, { "epoch": 0.13452691781673504, "grad_norm": 0.5024917721748352, "learning_rate": 4.327572630444694e-06, "loss": 0.8037, "step": 3246 }, { "epoch": 0.1345683617224087, "grad_norm": 0.4122340679168701, "learning_rate": 4.327365410916325e-06, "loss": 0.7778, "step": 3247 }, { "epoch": 0.1346098056280824, "grad_norm": 0.4296201765537262, "learning_rate": 4.327158191387957e-06, "loss": 0.7239, "step": 3248 }, { "epoch": 0.13465124953375607, "grad_norm": 0.41790303587913513, "learning_rate": 4.326950971859589e-06, "loss": 0.7373, "step": 3249 }, { "epoch": 0.13469269343942974, "grad_norm": 0.43771839141845703, "learning_rate": 4.32674375233122e-06, "loss": 0.7749, "step": 3250 }, { "epoch": 0.1347341373451034, "grad_norm": 0.43307211995124817, "learning_rate": 4.326536532802852e-06, "loss": 0.7705, "step": 3251 }, { "epoch": 0.13477558125077707, "grad_norm": 0.45304903388023376, "learning_rate": 4.326329313274483e-06, "loss": 0.719, "step": 3252 }, { "epoch": 0.13481702515645075, "grad_norm": 0.42284977436065674, "learning_rate": 4.326122093746115e-06, "loss": 0.6877, "step": 3253 }, { "epoch": 0.13485846906212443, "grad_norm": 0.41939017176628113, "learning_rate": 4.325914874217747e-06, "loss": 0.7179, "step": 3254 }, { "epoch": 0.13489991296779807, "grad_norm": 0.45863446593284607, "learning_rate": 4.325707654689378e-06, "loss": 0.7891, "step": 3255 }, { "epoch": 0.13494135687347175, "grad_norm": 0.45165812969207764, "learning_rate": 4.3255004351610094e-06, "loss": 0.7942, "step": 3256 }, { "epoch": 0.13498280077914543, "grad_norm": 0.40971502661705017, "learning_rate": 4.325293215632642e-06, "loss": 0.7324, "step": 3257 }, { "epoch": 0.1350242446848191, "grad_norm": 0.4014582335948944, "learning_rate": 4.325085996104274e-06, "loss": 0.7385, "step": 3258 }, { "epoch": 0.13506568859049276, "grad_norm": 0.41865915060043335, "learning_rate": 4.3248787765759044e-06, "loss": 0.7656, "step": 3259 }, { "epoch": 0.13510713249616643, "grad_norm": 0.4344530999660492, "learning_rate": 4.324671557047537e-06, "loss": 0.7493, "step": 3260 }, { "epoch": 0.1351485764018401, "grad_norm": 0.41098886728286743, "learning_rate": 4.324464337519168e-06, "loss": 0.7461, "step": 3261 }, { "epoch": 0.1351900203075138, "grad_norm": 0.4427797198295593, "learning_rate": 4.3242571179908e-06, "loss": 0.7266, "step": 3262 }, { "epoch": 0.13523146421318746, "grad_norm": 0.5158562660217285, "learning_rate": 4.324049898462432e-06, "loss": 0.7805, "step": 3263 }, { "epoch": 0.1352729081188611, "grad_norm": 0.4522770643234253, "learning_rate": 4.323842678934063e-06, "loss": 0.7476, "step": 3264 }, { "epoch": 0.1353143520245348, "grad_norm": 0.4183885455131531, "learning_rate": 4.3236354594056945e-06, "loss": 0.7151, "step": 3265 }, { "epoch": 0.13535579593020847, "grad_norm": 0.42074689269065857, "learning_rate": 4.323428239877327e-06, "loss": 0.7244, "step": 3266 }, { "epoch": 0.13539723983588214, "grad_norm": 0.42470109462738037, "learning_rate": 4.323221020348958e-06, "loss": 0.735, "step": 3267 }, { "epoch": 0.1354386837415558, "grad_norm": 0.48983752727508545, "learning_rate": 4.3230138008205895e-06, "loss": 0.7788, "step": 3268 }, { "epoch": 0.13548012764722947, "grad_norm": 0.4303596615791321, "learning_rate": 4.322806581292222e-06, "loss": 0.6938, "step": 3269 }, { "epoch": 0.13552157155290315, "grad_norm": 0.4334631860256195, "learning_rate": 4.322599361763853e-06, "loss": 0.7323, "step": 3270 }, { "epoch": 0.13556301545857682, "grad_norm": 0.4621518552303314, "learning_rate": 4.3223921422354845e-06, "loss": 0.7722, "step": 3271 }, { "epoch": 0.13560445936425047, "grad_norm": 0.45699045062065125, "learning_rate": 4.322184922707116e-06, "loss": 0.7908, "step": 3272 }, { "epoch": 0.13564590326992415, "grad_norm": 0.4064429700374603, "learning_rate": 4.321977703178748e-06, "loss": 0.7195, "step": 3273 }, { "epoch": 0.13568734717559783, "grad_norm": 0.4567306935787201, "learning_rate": 4.3217704836503795e-06, "loss": 0.813, "step": 3274 }, { "epoch": 0.1357287910812715, "grad_norm": 0.42542627453804016, "learning_rate": 4.321563264122011e-06, "loss": 0.7336, "step": 3275 }, { "epoch": 0.13577023498694518, "grad_norm": 0.4275822937488556, "learning_rate": 4.321356044593643e-06, "loss": 0.7197, "step": 3276 }, { "epoch": 0.13581167889261883, "grad_norm": 0.4292237162590027, "learning_rate": 4.3211488250652745e-06, "loss": 0.7317, "step": 3277 }, { "epoch": 0.1358531227982925, "grad_norm": 0.4524635672569275, "learning_rate": 4.320941605536907e-06, "loss": 0.7356, "step": 3278 }, { "epoch": 0.13589456670396619, "grad_norm": 0.43820255994796753, "learning_rate": 4.320734386008538e-06, "loss": 0.7183, "step": 3279 }, { "epoch": 0.13593601060963986, "grad_norm": 0.4431719481945038, "learning_rate": 4.3205271664801695e-06, "loss": 0.7849, "step": 3280 }, { "epoch": 0.1359774545153135, "grad_norm": 0.4130253195762634, "learning_rate": 4.320319946951801e-06, "loss": 0.7122, "step": 3281 }, { "epoch": 0.1360188984209872, "grad_norm": 0.4279428720474243, "learning_rate": 4.320112727423433e-06, "loss": 0.7747, "step": 3282 }, { "epoch": 0.13606034232666087, "grad_norm": 0.4873661696910858, "learning_rate": 4.3199055078950645e-06, "loss": 0.7856, "step": 3283 }, { "epoch": 0.13610178623233454, "grad_norm": 0.42564108967781067, "learning_rate": 4.319698288366696e-06, "loss": 0.7549, "step": 3284 }, { "epoch": 0.1361432301380082, "grad_norm": 0.4233907461166382, "learning_rate": 4.319491068838328e-06, "loss": 0.7986, "step": 3285 }, { "epoch": 0.13618467404368187, "grad_norm": 0.46017396450042725, "learning_rate": 4.3192838493099595e-06, "loss": 0.7651, "step": 3286 }, { "epoch": 0.13622611794935555, "grad_norm": 0.4139768183231354, "learning_rate": 4.319076629781591e-06, "loss": 0.7264, "step": 3287 }, { "epoch": 0.13626756185502922, "grad_norm": 0.4284610450267792, "learning_rate": 4.318869410253222e-06, "loss": 0.731, "step": 3288 }, { "epoch": 0.1363090057607029, "grad_norm": 0.3987548053264618, "learning_rate": 4.3186621907248545e-06, "loss": 0.7677, "step": 3289 }, { "epoch": 0.13635044966637655, "grad_norm": 0.4312137961387634, "learning_rate": 4.318454971196486e-06, "loss": 0.7651, "step": 3290 }, { "epoch": 0.13639189357205023, "grad_norm": 0.6239796280860901, "learning_rate": 4.318247751668117e-06, "loss": 0.7858, "step": 3291 }, { "epoch": 0.1364333374777239, "grad_norm": 0.4032297432422638, "learning_rate": 4.318040532139749e-06, "loss": 0.7095, "step": 3292 }, { "epoch": 0.13647478138339758, "grad_norm": 0.45367351174354553, "learning_rate": 4.317833312611381e-06, "loss": 0.7629, "step": 3293 }, { "epoch": 0.13651622528907123, "grad_norm": 0.47958412766456604, "learning_rate": 4.317626093083013e-06, "loss": 0.8145, "step": 3294 }, { "epoch": 0.1365576691947449, "grad_norm": 0.42471709847450256, "learning_rate": 4.317418873554644e-06, "loss": 0.7554, "step": 3295 }, { "epoch": 0.13659911310041858, "grad_norm": 0.465071439743042, "learning_rate": 4.317211654026276e-06, "loss": 0.7935, "step": 3296 }, { "epoch": 0.13664055700609226, "grad_norm": 0.40812942385673523, "learning_rate": 4.317004434497907e-06, "loss": 0.7136, "step": 3297 }, { "epoch": 0.13668200091176594, "grad_norm": 0.4135816693305969, "learning_rate": 4.3167972149695395e-06, "loss": 0.7083, "step": 3298 }, { "epoch": 0.1367234448174396, "grad_norm": 0.40938976407051086, "learning_rate": 4.316589995441171e-06, "loss": 0.7122, "step": 3299 }, { "epoch": 0.13676488872311326, "grad_norm": 0.4250430166721344, "learning_rate": 4.316382775912802e-06, "loss": 0.7507, "step": 3300 }, { "epoch": 0.13680633262878694, "grad_norm": 0.4539840817451477, "learning_rate": 4.316175556384434e-06, "loss": 0.7373, "step": 3301 }, { "epoch": 0.13684777653446062, "grad_norm": 0.45435410737991333, "learning_rate": 4.315968336856066e-06, "loss": 0.7261, "step": 3302 }, { "epoch": 0.13688922044013427, "grad_norm": 0.49231502413749695, "learning_rate": 4.315761117327697e-06, "loss": 0.729, "step": 3303 }, { "epoch": 0.13693066434580795, "grad_norm": 0.433064341545105, "learning_rate": 4.315553897799329e-06, "loss": 0.7062, "step": 3304 }, { "epoch": 0.13697210825148162, "grad_norm": 0.4862290620803833, "learning_rate": 4.315346678270961e-06, "loss": 0.7634, "step": 3305 }, { "epoch": 0.1370135521571553, "grad_norm": 0.43565258383750916, "learning_rate": 4.315139458742592e-06, "loss": 0.7664, "step": 3306 }, { "epoch": 0.13705499606282895, "grad_norm": 0.42442816495895386, "learning_rate": 4.314932239214224e-06, "loss": 0.7771, "step": 3307 }, { "epoch": 0.13709643996850263, "grad_norm": 0.4279419183731079, "learning_rate": 4.314725019685855e-06, "loss": 0.7104, "step": 3308 }, { "epoch": 0.1371378838741763, "grad_norm": 0.4139658212661743, "learning_rate": 4.314517800157487e-06, "loss": 0.7595, "step": 3309 }, { "epoch": 0.13717932777984998, "grad_norm": 0.4316956102848053, "learning_rate": 4.3143105806291195e-06, "loss": 0.7417, "step": 3310 }, { "epoch": 0.13722077168552366, "grad_norm": 0.5083264112472534, "learning_rate": 4.31410336110075e-06, "loss": 0.7698, "step": 3311 }, { "epoch": 0.1372622155911973, "grad_norm": 0.42437848448753357, "learning_rate": 4.313896141572382e-06, "loss": 0.7766, "step": 3312 }, { "epoch": 0.13730365949687098, "grad_norm": 0.42439892888069153, "learning_rate": 4.313688922044014e-06, "loss": 0.7124, "step": 3313 }, { "epoch": 0.13734510340254466, "grad_norm": 0.4494580924510956, "learning_rate": 4.313481702515646e-06, "loss": 0.7375, "step": 3314 }, { "epoch": 0.13738654730821834, "grad_norm": 0.41579461097717285, "learning_rate": 4.313274482987277e-06, "loss": 0.7512, "step": 3315 }, { "epoch": 0.137427991213892, "grad_norm": 0.46532508730888367, "learning_rate": 4.313067263458909e-06, "loss": 0.8091, "step": 3316 }, { "epoch": 0.13746943511956566, "grad_norm": 0.4343154728412628, "learning_rate": 4.31286004393054e-06, "loss": 0.7671, "step": 3317 }, { "epoch": 0.13751087902523934, "grad_norm": 0.46249207854270935, "learning_rate": 4.312652824402172e-06, "loss": 0.8242, "step": 3318 }, { "epoch": 0.13755232293091302, "grad_norm": 0.4311218559741974, "learning_rate": 4.312445604873804e-06, "loss": 0.7026, "step": 3319 }, { "epoch": 0.13759376683658667, "grad_norm": 0.4132808744907379, "learning_rate": 4.312238385345435e-06, "loss": 0.728, "step": 3320 }, { "epoch": 0.13763521074226034, "grad_norm": 0.4490644335746765, "learning_rate": 4.312031165817067e-06, "loss": 0.7307, "step": 3321 }, { "epoch": 0.13767665464793402, "grad_norm": 0.47734448313713074, "learning_rate": 4.311823946288699e-06, "loss": 0.7844, "step": 3322 }, { "epoch": 0.1377180985536077, "grad_norm": 1.2305147647857666, "learning_rate": 4.31161672676033e-06, "loss": 0.7303, "step": 3323 }, { "epoch": 0.13775954245928138, "grad_norm": 0.40227973461151123, "learning_rate": 4.3114095072319615e-06, "loss": 0.6914, "step": 3324 }, { "epoch": 0.13780098636495502, "grad_norm": 0.44827958941459656, "learning_rate": 4.311202287703594e-06, "loss": 0.7725, "step": 3325 }, { "epoch": 0.1378424302706287, "grad_norm": 0.4013510048389435, "learning_rate": 4.310995068175225e-06, "loss": 0.7131, "step": 3326 }, { "epoch": 0.13788387417630238, "grad_norm": 0.4368317425251007, "learning_rate": 4.3107878486468565e-06, "loss": 0.7012, "step": 3327 }, { "epoch": 0.13792531808197606, "grad_norm": 0.4506840109825134, "learning_rate": 4.310580629118489e-06, "loss": 0.7493, "step": 3328 }, { "epoch": 0.1379667619876497, "grad_norm": 0.40915173292160034, "learning_rate": 4.31037340959012e-06, "loss": 0.7351, "step": 3329 }, { "epoch": 0.13800820589332338, "grad_norm": 0.44817087054252625, "learning_rate": 4.310166190061752e-06, "loss": 0.7555, "step": 3330 }, { "epoch": 0.13804964979899706, "grad_norm": 0.4021657407283783, "learning_rate": 4.309958970533384e-06, "loss": 0.7273, "step": 3331 }, { "epoch": 0.13809109370467074, "grad_norm": 0.40061238408088684, "learning_rate": 4.309751751005015e-06, "loss": 0.7651, "step": 3332 }, { "epoch": 0.13813253761034439, "grad_norm": 0.44201990962028503, "learning_rate": 4.3095445314766465e-06, "loss": 0.7546, "step": 3333 }, { "epoch": 0.13817398151601806, "grad_norm": 0.4271180331707001, "learning_rate": 4.309337311948279e-06, "loss": 0.7583, "step": 3334 }, { "epoch": 0.13821542542169174, "grad_norm": 0.43941429257392883, "learning_rate": 4.30913009241991e-06, "loss": 0.7485, "step": 3335 }, { "epoch": 0.13825686932736542, "grad_norm": 0.4266482889652252, "learning_rate": 4.3089228728915415e-06, "loss": 0.7192, "step": 3336 }, { "epoch": 0.1382983132330391, "grad_norm": 0.4823482930660248, "learning_rate": 4.308715653363174e-06, "loss": 0.7314, "step": 3337 }, { "epoch": 0.13833975713871274, "grad_norm": 0.4269290268421173, "learning_rate": 4.308508433834805e-06, "loss": 0.7869, "step": 3338 }, { "epoch": 0.13838120104438642, "grad_norm": 0.42131197452545166, "learning_rate": 4.3083012143064365e-06, "loss": 0.7495, "step": 3339 }, { "epoch": 0.1384226449500601, "grad_norm": 0.45400270819664, "learning_rate": 4.308093994778068e-06, "loss": 0.7749, "step": 3340 }, { "epoch": 0.13846408885573377, "grad_norm": 0.4411889314651489, "learning_rate": 4.3078867752497e-06, "loss": 0.7817, "step": 3341 }, { "epoch": 0.13850553276140742, "grad_norm": 0.4161721169948578, "learning_rate": 4.3076795557213315e-06, "loss": 0.7742, "step": 3342 }, { "epoch": 0.1385469766670811, "grad_norm": 0.45293962955474854, "learning_rate": 4.307472336192963e-06, "loss": 0.7197, "step": 3343 }, { "epoch": 0.13858842057275478, "grad_norm": 0.46139729022979736, "learning_rate": 4.307265116664594e-06, "loss": 0.7737, "step": 3344 }, { "epoch": 0.13862986447842846, "grad_norm": 0.41932469606399536, "learning_rate": 4.3070578971362265e-06, "loss": 0.7228, "step": 3345 }, { "epoch": 0.1386713083841021, "grad_norm": 0.4702572226524353, "learning_rate": 4.306850677607859e-06, "loss": 0.7961, "step": 3346 }, { "epoch": 0.13871275228977578, "grad_norm": 0.4401925206184387, "learning_rate": 4.30664345807949e-06, "loss": 0.7523, "step": 3347 }, { "epoch": 0.13875419619544946, "grad_norm": 0.45952650904655457, "learning_rate": 4.3064362385511215e-06, "loss": 0.7715, "step": 3348 }, { "epoch": 0.13879564010112314, "grad_norm": 0.44228479266166687, "learning_rate": 4.306229019022753e-06, "loss": 0.7395, "step": 3349 }, { "epoch": 0.1388370840067968, "grad_norm": 0.4234450161457062, "learning_rate": 4.306021799494385e-06, "loss": 0.7694, "step": 3350 }, { "epoch": 0.13887852791247046, "grad_norm": 0.42631199955940247, "learning_rate": 4.3058145799660165e-06, "loss": 0.7473, "step": 3351 }, { "epoch": 0.13891997181814414, "grad_norm": 0.43943336606025696, "learning_rate": 4.305607360437648e-06, "loss": 0.7341, "step": 3352 }, { "epoch": 0.13896141572381782, "grad_norm": 0.3992968499660492, "learning_rate": 4.305400140909279e-06, "loss": 0.7251, "step": 3353 }, { "epoch": 0.1390028596294915, "grad_norm": 0.46236473321914673, "learning_rate": 4.3051929213809115e-06, "loss": 0.7545, "step": 3354 }, { "epoch": 0.13904430353516514, "grad_norm": 0.40973255038261414, "learning_rate": 4.304985701852543e-06, "loss": 0.7483, "step": 3355 }, { "epoch": 0.13908574744083882, "grad_norm": 0.4474811255931854, "learning_rate": 4.304778482324174e-06, "loss": 0.6654, "step": 3356 }, { "epoch": 0.1391271913465125, "grad_norm": 0.42747166752815247, "learning_rate": 4.3045712627958065e-06, "loss": 0.7522, "step": 3357 }, { "epoch": 0.13916863525218617, "grad_norm": 0.44262632727622986, "learning_rate": 4.304364043267438e-06, "loss": 0.7087, "step": 3358 }, { "epoch": 0.13921007915785982, "grad_norm": 0.39586323499679565, "learning_rate": 4.304156823739069e-06, "loss": 0.6575, "step": 3359 }, { "epoch": 0.1392515230635335, "grad_norm": 0.4459928572177887, "learning_rate": 4.303949604210701e-06, "loss": 0.7324, "step": 3360 }, { "epoch": 0.13929296696920718, "grad_norm": 0.4681191146373749, "learning_rate": 4.303742384682333e-06, "loss": 0.7527, "step": 3361 }, { "epoch": 0.13933441087488085, "grad_norm": 0.4402056932449341, "learning_rate": 4.303535165153964e-06, "loss": 0.7529, "step": 3362 }, { "epoch": 0.13937585478055453, "grad_norm": 0.4247872233390808, "learning_rate": 4.3033279456255965e-06, "loss": 0.7188, "step": 3363 }, { "epoch": 0.13941729868622818, "grad_norm": 0.4483569860458374, "learning_rate": 4.303120726097228e-06, "loss": 0.7046, "step": 3364 }, { "epoch": 0.13945874259190186, "grad_norm": 0.40512576699256897, "learning_rate": 4.302913506568859e-06, "loss": 0.7341, "step": 3365 }, { "epoch": 0.13950018649757553, "grad_norm": 0.4109475016593933, "learning_rate": 4.3027062870404915e-06, "loss": 0.7424, "step": 3366 }, { "epoch": 0.1395416304032492, "grad_norm": 0.47797730565071106, "learning_rate": 4.302499067512123e-06, "loss": 0.8013, "step": 3367 }, { "epoch": 0.13958307430892286, "grad_norm": 0.4332086145877838, "learning_rate": 4.302291847983754e-06, "loss": 0.6888, "step": 3368 }, { "epoch": 0.13962451821459654, "grad_norm": 0.42073312401771545, "learning_rate": 4.302084628455386e-06, "loss": 0.7136, "step": 3369 }, { "epoch": 0.13966596212027022, "grad_norm": 0.4236204922199249, "learning_rate": 4.301877408927018e-06, "loss": 0.7434, "step": 3370 }, { "epoch": 0.1397074060259439, "grad_norm": 0.41219261288642883, "learning_rate": 4.301670189398649e-06, "loss": 0.6863, "step": 3371 }, { "epoch": 0.13974884993161757, "grad_norm": 0.4015968441963196, "learning_rate": 4.301462969870281e-06, "loss": 0.7253, "step": 3372 }, { "epoch": 0.13979029383729122, "grad_norm": 0.43729519844055176, "learning_rate": 4.301255750341913e-06, "loss": 0.8142, "step": 3373 }, { "epoch": 0.1398317377429649, "grad_norm": 0.4236052334308624, "learning_rate": 4.301048530813544e-06, "loss": 0.761, "step": 3374 }, { "epoch": 0.13987318164863857, "grad_norm": 0.3898746371269226, "learning_rate": 4.300841311285176e-06, "loss": 0.6331, "step": 3375 }, { "epoch": 0.13991462555431225, "grad_norm": 0.4334551990032196, "learning_rate": 4.300634091756807e-06, "loss": 0.7014, "step": 3376 }, { "epoch": 0.1399560694599859, "grad_norm": 0.40040749311447144, "learning_rate": 4.300426872228439e-06, "loss": 0.6868, "step": 3377 }, { "epoch": 0.13999751336565958, "grad_norm": 0.4336819350719452, "learning_rate": 4.300219652700071e-06, "loss": 0.7603, "step": 3378 }, { "epoch": 0.14003895727133325, "grad_norm": 0.4108978509902954, "learning_rate": 4.300012433171702e-06, "loss": 0.7292, "step": 3379 }, { "epoch": 0.14008040117700693, "grad_norm": 0.4317830204963684, "learning_rate": 4.299805213643334e-06, "loss": 0.7084, "step": 3380 }, { "epoch": 0.14012184508268058, "grad_norm": 0.4439917802810669, "learning_rate": 4.299597994114966e-06, "loss": 0.734, "step": 3381 }, { "epoch": 0.14016328898835426, "grad_norm": 0.4173215627670288, "learning_rate": 4.299390774586598e-06, "loss": 0.731, "step": 3382 }, { "epoch": 0.14020473289402793, "grad_norm": 0.460292786359787, "learning_rate": 4.299183555058229e-06, "loss": 0.7661, "step": 3383 }, { "epoch": 0.1402461767997016, "grad_norm": 0.4207804799079895, "learning_rate": 4.298976335529861e-06, "loss": 0.719, "step": 3384 }, { "epoch": 0.1402876207053753, "grad_norm": 0.428084135055542, "learning_rate": 4.298769116001492e-06, "loss": 0.6895, "step": 3385 }, { "epoch": 0.14032906461104894, "grad_norm": 0.447248637676239, "learning_rate": 4.298561896473124e-06, "loss": 0.7157, "step": 3386 }, { "epoch": 0.14037050851672261, "grad_norm": 0.44536519050598145, "learning_rate": 4.298354676944756e-06, "loss": 0.7671, "step": 3387 }, { "epoch": 0.1404119524223963, "grad_norm": 0.41328883171081543, "learning_rate": 4.298147457416387e-06, "loss": 0.7095, "step": 3388 }, { "epoch": 0.14045339632806997, "grad_norm": 0.43724849820137024, "learning_rate": 4.297940237888019e-06, "loss": 0.7336, "step": 3389 }, { "epoch": 0.14049484023374362, "grad_norm": 0.46767181158065796, "learning_rate": 4.297733018359651e-06, "loss": 0.783, "step": 3390 }, { "epoch": 0.1405362841394173, "grad_norm": 0.4089604616165161, "learning_rate": 4.297525798831282e-06, "loss": 0.7812, "step": 3391 }, { "epoch": 0.14057772804509097, "grad_norm": 0.4268758296966553, "learning_rate": 4.2973185793029135e-06, "loss": 0.7498, "step": 3392 }, { "epoch": 0.14061917195076465, "grad_norm": 0.4097006320953369, "learning_rate": 4.297111359774546e-06, "loss": 0.7845, "step": 3393 }, { "epoch": 0.1406606158564383, "grad_norm": 0.4642042815685272, "learning_rate": 4.296904140246177e-06, "loss": 0.7285, "step": 3394 }, { "epoch": 0.14070205976211198, "grad_norm": 0.46710604429244995, "learning_rate": 4.2966969207178085e-06, "loss": 0.7606, "step": 3395 }, { "epoch": 0.14074350366778565, "grad_norm": 0.4127083122730255, "learning_rate": 4.29648970118944e-06, "loss": 0.6692, "step": 3396 }, { "epoch": 0.14078494757345933, "grad_norm": 0.4331100583076477, "learning_rate": 4.296282481661072e-06, "loss": 0.7859, "step": 3397 }, { "epoch": 0.140826391479133, "grad_norm": 0.43853187561035156, "learning_rate": 4.296075262132704e-06, "loss": 0.7822, "step": 3398 }, { "epoch": 0.14086783538480666, "grad_norm": 0.41086718440055847, "learning_rate": 4.295868042604336e-06, "loss": 0.7275, "step": 3399 }, { "epoch": 0.14090927929048033, "grad_norm": 0.41194015741348267, "learning_rate": 4.295660823075967e-06, "loss": 0.7148, "step": 3400 }, { "epoch": 0.140950723196154, "grad_norm": 0.4312911033630371, "learning_rate": 4.2954536035475985e-06, "loss": 0.7366, "step": 3401 }, { "epoch": 0.1409921671018277, "grad_norm": 0.38340893387794495, "learning_rate": 4.295246384019231e-06, "loss": 0.6613, "step": 3402 }, { "epoch": 0.14103361100750134, "grad_norm": 0.4471784830093384, "learning_rate": 4.295039164490862e-06, "loss": 0.7183, "step": 3403 }, { "epoch": 0.141075054913175, "grad_norm": 0.4324750602245331, "learning_rate": 4.2948319449624935e-06, "loss": 0.7515, "step": 3404 }, { "epoch": 0.1411164988188487, "grad_norm": 0.4392637610435486, "learning_rate": 4.294624725434125e-06, "loss": 0.7554, "step": 3405 }, { "epoch": 0.14115794272452237, "grad_norm": 0.4784635901451111, "learning_rate": 4.294417505905757e-06, "loss": 0.7439, "step": 3406 }, { "epoch": 0.14119938663019602, "grad_norm": 0.39625808596611023, "learning_rate": 4.2942102863773885e-06, "loss": 0.6875, "step": 3407 }, { "epoch": 0.1412408305358697, "grad_norm": 0.43132859468460083, "learning_rate": 4.29400306684902e-06, "loss": 0.78, "step": 3408 }, { "epoch": 0.14128227444154337, "grad_norm": 0.435649573802948, "learning_rate": 4.293795847320652e-06, "loss": 0.6616, "step": 3409 }, { "epoch": 0.14132371834721705, "grad_norm": 0.4050649404525757, "learning_rate": 4.2935886277922835e-06, "loss": 0.7496, "step": 3410 }, { "epoch": 0.14136516225289072, "grad_norm": 0.4385804831981659, "learning_rate": 4.293381408263915e-06, "loss": 0.7773, "step": 3411 }, { "epoch": 0.14140660615856437, "grad_norm": 0.42110100388526917, "learning_rate": 4.293174188735546e-06, "loss": 0.7549, "step": 3412 }, { "epoch": 0.14144805006423805, "grad_norm": 0.4377802610397339, "learning_rate": 4.2929669692071785e-06, "loss": 0.7051, "step": 3413 }, { "epoch": 0.14148949396991173, "grad_norm": 0.43398773670196533, "learning_rate": 4.29275974967881e-06, "loss": 0.7561, "step": 3414 }, { "epoch": 0.1415309378755854, "grad_norm": 0.4242144525051117, "learning_rate": 4.292552530150442e-06, "loss": 0.7158, "step": 3415 }, { "epoch": 0.14157238178125905, "grad_norm": 0.49774014949798584, "learning_rate": 4.2923453106220735e-06, "loss": 0.7537, "step": 3416 }, { "epoch": 0.14161382568693273, "grad_norm": 0.4257213771343231, "learning_rate": 4.292138091093705e-06, "loss": 0.7463, "step": 3417 }, { "epoch": 0.1416552695926064, "grad_norm": 0.4529542028903961, "learning_rate": 4.291930871565337e-06, "loss": 0.7406, "step": 3418 }, { "epoch": 0.14169671349828009, "grad_norm": 0.40651723742485046, "learning_rate": 4.2917236520369685e-06, "loss": 0.7239, "step": 3419 }, { "epoch": 0.14173815740395374, "grad_norm": 0.4605000615119934, "learning_rate": 4.2915164325086e-06, "loss": 0.7917, "step": 3420 }, { "epoch": 0.1417796013096274, "grad_norm": 0.46843579411506653, "learning_rate": 4.291309212980231e-06, "loss": 0.7854, "step": 3421 }, { "epoch": 0.1418210452153011, "grad_norm": 0.42019957304000854, "learning_rate": 4.2911019934518635e-06, "loss": 0.7336, "step": 3422 }, { "epoch": 0.14186248912097477, "grad_norm": 0.45614132285118103, "learning_rate": 4.290894773923495e-06, "loss": 0.7771, "step": 3423 }, { "epoch": 0.14190393302664844, "grad_norm": 0.4390544593334198, "learning_rate": 4.290687554395126e-06, "loss": 0.7145, "step": 3424 }, { "epoch": 0.1419453769323221, "grad_norm": 0.46028438210487366, "learning_rate": 4.2904803348667585e-06, "loss": 0.7913, "step": 3425 }, { "epoch": 0.14198682083799577, "grad_norm": 0.4318721294403076, "learning_rate": 4.29027311533839e-06, "loss": 0.7617, "step": 3426 }, { "epoch": 0.14202826474366945, "grad_norm": 0.44680488109588623, "learning_rate": 4.290065895810021e-06, "loss": 0.8103, "step": 3427 }, { "epoch": 0.14206970864934312, "grad_norm": 0.44753387570381165, "learning_rate": 4.289858676281653e-06, "loss": 0.749, "step": 3428 }, { "epoch": 0.14211115255501677, "grad_norm": 0.41088226437568665, "learning_rate": 4.289651456753285e-06, "loss": 0.7596, "step": 3429 }, { "epoch": 0.14215259646069045, "grad_norm": 0.4437929689884186, "learning_rate": 4.289444237224916e-06, "loss": 0.71, "step": 3430 }, { "epoch": 0.14219404036636413, "grad_norm": 0.43385204672813416, "learning_rate": 4.2892370176965485e-06, "loss": 0.735, "step": 3431 }, { "epoch": 0.1422354842720378, "grad_norm": 0.442533940076828, "learning_rate": 4.28902979816818e-06, "loss": 0.7471, "step": 3432 }, { "epoch": 0.14227692817771148, "grad_norm": 0.4379854202270508, "learning_rate": 4.288822578639811e-06, "loss": 0.7002, "step": 3433 }, { "epoch": 0.14231837208338513, "grad_norm": 0.4227604269981384, "learning_rate": 4.2886153591114435e-06, "loss": 0.7241, "step": 3434 }, { "epoch": 0.1423598159890588, "grad_norm": 0.4352363049983978, "learning_rate": 4.288408139583075e-06, "loss": 0.7261, "step": 3435 }, { "epoch": 0.14240125989473248, "grad_norm": 0.4293971359729767, "learning_rate": 4.288200920054706e-06, "loss": 0.6913, "step": 3436 }, { "epoch": 0.14244270380040616, "grad_norm": 0.54514080286026, "learning_rate": 4.287993700526338e-06, "loss": 0.7251, "step": 3437 }, { "epoch": 0.1424841477060798, "grad_norm": 0.42512738704681396, "learning_rate": 4.28778648099797e-06, "loss": 0.7371, "step": 3438 }, { "epoch": 0.1425255916117535, "grad_norm": 0.45656123757362366, "learning_rate": 4.287579261469601e-06, "loss": 0.772, "step": 3439 }, { "epoch": 0.14256703551742717, "grad_norm": 0.43020185828208923, "learning_rate": 4.287372041941233e-06, "loss": 0.7285, "step": 3440 }, { "epoch": 0.14260847942310084, "grad_norm": 0.43705663084983826, "learning_rate": 4.287164822412865e-06, "loss": 0.6764, "step": 3441 }, { "epoch": 0.1426499233287745, "grad_norm": 0.45319026708602905, "learning_rate": 4.286957602884496e-06, "loss": 0.7238, "step": 3442 }, { "epoch": 0.14269136723444817, "grad_norm": 0.40755826234817505, "learning_rate": 4.286750383356128e-06, "loss": 0.6689, "step": 3443 }, { "epoch": 0.14273281114012185, "grad_norm": 0.4272681176662445, "learning_rate": 4.286543163827759e-06, "loss": 0.7615, "step": 3444 }, { "epoch": 0.14277425504579552, "grad_norm": 0.4659278094768524, "learning_rate": 4.286335944299391e-06, "loss": 0.7437, "step": 3445 }, { "epoch": 0.1428156989514692, "grad_norm": 0.4307320713996887, "learning_rate": 4.286128724771023e-06, "loss": 0.744, "step": 3446 }, { "epoch": 0.14285714285714285, "grad_norm": 0.45658209919929504, "learning_rate": 4.285921505242654e-06, "loss": 0.7683, "step": 3447 }, { "epoch": 0.14289858676281653, "grad_norm": 0.4416615068912506, "learning_rate": 4.2857142857142855e-06, "loss": 0.7361, "step": 3448 }, { "epoch": 0.1429400306684902, "grad_norm": 0.4493853747844696, "learning_rate": 4.285507066185918e-06, "loss": 0.7104, "step": 3449 }, { "epoch": 0.14298147457416388, "grad_norm": 0.38244369626045227, "learning_rate": 4.28529984665755e-06, "loss": 0.7253, "step": 3450 }, { "epoch": 0.14302291847983753, "grad_norm": 0.4370732307434082, "learning_rate": 4.285092627129181e-06, "loss": 0.7568, "step": 3451 }, { "epoch": 0.1430643623855112, "grad_norm": 0.4673200845718384, "learning_rate": 4.284885407600813e-06, "loss": 0.7708, "step": 3452 }, { "epoch": 0.14310580629118488, "grad_norm": 0.461574524641037, "learning_rate": 4.284678188072444e-06, "loss": 0.7571, "step": 3453 }, { "epoch": 0.14314725019685856, "grad_norm": 0.4427507519721985, "learning_rate": 4.284470968544076e-06, "loss": 0.7747, "step": 3454 }, { "epoch": 0.1431886941025322, "grad_norm": 0.42441776394844055, "learning_rate": 4.284263749015708e-06, "loss": 0.7646, "step": 3455 }, { "epoch": 0.1432301380082059, "grad_norm": 0.4430643320083618, "learning_rate": 4.284056529487339e-06, "loss": 0.7299, "step": 3456 }, { "epoch": 0.14327158191387956, "grad_norm": 0.4265633225440979, "learning_rate": 4.2838493099589705e-06, "loss": 0.6846, "step": 3457 }, { "epoch": 0.14331302581955324, "grad_norm": 0.4507368505001068, "learning_rate": 4.283642090430603e-06, "loss": 0.7822, "step": 3458 }, { "epoch": 0.14335446972522692, "grad_norm": 0.40229934453964233, "learning_rate": 4.283434870902234e-06, "loss": 0.7261, "step": 3459 }, { "epoch": 0.14339591363090057, "grad_norm": 0.4339674413204193, "learning_rate": 4.2832276513738655e-06, "loss": 0.6611, "step": 3460 }, { "epoch": 0.14343735753657424, "grad_norm": 0.424968421459198, "learning_rate": 4.283020431845498e-06, "loss": 0.7578, "step": 3461 }, { "epoch": 0.14347880144224792, "grad_norm": 0.4391248822212219, "learning_rate": 4.282813212317129e-06, "loss": 0.7832, "step": 3462 }, { "epoch": 0.1435202453479216, "grad_norm": 0.47979751229286194, "learning_rate": 4.2826059927887605e-06, "loss": 0.7716, "step": 3463 }, { "epoch": 0.14356168925359525, "grad_norm": 0.44123777747154236, "learning_rate": 4.282398773260392e-06, "loss": 0.7141, "step": 3464 }, { "epoch": 0.14360313315926893, "grad_norm": 0.46980735659599304, "learning_rate": 4.282191553732024e-06, "loss": 0.7388, "step": 3465 }, { "epoch": 0.1436445770649426, "grad_norm": 0.40594521164894104, "learning_rate": 4.2819843342036555e-06, "loss": 0.696, "step": 3466 }, { "epoch": 0.14368602097061628, "grad_norm": 0.4325354993343353, "learning_rate": 4.281777114675288e-06, "loss": 0.7344, "step": 3467 }, { "epoch": 0.14372746487628993, "grad_norm": 0.42500731348991394, "learning_rate": 4.281569895146919e-06, "loss": 0.6895, "step": 3468 }, { "epoch": 0.1437689087819636, "grad_norm": 0.43492645025253296, "learning_rate": 4.2813626756185505e-06, "loss": 0.6851, "step": 3469 }, { "epoch": 0.14381035268763728, "grad_norm": 0.42430299520492554, "learning_rate": 4.281155456090183e-06, "loss": 0.6852, "step": 3470 }, { "epoch": 0.14385179659331096, "grad_norm": 0.4382709860801697, "learning_rate": 4.280948236561814e-06, "loss": 0.7198, "step": 3471 }, { "epoch": 0.14389324049898464, "grad_norm": 0.4364079535007477, "learning_rate": 4.2807410170334455e-06, "loss": 0.7444, "step": 3472 }, { "epoch": 0.1439346844046583, "grad_norm": 0.456495463848114, "learning_rate": 4.280533797505077e-06, "loss": 0.7461, "step": 3473 }, { "epoch": 0.14397612831033196, "grad_norm": 0.44718798995018005, "learning_rate": 4.280326577976709e-06, "loss": 0.7671, "step": 3474 }, { "epoch": 0.14401757221600564, "grad_norm": 0.4819701313972473, "learning_rate": 4.2801193584483405e-06, "loss": 0.7366, "step": 3475 }, { "epoch": 0.14405901612167932, "grad_norm": 0.4162538945674896, "learning_rate": 4.279912138919972e-06, "loss": 0.7332, "step": 3476 }, { "epoch": 0.14410046002735297, "grad_norm": 0.5408386588096619, "learning_rate": 4.279704919391604e-06, "loss": 0.7814, "step": 3477 }, { "epoch": 0.14414190393302664, "grad_norm": 0.43330806493759155, "learning_rate": 4.2794976998632355e-06, "loss": 0.7573, "step": 3478 }, { "epoch": 0.14418334783870032, "grad_norm": 0.4250098764896393, "learning_rate": 4.279290480334867e-06, "loss": 0.7593, "step": 3479 }, { "epoch": 0.144224791744374, "grad_norm": 0.4699741303920746, "learning_rate": 4.279083260806498e-06, "loss": 0.7786, "step": 3480 }, { "epoch": 0.14426623565004765, "grad_norm": 0.442393958568573, "learning_rate": 4.2788760412781305e-06, "loss": 0.7375, "step": 3481 }, { "epoch": 0.14430767955572132, "grad_norm": 0.4113013446331024, "learning_rate": 4.278668821749762e-06, "loss": 0.7412, "step": 3482 }, { "epoch": 0.144349123461395, "grad_norm": 0.46772781014442444, "learning_rate": 4.278461602221394e-06, "loss": 0.7527, "step": 3483 }, { "epoch": 0.14439056736706868, "grad_norm": 0.41785454750061035, "learning_rate": 4.278254382693025e-06, "loss": 0.7554, "step": 3484 }, { "epoch": 0.14443201127274236, "grad_norm": 0.4000510275363922, "learning_rate": 4.278047163164657e-06, "loss": 0.7734, "step": 3485 }, { "epoch": 0.144473455178416, "grad_norm": 0.4850148856639862, "learning_rate": 4.277839943636289e-06, "loss": 0.7705, "step": 3486 }, { "epoch": 0.14451489908408968, "grad_norm": 0.4162720739841461, "learning_rate": 4.2776327241079205e-06, "loss": 0.738, "step": 3487 }, { "epoch": 0.14455634298976336, "grad_norm": 0.4242589771747589, "learning_rate": 4.277425504579552e-06, "loss": 0.7737, "step": 3488 }, { "epoch": 0.14459778689543704, "grad_norm": 0.41584208607673645, "learning_rate": 4.277218285051183e-06, "loss": 0.7625, "step": 3489 }, { "epoch": 0.14463923080111069, "grad_norm": 0.4112046957015991, "learning_rate": 4.2770110655228155e-06, "loss": 0.7417, "step": 3490 }, { "epoch": 0.14468067470678436, "grad_norm": 0.3992477059364319, "learning_rate": 4.276803845994447e-06, "loss": 0.6951, "step": 3491 }, { "epoch": 0.14472211861245804, "grad_norm": 0.418252557516098, "learning_rate": 4.276596626466078e-06, "loss": 0.7751, "step": 3492 }, { "epoch": 0.14476356251813172, "grad_norm": 0.4534588158130646, "learning_rate": 4.2763894069377105e-06, "loss": 0.7522, "step": 3493 }, { "epoch": 0.14480500642380537, "grad_norm": 0.43014875054359436, "learning_rate": 4.276182187409342e-06, "loss": 0.7549, "step": 3494 }, { "epoch": 0.14484645032947904, "grad_norm": 0.437710702419281, "learning_rate": 4.275974967880973e-06, "loss": 0.7339, "step": 3495 }, { "epoch": 0.14488789423515272, "grad_norm": 0.39932766556739807, "learning_rate": 4.275767748352605e-06, "loss": 0.7107, "step": 3496 }, { "epoch": 0.1449293381408264, "grad_norm": 0.4366492033004761, "learning_rate": 4.275560528824237e-06, "loss": 0.803, "step": 3497 }, { "epoch": 0.14497078204650007, "grad_norm": 0.45354920625686646, "learning_rate": 4.275353309295868e-06, "loss": 0.7188, "step": 3498 }, { "epoch": 0.14501222595217372, "grad_norm": 0.4322458803653717, "learning_rate": 4.2751460897675005e-06, "loss": 0.7305, "step": 3499 }, { "epoch": 0.1450536698578474, "grad_norm": 0.3932023048400879, "learning_rate": 4.274938870239131e-06, "loss": 0.7068, "step": 3500 }, { "epoch": 0.14509511376352108, "grad_norm": 0.4517843723297119, "learning_rate": 4.274731650710763e-06, "loss": 0.7703, "step": 3501 }, { "epoch": 0.14513655766919475, "grad_norm": 0.4348665475845337, "learning_rate": 4.2745244311823955e-06, "loss": 0.7231, "step": 3502 }, { "epoch": 0.1451780015748684, "grad_norm": 0.4205609858036041, "learning_rate": 4.274317211654027e-06, "loss": 0.7456, "step": 3503 }, { "epoch": 0.14521944548054208, "grad_norm": 0.42239752411842346, "learning_rate": 4.274109992125658e-06, "loss": 0.7769, "step": 3504 }, { "epoch": 0.14526088938621576, "grad_norm": 0.41269490122795105, "learning_rate": 4.27390277259729e-06, "loss": 0.7185, "step": 3505 }, { "epoch": 0.14530233329188944, "grad_norm": 0.4312281906604767, "learning_rate": 4.273695553068922e-06, "loss": 0.7313, "step": 3506 }, { "epoch": 0.1453437771975631, "grad_norm": 0.4436001777648926, "learning_rate": 4.273488333540553e-06, "loss": 0.7188, "step": 3507 }, { "epoch": 0.14538522110323676, "grad_norm": 0.46300846338272095, "learning_rate": 4.273281114012185e-06, "loss": 0.7646, "step": 3508 }, { "epoch": 0.14542666500891044, "grad_norm": 0.42279136180877686, "learning_rate": 4.273073894483816e-06, "loss": 0.7061, "step": 3509 }, { "epoch": 0.14546810891458412, "grad_norm": 0.4688092768192291, "learning_rate": 4.272866674955448e-06, "loss": 0.7526, "step": 3510 }, { "epoch": 0.1455095528202578, "grad_norm": 0.6708021759986877, "learning_rate": 4.27265945542708e-06, "loss": 0.7153, "step": 3511 }, { "epoch": 0.14555099672593144, "grad_norm": 0.4083500802516937, "learning_rate": 4.272452235898711e-06, "loss": 0.7288, "step": 3512 }, { "epoch": 0.14559244063160512, "grad_norm": 0.43831339478492737, "learning_rate": 4.272245016370343e-06, "loss": 0.7676, "step": 3513 }, { "epoch": 0.1456338845372788, "grad_norm": 0.4630539119243622, "learning_rate": 4.272037796841975e-06, "loss": 0.7388, "step": 3514 }, { "epoch": 0.14567532844295247, "grad_norm": 0.4154176414012909, "learning_rate": 4.271830577313606e-06, "loss": 0.6853, "step": 3515 }, { "epoch": 0.14571677234862612, "grad_norm": 0.4656500816345215, "learning_rate": 4.2716233577852375e-06, "loss": 0.7324, "step": 3516 }, { "epoch": 0.1457582162542998, "grad_norm": 0.40697988867759705, "learning_rate": 4.27141613825687e-06, "loss": 0.7178, "step": 3517 }, { "epoch": 0.14579966015997348, "grad_norm": 0.44282448291778564, "learning_rate": 4.271208918728501e-06, "loss": 0.6907, "step": 3518 }, { "epoch": 0.14584110406564715, "grad_norm": 0.40109995007514954, "learning_rate": 4.271001699200133e-06, "loss": 0.6815, "step": 3519 }, { "epoch": 0.14588254797132083, "grad_norm": 0.4698004722595215, "learning_rate": 4.270794479671765e-06, "loss": 0.7817, "step": 3520 }, { "epoch": 0.14592399187699448, "grad_norm": 0.4324088990688324, "learning_rate": 4.270587260143396e-06, "loss": 0.7324, "step": 3521 }, { "epoch": 0.14596543578266816, "grad_norm": 0.4603823125362396, "learning_rate": 4.270380040615028e-06, "loss": 0.741, "step": 3522 }, { "epoch": 0.14600687968834183, "grad_norm": 0.41372841596603394, "learning_rate": 4.27017282108666e-06, "loss": 0.726, "step": 3523 }, { "epoch": 0.1460483235940155, "grad_norm": 0.42065271735191345, "learning_rate": 4.269965601558291e-06, "loss": 0.7506, "step": 3524 }, { "epoch": 0.14608976749968916, "grad_norm": 0.4407220482826233, "learning_rate": 4.2697583820299225e-06, "loss": 0.7522, "step": 3525 }, { "epoch": 0.14613121140536284, "grad_norm": 0.46513113379478455, "learning_rate": 4.269551162501555e-06, "loss": 0.7695, "step": 3526 }, { "epoch": 0.14617265531103651, "grad_norm": 0.4022018313407898, "learning_rate": 4.269343942973186e-06, "loss": 0.7001, "step": 3527 }, { "epoch": 0.1462140992167102, "grad_norm": 0.4551241397857666, "learning_rate": 4.2691367234448175e-06, "loss": 0.7739, "step": 3528 }, { "epoch": 0.14625554312238384, "grad_norm": 0.4327596127986908, "learning_rate": 4.26892950391645e-06, "loss": 0.723, "step": 3529 }, { "epoch": 0.14629698702805752, "grad_norm": 0.41593167185783386, "learning_rate": 4.268722284388081e-06, "loss": 0.7646, "step": 3530 }, { "epoch": 0.1463384309337312, "grad_norm": 0.4278425872325897, "learning_rate": 4.2685150648597125e-06, "loss": 0.6973, "step": 3531 }, { "epoch": 0.14637987483940487, "grad_norm": 0.43814510107040405, "learning_rate": 4.268307845331344e-06, "loss": 0.7639, "step": 3532 }, { "epoch": 0.14642131874507855, "grad_norm": 0.43373367190361023, "learning_rate": 4.268100625802976e-06, "loss": 0.7402, "step": 3533 }, { "epoch": 0.1464627626507522, "grad_norm": 0.44189921021461487, "learning_rate": 4.2678934062746075e-06, "loss": 0.73, "step": 3534 }, { "epoch": 0.14650420655642588, "grad_norm": 0.4363210201263428, "learning_rate": 4.26768618674624e-06, "loss": 0.7893, "step": 3535 }, { "epoch": 0.14654565046209955, "grad_norm": 0.4836121201515198, "learning_rate": 4.267478967217871e-06, "loss": 0.8115, "step": 3536 }, { "epoch": 0.14658709436777323, "grad_norm": 0.45794326066970825, "learning_rate": 4.2672717476895025e-06, "loss": 0.7864, "step": 3537 }, { "epoch": 0.14662853827344688, "grad_norm": 0.4145175814628601, "learning_rate": 4.267064528161135e-06, "loss": 0.7749, "step": 3538 }, { "epoch": 0.14666998217912056, "grad_norm": 0.4149456024169922, "learning_rate": 4.266857308632766e-06, "loss": 0.6948, "step": 3539 }, { "epoch": 0.14671142608479423, "grad_norm": 0.4114832282066345, "learning_rate": 4.2666500891043975e-06, "loss": 0.7629, "step": 3540 }, { "epoch": 0.1467528699904679, "grad_norm": 0.4136492609977722, "learning_rate": 4.266442869576029e-06, "loss": 0.7, "step": 3541 }, { "epoch": 0.14679431389614156, "grad_norm": 0.447996586561203, "learning_rate": 4.266235650047661e-06, "loss": 0.7268, "step": 3542 }, { "epoch": 0.14683575780181524, "grad_norm": 0.4522111117839813, "learning_rate": 4.2660284305192925e-06, "loss": 0.7478, "step": 3543 }, { "epoch": 0.1468772017074889, "grad_norm": 0.44529223442077637, "learning_rate": 4.265821210990924e-06, "loss": 0.7354, "step": 3544 }, { "epoch": 0.1469186456131626, "grad_norm": 0.45901861786842346, "learning_rate": 4.265613991462555e-06, "loss": 0.7432, "step": 3545 }, { "epoch": 0.14696008951883627, "grad_norm": 0.4476220905780792, "learning_rate": 4.2654067719341875e-06, "loss": 0.7363, "step": 3546 }, { "epoch": 0.14700153342450992, "grad_norm": 0.4382518231868744, "learning_rate": 4.265199552405819e-06, "loss": 0.7825, "step": 3547 }, { "epoch": 0.1470429773301836, "grad_norm": 0.42589089274406433, "learning_rate": 4.26499233287745e-06, "loss": 0.7344, "step": 3548 }, { "epoch": 0.14708442123585727, "grad_norm": 0.46473634243011475, "learning_rate": 4.2647851133490825e-06, "loss": 0.7041, "step": 3549 }, { "epoch": 0.14712586514153095, "grad_norm": 0.4355294704437256, "learning_rate": 4.264577893820714e-06, "loss": 0.7573, "step": 3550 }, { "epoch": 0.1471673090472046, "grad_norm": 0.429289847612381, "learning_rate": 4.264370674292346e-06, "loss": 0.7227, "step": 3551 }, { "epoch": 0.14720875295287827, "grad_norm": 0.43157416582107544, "learning_rate": 4.264163454763977e-06, "loss": 0.7251, "step": 3552 }, { "epoch": 0.14725019685855195, "grad_norm": 0.4183496832847595, "learning_rate": 4.263956235235609e-06, "loss": 0.7416, "step": 3553 }, { "epoch": 0.14729164076422563, "grad_norm": 0.44607776403427124, "learning_rate": 4.263749015707241e-06, "loss": 0.7109, "step": 3554 }, { "epoch": 0.14733308466989928, "grad_norm": 0.4660865366458893, "learning_rate": 4.2635417961788725e-06, "loss": 0.803, "step": 3555 }, { "epoch": 0.14737452857557296, "grad_norm": 0.42630720138549805, "learning_rate": 4.263334576650504e-06, "loss": 0.7761, "step": 3556 }, { "epoch": 0.14741597248124663, "grad_norm": 0.41773396730422974, "learning_rate": 4.263127357122135e-06, "loss": 0.7043, "step": 3557 }, { "epoch": 0.1474574163869203, "grad_norm": 0.4292723536491394, "learning_rate": 4.2629201375937675e-06, "loss": 0.7739, "step": 3558 }, { "epoch": 0.147498860292594, "grad_norm": 0.4077301323413849, "learning_rate": 4.262712918065399e-06, "loss": 0.7283, "step": 3559 }, { "epoch": 0.14754030419826764, "grad_norm": 0.4134046137332916, "learning_rate": 4.26250569853703e-06, "loss": 0.7029, "step": 3560 }, { "epoch": 0.1475817481039413, "grad_norm": 0.438385546207428, "learning_rate": 4.262298479008662e-06, "loss": 0.7144, "step": 3561 }, { "epoch": 0.147623192009615, "grad_norm": 0.47375229001045227, "learning_rate": 4.262091259480294e-06, "loss": 0.7732, "step": 3562 }, { "epoch": 0.14766463591528867, "grad_norm": 0.4498146176338196, "learning_rate": 4.261884039951925e-06, "loss": 0.762, "step": 3563 }, { "epoch": 0.14770607982096232, "grad_norm": 0.4412408173084259, "learning_rate": 4.261676820423557e-06, "loss": 0.7686, "step": 3564 }, { "epoch": 0.147747523726636, "grad_norm": 0.39256995916366577, "learning_rate": 4.261469600895189e-06, "loss": 0.6788, "step": 3565 }, { "epoch": 0.14778896763230967, "grad_norm": 0.43233808875083923, "learning_rate": 4.26126238136682e-06, "loss": 0.7018, "step": 3566 }, { "epoch": 0.14783041153798335, "grad_norm": 0.41340142488479614, "learning_rate": 4.2610551618384525e-06, "loss": 0.7051, "step": 3567 }, { "epoch": 0.147871855443657, "grad_norm": 0.451794296503067, "learning_rate": 4.260847942310083e-06, "loss": 0.7329, "step": 3568 }, { "epoch": 0.14791329934933067, "grad_norm": 0.4397566318511963, "learning_rate": 4.260640722781715e-06, "loss": 0.6978, "step": 3569 }, { "epoch": 0.14795474325500435, "grad_norm": 0.41024884581565857, "learning_rate": 4.260433503253347e-06, "loss": 0.6921, "step": 3570 }, { "epoch": 0.14799618716067803, "grad_norm": 0.4070987403392792, "learning_rate": 4.260226283724979e-06, "loss": 0.7483, "step": 3571 }, { "epoch": 0.1480376310663517, "grad_norm": 0.4577427804470062, "learning_rate": 4.26001906419661e-06, "loss": 0.6964, "step": 3572 }, { "epoch": 0.14807907497202535, "grad_norm": 0.44319581985473633, "learning_rate": 4.259811844668242e-06, "loss": 0.7606, "step": 3573 }, { "epoch": 0.14812051887769903, "grad_norm": 0.4242476522922516, "learning_rate": 4.259604625139874e-06, "loss": 0.7629, "step": 3574 }, { "epoch": 0.1481619627833727, "grad_norm": 0.43759962916374207, "learning_rate": 4.259397405611505e-06, "loss": 0.7307, "step": 3575 }, { "epoch": 0.14820340668904639, "grad_norm": 0.41502565145492554, "learning_rate": 4.259190186083137e-06, "loss": 0.751, "step": 3576 }, { "epoch": 0.14824485059472003, "grad_norm": 0.4376647472381592, "learning_rate": 4.258982966554768e-06, "loss": 0.7952, "step": 3577 }, { "epoch": 0.1482862945003937, "grad_norm": 0.423888623714447, "learning_rate": 4.2587757470264e-06, "loss": 0.724, "step": 3578 }, { "epoch": 0.1483277384060674, "grad_norm": 0.4060913622379303, "learning_rate": 4.258568527498032e-06, "loss": 0.7397, "step": 3579 }, { "epoch": 0.14836918231174107, "grad_norm": 0.41397637128829956, "learning_rate": 4.258361307969663e-06, "loss": 0.7385, "step": 3580 }, { "epoch": 0.14841062621741474, "grad_norm": 0.42415210604667664, "learning_rate": 4.258154088441295e-06, "loss": 0.7397, "step": 3581 }, { "epoch": 0.1484520701230884, "grad_norm": 0.42841020226478577, "learning_rate": 4.257946868912927e-06, "loss": 0.7101, "step": 3582 }, { "epoch": 0.14849351402876207, "grad_norm": 0.3950115144252777, "learning_rate": 4.257739649384559e-06, "loss": 0.7822, "step": 3583 }, { "epoch": 0.14853495793443575, "grad_norm": 0.41325604915618896, "learning_rate": 4.2575324298561895e-06, "loss": 0.7448, "step": 3584 }, { "epoch": 0.14857640184010942, "grad_norm": 0.4179607331752777, "learning_rate": 4.257325210327822e-06, "loss": 0.7587, "step": 3585 }, { "epoch": 0.14861784574578307, "grad_norm": 0.44397249817848206, "learning_rate": 4.257117990799453e-06, "loss": 0.7317, "step": 3586 }, { "epoch": 0.14865928965145675, "grad_norm": 0.423122763633728, "learning_rate": 4.256910771271085e-06, "loss": 0.6818, "step": 3587 }, { "epoch": 0.14870073355713043, "grad_norm": 0.43470704555511475, "learning_rate": 4.256703551742717e-06, "loss": 0.702, "step": 3588 }, { "epoch": 0.1487421774628041, "grad_norm": 0.4402065873146057, "learning_rate": 4.256496332214348e-06, "loss": 0.7617, "step": 3589 }, { "epoch": 0.14878362136847775, "grad_norm": 0.44065746665000916, "learning_rate": 4.25628911268598e-06, "loss": 0.8018, "step": 3590 }, { "epoch": 0.14882506527415143, "grad_norm": 0.4382827579975128, "learning_rate": 4.256081893157612e-06, "loss": 0.6772, "step": 3591 }, { "epoch": 0.1488665091798251, "grad_norm": 0.4135058522224426, "learning_rate": 4.255874673629243e-06, "loss": 0.7045, "step": 3592 }, { "epoch": 0.14890795308549878, "grad_norm": 0.40508443117141724, "learning_rate": 4.2556674541008745e-06, "loss": 0.7466, "step": 3593 }, { "epoch": 0.14894939699117246, "grad_norm": 0.3963937759399414, "learning_rate": 4.255460234572507e-06, "loss": 0.7258, "step": 3594 }, { "epoch": 0.1489908408968461, "grad_norm": 0.4512398838996887, "learning_rate": 4.255253015044138e-06, "loss": 0.811, "step": 3595 }, { "epoch": 0.1490322848025198, "grad_norm": 0.3936716318130493, "learning_rate": 4.2550457955157695e-06, "loss": 0.7217, "step": 3596 }, { "epoch": 0.14907372870819346, "grad_norm": 0.43926966190338135, "learning_rate": 4.254838575987401e-06, "loss": 0.77, "step": 3597 }, { "epoch": 0.14911517261386714, "grad_norm": 0.46051591634750366, "learning_rate": 4.254631356459033e-06, "loss": 0.7808, "step": 3598 }, { "epoch": 0.1491566165195408, "grad_norm": 0.40303388237953186, "learning_rate": 4.2544241369306645e-06, "loss": 0.7302, "step": 3599 }, { "epoch": 0.14919806042521447, "grad_norm": 0.4292672872543335, "learning_rate": 4.254216917402296e-06, "loss": 0.7432, "step": 3600 }, { "epoch": 0.14923950433088815, "grad_norm": 0.439406156539917, "learning_rate": 4.254009697873928e-06, "loss": 0.76, "step": 3601 }, { "epoch": 0.14928094823656182, "grad_norm": 0.44368961453437805, "learning_rate": 4.2538024783455595e-06, "loss": 0.7229, "step": 3602 }, { "epoch": 0.14932239214223547, "grad_norm": 0.3987877666950226, "learning_rate": 4.253595258817192e-06, "loss": 0.7163, "step": 3603 }, { "epoch": 0.14936383604790915, "grad_norm": 0.430250346660614, "learning_rate": 4.253388039288823e-06, "loss": 0.7388, "step": 3604 }, { "epoch": 0.14940527995358283, "grad_norm": 0.46438106894493103, "learning_rate": 4.2531808197604545e-06, "loss": 0.7856, "step": 3605 }, { "epoch": 0.1494467238592565, "grad_norm": 0.46950626373291016, "learning_rate": 4.252973600232087e-06, "loss": 0.6853, "step": 3606 }, { "epoch": 0.14948816776493018, "grad_norm": 0.4444265365600586, "learning_rate": 4.252766380703718e-06, "loss": 0.7473, "step": 3607 }, { "epoch": 0.14952961167060383, "grad_norm": 0.40145230293273926, "learning_rate": 4.2525591611753495e-06, "loss": 0.7161, "step": 3608 }, { "epoch": 0.1495710555762775, "grad_norm": 0.4515431821346283, "learning_rate": 4.252351941646981e-06, "loss": 0.73, "step": 3609 }, { "epoch": 0.14961249948195118, "grad_norm": 0.4358542263507843, "learning_rate": 4.252144722118613e-06, "loss": 0.6992, "step": 3610 }, { "epoch": 0.14965394338762486, "grad_norm": 0.4030960500240326, "learning_rate": 4.2519375025902445e-06, "loss": 0.6978, "step": 3611 }, { "epoch": 0.1496953872932985, "grad_norm": 0.4064294099807739, "learning_rate": 4.251730283061876e-06, "loss": 0.7324, "step": 3612 }, { "epoch": 0.1497368311989722, "grad_norm": 0.39400628209114075, "learning_rate": 4.251523063533507e-06, "loss": 0.7573, "step": 3613 }, { "epoch": 0.14977827510464586, "grad_norm": 0.399495005607605, "learning_rate": 4.2513158440051395e-06, "loss": 0.7009, "step": 3614 }, { "epoch": 0.14981971901031954, "grad_norm": 0.4368586242198944, "learning_rate": 4.251108624476771e-06, "loss": 0.7644, "step": 3615 }, { "epoch": 0.1498611629159932, "grad_norm": 0.3966556489467621, "learning_rate": 4.250901404948402e-06, "loss": 0.6858, "step": 3616 }, { "epoch": 0.14990260682166687, "grad_norm": 0.43613311648368835, "learning_rate": 4.2506941854200345e-06, "loss": 0.7029, "step": 3617 }, { "epoch": 0.14994405072734054, "grad_norm": 0.43202292919158936, "learning_rate": 4.250486965891666e-06, "loss": 0.7192, "step": 3618 }, { "epoch": 0.14998549463301422, "grad_norm": 0.44344326853752136, "learning_rate": 4.250279746363298e-06, "loss": 0.7344, "step": 3619 }, { "epoch": 0.1500269385386879, "grad_norm": 0.39629971981048584, "learning_rate": 4.250072526834929e-06, "loss": 0.6735, "step": 3620 }, { "epoch": 0.15006838244436155, "grad_norm": 0.40750643610954285, "learning_rate": 4.249865307306561e-06, "loss": 0.7263, "step": 3621 }, { "epoch": 0.15010982635003522, "grad_norm": 0.48804667592048645, "learning_rate": 4.249658087778192e-06, "loss": 0.7789, "step": 3622 }, { "epoch": 0.1501512702557089, "grad_norm": 0.5001088976860046, "learning_rate": 4.2494508682498245e-06, "loss": 0.7737, "step": 3623 }, { "epoch": 0.15019271416138258, "grad_norm": 0.39449793100357056, "learning_rate": 4.249243648721456e-06, "loss": 0.7244, "step": 3624 }, { "epoch": 0.15023415806705623, "grad_norm": 0.4193180501461029, "learning_rate": 4.249036429193087e-06, "loss": 0.7214, "step": 3625 }, { "epoch": 0.1502756019727299, "grad_norm": 0.3914564549922943, "learning_rate": 4.2488292096647195e-06, "loss": 0.6865, "step": 3626 }, { "epoch": 0.15031704587840358, "grad_norm": 0.3871206045150757, "learning_rate": 4.248621990136351e-06, "loss": 0.6923, "step": 3627 }, { "epoch": 0.15035848978407726, "grad_norm": 0.4146389663219452, "learning_rate": 4.248414770607982e-06, "loss": 0.7463, "step": 3628 }, { "epoch": 0.1503999336897509, "grad_norm": 0.38069748878479004, "learning_rate": 4.248207551079614e-06, "loss": 0.6357, "step": 3629 }, { "epoch": 0.15044137759542459, "grad_norm": 0.4219701886177063, "learning_rate": 4.248000331551246e-06, "loss": 0.7177, "step": 3630 }, { "epoch": 0.15048282150109826, "grad_norm": 0.4784638583660126, "learning_rate": 4.247793112022877e-06, "loss": 0.7349, "step": 3631 }, { "epoch": 0.15052426540677194, "grad_norm": 0.44536247849464417, "learning_rate": 4.247585892494509e-06, "loss": 0.7607, "step": 3632 }, { "epoch": 0.15056570931244562, "grad_norm": 0.4355635941028595, "learning_rate": 4.247378672966141e-06, "loss": 0.7251, "step": 3633 }, { "epoch": 0.15060715321811927, "grad_norm": 0.44101041555404663, "learning_rate": 4.247171453437772e-06, "loss": 0.7249, "step": 3634 }, { "epoch": 0.15064859712379294, "grad_norm": 0.40255945920944214, "learning_rate": 4.2469642339094046e-06, "loss": 0.7219, "step": 3635 }, { "epoch": 0.15069004102946662, "grad_norm": 0.4074898362159729, "learning_rate": 4.246757014381035e-06, "loss": 0.7041, "step": 3636 }, { "epoch": 0.1507314849351403, "grad_norm": 0.4288147985935211, "learning_rate": 4.246549794852667e-06, "loss": 0.7053, "step": 3637 }, { "epoch": 0.15077292884081395, "grad_norm": 0.4557374119758606, "learning_rate": 4.246342575324299e-06, "loss": 0.7749, "step": 3638 }, { "epoch": 0.15081437274648762, "grad_norm": 0.43249452114105225, "learning_rate": 4.246135355795931e-06, "loss": 0.7347, "step": 3639 }, { "epoch": 0.1508558166521613, "grad_norm": 0.4268703758716583, "learning_rate": 4.245928136267562e-06, "loss": 0.7239, "step": 3640 }, { "epoch": 0.15089726055783498, "grad_norm": 0.44752857089042664, "learning_rate": 4.245720916739194e-06, "loss": 0.7336, "step": 3641 }, { "epoch": 0.15093870446350863, "grad_norm": 0.4321291148662567, "learning_rate": 4.245513697210826e-06, "loss": 0.7771, "step": 3642 }, { "epoch": 0.1509801483691823, "grad_norm": 0.4716063141822815, "learning_rate": 4.245306477682457e-06, "loss": 0.7715, "step": 3643 }, { "epoch": 0.15102159227485598, "grad_norm": 0.458738774061203, "learning_rate": 4.245099258154089e-06, "loss": 0.7217, "step": 3644 }, { "epoch": 0.15106303618052966, "grad_norm": 0.4909980297088623, "learning_rate": 4.24489203862572e-06, "loss": 0.7834, "step": 3645 }, { "epoch": 0.15110448008620334, "grad_norm": 0.43763333559036255, "learning_rate": 4.244684819097352e-06, "loss": 0.7527, "step": 3646 }, { "epoch": 0.15114592399187698, "grad_norm": 0.42772412300109863, "learning_rate": 4.244477599568984e-06, "loss": 0.7329, "step": 3647 }, { "epoch": 0.15118736789755066, "grad_norm": 0.40519919991493225, "learning_rate": 4.244270380040615e-06, "loss": 0.6915, "step": 3648 }, { "epoch": 0.15122881180322434, "grad_norm": 0.4585369825363159, "learning_rate": 4.2440631605122465e-06, "loss": 0.7924, "step": 3649 }, { "epoch": 0.15127025570889802, "grad_norm": 0.43139317631721497, "learning_rate": 4.243855940983879e-06, "loss": 0.7683, "step": 3650 }, { "epoch": 0.15131169961457167, "grad_norm": 0.41319629549980164, "learning_rate": 4.243648721455511e-06, "loss": 0.7083, "step": 3651 }, { "epoch": 0.15135314352024534, "grad_norm": 0.3986436128616333, "learning_rate": 4.2434415019271415e-06, "loss": 0.6721, "step": 3652 }, { "epoch": 0.15139458742591902, "grad_norm": 0.47634583711624146, "learning_rate": 4.243234282398774e-06, "loss": 0.7754, "step": 3653 }, { "epoch": 0.1514360313315927, "grad_norm": 0.46902167797088623, "learning_rate": 4.243027062870405e-06, "loss": 0.7502, "step": 3654 }, { "epoch": 0.15147747523726637, "grad_norm": 0.4037449061870575, "learning_rate": 4.242819843342037e-06, "loss": 0.7, "step": 3655 }, { "epoch": 0.15151891914294002, "grad_norm": 0.43108341097831726, "learning_rate": 4.242612623813669e-06, "loss": 0.7468, "step": 3656 }, { "epoch": 0.1515603630486137, "grad_norm": 0.45734941959381104, "learning_rate": 4.2424054042853e-06, "loss": 0.7212, "step": 3657 }, { "epoch": 0.15160180695428738, "grad_norm": 0.47673550248146057, "learning_rate": 4.2421981847569315e-06, "loss": 0.7717, "step": 3658 }, { "epoch": 0.15164325085996105, "grad_norm": 0.4326879382133484, "learning_rate": 4.241990965228564e-06, "loss": 0.7538, "step": 3659 }, { "epoch": 0.1516846947656347, "grad_norm": 0.4235236942768097, "learning_rate": 4.241783745700195e-06, "loss": 0.7549, "step": 3660 }, { "epoch": 0.15172613867130838, "grad_norm": 0.41055363416671753, "learning_rate": 4.2415765261718265e-06, "loss": 0.7318, "step": 3661 }, { "epoch": 0.15176758257698206, "grad_norm": 0.46266499161720276, "learning_rate": 4.241369306643459e-06, "loss": 0.7629, "step": 3662 }, { "epoch": 0.15180902648265573, "grad_norm": 0.42659202218055725, "learning_rate": 4.24116208711509e-06, "loss": 0.7251, "step": 3663 }, { "epoch": 0.15185047038832938, "grad_norm": 0.427820086479187, "learning_rate": 4.2409548675867215e-06, "loss": 0.7002, "step": 3664 }, { "epoch": 0.15189191429400306, "grad_norm": 0.4459049105644226, "learning_rate": 4.240747648058353e-06, "loss": 0.7202, "step": 3665 }, { "epoch": 0.15193335819967674, "grad_norm": 0.440362811088562, "learning_rate": 4.240540428529985e-06, "loss": 0.7164, "step": 3666 }, { "epoch": 0.15197480210535042, "grad_norm": 0.4040077030658722, "learning_rate": 4.2403332090016165e-06, "loss": 0.7285, "step": 3667 }, { "epoch": 0.1520162460110241, "grad_norm": 0.4361995458602905, "learning_rate": 4.240125989473248e-06, "loss": 0.7588, "step": 3668 }, { "epoch": 0.15205768991669774, "grad_norm": 0.41343316435813904, "learning_rate": 4.23991876994488e-06, "loss": 0.7262, "step": 3669 }, { "epoch": 0.15209913382237142, "grad_norm": 0.4115023612976074, "learning_rate": 4.2397115504165115e-06, "loss": 0.6897, "step": 3670 }, { "epoch": 0.1521405777280451, "grad_norm": 0.42587971687316895, "learning_rate": 4.239504330888144e-06, "loss": 0.7446, "step": 3671 }, { "epoch": 0.15218202163371877, "grad_norm": 0.44701018929481506, "learning_rate": 4.239297111359775e-06, "loss": 0.7031, "step": 3672 }, { "epoch": 0.15222346553939242, "grad_norm": 0.44883471727371216, "learning_rate": 4.2390898918314065e-06, "loss": 0.6941, "step": 3673 }, { "epoch": 0.1522649094450661, "grad_norm": 0.44280728697776794, "learning_rate": 4.238882672303038e-06, "loss": 0.7102, "step": 3674 }, { "epoch": 0.15230635335073978, "grad_norm": 0.4050421416759491, "learning_rate": 4.23867545277467e-06, "loss": 0.7335, "step": 3675 }, { "epoch": 0.15234779725641345, "grad_norm": 0.40624454617500305, "learning_rate": 4.2384682332463015e-06, "loss": 0.7367, "step": 3676 }, { "epoch": 0.1523892411620871, "grad_norm": 0.4146789312362671, "learning_rate": 4.238261013717933e-06, "loss": 0.707, "step": 3677 }, { "epoch": 0.15243068506776078, "grad_norm": 0.44670915603637695, "learning_rate": 4.238053794189565e-06, "loss": 0.7158, "step": 3678 }, { "epoch": 0.15247212897343446, "grad_norm": 0.46492907404899597, "learning_rate": 4.2378465746611965e-06, "loss": 0.7979, "step": 3679 }, { "epoch": 0.15251357287910813, "grad_norm": 0.40393638610839844, "learning_rate": 4.237639355132828e-06, "loss": 0.6559, "step": 3680 }, { "epoch": 0.1525550167847818, "grad_norm": 0.46904534101486206, "learning_rate": 4.237432135604459e-06, "loss": 0.7677, "step": 3681 }, { "epoch": 0.15259646069045546, "grad_norm": 0.41780877113342285, "learning_rate": 4.2372249160760915e-06, "loss": 0.7354, "step": 3682 }, { "epoch": 0.15263790459612914, "grad_norm": 0.42746788263320923, "learning_rate": 4.237017696547723e-06, "loss": 0.7036, "step": 3683 }, { "epoch": 0.15267934850180281, "grad_norm": 0.45559629797935486, "learning_rate": 4.236810477019354e-06, "loss": 0.7527, "step": 3684 }, { "epoch": 0.1527207924074765, "grad_norm": 0.5866517424583435, "learning_rate": 4.2366032574909865e-06, "loss": 0.7939, "step": 3685 }, { "epoch": 0.15276223631315014, "grad_norm": 0.42547816038131714, "learning_rate": 4.236396037962618e-06, "loss": 0.7096, "step": 3686 }, { "epoch": 0.15280368021882382, "grad_norm": 0.5292379260063171, "learning_rate": 4.23618881843425e-06, "loss": 0.8098, "step": 3687 }, { "epoch": 0.1528451241244975, "grad_norm": 0.45485520362854004, "learning_rate": 4.235981598905881e-06, "loss": 0.7356, "step": 3688 }, { "epoch": 0.15288656803017117, "grad_norm": 0.4807022213935852, "learning_rate": 4.235774379377513e-06, "loss": 0.728, "step": 3689 }, { "epoch": 0.15292801193584482, "grad_norm": 0.44598907232284546, "learning_rate": 4.235567159849144e-06, "loss": 0.8257, "step": 3690 }, { "epoch": 0.1529694558415185, "grad_norm": 0.43914106488227844, "learning_rate": 4.2353599403207765e-06, "loss": 0.7654, "step": 3691 }, { "epoch": 0.15301089974719218, "grad_norm": 0.4122999906539917, "learning_rate": 4.235152720792408e-06, "loss": 0.7502, "step": 3692 }, { "epoch": 0.15305234365286585, "grad_norm": 0.4320218563079834, "learning_rate": 4.234945501264039e-06, "loss": 0.7546, "step": 3693 }, { "epoch": 0.15309378755853953, "grad_norm": 0.4195154309272766, "learning_rate": 4.2347382817356716e-06, "loss": 0.7177, "step": 3694 }, { "epoch": 0.15313523146421318, "grad_norm": 0.4758714735507965, "learning_rate": 4.234531062207303e-06, "loss": 0.7284, "step": 3695 }, { "epoch": 0.15317667536988686, "grad_norm": 0.4443349242210388, "learning_rate": 4.234323842678934e-06, "loss": 0.7363, "step": 3696 }, { "epoch": 0.15321811927556053, "grad_norm": 0.4219074249267578, "learning_rate": 4.234116623150566e-06, "loss": 0.7231, "step": 3697 }, { "epoch": 0.1532595631812342, "grad_norm": 0.4744405746459961, "learning_rate": 4.233909403622198e-06, "loss": 0.8752, "step": 3698 }, { "epoch": 0.15330100708690786, "grad_norm": 0.45451921224594116, "learning_rate": 4.233702184093829e-06, "loss": 0.811, "step": 3699 }, { "epoch": 0.15334245099258154, "grad_norm": 0.4308885633945465, "learning_rate": 4.233494964565461e-06, "loss": 0.7266, "step": 3700 }, { "epoch": 0.1533838948982552, "grad_norm": 0.4299415647983551, "learning_rate": 4.233287745037092e-06, "loss": 0.7097, "step": 3701 }, { "epoch": 0.1534253388039289, "grad_norm": 0.43409693241119385, "learning_rate": 4.233080525508724e-06, "loss": 0.707, "step": 3702 }, { "epoch": 0.15346678270960254, "grad_norm": 0.4016571640968323, "learning_rate": 4.2328733059803566e-06, "loss": 0.7205, "step": 3703 }, { "epoch": 0.15350822661527622, "grad_norm": 0.43497949838638306, "learning_rate": 4.232666086451987e-06, "loss": 0.7119, "step": 3704 }, { "epoch": 0.1535496705209499, "grad_norm": 0.45771169662475586, "learning_rate": 4.232458866923619e-06, "loss": 0.7278, "step": 3705 }, { "epoch": 0.15359111442662357, "grad_norm": 0.4203149080276489, "learning_rate": 4.232251647395251e-06, "loss": 0.668, "step": 3706 }, { "epoch": 0.15363255833229725, "grad_norm": 0.4154300093650818, "learning_rate": 4.232044427866883e-06, "loss": 0.741, "step": 3707 }, { "epoch": 0.1536740022379709, "grad_norm": 0.4568198025226593, "learning_rate": 4.231837208338514e-06, "loss": 0.7621, "step": 3708 }, { "epoch": 0.15371544614364457, "grad_norm": 0.4174901247024536, "learning_rate": 4.231629988810146e-06, "loss": 0.7434, "step": 3709 }, { "epoch": 0.15375689004931825, "grad_norm": 0.40741077065467834, "learning_rate": 4.231422769281777e-06, "loss": 0.715, "step": 3710 }, { "epoch": 0.15379833395499193, "grad_norm": 0.43890616297721863, "learning_rate": 4.231215549753409e-06, "loss": 0.7468, "step": 3711 }, { "epoch": 0.15383977786066558, "grad_norm": 0.5174891352653503, "learning_rate": 4.231008330225041e-06, "loss": 0.7142, "step": 3712 }, { "epoch": 0.15388122176633925, "grad_norm": 0.41212040185928345, "learning_rate": 4.230801110696672e-06, "loss": 0.6943, "step": 3713 }, { "epoch": 0.15392266567201293, "grad_norm": 0.43226858973503113, "learning_rate": 4.230593891168304e-06, "loss": 0.7426, "step": 3714 }, { "epoch": 0.1539641095776866, "grad_norm": 0.4193629324436188, "learning_rate": 4.230386671639936e-06, "loss": 0.6771, "step": 3715 }, { "epoch": 0.15400555348336029, "grad_norm": 0.4190411865711212, "learning_rate": 4.230179452111567e-06, "loss": 0.6957, "step": 3716 }, { "epoch": 0.15404699738903394, "grad_norm": 0.41139307618141174, "learning_rate": 4.2299722325831985e-06, "loss": 0.7343, "step": 3717 }, { "epoch": 0.1540884412947076, "grad_norm": 0.42168131470680237, "learning_rate": 4.229765013054831e-06, "loss": 0.7883, "step": 3718 }, { "epoch": 0.1541298852003813, "grad_norm": 0.4320341646671295, "learning_rate": 4.229557793526462e-06, "loss": 0.7852, "step": 3719 }, { "epoch": 0.15417132910605497, "grad_norm": 0.42037323117256165, "learning_rate": 4.2293505739980935e-06, "loss": 0.7559, "step": 3720 }, { "epoch": 0.15421277301172862, "grad_norm": 0.42893674969673157, "learning_rate": 4.229143354469726e-06, "loss": 0.7651, "step": 3721 }, { "epoch": 0.1542542169174023, "grad_norm": 0.45412829518318176, "learning_rate": 4.228936134941357e-06, "loss": 0.7717, "step": 3722 }, { "epoch": 0.15429566082307597, "grad_norm": 0.43226689100265503, "learning_rate": 4.228728915412989e-06, "loss": 0.7659, "step": 3723 }, { "epoch": 0.15433710472874965, "grad_norm": 0.4245906472206116, "learning_rate": 4.228521695884621e-06, "loss": 0.7463, "step": 3724 }, { "epoch": 0.1543785486344233, "grad_norm": 0.43555760383605957, "learning_rate": 4.228314476356252e-06, "loss": 0.7593, "step": 3725 }, { "epoch": 0.15441999254009697, "grad_norm": 0.42883971333503723, "learning_rate": 4.2281072568278835e-06, "loss": 0.6471, "step": 3726 }, { "epoch": 0.15446143644577065, "grad_norm": 0.4438154697418213, "learning_rate": 4.227900037299516e-06, "loss": 0.7188, "step": 3727 }, { "epoch": 0.15450288035144433, "grad_norm": 0.4603244960308075, "learning_rate": 4.227692817771147e-06, "loss": 0.7139, "step": 3728 }, { "epoch": 0.154544324257118, "grad_norm": 0.4458857476711273, "learning_rate": 4.2274855982427785e-06, "loss": 0.7078, "step": 3729 }, { "epoch": 0.15458576816279165, "grad_norm": 0.4062007665634155, "learning_rate": 4.227278378714411e-06, "loss": 0.6869, "step": 3730 }, { "epoch": 0.15462721206846533, "grad_norm": 0.47022193670272827, "learning_rate": 4.227071159186042e-06, "loss": 0.7849, "step": 3731 }, { "epoch": 0.154668655974139, "grad_norm": 0.4200233221054077, "learning_rate": 4.2268639396576735e-06, "loss": 0.739, "step": 3732 }, { "epoch": 0.15471009987981268, "grad_norm": 0.4448118805885315, "learning_rate": 4.226656720129305e-06, "loss": 0.7271, "step": 3733 }, { "epoch": 0.15475154378548633, "grad_norm": 0.41340065002441406, "learning_rate": 4.226449500600937e-06, "loss": 0.7073, "step": 3734 }, { "epoch": 0.15479298769116, "grad_norm": 0.4488571882247925, "learning_rate": 4.2262422810725685e-06, "loss": 0.748, "step": 3735 }, { "epoch": 0.1548344315968337, "grad_norm": 0.4039192199707031, "learning_rate": 4.2260350615442e-06, "loss": 0.6909, "step": 3736 }, { "epoch": 0.15487587550250737, "grad_norm": 0.4350754916667938, "learning_rate": 4.225827842015832e-06, "loss": 0.7705, "step": 3737 }, { "epoch": 0.15491731940818101, "grad_norm": 0.44145551323890686, "learning_rate": 4.2256206224874635e-06, "loss": 0.7205, "step": 3738 }, { "epoch": 0.1549587633138547, "grad_norm": 0.39144453406333923, "learning_rate": 4.225413402959096e-06, "loss": 0.731, "step": 3739 }, { "epoch": 0.15500020721952837, "grad_norm": 0.43198150396347046, "learning_rate": 4.225206183430727e-06, "loss": 0.7961, "step": 3740 }, { "epoch": 0.15504165112520205, "grad_norm": 0.4303108751773834, "learning_rate": 4.2249989639023585e-06, "loss": 0.7178, "step": 3741 }, { "epoch": 0.15508309503087572, "grad_norm": 0.42614153027534485, "learning_rate": 4.22479174437399e-06, "loss": 0.7305, "step": 3742 }, { "epoch": 0.15512453893654937, "grad_norm": 0.4599325656890869, "learning_rate": 4.224584524845622e-06, "loss": 0.728, "step": 3743 }, { "epoch": 0.15516598284222305, "grad_norm": 0.4178580343723297, "learning_rate": 4.2243773053172535e-06, "loss": 0.7483, "step": 3744 }, { "epoch": 0.15520742674789673, "grad_norm": 0.43739956617355347, "learning_rate": 4.224170085788885e-06, "loss": 0.731, "step": 3745 }, { "epoch": 0.1552488706535704, "grad_norm": 0.424846887588501, "learning_rate": 4.223962866260517e-06, "loss": 0.7227, "step": 3746 }, { "epoch": 0.15529031455924405, "grad_norm": 0.42290574312210083, "learning_rate": 4.2237556467321485e-06, "loss": 0.7251, "step": 3747 }, { "epoch": 0.15533175846491773, "grad_norm": 0.4364088177680969, "learning_rate": 4.22354842720378e-06, "loss": 0.7498, "step": 3748 }, { "epoch": 0.1553732023705914, "grad_norm": 0.39693793654441833, "learning_rate": 4.223341207675411e-06, "loss": 0.6687, "step": 3749 }, { "epoch": 0.15541464627626508, "grad_norm": 0.46199992299079895, "learning_rate": 4.2231339881470435e-06, "loss": 0.761, "step": 3750 }, { "epoch": 0.15545609018193873, "grad_norm": 0.43053698539733887, "learning_rate": 4.222926768618675e-06, "loss": 0.7136, "step": 3751 }, { "epoch": 0.1554975340876124, "grad_norm": 0.4421553909778595, "learning_rate": 4.222719549090306e-06, "loss": 0.7683, "step": 3752 }, { "epoch": 0.1555389779932861, "grad_norm": 0.45947518944740295, "learning_rate": 4.222512329561938e-06, "loss": 0.7603, "step": 3753 }, { "epoch": 0.15558042189895976, "grad_norm": 0.4375973343849182, "learning_rate": 4.22230511003357e-06, "loss": 0.756, "step": 3754 }, { "epoch": 0.15562186580463344, "grad_norm": 0.4328485131263733, "learning_rate": 4.222097890505202e-06, "loss": 0.7197, "step": 3755 }, { "epoch": 0.1556633097103071, "grad_norm": 0.4104398787021637, "learning_rate": 4.2218906709768336e-06, "loss": 0.707, "step": 3756 }, { "epoch": 0.15570475361598077, "grad_norm": 0.439996600151062, "learning_rate": 4.221683451448465e-06, "loss": 0.7365, "step": 3757 }, { "epoch": 0.15574619752165444, "grad_norm": 0.42545443773269653, "learning_rate": 4.221476231920096e-06, "loss": 0.7856, "step": 3758 }, { "epoch": 0.15578764142732812, "grad_norm": 0.43424174189567566, "learning_rate": 4.2212690123917286e-06, "loss": 0.7655, "step": 3759 }, { "epoch": 0.15582908533300177, "grad_norm": 0.4118076264858246, "learning_rate": 4.22106179286336e-06, "loss": 0.7544, "step": 3760 }, { "epoch": 0.15587052923867545, "grad_norm": 0.42083072662353516, "learning_rate": 4.220854573334991e-06, "loss": 0.7202, "step": 3761 }, { "epoch": 0.15591197314434913, "grad_norm": 0.44824084639549255, "learning_rate": 4.220647353806623e-06, "loss": 0.6895, "step": 3762 }, { "epoch": 0.1559534170500228, "grad_norm": 0.4726134240627289, "learning_rate": 4.220440134278255e-06, "loss": 0.7805, "step": 3763 }, { "epoch": 0.15599486095569645, "grad_norm": 0.43774133920669556, "learning_rate": 4.220232914749886e-06, "loss": 0.7761, "step": 3764 }, { "epoch": 0.15603630486137013, "grad_norm": 0.46876615285873413, "learning_rate": 4.220025695221518e-06, "loss": 0.7654, "step": 3765 }, { "epoch": 0.1560777487670438, "grad_norm": 0.4385366141796112, "learning_rate": 4.21981847569315e-06, "loss": 0.6725, "step": 3766 }, { "epoch": 0.15611919267271748, "grad_norm": 0.45882877707481384, "learning_rate": 4.219611256164781e-06, "loss": 0.7383, "step": 3767 }, { "epoch": 0.15616063657839116, "grad_norm": 0.4636717438697815, "learning_rate": 4.219404036636413e-06, "loss": 0.7627, "step": 3768 }, { "epoch": 0.1562020804840648, "grad_norm": 0.41499027609825134, "learning_rate": 4.219196817108044e-06, "loss": 0.7092, "step": 3769 }, { "epoch": 0.1562435243897385, "grad_norm": 0.4373124837875366, "learning_rate": 4.218989597579676e-06, "loss": 0.7122, "step": 3770 }, { "epoch": 0.15628496829541216, "grad_norm": 0.4432714879512787, "learning_rate": 4.218782378051308e-06, "loss": 0.7781, "step": 3771 }, { "epoch": 0.15632641220108584, "grad_norm": 0.4590091407299042, "learning_rate": 4.218575158522939e-06, "loss": 0.7834, "step": 3772 }, { "epoch": 0.1563678561067595, "grad_norm": 0.43703097105026245, "learning_rate": 4.218367938994571e-06, "loss": 0.7715, "step": 3773 }, { "epoch": 0.15640930001243317, "grad_norm": 0.45799267292022705, "learning_rate": 4.218160719466203e-06, "loss": 0.7039, "step": 3774 }, { "epoch": 0.15645074391810684, "grad_norm": 0.40787288546562195, "learning_rate": 4.217953499937835e-06, "loss": 0.7441, "step": 3775 }, { "epoch": 0.15649218782378052, "grad_norm": 0.4136776924133301, "learning_rate": 4.217746280409466e-06, "loss": 0.7788, "step": 3776 }, { "epoch": 0.15653363172945417, "grad_norm": 0.4273936450481415, "learning_rate": 4.217539060881098e-06, "loss": 0.7148, "step": 3777 }, { "epoch": 0.15657507563512785, "grad_norm": 0.4586282968521118, "learning_rate": 4.217331841352729e-06, "loss": 0.8235, "step": 3778 }, { "epoch": 0.15661651954080152, "grad_norm": 0.40617039799690247, "learning_rate": 4.217124621824361e-06, "loss": 0.7317, "step": 3779 }, { "epoch": 0.1566579634464752, "grad_norm": 0.45033735036849976, "learning_rate": 4.216917402295993e-06, "loss": 0.7651, "step": 3780 }, { "epoch": 0.15669940735214888, "grad_norm": 0.42508837580680847, "learning_rate": 4.216710182767624e-06, "loss": 0.7024, "step": 3781 }, { "epoch": 0.15674085125782253, "grad_norm": 0.41419878602027893, "learning_rate": 4.216502963239256e-06, "loss": 0.7351, "step": 3782 }, { "epoch": 0.1567822951634962, "grad_norm": 0.46937480568885803, "learning_rate": 4.216295743710888e-06, "loss": 0.741, "step": 3783 }, { "epoch": 0.15682373906916988, "grad_norm": 0.5076144933700562, "learning_rate": 4.216088524182519e-06, "loss": 0.8005, "step": 3784 }, { "epoch": 0.15686518297484356, "grad_norm": 0.43180370330810547, "learning_rate": 4.2158813046541505e-06, "loss": 0.6908, "step": 3785 }, { "epoch": 0.1569066268805172, "grad_norm": 0.43027907609939575, "learning_rate": 4.215674085125783e-06, "loss": 0.7168, "step": 3786 }, { "epoch": 0.15694807078619089, "grad_norm": 0.4288286864757538, "learning_rate": 4.215466865597414e-06, "loss": 0.756, "step": 3787 }, { "epoch": 0.15698951469186456, "grad_norm": 0.43886905908584595, "learning_rate": 4.2152596460690455e-06, "loss": 0.7681, "step": 3788 }, { "epoch": 0.15703095859753824, "grad_norm": 0.45099616050720215, "learning_rate": 4.215052426540678e-06, "loss": 0.7822, "step": 3789 }, { "epoch": 0.15707240250321192, "grad_norm": 0.4101656675338745, "learning_rate": 4.214845207012309e-06, "loss": 0.7307, "step": 3790 }, { "epoch": 0.15711384640888557, "grad_norm": 0.460316926240921, "learning_rate": 4.214637987483941e-06, "loss": 0.7715, "step": 3791 }, { "epoch": 0.15715529031455924, "grad_norm": 0.42900216579437256, "learning_rate": 4.214430767955573e-06, "loss": 0.7971, "step": 3792 }, { "epoch": 0.15719673422023292, "grad_norm": 0.4359113574028015, "learning_rate": 4.214223548427204e-06, "loss": 0.7041, "step": 3793 }, { "epoch": 0.1572381781259066, "grad_norm": 0.4067431092262268, "learning_rate": 4.2140163288988355e-06, "loss": 0.738, "step": 3794 }, { "epoch": 0.15727962203158025, "grad_norm": 0.4477531611919403, "learning_rate": 4.213809109370468e-06, "loss": 0.7571, "step": 3795 }, { "epoch": 0.15732106593725392, "grad_norm": 0.39705902338027954, "learning_rate": 4.213601889842099e-06, "loss": 0.7002, "step": 3796 }, { "epoch": 0.1573625098429276, "grad_norm": 0.47838208079338074, "learning_rate": 4.2133946703137305e-06, "loss": 0.7654, "step": 3797 }, { "epoch": 0.15740395374860128, "grad_norm": 0.41748538613319397, "learning_rate": 4.213187450785363e-06, "loss": 0.72, "step": 3798 }, { "epoch": 0.15744539765427493, "grad_norm": 0.40485039353370667, "learning_rate": 4.212980231256994e-06, "loss": 0.7607, "step": 3799 }, { "epoch": 0.1574868415599486, "grad_norm": 0.4300915002822876, "learning_rate": 4.2127730117286255e-06, "loss": 0.7488, "step": 3800 }, { "epoch": 0.15752828546562228, "grad_norm": 0.38883349299430847, "learning_rate": 4.212565792200257e-06, "loss": 0.714, "step": 3801 }, { "epoch": 0.15756972937129596, "grad_norm": 0.406230092048645, "learning_rate": 4.212358572671889e-06, "loss": 0.7583, "step": 3802 }, { "epoch": 0.15761117327696963, "grad_norm": 0.4403274655342102, "learning_rate": 4.2121513531435205e-06, "loss": 0.7842, "step": 3803 }, { "epoch": 0.15765261718264328, "grad_norm": 0.42502692341804504, "learning_rate": 4.211944133615152e-06, "loss": 0.7688, "step": 3804 }, { "epoch": 0.15769406108831696, "grad_norm": 0.43706291913986206, "learning_rate": 4.211736914086783e-06, "loss": 0.8064, "step": 3805 }, { "epoch": 0.15773550499399064, "grad_norm": 0.4406483769416809, "learning_rate": 4.2115296945584155e-06, "loss": 0.7446, "step": 3806 }, { "epoch": 0.15777694889966432, "grad_norm": 0.4406915605068207, "learning_rate": 4.211322475030048e-06, "loss": 0.7148, "step": 3807 }, { "epoch": 0.15781839280533796, "grad_norm": 0.40346699953079224, "learning_rate": 4.211115255501679e-06, "loss": 0.7522, "step": 3808 }, { "epoch": 0.15785983671101164, "grad_norm": 0.4076678454875946, "learning_rate": 4.2109080359733105e-06, "loss": 0.7419, "step": 3809 }, { "epoch": 0.15790128061668532, "grad_norm": 0.4552779495716095, "learning_rate": 4.210700816444942e-06, "loss": 0.7433, "step": 3810 }, { "epoch": 0.157942724522359, "grad_norm": 0.4086398184299469, "learning_rate": 4.210493596916574e-06, "loss": 0.7224, "step": 3811 }, { "epoch": 0.15798416842803265, "grad_norm": 0.43320825695991516, "learning_rate": 4.2102863773882056e-06, "loss": 0.7432, "step": 3812 }, { "epoch": 0.15802561233370632, "grad_norm": 0.4551692008972168, "learning_rate": 4.210079157859837e-06, "loss": 0.7246, "step": 3813 }, { "epoch": 0.15806705623938, "grad_norm": 0.4776467978954315, "learning_rate": 4.209871938331468e-06, "loss": 0.7891, "step": 3814 }, { "epoch": 0.15810850014505368, "grad_norm": 0.41093844175338745, "learning_rate": 4.2096647188031006e-06, "loss": 0.7476, "step": 3815 }, { "epoch": 0.15814994405072735, "grad_norm": 0.4701308608055115, "learning_rate": 4.209457499274732e-06, "loss": 0.7942, "step": 3816 }, { "epoch": 0.158191387956401, "grad_norm": 0.44050124287605286, "learning_rate": 4.209250279746363e-06, "loss": 0.7574, "step": 3817 }, { "epoch": 0.15823283186207468, "grad_norm": 0.45372331142425537, "learning_rate": 4.2090430602179956e-06, "loss": 0.7195, "step": 3818 }, { "epoch": 0.15827427576774836, "grad_norm": 0.4233514368534088, "learning_rate": 4.208835840689627e-06, "loss": 0.6733, "step": 3819 }, { "epoch": 0.15831571967342203, "grad_norm": 0.47585606575012207, "learning_rate": 4.208628621161258e-06, "loss": 0.7422, "step": 3820 }, { "epoch": 0.15835716357909568, "grad_norm": 0.4351378083229065, "learning_rate": 4.20842140163289e-06, "loss": 0.7527, "step": 3821 }, { "epoch": 0.15839860748476936, "grad_norm": 0.44203707575798035, "learning_rate": 4.208214182104522e-06, "loss": 0.7351, "step": 3822 }, { "epoch": 0.15844005139044304, "grad_norm": 0.4211277365684509, "learning_rate": 4.208006962576153e-06, "loss": 0.7217, "step": 3823 }, { "epoch": 0.15848149529611671, "grad_norm": 0.3870598077774048, "learning_rate": 4.2077997430477856e-06, "loss": 0.7488, "step": 3824 }, { "epoch": 0.15852293920179036, "grad_norm": 0.5050339698791504, "learning_rate": 4.207592523519417e-06, "loss": 0.8223, "step": 3825 }, { "epoch": 0.15856438310746404, "grad_norm": 0.4232695400714874, "learning_rate": 4.207385303991048e-06, "loss": 0.7443, "step": 3826 }, { "epoch": 0.15860582701313772, "grad_norm": 0.429105281829834, "learning_rate": 4.2071780844626806e-06, "loss": 0.7402, "step": 3827 }, { "epoch": 0.1586472709188114, "grad_norm": 0.4061126708984375, "learning_rate": 4.206970864934312e-06, "loss": 0.7383, "step": 3828 }, { "epoch": 0.15868871482448507, "grad_norm": 0.41521525382995605, "learning_rate": 4.206763645405943e-06, "loss": 0.792, "step": 3829 }, { "epoch": 0.15873015873015872, "grad_norm": 0.422641396522522, "learning_rate": 4.206556425877575e-06, "loss": 0.715, "step": 3830 }, { "epoch": 0.1587716026358324, "grad_norm": 0.4782736897468567, "learning_rate": 4.206349206349207e-06, "loss": 0.761, "step": 3831 }, { "epoch": 0.15881304654150608, "grad_norm": 0.42144641280174255, "learning_rate": 4.206141986820838e-06, "loss": 0.7354, "step": 3832 }, { "epoch": 0.15885449044717975, "grad_norm": 0.41282618045806885, "learning_rate": 4.20593476729247e-06, "loss": 0.7241, "step": 3833 }, { "epoch": 0.1588959343528534, "grad_norm": 0.41399601101875305, "learning_rate": 4.205727547764102e-06, "loss": 0.7102, "step": 3834 }, { "epoch": 0.15893737825852708, "grad_norm": 0.4068455696105957, "learning_rate": 4.205520328235733e-06, "loss": 0.7114, "step": 3835 }, { "epoch": 0.15897882216420076, "grad_norm": 0.4130920171737671, "learning_rate": 4.205313108707365e-06, "loss": 0.7224, "step": 3836 }, { "epoch": 0.15902026606987443, "grad_norm": 0.4225328862667084, "learning_rate": 4.205105889178996e-06, "loss": 0.6973, "step": 3837 }, { "epoch": 0.15906170997554808, "grad_norm": 0.47452792525291443, "learning_rate": 4.204898669650628e-06, "loss": 0.7434, "step": 3838 }, { "epoch": 0.15910315388122176, "grad_norm": 0.42832037806510925, "learning_rate": 4.20469145012226e-06, "loss": 0.8097, "step": 3839 }, { "epoch": 0.15914459778689544, "grad_norm": 0.6010823845863342, "learning_rate": 4.204484230593891e-06, "loss": 0.7976, "step": 3840 }, { "epoch": 0.1591860416925691, "grad_norm": 0.40898996591567993, "learning_rate": 4.2042770110655225e-06, "loss": 0.7407, "step": 3841 }, { "epoch": 0.1592274855982428, "grad_norm": 0.4171966016292572, "learning_rate": 4.204069791537155e-06, "loss": 0.6851, "step": 3842 }, { "epoch": 0.15926892950391644, "grad_norm": 0.4605291187763214, "learning_rate": 4.203862572008787e-06, "loss": 0.7437, "step": 3843 }, { "epoch": 0.15931037340959012, "grad_norm": 0.4282947778701782, "learning_rate": 4.203655352480418e-06, "loss": 0.6973, "step": 3844 }, { "epoch": 0.1593518173152638, "grad_norm": 0.4084917902946472, "learning_rate": 4.20344813295205e-06, "loss": 0.6914, "step": 3845 }, { "epoch": 0.15939326122093747, "grad_norm": 0.42546719312667847, "learning_rate": 4.203240913423681e-06, "loss": 0.7482, "step": 3846 }, { "epoch": 0.15943470512661112, "grad_norm": 0.4307466745376587, "learning_rate": 4.203033693895313e-06, "loss": 0.7617, "step": 3847 }, { "epoch": 0.1594761490322848, "grad_norm": 0.46401798725128174, "learning_rate": 4.202826474366945e-06, "loss": 0.7565, "step": 3848 }, { "epoch": 0.15951759293795847, "grad_norm": 0.4168911874294281, "learning_rate": 4.202619254838576e-06, "loss": 0.7178, "step": 3849 }, { "epoch": 0.15955903684363215, "grad_norm": 0.4438225030899048, "learning_rate": 4.202412035310208e-06, "loss": 0.7593, "step": 3850 }, { "epoch": 0.1596004807493058, "grad_norm": 0.3990876078605652, "learning_rate": 4.20220481578184e-06, "loss": 0.7274, "step": 3851 }, { "epoch": 0.15964192465497948, "grad_norm": 0.40294134616851807, "learning_rate": 4.201997596253471e-06, "loss": 0.7137, "step": 3852 }, { "epoch": 0.15968336856065315, "grad_norm": 0.3944995105266571, "learning_rate": 4.2017903767251025e-06, "loss": 0.698, "step": 3853 }, { "epoch": 0.15972481246632683, "grad_norm": 0.40305161476135254, "learning_rate": 4.201583157196735e-06, "loss": 0.7211, "step": 3854 }, { "epoch": 0.1597662563720005, "grad_norm": 0.45723584294319153, "learning_rate": 4.201375937668366e-06, "loss": 0.7339, "step": 3855 }, { "epoch": 0.15980770027767416, "grad_norm": 0.4147433042526245, "learning_rate": 4.2011687181399975e-06, "loss": 0.7229, "step": 3856 }, { "epoch": 0.15984914418334784, "grad_norm": 0.4272827208042145, "learning_rate": 4.200961498611629e-06, "loss": 0.7533, "step": 3857 }, { "epoch": 0.1598905880890215, "grad_norm": 0.4190048575401306, "learning_rate": 4.200754279083261e-06, "loss": 0.6923, "step": 3858 }, { "epoch": 0.1599320319946952, "grad_norm": 0.41579949855804443, "learning_rate": 4.200547059554893e-06, "loss": 0.7451, "step": 3859 }, { "epoch": 0.15997347590036884, "grad_norm": 0.48038020730018616, "learning_rate": 4.200339840026525e-06, "loss": 0.8193, "step": 3860 }, { "epoch": 0.16001491980604252, "grad_norm": 0.409093976020813, "learning_rate": 4.200132620498156e-06, "loss": 0.7903, "step": 3861 }, { "epoch": 0.1600563637117162, "grad_norm": 0.4334113597869873, "learning_rate": 4.1999254009697875e-06, "loss": 0.7181, "step": 3862 }, { "epoch": 0.16009780761738987, "grad_norm": 0.4502509534358978, "learning_rate": 4.19971818144142e-06, "loss": 0.7964, "step": 3863 }, { "epoch": 0.16013925152306355, "grad_norm": 0.484048992395401, "learning_rate": 4.199510961913051e-06, "loss": 0.7456, "step": 3864 }, { "epoch": 0.1601806954287372, "grad_norm": 0.4655064642429352, "learning_rate": 4.1993037423846825e-06, "loss": 0.8149, "step": 3865 }, { "epoch": 0.16022213933441087, "grad_norm": 0.39345523715019226, "learning_rate": 4.199096522856314e-06, "loss": 0.7698, "step": 3866 }, { "epoch": 0.16026358324008455, "grad_norm": 0.4604015350341797, "learning_rate": 4.198889303327946e-06, "loss": 0.7786, "step": 3867 }, { "epoch": 0.16030502714575823, "grad_norm": 0.42139995098114014, "learning_rate": 4.1986820837995775e-06, "loss": 0.7173, "step": 3868 }, { "epoch": 0.16034647105143188, "grad_norm": 0.4157603681087494, "learning_rate": 4.198474864271209e-06, "loss": 0.7422, "step": 3869 }, { "epoch": 0.16038791495710555, "grad_norm": 0.43068990111351013, "learning_rate": 4.198267644742841e-06, "loss": 0.7275, "step": 3870 }, { "epoch": 0.16042935886277923, "grad_norm": 0.4326135516166687, "learning_rate": 4.1980604252144726e-06, "loss": 0.7882, "step": 3871 }, { "epoch": 0.1604708027684529, "grad_norm": 0.47131362557411194, "learning_rate": 4.197853205686104e-06, "loss": 0.7666, "step": 3872 }, { "epoch": 0.16051224667412656, "grad_norm": 0.40668225288391113, "learning_rate": 4.197645986157735e-06, "loss": 0.6924, "step": 3873 }, { "epoch": 0.16055369057980023, "grad_norm": 0.42884138226509094, "learning_rate": 4.1974387666293676e-06, "loss": 0.7778, "step": 3874 }, { "epoch": 0.1605951344854739, "grad_norm": 0.4121173024177551, "learning_rate": 4.197231547100999e-06, "loss": 0.7334, "step": 3875 }, { "epoch": 0.1606365783911476, "grad_norm": 0.4066050052642822, "learning_rate": 4.197024327572631e-06, "loss": 0.7073, "step": 3876 }, { "epoch": 0.16067802229682127, "grad_norm": 0.4591585099697113, "learning_rate": 4.1968171080442626e-06, "loss": 0.7766, "step": 3877 }, { "epoch": 0.16071946620249491, "grad_norm": 0.3993017077445984, "learning_rate": 4.196609888515894e-06, "loss": 0.7126, "step": 3878 }, { "epoch": 0.1607609101081686, "grad_norm": 0.49140071868896484, "learning_rate": 4.196402668987526e-06, "loss": 0.7837, "step": 3879 }, { "epoch": 0.16080235401384227, "grad_norm": 0.44633105397224426, "learning_rate": 4.1961954494591576e-06, "loss": 0.7556, "step": 3880 }, { "epoch": 0.16084379791951595, "grad_norm": 0.41756877303123474, "learning_rate": 4.195988229930789e-06, "loss": 0.6919, "step": 3881 }, { "epoch": 0.1608852418251896, "grad_norm": 0.4424387812614441, "learning_rate": 4.19578101040242e-06, "loss": 0.6985, "step": 3882 }, { "epoch": 0.16092668573086327, "grad_norm": 0.4155738055706024, "learning_rate": 4.1955737908740526e-06, "loss": 0.7263, "step": 3883 }, { "epoch": 0.16096812963653695, "grad_norm": 0.44334840774536133, "learning_rate": 4.195366571345684e-06, "loss": 0.7375, "step": 3884 }, { "epoch": 0.16100957354221063, "grad_norm": 0.45792505145072937, "learning_rate": 4.195159351817315e-06, "loss": 0.7279, "step": 3885 }, { "epoch": 0.16105101744788428, "grad_norm": 0.40332263708114624, "learning_rate": 4.1949521322889476e-06, "loss": 0.7096, "step": 3886 }, { "epoch": 0.16109246135355795, "grad_norm": 0.41727304458618164, "learning_rate": 4.194744912760579e-06, "loss": 0.7327, "step": 3887 }, { "epoch": 0.16113390525923163, "grad_norm": 0.4250168204307556, "learning_rate": 4.19453769323221e-06, "loss": 0.7893, "step": 3888 }, { "epoch": 0.1611753491649053, "grad_norm": 0.44715920090675354, "learning_rate": 4.194330473703842e-06, "loss": 0.7297, "step": 3889 }, { "epoch": 0.16121679307057898, "grad_norm": 0.4113903045654297, "learning_rate": 4.194123254175474e-06, "loss": 0.7454, "step": 3890 }, { "epoch": 0.16125823697625263, "grad_norm": 0.39251089096069336, "learning_rate": 4.193916034647105e-06, "loss": 0.6851, "step": 3891 }, { "epoch": 0.1612996808819263, "grad_norm": 0.426839679479599, "learning_rate": 4.193708815118738e-06, "loss": 0.7734, "step": 3892 }, { "epoch": 0.1613411247876, "grad_norm": 0.45610150694847107, "learning_rate": 4.193501595590368e-06, "loss": 0.7053, "step": 3893 }, { "epoch": 0.16138256869327366, "grad_norm": 0.4198380410671234, "learning_rate": 4.193294376062e-06, "loss": 0.7378, "step": 3894 }, { "epoch": 0.16142401259894731, "grad_norm": 0.4052564799785614, "learning_rate": 4.193087156533633e-06, "loss": 0.7124, "step": 3895 }, { "epoch": 0.161465456504621, "grad_norm": 0.4669317305088043, "learning_rate": 4.192879937005264e-06, "loss": 0.7631, "step": 3896 }, { "epoch": 0.16150690041029467, "grad_norm": 0.43615469336509705, "learning_rate": 4.192672717476895e-06, "loss": 0.73, "step": 3897 }, { "epoch": 0.16154834431596835, "grad_norm": 0.43209102749824524, "learning_rate": 4.192465497948527e-06, "loss": 0.702, "step": 3898 }, { "epoch": 0.161589788221642, "grad_norm": 0.43262773752212524, "learning_rate": 4.192258278420159e-06, "loss": 0.7737, "step": 3899 }, { "epoch": 0.16163123212731567, "grad_norm": 0.4481184780597687, "learning_rate": 4.19205105889179e-06, "loss": 0.7319, "step": 3900 }, { "epoch": 0.16167267603298935, "grad_norm": 0.43672117590904236, "learning_rate": 4.191843839363422e-06, "loss": 0.7717, "step": 3901 }, { "epoch": 0.16171411993866303, "grad_norm": 0.41126543283462524, "learning_rate": 4.191636619835053e-06, "loss": 0.7668, "step": 3902 }, { "epoch": 0.1617555638443367, "grad_norm": 0.4169687032699585, "learning_rate": 4.191429400306685e-06, "loss": 0.6929, "step": 3903 }, { "epoch": 0.16179700775001035, "grad_norm": 0.450638085603714, "learning_rate": 4.191222180778317e-06, "loss": 0.752, "step": 3904 }, { "epoch": 0.16183845165568403, "grad_norm": 0.43379783630371094, "learning_rate": 4.191014961249948e-06, "loss": 0.7185, "step": 3905 }, { "epoch": 0.1618798955613577, "grad_norm": 0.42769724130630493, "learning_rate": 4.19080774172158e-06, "loss": 0.6768, "step": 3906 }, { "epoch": 0.16192133946703138, "grad_norm": 0.45519348978996277, "learning_rate": 4.190600522193212e-06, "loss": 0.7336, "step": 3907 }, { "epoch": 0.16196278337270503, "grad_norm": 0.40760353207588196, "learning_rate": 4.190393302664844e-06, "loss": 0.6626, "step": 3908 }, { "epoch": 0.1620042272783787, "grad_norm": 0.40230047702789307, "learning_rate": 4.1901860831364745e-06, "loss": 0.7207, "step": 3909 }, { "epoch": 0.1620456711840524, "grad_norm": 0.4131404161453247, "learning_rate": 4.189978863608107e-06, "loss": 0.717, "step": 3910 }, { "epoch": 0.16208711508972606, "grad_norm": 0.5082923173904419, "learning_rate": 4.189771644079739e-06, "loss": 0.8196, "step": 3911 }, { "epoch": 0.1621285589953997, "grad_norm": 0.40603211522102356, "learning_rate": 4.18956442455137e-06, "loss": 0.7483, "step": 3912 }, { "epoch": 0.1621700029010734, "grad_norm": 0.43264731764793396, "learning_rate": 4.189357205023002e-06, "loss": 0.7549, "step": 3913 }, { "epoch": 0.16221144680674707, "grad_norm": 0.4516741931438446, "learning_rate": 4.189149985494633e-06, "loss": 0.7024, "step": 3914 }, { "epoch": 0.16225289071242074, "grad_norm": 0.41795071959495544, "learning_rate": 4.188942765966265e-06, "loss": 0.7427, "step": 3915 }, { "epoch": 0.16229433461809442, "grad_norm": 0.4450035095214844, "learning_rate": 4.188735546437897e-06, "loss": 0.8152, "step": 3916 }, { "epoch": 0.16233577852376807, "grad_norm": 0.4148249924182892, "learning_rate": 4.188528326909528e-06, "loss": 0.7222, "step": 3917 }, { "epoch": 0.16237722242944175, "grad_norm": 0.4537728726863861, "learning_rate": 4.1883211073811595e-06, "loss": 0.7715, "step": 3918 }, { "epoch": 0.16241866633511542, "grad_norm": 0.43246373534202576, "learning_rate": 4.188113887852792e-06, "loss": 0.7498, "step": 3919 }, { "epoch": 0.1624601102407891, "grad_norm": 0.41484004259109497, "learning_rate": 4.187906668324423e-06, "loss": 0.6858, "step": 3920 }, { "epoch": 0.16250155414646275, "grad_norm": 0.4074244797229767, "learning_rate": 4.1876994487960545e-06, "loss": 0.7598, "step": 3921 }, { "epoch": 0.16254299805213643, "grad_norm": 0.42424190044403076, "learning_rate": 4.187492229267687e-06, "loss": 0.7505, "step": 3922 }, { "epoch": 0.1625844419578101, "grad_norm": 0.4053076207637787, "learning_rate": 4.187285009739318e-06, "loss": 0.7412, "step": 3923 }, { "epoch": 0.16262588586348378, "grad_norm": 0.48449522256851196, "learning_rate": 4.1870777902109495e-06, "loss": 0.7935, "step": 3924 }, { "epoch": 0.16266732976915743, "grad_norm": 0.41274046897888184, "learning_rate": 4.186870570682581e-06, "loss": 0.7275, "step": 3925 }, { "epoch": 0.1627087736748311, "grad_norm": 0.43400174379348755, "learning_rate": 4.186663351154213e-06, "loss": 0.77, "step": 3926 }, { "epoch": 0.16275021758050479, "grad_norm": 0.43948426842689514, "learning_rate": 4.1864561316258445e-06, "loss": 0.7354, "step": 3927 }, { "epoch": 0.16279166148617846, "grad_norm": 0.4341945946216583, "learning_rate": 4.186248912097477e-06, "loss": 0.7493, "step": 3928 }, { "epoch": 0.16283310539185214, "grad_norm": 0.4131866693496704, "learning_rate": 4.186041692569108e-06, "loss": 0.7366, "step": 3929 }, { "epoch": 0.1628745492975258, "grad_norm": 0.43092653155326843, "learning_rate": 4.1858344730407396e-06, "loss": 0.6958, "step": 3930 }, { "epoch": 0.16291599320319947, "grad_norm": 0.43224841356277466, "learning_rate": 4.185627253512372e-06, "loss": 0.6899, "step": 3931 }, { "epoch": 0.16295743710887314, "grad_norm": 0.43330833315849304, "learning_rate": 4.185420033984003e-06, "loss": 0.7175, "step": 3932 }, { "epoch": 0.16299888101454682, "grad_norm": 0.4147273302078247, "learning_rate": 4.1852128144556346e-06, "loss": 0.7378, "step": 3933 }, { "epoch": 0.16304032492022047, "grad_norm": 0.5594637393951416, "learning_rate": 4.185005594927266e-06, "loss": 0.8049, "step": 3934 }, { "epoch": 0.16308176882589415, "grad_norm": 0.448483943939209, "learning_rate": 4.184798375398898e-06, "loss": 0.7856, "step": 3935 }, { "epoch": 0.16312321273156782, "grad_norm": 0.40614721179008484, "learning_rate": 4.1845911558705296e-06, "loss": 0.6211, "step": 3936 }, { "epoch": 0.1631646566372415, "grad_norm": 0.4241173565387726, "learning_rate": 4.184383936342161e-06, "loss": 0.7198, "step": 3937 }, { "epoch": 0.16320610054291518, "grad_norm": 0.42847543954849243, "learning_rate": 4.184176716813793e-06, "loss": 0.7295, "step": 3938 }, { "epoch": 0.16324754444858883, "grad_norm": 0.41963788866996765, "learning_rate": 4.1839694972854246e-06, "loss": 0.7625, "step": 3939 }, { "epoch": 0.1632889883542625, "grad_norm": 0.4731791019439697, "learning_rate": 4.183762277757056e-06, "loss": 0.7205, "step": 3940 }, { "epoch": 0.16333043225993618, "grad_norm": 0.4068329632282257, "learning_rate": 4.183555058228687e-06, "loss": 0.6885, "step": 3941 }, { "epoch": 0.16337187616560986, "grad_norm": 0.4217120110988617, "learning_rate": 4.1833478387003196e-06, "loss": 0.7158, "step": 3942 }, { "epoch": 0.1634133200712835, "grad_norm": 0.42139700055122375, "learning_rate": 4.183140619171951e-06, "loss": 0.7356, "step": 3943 }, { "epoch": 0.16345476397695718, "grad_norm": 0.4465329051017761, "learning_rate": 4.182933399643583e-06, "loss": 0.7734, "step": 3944 }, { "epoch": 0.16349620788263086, "grad_norm": 0.4422605335712433, "learning_rate": 4.182726180115214e-06, "loss": 0.7888, "step": 3945 }, { "epoch": 0.16353765178830454, "grad_norm": 0.4786761999130249, "learning_rate": 4.182518960586846e-06, "loss": 0.7808, "step": 3946 }, { "epoch": 0.1635790956939782, "grad_norm": 0.41335394978523254, "learning_rate": 4.182311741058478e-06, "loss": 0.7201, "step": 3947 }, { "epoch": 0.16362053959965187, "grad_norm": 0.42506518959999084, "learning_rate": 4.18210452153011e-06, "loss": 0.7446, "step": 3948 }, { "epoch": 0.16366198350532554, "grad_norm": 0.423186331987381, "learning_rate": 4.181897302001741e-06, "loss": 0.7307, "step": 3949 }, { "epoch": 0.16370342741099922, "grad_norm": 0.4302513599395752, "learning_rate": 4.181690082473372e-06, "loss": 0.666, "step": 3950 }, { "epoch": 0.1637448713166729, "grad_norm": 0.387676477432251, "learning_rate": 4.181482862945005e-06, "loss": 0.7036, "step": 3951 }, { "epoch": 0.16378631522234655, "grad_norm": 0.4358347952365875, "learning_rate": 4.181275643416636e-06, "loss": 0.7473, "step": 3952 }, { "epoch": 0.16382775912802022, "grad_norm": 0.5031672716140747, "learning_rate": 4.181068423888267e-06, "loss": 0.8708, "step": 3953 }, { "epoch": 0.1638692030336939, "grad_norm": 0.3876796066761017, "learning_rate": 4.180861204359899e-06, "loss": 0.6792, "step": 3954 }, { "epoch": 0.16391064693936758, "grad_norm": 0.4248257577419281, "learning_rate": 4.180653984831531e-06, "loss": 0.7629, "step": 3955 }, { "epoch": 0.16395209084504123, "grad_norm": 0.45140397548675537, "learning_rate": 4.180446765303162e-06, "loss": 0.7388, "step": 3956 }, { "epoch": 0.1639935347507149, "grad_norm": 0.4637148678302765, "learning_rate": 4.180239545774794e-06, "loss": 0.6992, "step": 3957 }, { "epoch": 0.16403497865638858, "grad_norm": 0.4246406555175781, "learning_rate": 4.180032326246426e-06, "loss": 0.7332, "step": 3958 }, { "epoch": 0.16407642256206226, "grad_norm": 0.39203453063964844, "learning_rate": 4.179825106718057e-06, "loss": 0.7146, "step": 3959 }, { "epoch": 0.1641178664677359, "grad_norm": 0.41353005170822144, "learning_rate": 4.17961788718969e-06, "loss": 0.7155, "step": 3960 }, { "epoch": 0.16415931037340958, "grad_norm": 0.41028448939323425, "learning_rate": 4.17941066766132e-06, "loss": 0.6794, "step": 3961 }, { "epoch": 0.16420075427908326, "grad_norm": 0.44359683990478516, "learning_rate": 4.179203448132952e-06, "loss": 0.7217, "step": 3962 }, { "epoch": 0.16424219818475694, "grad_norm": 0.452467679977417, "learning_rate": 4.178996228604585e-06, "loss": 0.7759, "step": 3963 }, { "epoch": 0.16428364209043061, "grad_norm": 0.4533520042896271, "learning_rate": 4.178789009076216e-06, "loss": 0.6893, "step": 3964 }, { "epoch": 0.16432508599610426, "grad_norm": 0.4402236342430115, "learning_rate": 4.178581789547847e-06, "loss": 0.6709, "step": 3965 }, { "epoch": 0.16436652990177794, "grad_norm": 0.432903915643692, "learning_rate": 4.178374570019479e-06, "loss": 0.8, "step": 3966 }, { "epoch": 0.16440797380745162, "grad_norm": 0.4020920693874359, "learning_rate": 4.178167350491111e-06, "loss": 0.6924, "step": 3967 }, { "epoch": 0.1644494177131253, "grad_norm": 0.42549023032188416, "learning_rate": 4.177960130962742e-06, "loss": 0.7205, "step": 3968 }, { "epoch": 0.16449086161879894, "grad_norm": 0.4136500954627991, "learning_rate": 4.177752911434374e-06, "loss": 0.7224, "step": 3969 }, { "epoch": 0.16453230552447262, "grad_norm": 0.43125662207603455, "learning_rate": 4.177545691906005e-06, "loss": 0.6881, "step": 3970 }, { "epoch": 0.1645737494301463, "grad_norm": 0.4146919548511505, "learning_rate": 4.177338472377637e-06, "loss": 0.7551, "step": 3971 }, { "epoch": 0.16461519333581998, "grad_norm": 0.4323260486125946, "learning_rate": 4.177131252849269e-06, "loss": 0.7119, "step": 3972 }, { "epoch": 0.16465663724149363, "grad_norm": 0.41846591234207153, "learning_rate": 4.1769240333209e-06, "loss": 0.7036, "step": 3973 }, { "epoch": 0.1646980811471673, "grad_norm": 0.40757814049720764, "learning_rate": 4.176716813792532e-06, "loss": 0.7043, "step": 3974 }, { "epoch": 0.16473952505284098, "grad_norm": 0.4579413831233978, "learning_rate": 4.176509594264164e-06, "loss": 0.8242, "step": 3975 }, { "epoch": 0.16478096895851466, "grad_norm": 0.38648974895477295, "learning_rate": 4.176302374735796e-06, "loss": 0.7195, "step": 3976 }, { "epoch": 0.16482241286418833, "grad_norm": 0.4380299150943756, "learning_rate": 4.1760951552074265e-06, "loss": 0.7676, "step": 3977 }, { "epoch": 0.16486385676986198, "grad_norm": 0.4183562099933624, "learning_rate": 4.175887935679059e-06, "loss": 0.686, "step": 3978 }, { "epoch": 0.16490530067553566, "grad_norm": 0.4199469983577728, "learning_rate": 4.17568071615069e-06, "loss": 0.6858, "step": 3979 }, { "epoch": 0.16494674458120934, "grad_norm": 0.4656279981136322, "learning_rate": 4.175473496622322e-06, "loss": 0.7596, "step": 3980 }, { "epoch": 0.16498818848688301, "grad_norm": 0.41313594579696655, "learning_rate": 4.175266277093954e-06, "loss": 0.7543, "step": 3981 }, { "epoch": 0.16502963239255666, "grad_norm": 0.42932602763175964, "learning_rate": 4.175059057565585e-06, "loss": 0.7078, "step": 3982 }, { "epoch": 0.16507107629823034, "grad_norm": 0.44365110993385315, "learning_rate": 4.174851838037217e-06, "loss": 0.7307, "step": 3983 }, { "epoch": 0.16511252020390402, "grad_norm": 0.4406120479106903, "learning_rate": 4.174644618508849e-06, "loss": 0.6836, "step": 3984 }, { "epoch": 0.1651539641095777, "grad_norm": 0.4153088927268982, "learning_rate": 4.17443739898048e-06, "loss": 0.6853, "step": 3985 }, { "epoch": 0.16519540801525134, "grad_norm": 0.42502832412719727, "learning_rate": 4.1742301794521115e-06, "loss": 0.7, "step": 3986 }, { "epoch": 0.16523685192092502, "grad_norm": 0.3973308801651001, "learning_rate": 4.174022959923744e-06, "loss": 0.7063, "step": 3987 }, { "epoch": 0.1652782958265987, "grad_norm": 0.448110431432724, "learning_rate": 4.173815740395375e-06, "loss": 0.7849, "step": 3988 }, { "epoch": 0.16531973973227237, "grad_norm": 0.40824928879737854, "learning_rate": 4.1736085208670066e-06, "loss": 0.7045, "step": 3989 }, { "epoch": 0.16536118363794605, "grad_norm": 0.42675846815109253, "learning_rate": 4.173401301338639e-06, "loss": 0.7175, "step": 3990 }, { "epoch": 0.1654026275436197, "grad_norm": 0.44289180636405945, "learning_rate": 4.17319408181027e-06, "loss": 0.6904, "step": 3991 }, { "epoch": 0.16544407144929338, "grad_norm": 0.45638832449913025, "learning_rate": 4.1729868622819016e-06, "loss": 0.7351, "step": 3992 }, { "epoch": 0.16548551535496706, "grad_norm": 0.4534793198108673, "learning_rate": 4.172779642753533e-06, "loss": 0.7827, "step": 3993 }, { "epoch": 0.16552695926064073, "grad_norm": 0.4408583641052246, "learning_rate": 4.172572423225165e-06, "loss": 0.7581, "step": 3994 }, { "epoch": 0.16556840316631438, "grad_norm": 0.3889797031879425, "learning_rate": 4.1723652036967966e-06, "loss": 0.6903, "step": 3995 }, { "epoch": 0.16560984707198806, "grad_norm": 0.4430372714996338, "learning_rate": 4.172157984168429e-06, "loss": 0.7732, "step": 3996 }, { "epoch": 0.16565129097766174, "grad_norm": 0.44308802485466003, "learning_rate": 4.17195076464006e-06, "loss": 0.7124, "step": 3997 }, { "epoch": 0.1656927348833354, "grad_norm": 0.41470906138420105, "learning_rate": 4.1717435451116916e-06, "loss": 0.7473, "step": 3998 }, { "epoch": 0.1657341787890091, "grad_norm": 0.44298768043518066, "learning_rate": 4.171536325583324e-06, "loss": 0.7802, "step": 3999 }, { "epoch": 0.16577562269468274, "grad_norm": 0.4316277801990509, "learning_rate": 4.171329106054955e-06, "loss": 0.6786, "step": 4000 }, { "epoch": 0.16581706660035642, "grad_norm": 0.42960840463638306, "learning_rate": 4.1711218865265866e-06, "loss": 0.7012, "step": 4001 }, { "epoch": 0.1658585105060301, "grad_norm": 0.40676987171173096, "learning_rate": 4.170914666998218e-06, "loss": 0.7371, "step": 4002 }, { "epoch": 0.16589995441170377, "grad_norm": 0.4225432872772217, "learning_rate": 4.17070744746985e-06, "loss": 0.7908, "step": 4003 }, { "epoch": 0.16594139831737742, "grad_norm": 0.4528430700302124, "learning_rate": 4.1705002279414816e-06, "loss": 0.7756, "step": 4004 }, { "epoch": 0.1659828422230511, "grad_norm": 0.43588078022003174, "learning_rate": 4.170293008413113e-06, "loss": 0.738, "step": 4005 }, { "epoch": 0.16602428612872477, "grad_norm": 0.4559016525745392, "learning_rate": 4.170085788884744e-06, "loss": 0.7429, "step": 4006 }, { "epoch": 0.16606573003439845, "grad_norm": 0.43830540776252747, "learning_rate": 4.169878569356377e-06, "loss": 0.7094, "step": 4007 }, { "epoch": 0.1661071739400721, "grad_norm": 0.41823479533195496, "learning_rate": 4.169671349828008e-06, "loss": 0.7025, "step": 4008 }, { "epoch": 0.16614861784574578, "grad_norm": 0.4519365131855011, "learning_rate": 4.169464130299639e-06, "loss": 0.7263, "step": 4009 }, { "epoch": 0.16619006175141945, "grad_norm": 0.45159727334976196, "learning_rate": 4.169256910771272e-06, "loss": 0.7444, "step": 4010 }, { "epoch": 0.16623150565709313, "grad_norm": 0.422893226146698, "learning_rate": 4.169049691242903e-06, "loss": 0.6951, "step": 4011 }, { "epoch": 0.1662729495627668, "grad_norm": 0.516808032989502, "learning_rate": 4.168842471714535e-06, "loss": 0.7462, "step": 4012 }, { "epoch": 0.16631439346844046, "grad_norm": 0.40142881870269775, "learning_rate": 4.168635252186166e-06, "loss": 0.7177, "step": 4013 }, { "epoch": 0.16635583737411413, "grad_norm": 0.45885249972343445, "learning_rate": 4.168428032657798e-06, "loss": 0.6953, "step": 4014 }, { "epoch": 0.1663972812797878, "grad_norm": 0.4287058711051941, "learning_rate": 4.168220813129429e-06, "loss": 0.7703, "step": 4015 }, { "epoch": 0.1664387251854615, "grad_norm": 0.4489665627479553, "learning_rate": 4.168013593601062e-06, "loss": 0.7551, "step": 4016 }, { "epoch": 0.16648016909113514, "grad_norm": 0.4240235686302185, "learning_rate": 4.167806374072693e-06, "loss": 0.6899, "step": 4017 }, { "epoch": 0.16652161299680882, "grad_norm": 0.41933944821357727, "learning_rate": 4.167599154544324e-06, "loss": 0.7435, "step": 4018 }, { "epoch": 0.1665630569024825, "grad_norm": 0.42885735630989075, "learning_rate": 4.167391935015957e-06, "loss": 0.7434, "step": 4019 }, { "epoch": 0.16660450080815617, "grad_norm": 0.40203186869621277, "learning_rate": 4.167184715487588e-06, "loss": 0.7095, "step": 4020 }, { "epoch": 0.16664594471382982, "grad_norm": 0.41601675748825073, "learning_rate": 4.166977495959219e-06, "loss": 0.7109, "step": 4021 }, { "epoch": 0.1666873886195035, "grad_norm": 0.40761032700538635, "learning_rate": 4.166770276430851e-06, "loss": 0.7393, "step": 4022 }, { "epoch": 0.16672883252517717, "grad_norm": 0.42859432101249695, "learning_rate": 4.166563056902483e-06, "loss": 0.7266, "step": 4023 }, { "epoch": 0.16677027643085085, "grad_norm": 0.4399731159210205, "learning_rate": 4.166355837374114e-06, "loss": 0.7351, "step": 4024 }, { "epoch": 0.16681172033652453, "grad_norm": 0.44113704562187195, "learning_rate": 4.166148617845746e-06, "loss": 0.7808, "step": 4025 }, { "epoch": 0.16685316424219818, "grad_norm": 0.42984268069267273, "learning_rate": 4.165941398317378e-06, "loss": 0.7239, "step": 4026 }, { "epoch": 0.16689460814787185, "grad_norm": 0.42799112200737, "learning_rate": 4.165734178789009e-06, "loss": 0.7688, "step": 4027 }, { "epoch": 0.16693605205354553, "grad_norm": 0.40136244893074036, "learning_rate": 4.165526959260642e-06, "loss": 0.7172, "step": 4028 }, { "epoch": 0.1669774959592192, "grad_norm": 0.42733749747276306, "learning_rate": 4.165319739732272e-06, "loss": 0.6819, "step": 4029 }, { "epoch": 0.16701893986489286, "grad_norm": 0.441489577293396, "learning_rate": 4.165112520203904e-06, "loss": 0.7354, "step": 4030 }, { "epoch": 0.16706038377056653, "grad_norm": 0.3815517723560333, "learning_rate": 4.164905300675536e-06, "loss": 0.6633, "step": 4031 }, { "epoch": 0.1671018276762402, "grad_norm": 0.4398477375507355, "learning_rate": 4.164698081147168e-06, "loss": 0.8042, "step": 4032 }, { "epoch": 0.1671432715819139, "grad_norm": 0.43279215693473816, "learning_rate": 4.164490861618799e-06, "loss": 0.7329, "step": 4033 }, { "epoch": 0.16718471548758754, "grad_norm": 0.4133869707584381, "learning_rate": 4.164283642090431e-06, "loss": 0.6512, "step": 4034 }, { "epoch": 0.16722615939326121, "grad_norm": 0.41181620955467224, "learning_rate": 4.164076422562063e-06, "loss": 0.6775, "step": 4035 }, { "epoch": 0.1672676032989349, "grad_norm": 0.48185110092163086, "learning_rate": 4.163869203033694e-06, "loss": 0.7527, "step": 4036 }, { "epoch": 0.16730904720460857, "grad_norm": 0.44024205207824707, "learning_rate": 4.163661983505326e-06, "loss": 0.76, "step": 4037 }, { "epoch": 0.16735049111028225, "grad_norm": 0.4023451507091522, "learning_rate": 4.163454763976957e-06, "loss": 0.7004, "step": 4038 }, { "epoch": 0.1673919350159559, "grad_norm": 0.44601884484291077, "learning_rate": 4.163247544448589e-06, "loss": 0.7803, "step": 4039 }, { "epoch": 0.16743337892162957, "grad_norm": 0.4677041471004486, "learning_rate": 4.163040324920221e-06, "loss": 0.7546, "step": 4040 }, { "epoch": 0.16747482282730325, "grad_norm": 0.44302669167518616, "learning_rate": 4.162833105391852e-06, "loss": 0.7373, "step": 4041 }, { "epoch": 0.16751626673297693, "grad_norm": 0.42496371269226074, "learning_rate": 4.162625885863484e-06, "loss": 0.7747, "step": 4042 }, { "epoch": 0.16755771063865058, "grad_norm": 0.446289986371994, "learning_rate": 4.162418666335116e-06, "loss": 0.822, "step": 4043 }, { "epoch": 0.16759915454432425, "grad_norm": 0.40851324796676636, "learning_rate": 4.162211446806748e-06, "loss": 0.6687, "step": 4044 }, { "epoch": 0.16764059844999793, "grad_norm": 0.4266902506351471, "learning_rate": 4.1620042272783785e-06, "loss": 0.6819, "step": 4045 }, { "epoch": 0.1676820423556716, "grad_norm": 0.39563530683517456, "learning_rate": 4.161797007750011e-06, "loss": 0.71, "step": 4046 }, { "epoch": 0.16772348626134526, "grad_norm": 0.43665483593940735, "learning_rate": 4.161589788221642e-06, "loss": 0.7131, "step": 4047 }, { "epoch": 0.16776493016701893, "grad_norm": 0.4138428568840027, "learning_rate": 4.161382568693274e-06, "loss": 0.7512, "step": 4048 }, { "epoch": 0.1678063740726926, "grad_norm": 0.4195769429206848, "learning_rate": 4.161175349164906e-06, "loss": 0.79, "step": 4049 }, { "epoch": 0.1678478179783663, "grad_norm": 0.40827223658561707, "learning_rate": 4.160968129636537e-06, "loss": 0.7583, "step": 4050 }, { "epoch": 0.16788926188403996, "grad_norm": 0.44803479313850403, "learning_rate": 4.160760910108169e-06, "loss": 0.6868, "step": 4051 }, { "epoch": 0.1679307057897136, "grad_norm": 0.44756048917770386, "learning_rate": 4.160553690579801e-06, "loss": 0.7944, "step": 4052 }, { "epoch": 0.1679721496953873, "grad_norm": 0.4231491684913635, "learning_rate": 4.160346471051432e-06, "loss": 0.676, "step": 4053 }, { "epoch": 0.16801359360106097, "grad_norm": 0.4277089536190033, "learning_rate": 4.1601392515230636e-06, "loss": 0.7444, "step": 4054 }, { "epoch": 0.16805503750673464, "grad_norm": 0.4218698740005493, "learning_rate": 4.159932031994696e-06, "loss": 0.7844, "step": 4055 }, { "epoch": 0.1680964814124083, "grad_norm": 0.43698549270629883, "learning_rate": 4.159724812466327e-06, "loss": 0.6996, "step": 4056 }, { "epoch": 0.16813792531808197, "grad_norm": 0.46601346135139465, "learning_rate": 4.1595175929379586e-06, "loss": 0.8347, "step": 4057 }, { "epoch": 0.16817936922375565, "grad_norm": 0.4093257486820221, "learning_rate": 4.15931037340959e-06, "loss": 0.7715, "step": 4058 }, { "epoch": 0.16822081312942933, "grad_norm": 0.4418031573295593, "learning_rate": 4.159103153881222e-06, "loss": 0.7483, "step": 4059 }, { "epoch": 0.16826225703510297, "grad_norm": 0.4325525164604187, "learning_rate": 4.1588959343528536e-06, "loss": 0.7891, "step": 4060 }, { "epoch": 0.16830370094077665, "grad_norm": 0.4223649799823761, "learning_rate": 4.158688714824485e-06, "loss": 0.754, "step": 4061 }, { "epoch": 0.16834514484645033, "grad_norm": 0.46665868163108826, "learning_rate": 4.158481495296117e-06, "loss": 0.7937, "step": 4062 }, { "epoch": 0.168386588752124, "grad_norm": 0.42945510149002075, "learning_rate": 4.1582742757677486e-06, "loss": 0.783, "step": 4063 }, { "epoch": 0.16842803265779768, "grad_norm": 0.4173888862133026, "learning_rate": 4.158067056239381e-06, "loss": 0.8145, "step": 4064 }, { "epoch": 0.16846947656347133, "grad_norm": 0.45253029465675354, "learning_rate": 4.157859836711012e-06, "loss": 0.7291, "step": 4065 }, { "epoch": 0.168510920469145, "grad_norm": 0.4167127013206482, "learning_rate": 4.157652617182644e-06, "loss": 0.7217, "step": 4066 }, { "epoch": 0.1685523643748187, "grad_norm": 0.43157029151916504, "learning_rate": 4.157445397654275e-06, "loss": 0.6862, "step": 4067 }, { "epoch": 0.16859380828049236, "grad_norm": 0.4422221779823303, "learning_rate": 4.157238178125907e-06, "loss": 0.7424, "step": 4068 }, { "epoch": 0.168635252186166, "grad_norm": 0.42759624123573303, "learning_rate": 4.157030958597539e-06, "loss": 0.7703, "step": 4069 }, { "epoch": 0.1686766960918397, "grad_norm": 0.4230738878250122, "learning_rate": 4.15682373906917e-06, "loss": 0.7111, "step": 4070 }, { "epoch": 0.16871813999751337, "grad_norm": 0.39961332082748413, "learning_rate": 4.156616519540802e-06, "loss": 0.7043, "step": 4071 }, { "epoch": 0.16875958390318704, "grad_norm": 0.4062801003456116, "learning_rate": 4.156409300012434e-06, "loss": 0.7009, "step": 4072 }, { "epoch": 0.16880102780886072, "grad_norm": 0.44138258695602417, "learning_rate": 4.156202080484065e-06, "loss": 0.7405, "step": 4073 }, { "epoch": 0.16884247171453437, "grad_norm": 0.44288939237594604, "learning_rate": 4.155994860955696e-06, "loss": 0.693, "step": 4074 }, { "epoch": 0.16888391562020805, "grad_norm": 0.4357079267501831, "learning_rate": 4.155787641427329e-06, "loss": 0.7783, "step": 4075 }, { "epoch": 0.16892535952588172, "grad_norm": 0.4279991388320923, "learning_rate": 4.15558042189896e-06, "loss": 0.688, "step": 4076 }, { "epoch": 0.1689668034315554, "grad_norm": 0.4231995642185211, "learning_rate": 4.155373202370591e-06, "loss": 0.6996, "step": 4077 }, { "epoch": 0.16900824733722905, "grad_norm": 0.4232848584651947, "learning_rate": 4.155165982842224e-06, "loss": 0.7244, "step": 4078 }, { "epoch": 0.16904969124290273, "grad_norm": 0.4019148647785187, "learning_rate": 4.154958763313855e-06, "loss": 0.73, "step": 4079 }, { "epoch": 0.1690911351485764, "grad_norm": 0.42240452766418457, "learning_rate": 4.154751543785487e-06, "loss": 0.7595, "step": 4080 }, { "epoch": 0.16913257905425008, "grad_norm": 0.4243178367614746, "learning_rate": 4.154544324257119e-06, "loss": 0.7229, "step": 4081 }, { "epoch": 0.16917402295992373, "grad_norm": 0.41425445675849915, "learning_rate": 4.15433710472875e-06, "loss": 0.7991, "step": 4082 }, { "epoch": 0.1692154668655974, "grad_norm": 0.41091328859329224, "learning_rate": 4.154129885200381e-06, "loss": 0.6925, "step": 4083 }, { "epoch": 0.16925691077127109, "grad_norm": 0.4051581025123596, "learning_rate": 4.153922665672014e-06, "loss": 0.7988, "step": 4084 }, { "epoch": 0.16929835467694476, "grad_norm": 0.4197920262813568, "learning_rate": 4.153715446143645e-06, "loss": 0.72, "step": 4085 }, { "epoch": 0.16933979858261844, "grad_norm": 0.41297194361686707, "learning_rate": 4.153508226615276e-06, "loss": 0.7186, "step": 4086 }, { "epoch": 0.1693812424882921, "grad_norm": 0.4065501391887665, "learning_rate": 4.153301007086909e-06, "loss": 0.7136, "step": 4087 }, { "epoch": 0.16942268639396577, "grad_norm": 0.4138219356536865, "learning_rate": 4.15309378755854e-06, "loss": 0.6766, "step": 4088 }, { "epoch": 0.16946413029963944, "grad_norm": 0.439310759305954, "learning_rate": 4.152886568030171e-06, "loss": 0.7302, "step": 4089 }, { "epoch": 0.16950557420531312, "grad_norm": 0.46391627192497253, "learning_rate": 4.152679348501803e-06, "loss": 0.7849, "step": 4090 }, { "epoch": 0.16954701811098677, "grad_norm": 0.4477780759334564, "learning_rate": 4.152472128973435e-06, "loss": 0.7234, "step": 4091 }, { "epoch": 0.16958846201666045, "grad_norm": 0.4495815634727478, "learning_rate": 4.152264909445066e-06, "loss": 0.7729, "step": 4092 }, { "epoch": 0.16962990592233412, "grad_norm": 0.48693814873695374, "learning_rate": 4.152057689916698e-06, "loss": 0.8271, "step": 4093 }, { "epoch": 0.1696713498280078, "grad_norm": 0.446256548166275, "learning_rate": 4.15185047038833e-06, "loss": 0.761, "step": 4094 }, { "epoch": 0.16971279373368145, "grad_norm": 0.42985719442367554, "learning_rate": 4.151643250859961e-06, "loss": 0.7383, "step": 4095 }, { "epoch": 0.16975423763935513, "grad_norm": 0.4384533166885376, "learning_rate": 4.151436031331594e-06, "loss": 0.7612, "step": 4096 }, { "epoch": 0.1697956815450288, "grad_norm": 0.42223790287971497, "learning_rate": 4.151228811803224e-06, "loss": 0.7559, "step": 4097 }, { "epoch": 0.16983712545070248, "grad_norm": 0.4279736280441284, "learning_rate": 4.151021592274856e-06, "loss": 0.7524, "step": 4098 }, { "epoch": 0.16987856935637616, "grad_norm": 0.44225189089775085, "learning_rate": 4.150814372746488e-06, "loss": 0.7634, "step": 4099 }, { "epoch": 0.1699200132620498, "grad_norm": 0.4234447777271271, "learning_rate": 4.15060715321812e-06, "loss": 0.7036, "step": 4100 }, { "epoch": 0.16996145716772348, "grad_norm": 0.4547146260738373, "learning_rate": 4.150399933689751e-06, "loss": 0.7412, "step": 4101 }, { "epoch": 0.17000290107339716, "grad_norm": 0.45162782073020935, "learning_rate": 4.150192714161383e-06, "loss": 0.7188, "step": 4102 }, { "epoch": 0.17004434497907084, "grad_norm": 0.4342907965183258, "learning_rate": 4.149985494633015e-06, "loss": 0.777, "step": 4103 }, { "epoch": 0.1700857888847445, "grad_norm": 0.4333324134349823, "learning_rate": 4.149778275104646e-06, "loss": 0.7083, "step": 4104 }, { "epoch": 0.17012723279041816, "grad_norm": 0.4206410050392151, "learning_rate": 4.149571055576278e-06, "loss": 0.7418, "step": 4105 }, { "epoch": 0.17016867669609184, "grad_norm": 0.3920663297176361, "learning_rate": 4.149363836047909e-06, "loss": 0.7363, "step": 4106 }, { "epoch": 0.17021012060176552, "grad_norm": 0.4368460178375244, "learning_rate": 4.149156616519541e-06, "loss": 0.7361, "step": 4107 }, { "epoch": 0.17025156450743917, "grad_norm": 0.4273522198200226, "learning_rate": 4.148949396991173e-06, "loss": 0.7595, "step": 4108 }, { "epoch": 0.17029300841311285, "grad_norm": 0.4365537762641907, "learning_rate": 4.148742177462804e-06, "loss": 0.7723, "step": 4109 }, { "epoch": 0.17033445231878652, "grad_norm": 0.4658549726009369, "learning_rate": 4.1485349579344356e-06, "loss": 0.7766, "step": 4110 }, { "epoch": 0.1703758962244602, "grad_norm": 0.44773876667022705, "learning_rate": 4.148327738406068e-06, "loss": 0.7271, "step": 4111 }, { "epoch": 0.17041734013013388, "grad_norm": 0.4418567717075348, "learning_rate": 4.1481205188777e-06, "loss": 0.7227, "step": 4112 }, { "epoch": 0.17045878403580753, "grad_norm": 0.4278064966201782, "learning_rate": 4.1479132993493306e-06, "loss": 0.7418, "step": 4113 }, { "epoch": 0.1705002279414812, "grad_norm": 0.41931578516960144, "learning_rate": 4.147706079820963e-06, "loss": 0.7349, "step": 4114 }, { "epoch": 0.17054167184715488, "grad_norm": 0.4626260995864868, "learning_rate": 4.147498860292594e-06, "loss": 0.7537, "step": 4115 }, { "epoch": 0.17058311575282856, "grad_norm": 0.4503745138645172, "learning_rate": 4.147291640764226e-06, "loss": 0.7825, "step": 4116 }, { "epoch": 0.1706245596585022, "grad_norm": 0.48199525475502014, "learning_rate": 4.147084421235858e-06, "loss": 0.814, "step": 4117 }, { "epoch": 0.17066600356417588, "grad_norm": 0.4059840440750122, "learning_rate": 4.146877201707489e-06, "loss": 0.7341, "step": 4118 }, { "epoch": 0.17070744746984956, "grad_norm": 0.4386116564273834, "learning_rate": 4.1466699821791206e-06, "loss": 0.7388, "step": 4119 }, { "epoch": 0.17074889137552324, "grad_norm": 0.48894020915031433, "learning_rate": 4.146462762650753e-06, "loss": 0.8145, "step": 4120 }, { "epoch": 0.1707903352811969, "grad_norm": 0.45389366149902344, "learning_rate": 4.146255543122384e-06, "loss": 0.72, "step": 4121 }, { "epoch": 0.17083177918687056, "grad_norm": 0.4295516312122345, "learning_rate": 4.1460483235940156e-06, "loss": 0.7039, "step": 4122 }, { "epoch": 0.17087322309254424, "grad_norm": 0.44105562567710876, "learning_rate": 4.145841104065648e-06, "loss": 0.6743, "step": 4123 }, { "epoch": 0.17091466699821792, "grad_norm": 0.44636979699134827, "learning_rate": 4.145633884537279e-06, "loss": 0.7712, "step": 4124 }, { "epoch": 0.1709561109038916, "grad_norm": 0.47286689281463623, "learning_rate": 4.145426665008911e-06, "loss": 0.7695, "step": 4125 }, { "epoch": 0.17099755480956524, "grad_norm": 0.4634702205657959, "learning_rate": 4.145219445480542e-06, "loss": 0.7666, "step": 4126 }, { "epoch": 0.17103899871523892, "grad_norm": 0.4256576597690582, "learning_rate": 4.145012225952174e-06, "loss": 0.7419, "step": 4127 }, { "epoch": 0.1710804426209126, "grad_norm": 0.4284919202327728, "learning_rate": 4.144805006423806e-06, "loss": 0.7633, "step": 4128 }, { "epoch": 0.17112188652658628, "grad_norm": 0.47939515113830566, "learning_rate": 4.144597786895437e-06, "loss": 0.7878, "step": 4129 }, { "epoch": 0.17116333043225992, "grad_norm": 0.40596550703048706, "learning_rate": 4.144390567367069e-06, "loss": 0.7238, "step": 4130 }, { "epoch": 0.1712047743379336, "grad_norm": 0.439281165599823, "learning_rate": 4.144183347838701e-06, "loss": 0.7467, "step": 4131 }, { "epoch": 0.17124621824360728, "grad_norm": 0.4507501721382141, "learning_rate": 4.143976128310333e-06, "loss": 0.7004, "step": 4132 }, { "epoch": 0.17128766214928096, "grad_norm": 0.43110886216163635, "learning_rate": 4.143768908781964e-06, "loss": 0.7115, "step": 4133 }, { "epoch": 0.1713291060549546, "grad_norm": 0.40868330001831055, "learning_rate": 4.143561689253596e-06, "loss": 0.7634, "step": 4134 }, { "epoch": 0.17137054996062828, "grad_norm": 0.4266672730445862, "learning_rate": 4.143354469725227e-06, "loss": 0.7334, "step": 4135 }, { "epoch": 0.17141199386630196, "grad_norm": 0.39296823740005493, "learning_rate": 4.143147250196859e-06, "loss": 0.7358, "step": 4136 }, { "epoch": 0.17145343777197564, "grad_norm": 0.45111462473869324, "learning_rate": 4.142940030668491e-06, "loss": 0.7788, "step": 4137 }, { "epoch": 0.1714948816776493, "grad_norm": 0.3998972475528717, "learning_rate": 4.142732811140122e-06, "loss": 0.7017, "step": 4138 }, { "epoch": 0.17153632558332296, "grad_norm": 0.41973164677619934, "learning_rate": 4.142525591611754e-06, "loss": 0.7468, "step": 4139 }, { "epoch": 0.17157776948899664, "grad_norm": 0.404287189245224, "learning_rate": 4.142318372083386e-06, "loss": 0.6727, "step": 4140 }, { "epoch": 0.17161921339467032, "grad_norm": 0.4475458264350891, "learning_rate": 4.142111152555017e-06, "loss": 0.8062, "step": 4141 }, { "epoch": 0.171660657300344, "grad_norm": 0.3942428231239319, "learning_rate": 4.141903933026648e-06, "loss": 0.7148, "step": 4142 }, { "epoch": 0.17170210120601764, "grad_norm": 0.4355577528476715, "learning_rate": 4.141696713498281e-06, "loss": 0.7273, "step": 4143 }, { "epoch": 0.17174354511169132, "grad_norm": 0.40482786297798157, "learning_rate": 4.141489493969912e-06, "loss": 0.7554, "step": 4144 }, { "epoch": 0.171784989017365, "grad_norm": 0.4200670123100281, "learning_rate": 4.141282274441543e-06, "loss": 0.6975, "step": 4145 }, { "epoch": 0.17182643292303867, "grad_norm": 0.3948560953140259, "learning_rate": 4.141075054913176e-06, "loss": 0.7036, "step": 4146 }, { "epoch": 0.17186787682871235, "grad_norm": 0.43616798520088196, "learning_rate": 4.140867835384807e-06, "loss": 0.741, "step": 4147 }, { "epoch": 0.171909320734386, "grad_norm": 0.39573100209236145, "learning_rate": 4.140660615856439e-06, "loss": 0.7046, "step": 4148 }, { "epoch": 0.17195076464005968, "grad_norm": 0.8043258190155029, "learning_rate": 4.140453396328071e-06, "loss": 0.7205, "step": 4149 }, { "epoch": 0.17199220854573335, "grad_norm": 0.44154685735702515, "learning_rate": 4.140246176799702e-06, "loss": 0.804, "step": 4150 }, { "epoch": 0.17203365245140703, "grad_norm": 0.4319092035293579, "learning_rate": 4.140038957271333e-06, "loss": 0.7446, "step": 4151 }, { "epoch": 0.17207509635708068, "grad_norm": 0.4138278663158417, "learning_rate": 4.139831737742966e-06, "loss": 0.718, "step": 4152 }, { "epoch": 0.17211654026275436, "grad_norm": 0.4278290271759033, "learning_rate": 4.139624518214597e-06, "loss": 0.7494, "step": 4153 }, { "epoch": 0.17215798416842804, "grad_norm": 0.452711284160614, "learning_rate": 4.139417298686228e-06, "loss": 0.6721, "step": 4154 }, { "epoch": 0.1721994280741017, "grad_norm": 0.41718626022338867, "learning_rate": 4.139210079157861e-06, "loss": 0.7144, "step": 4155 }, { "epoch": 0.17224087197977536, "grad_norm": 0.4729371964931488, "learning_rate": 4.139002859629492e-06, "loss": 0.8325, "step": 4156 }, { "epoch": 0.17228231588544904, "grad_norm": 0.4808465242385864, "learning_rate": 4.138795640101123e-06, "loss": 0.7349, "step": 4157 }, { "epoch": 0.17232375979112272, "grad_norm": 0.4174172580242157, "learning_rate": 4.138588420572755e-06, "loss": 0.6959, "step": 4158 }, { "epoch": 0.1723652036967964, "grad_norm": 0.45987755060195923, "learning_rate": 4.138381201044387e-06, "loss": 0.7039, "step": 4159 }, { "epoch": 0.17240664760247007, "grad_norm": 0.4068875312805176, "learning_rate": 4.138173981516018e-06, "loss": 0.7463, "step": 4160 }, { "epoch": 0.17244809150814372, "grad_norm": 0.4682177007198334, "learning_rate": 4.13796676198765e-06, "loss": 0.7634, "step": 4161 }, { "epoch": 0.1724895354138174, "grad_norm": 0.4065361022949219, "learning_rate": 4.137759542459281e-06, "loss": 0.6597, "step": 4162 }, { "epoch": 0.17253097931949107, "grad_norm": 0.41984304785728455, "learning_rate": 4.137552322930913e-06, "loss": 0.7788, "step": 4163 }, { "epoch": 0.17257242322516475, "grad_norm": 0.4202154874801636, "learning_rate": 4.137345103402546e-06, "loss": 0.719, "step": 4164 }, { "epoch": 0.1726138671308384, "grad_norm": 0.4359605014324188, "learning_rate": 4.137137883874176e-06, "loss": 0.6997, "step": 4165 }, { "epoch": 0.17265531103651208, "grad_norm": 0.45010313391685486, "learning_rate": 4.136930664345808e-06, "loss": 0.7617, "step": 4166 }, { "epoch": 0.17269675494218575, "grad_norm": 0.433231383562088, "learning_rate": 4.13672344481744e-06, "loss": 0.7437, "step": 4167 }, { "epoch": 0.17273819884785943, "grad_norm": 0.42225828766822815, "learning_rate": 4.136516225289072e-06, "loss": 0.6831, "step": 4168 }, { "epoch": 0.17277964275353308, "grad_norm": 0.4486711621284485, "learning_rate": 4.136309005760703e-06, "loss": 0.7192, "step": 4169 }, { "epoch": 0.17282108665920676, "grad_norm": 0.39400067925453186, "learning_rate": 4.136101786232335e-06, "loss": 0.676, "step": 4170 }, { "epoch": 0.17286253056488043, "grad_norm": 0.43347644805908203, "learning_rate": 4.135894566703966e-06, "loss": 0.7278, "step": 4171 }, { "epoch": 0.1729039744705541, "grad_norm": 0.4101540744304657, "learning_rate": 4.135687347175598e-06, "loss": 0.7332, "step": 4172 }, { "epoch": 0.1729454183762278, "grad_norm": 0.4142490327358246, "learning_rate": 4.13548012764723e-06, "loss": 0.6735, "step": 4173 }, { "epoch": 0.17298686228190144, "grad_norm": 0.4112522304058075, "learning_rate": 4.135272908118861e-06, "loss": 0.7722, "step": 4174 }, { "epoch": 0.17302830618757511, "grad_norm": 0.43039146065711975, "learning_rate": 4.135065688590493e-06, "loss": 0.7041, "step": 4175 }, { "epoch": 0.1730697500932488, "grad_norm": 0.44352975487709045, "learning_rate": 4.134858469062125e-06, "loss": 0.7698, "step": 4176 }, { "epoch": 0.17311119399892247, "grad_norm": 0.39544183015823364, "learning_rate": 4.134651249533756e-06, "loss": 0.6682, "step": 4177 }, { "epoch": 0.17315263790459612, "grad_norm": 0.47532305121421814, "learning_rate": 4.1344440300053876e-06, "loss": 0.7827, "step": 4178 }, { "epoch": 0.1731940818102698, "grad_norm": 0.4396321773529053, "learning_rate": 4.13423681047702e-06, "loss": 0.7385, "step": 4179 }, { "epoch": 0.17323552571594347, "grad_norm": 0.43444523215293884, "learning_rate": 4.134029590948651e-06, "loss": 0.7607, "step": 4180 }, { "epoch": 0.17327696962161715, "grad_norm": 0.42582258582115173, "learning_rate": 4.1338223714202826e-06, "loss": 0.7576, "step": 4181 }, { "epoch": 0.1733184135272908, "grad_norm": 0.4471827745437622, "learning_rate": 4.133615151891915e-06, "loss": 0.7507, "step": 4182 }, { "epoch": 0.17335985743296448, "grad_norm": 0.47482526302337646, "learning_rate": 4.133407932363546e-06, "loss": 0.7964, "step": 4183 }, { "epoch": 0.17340130133863815, "grad_norm": 0.4040158987045288, "learning_rate": 4.1332007128351784e-06, "loss": 0.6931, "step": 4184 }, { "epoch": 0.17344274524431183, "grad_norm": 0.4479696452617645, "learning_rate": 4.13299349330681e-06, "loss": 0.7115, "step": 4185 }, { "epoch": 0.1734841891499855, "grad_norm": 0.41967323422431946, "learning_rate": 4.132786273778441e-06, "loss": 0.7207, "step": 4186 }, { "epoch": 0.17352563305565916, "grad_norm": 0.4370481073856354, "learning_rate": 4.132579054250073e-06, "loss": 0.7883, "step": 4187 }, { "epoch": 0.17356707696133283, "grad_norm": 0.42650651931762695, "learning_rate": 4.132371834721705e-06, "loss": 0.7749, "step": 4188 }, { "epoch": 0.1736085208670065, "grad_norm": 0.4119715094566345, "learning_rate": 4.132164615193336e-06, "loss": 0.7407, "step": 4189 }, { "epoch": 0.1736499647726802, "grad_norm": 0.4390302002429962, "learning_rate": 4.131957395664968e-06, "loss": 0.701, "step": 4190 }, { "epoch": 0.17369140867835384, "grad_norm": 0.40100395679473877, "learning_rate": 4.1317501761366e-06, "loss": 0.7158, "step": 4191 }, { "epoch": 0.17373285258402751, "grad_norm": 0.39124998450279236, "learning_rate": 4.131542956608231e-06, "loss": 0.7228, "step": 4192 }, { "epoch": 0.1737742964897012, "grad_norm": 0.4262610673904419, "learning_rate": 4.131335737079863e-06, "loss": 0.7166, "step": 4193 }, { "epoch": 0.17381574039537487, "grad_norm": 0.43274012207984924, "learning_rate": 4.131128517551494e-06, "loss": 0.7268, "step": 4194 }, { "epoch": 0.17385718430104852, "grad_norm": 0.38935357332229614, "learning_rate": 4.130921298023126e-06, "loss": 0.7111, "step": 4195 }, { "epoch": 0.1738986282067222, "grad_norm": 0.41760388016700745, "learning_rate": 4.130714078494758e-06, "loss": 0.6892, "step": 4196 }, { "epoch": 0.17394007211239587, "grad_norm": 0.41746559739112854, "learning_rate": 4.130506858966389e-06, "loss": 0.7036, "step": 4197 }, { "epoch": 0.17398151601806955, "grad_norm": 0.4128584861755371, "learning_rate": 4.13029963943802e-06, "loss": 0.7644, "step": 4198 }, { "epoch": 0.17402295992374323, "grad_norm": 0.3830883204936981, "learning_rate": 4.130092419909653e-06, "loss": 0.6658, "step": 4199 }, { "epoch": 0.17406440382941687, "grad_norm": 0.4201417863368988, "learning_rate": 4.129885200381285e-06, "loss": 0.7802, "step": 4200 }, { "epoch": 0.17410584773509055, "grad_norm": 0.4346417784690857, "learning_rate": 4.129677980852916e-06, "loss": 0.7305, "step": 4201 }, { "epoch": 0.17414729164076423, "grad_norm": 0.4209913909435272, "learning_rate": 4.129470761324548e-06, "loss": 0.708, "step": 4202 }, { "epoch": 0.1741887355464379, "grad_norm": 0.42489689588546753, "learning_rate": 4.129263541796179e-06, "loss": 0.7178, "step": 4203 }, { "epoch": 0.17423017945211156, "grad_norm": 0.44613325595855713, "learning_rate": 4.129056322267811e-06, "loss": 0.7522, "step": 4204 }, { "epoch": 0.17427162335778523, "grad_norm": 0.4278450608253479, "learning_rate": 4.128849102739443e-06, "loss": 0.7473, "step": 4205 }, { "epoch": 0.1743130672634589, "grad_norm": 0.40253785252571106, "learning_rate": 4.128641883211074e-06, "loss": 0.772, "step": 4206 }, { "epoch": 0.1743545111691326, "grad_norm": 0.41442564129829407, "learning_rate": 4.128434663682706e-06, "loss": 0.7129, "step": 4207 }, { "epoch": 0.17439595507480624, "grad_norm": 0.4364762306213379, "learning_rate": 4.128227444154338e-06, "loss": 0.6649, "step": 4208 }, { "epoch": 0.1744373989804799, "grad_norm": 0.44691237807273865, "learning_rate": 4.128020224625969e-06, "loss": 0.7659, "step": 4209 }, { "epoch": 0.1744788428861536, "grad_norm": 0.4337712228298187, "learning_rate": 4.1278130050976e-06, "loss": 0.7446, "step": 4210 }, { "epoch": 0.17452028679182727, "grad_norm": 0.5062041878700256, "learning_rate": 4.127605785569233e-06, "loss": 0.7197, "step": 4211 }, { "epoch": 0.17456173069750094, "grad_norm": 0.44443896412849426, "learning_rate": 4.127398566040864e-06, "loss": 0.7415, "step": 4212 }, { "epoch": 0.1746031746031746, "grad_norm": 0.417375773191452, "learning_rate": 4.127191346512495e-06, "loss": 0.7375, "step": 4213 }, { "epoch": 0.17464461850884827, "grad_norm": 0.4277600944042206, "learning_rate": 4.126984126984127e-06, "loss": 0.7286, "step": 4214 }, { "epoch": 0.17468606241452195, "grad_norm": 0.4206206500530243, "learning_rate": 4.126776907455759e-06, "loss": 0.6997, "step": 4215 }, { "epoch": 0.17472750632019562, "grad_norm": 0.427682489156723, "learning_rate": 4.126569687927391e-06, "loss": 0.6738, "step": 4216 }, { "epoch": 0.17476895022586927, "grad_norm": 0.39393875002861023, "learning_rate": 4.126362468399023e-06, "loss": 0.6687, "step": 4217 }, { "epoch": 0.17481039413154295, "grad_norm": 0.47567206621170044, "learning_rate": 4.126155248870654e-06, "loss": 0.7629, "step": 4218 }, { "epoch": 0.17485183803721663, "grad_norm": 0.4112507998943329, "learning_rate": 4.125948029342285e-06, "loss": 0.7551, "step": 4219 }, { "epoch": 0.1748932819428903, "grad_norm": 0.49653053283691406, "learning_rate": 4.125740809813918e-06, "loss": 0.6866, "step": 4220 }, { "epoch": 0.17493472584856398, "grad_norm": 0.4260661005973816, "learning_rate": 4.125533590285549e-06, "loss": 0.7341, "step": 4221 }, { "epoch": 0.17497616975423763, "grad_norm": 0.4743230938911438, "learning_rate": 4.12532637075718e-06, "loss": 0.8005, "step": 4222 }, { "epoch": 0.1750176136599113, "grad_norm": 0.49983835220336914, "learning_rate": 4.125119151228812e-06, "loss": 0.7412, "step": 4223 }, { "epoch": 0.17505905756558499, "grad_norm": 0.42489340901374817, "learning_rate": 4.124911931700444e-06, "loss": 0.7681, "step": 4224 }, { "epoch": 0.17510050147125866, "grad_norm": 0.43316906690597534, "learning_rate": 4.124704712172075e-06, "loss": 0.7222, "step": 4225 }, { "epoch": 0.1751419453769323, "grad_norm": 0.42767786979675293, "learning_rate": 4.124497492643707e-06, "loss": 0.6965, "step": 4226 }, { "epoch": 0.175183389282606, "grad_norm": 0.4111917316913605, "learning_rate": 4.124290273115339e-06, "loss": 0.6895, "step": 4227 }, { "epoch": 0.17522483318827967, "grad_norm": 0.4314042329788208, "learning_rate": 4.12408305358697e-06, "loss": 0.739, "step": 4228 }, { "epoch": 0.17526627709395334, "grad_norm": 0.4350607097148895, "learning_rate": 4.123875834058602e-06, "loss": 0.7375, "step": 4229 }, { "epoch": 0.175307720999627, "grad_norm": 0.45220619440078735, "learning_rate": 4.123668614530233e-06, "loss": 0.7595, "step": 4230 }, { "epoch": 0.17534916490530067, "grad_norm": 0.45952433347702026, "learning_rate": 4.123461395001865e-06, "loss": 0.7957, "step": 4231 }, { "epoch": 0.17539060881097435, "grad_norm": 0.4205242991447449, "learning_rate": 4.123254175473497e-06, "loss": 0.7061, "step": 4232 }, { "epoch": 0.17543205271664802, "grad_norm": 0.45333150029182434, "learning_rate": 4.123046955945129e-06, "loss": 0.6853, "step": 4233 }, { "epoch": 0.1754734966223217, "grad_norm": 0.42033901810646057, "learning_rate": 4.12283973641676e-06, "loss": 0.672, "step": 4234 }, { "epoch": 0.17551494052799535, "grad_norm": 0.46275144815444946, "learning_rate": 4.122632516888392e-06, "loss": 0.7386, "step": 4235 }, { "epoch": 0.17555638443366903, "grad_norm": 0.45021653175354004, "learning_rate": 4.122425297360024e-06, "loss": 0.7385, "step": 4236 }, { "epoch": 0.1755978283393427, "grad_norm": 0.4110069274902344, "learning_rate": 4.122218077831655e-06, "loss": 0.6906, "step": 4237 }, { "epoch": 0.17563927224501638, "grad_norm": 0.4427078068256378, "learning_rate": 4.122010858303287e-06, "loss": 0.7339, "step": 4238 }, { "epoch": 0.17568071615069003, "grad_norm": 0.46116575598716736, "learning_rate": 4.121803638774918e-06, "loss": 0.7412, "step": 4239 }, { "epoch": 0.1757221600563637, "grad_norm": 0.39947497844696045, "learning_rate": 4.12159641924655e-06, "loss": 0.7489, "step": 4240 }, { "epoch": 0.17576360396203738, "grad_norm": 0.43869084119796753, "learning_rate": 4.121389199718182e-06, "loss": 0.7412, "step": 4241 }, { "epoch": 0.17580504786771106, "grad_norm": 0.42985743284225464, "learning_rate": 4.121181980189813e-06, "loss": 0.7114, "step": 4242 }, { "epoch": 0.1758464917733847, "grad_norm": 0.41117119789123535, "learning_rate": 4.1209747606614454e-06, "loss": 0.7871, "step": 4243 }, { "epoch": 0.1758879356790584, "grad_norm": 0.41140472888946533, "learning_rate": 4.120767541133077e-06, "loss": 0.7231, "step": 4244 }, { "epoch": 0.17592937958473207, "grad_norm": 0.457061231136322, "learning_rate": 4.120560321604708e-06, "loss": 0.75, "step": 4245 }, { "epoch": 0.17597082349040574, "grad_norm": 0.46602439880371094, "learning_rate": 4.12035310207634e-06, "loss": 0.7336, "step": 4246 }, { "epoch": 0.17601226739607942, "grad_norm": 0.4160914421081543, "learning_rate": 4.120145882547972e-06, "loss": 0.689, "step": 4247 }, { "epoch": 0.17605371130175307, "grad_norm": 0.43183350563049316, "learning_rate": 4.119938663019603e-06, "loss": 0.7625, "step": 4248 }, { "epoch": 0.17609515520742675, "grad_norm": 0.3859313130378723, "learning_rate": 4.119731443491235e-06, "loss": 0.77, "step": 4249 }, { "epoch": 0.17613659911310042, "grad_norm": 0.492988646030426, "learning_rate": 4.119524223962866e-06, "loss": 0.7579, "step": 4250 }, { "epoch": 0.1761780430187741, "grad_norm": 0.39864224195480347, "learning_rate": 4.119317004434498e-06, "loss": 0.7153, "step": 4251 }, { "epoch": 0.17621948692444775, "grad_norm": 0.40799373388290405, "learning_rate": 4.1191097849061304e-06, "loss": 0.7589, "step": 4252 }, { "epoch": 0.17626093083012143, "grad_norm": 0.4360237121582031, "learning_rate": 4.118902565377762e-06, "loss": 0.729, "step": 4253 }, { "epoch": 0.1763023747357951, "grad_norm": 0.41178247332572937, "learning_rate": 4.118695345849393e-06, "loss": 0.7241, "step": 4254 }, { "epoch": 0.17634381864146878, "grad_norm": 0.42210111021995544, "learning_rate": 4.118488126321025e-06, "loss": 0.7601, "step": 4255 }, { "epoch": 0.17638526254714243, "grad_norm": 0.4160026013851166, "learning_rate": 4.118280906792657e-06, "loss": 0.6973, "step": 4256 }, { "epoch": 0.1764267064528161, "grad_norm": 0.4246573746204376, "learning_rate": 4.118073687264288e-06, "loss": 0.7444, "step": 4257 }, { "epoch": 0.17646815035848978, "grad_norm": 0.41488221287727356, "learning_rate": 4.11786646773592e-06, "loss": 0.7316, "step": 4258 }, { "epoch": 0.17650959426416346, "grad_norm": 0.4662647247314453, "learning_rate": 4.117659248207551e-06, "loss": 0.7405, "step": 4259 }, { "epoch": 0.17655103816983714, "grad_norm": 0.3996306359767914, "learning_rate": 4.117452028679183e-06, "loss": 0.6599, "step": 4260 }, { "epoch": 0.1765924820755108, "grad_norm": 0.42388278245925903, "learning_rate": 4.117244809150815e-06, "loss": 0.6536, "step": 4261 }, { "epoch": 0.17663392598118446, "grad_norm": 0.4344223141670227, "learning_rate": 4.117037589622446e-06, "loss": 0.7434, "step": 4262 }, { "epoch": 0.17667536988685814, "grad_norm": 0.3850594758987427, "learning_rate": 4.116830370094078e-06, "loss": 0.6919, "step": 4263 }, { "epoch": 0.17671681379253182, "grad_norm": 0.45665624737739563, "learning_rate": 4.11662315056571e-06, "loss": 0.7454, "step": 4264 }, { "epoch": 0.17675825769820547, "grad_norm": 0.44247543811798096, "learning_rate": 4.116415931037341e-06, "loss": 0.7869, "step": 4265 }, { "epoch": 0.17679970160387914, "grad_norm": 0.4546608328819275, "learning_rate": 4.116208711508972e-06, "loss": 0.7522, "step": 4266 }, { "epoch": 0.17684114550955282, "grad_norm": 0.41410019993782043, "learning_rate": 4.116001491980605e-06, "loss": 0.7443, "step": 4267 }, { "epoch": 0.1768825894152265, "grad_norm": 0.4559858739376068, "learning_rate": 4.115794272452237e-06, "loss": 0.741, "step": 4268 }, { "epoch": 0.17692403332090015, "grad_norm": 0.42087429761886597, "learning_rate": 4.115587052923868e-06, "loss": 0.7236, "step": 4269 }, { "epoch": 0.17696547722657383, "grad_norm": 0.4106200635433197, "learning_rate": 4.1153798333955e-06, "loss": 0.6926, "step": 4270 }, { "epoch": 0.1770069211322475, "grad_norm": 0.45233482122421265, "learning_rate": 4.115172613867131e-06, "loss": 0.7683, "step": 4271 }, { "epoch": 0.17704836503792118, "grad_norm": 0.43223050236701965, "learning_rate": 4.114965394338763e-06, "loss": 0.6833, "step": 4272 }, { "epoch": 0.17708980894359486, "grad_norm": 0.4224109351634979, "learning_rate": 4.114758174810395e-06, "loss": 0.712, "step": 4273 }, { "epoch": 0.1771312528492685, "grad_norm": 0.4079419672489166, "learning_rate": 4.114550955282026e-06, "loss": 0.6975, "step": 4274 }, { "epoch": 0.17717269675494218, "grad_norm": 0.41453585028648376, "learning_rate": 4.114343735753657e-06, "loss": 0.6721, "step": 4275 }, { "epoch": 0.17721414066061586, "grad_norm": 0.46978724002838135, "learning_rate": 4.11413651622529e-06, "loss": 0.7283, "step": 4276 }, { "epoch": 0.17725558456628954, "grad_norm": 0.4823302924633026, "learning_rate": 4.113929296696921e-06, "loss": 0.7764, "step": 4277 }, { "epoch": 0.1772970284719632, "grad_norm": 0.4234614968299866, "learning_rate": 4.113722077168552e-06, "loss": 0.7303, "step": 4278 }, { "epoch": 0.17733847237763686, "grad_norm": 0.44675666093826294, "learning_rate": 4.113514857640185e-06, "loss": 0.7837, "step": 4279 }, { "epoch": 0.17737991628331054, "grad_norm": 0.47405174374580383, "learning_rate": 4.113307638111816e-06, "loss": 0.7217, "step": 4280 }, { "epoch": 0.17742136018898422, "grad_norm": 0.4084571599960327, "learning_rate": 4.113100418583447e-06, "loss": 0.7075, "step": 4281 }, { "epoch": 0.1774628040946579, "grad_norm": 0.4558860957622528, "learning_rate": 4.112893199055079e-06, "loss": 0.7074, "step": 4282 }, { "epoch": 0.17750424800033154, "grad_norm": 0.42903947830200195, "learning_rate": 4.112685979526711e-06, "loss": 0.7527, "step": 4283 }, { "epoch": 0.17754569190600522, "grad_norm": 0.40029436349868774, "learning_rate": 4.112478759998342e-06, "loss": 0.6987, "step": 4284 }, { "epoch": 0.1775871358116789, "grad_norm": 0.42030277848243713, "learning_rate": 4.112271540469975e-06, "loss": 0.6965, "step": 4285 }, { "epoch": 0.17762857971735257, "grad_norm": 0.40933334827423096, "learning_rate": 4.112064320941606e-06, "loss": 0.7434, "step": 4286 }, { "epoch": 0.17767002362302622, "grad_norm": 0.4166834056377411, "learning_rate": 4.111857101413237e-06, "loss": 0.7035, "step": 4287 }, { "epoch": 0.1777114675286999, "grad_norm": 0.43815746903419495, "learning_rate": 4.11164988188487e-06, "loss": 0.731, "step": 4288 }, { "epoch": 0.17775291143437358, "grad_norm": 0.4044334292411804, "learning_rate": 4.111442662356501e-06, "loss": 0.7351, "step": 4289 }, { "epoch": 0.17779435534004726, "grad_norm": 0.4471001923084259, "learning_rate": 4.111235442828132e-06, "loss": 0.7148, "step": 4290 }, { "epoch": 0.1778357992457209, "grad_norm": 0.4518488347530365, "learning_rate": 4.111028223299764e-06, "loss": 0.7686, "step": 4291 }, { "epoch": 0.17787724315139458, "grad_norm": 0.4025476276874542, "learning_rate": 4.110821003771396e-06, "loss": 0.6982, "step": 4292 }, { "epoch": 0.17791868705706826, "grad_norm": 0.4361026883125305, "learning_rate": 4.110613784243027e-06, "loss": 0.8044, "step": 4293 }, { "epoch": 0.17796013096274194, "grad_norm": 0.4315468966960907, "learning_rate": 4.110406564714659e-06, "loss": 0.762, "step": 4294 }, { "epoch": 0.1780015748684156, "grad_norm": 0.4462752342224121, "learning_rate": 4.110199345186291e-06, "loss": 0.7991, "step": 4295 }, { "epoch": 0.17804301877408926, "grad_norm": 0.41034141182899475, "learning_rate": 4.109992125657922e-06, "loss": 0.7325, "step": 4296 }, { "epoch": 0.17808446267976294, "grad_norm": 0.4040284752845764, "learning_rate": 4.109784906129554e-06, "loss": 0.679, "step": 4297 }, { "epoch": 0.17812590658543662, "grad_norm": 0.46674495935440063, "learning_rate": 4.109577686601185e-06, "loss": 0.7847, "step": 4298 }, { "epoch": 0.1781673504911103, "grad_norm": 0.40785902738571167, "learning_rate": 4.109370467072817e-06, "loss": 0.7009, "step": 4299 }, { "epoch": 0.17820879439678394, "grad_norm": 0.4218190908432007, "learning_rate": 4.109163247544449e-06, "loss": 0.7023, "step": 4300 }, { "epoch": 0.17825023830245762, "grad_norm": 0.4493870437145233, "learning_rate": 4.108956028016081e-06, "loss": 0.6936, "step": 4301 }, { "epoch": 0.1782916822081313, "grad_norm": 0.5042098760604858, "learning_rate": 4.108748808487712e-06, "loss": 0.7284, "step": 4302 }, { "epoch": 0.17833312611380497, "grad_norm": 0.416984498500824, "learning_rate": 4.108541588959344e-06, "loss": 0.7144, "step": 4303 }, { "epoch": 0.17837457001947862, "grad_norm": 0.4273073077201843, "learning_rate": 4.108334369430976e-06, "loss": 0.7437, "step": 4304 }, { "epoch": 0.1784160139251523, "grad_norm": 0.41094937920570374, "learning_rate": 4.1081271499026074e-06, "loss": 0.7384, "step": 4305 }, { "epoch": 0.17845745783082598, "grad_norm": 0.5612376928329468, "learning_rate": 4.107919930374239e-06, "loss": 0.8157, "step": 4306 }, { "epoch": 0.17849890173649965, "grad_norm": 0.46674543619155884, "learning_rate": 4.10771271084587e-06, "loss": 0.7106, "step": 4307 }, { "epoch": 0.17854034564217333, "grad_norm": 0.4352622330188751, "learning_rate": 4.1075054913175024e-06, "loss": 0.7216, "step": 4308 }, { "epoch": 0.17858178954784698, "grad_norm": 0.4645962715148926, "learning_rate": 4.107298271789134e-06, "loss": 0.7854, "step": 4309 }, { "epoch": 0.17862323345352066, "grad_norm": 0.4313540756702423, "learning_rate": 4.107091052260765e-06, "loss": 0.7395, "step": 4310 }, { "epoch": 0.17866467735919433, "grad_norm": 0.42626938223838806, "learning_rate": 4.106883832732397e-06, "loss": 0.7283, "step": 4311 }, { "epoch": 0.178706121264868, "grad_norm": 0.4383326768875122, "learning_rate": 4.106676613204029e-06, "loss": 0.7449, "step": 4312 }, { "epoch": 0.17874756517054166, "grad_norm": 0.39216348528862, "learning_rate": 4.10646939367566e-06, "loss": 0.6851, "step": 4313 }, { "epoch": 0.17878900907621534, "grad_norm": 0.42828142642974854, "learning_rate": 4.106262174147292e-06, "loss": 0.7074, "step": 4314 }, { "epoch": 0.17883045298188902, "grad_norm": 0.42427563667297363, "learning_rate": 4.106054954618924e-06, "loss": 0.6914, "step": 4315 }, { "epoch": 0.1788718968875627, "grad_norm": 0.42758116126060486, "learning_rate": 4.105847735090555e-06, "loss": 0.752, "step": 4316 }, { "epoch": 0.17891334079323634, "grad_norm": 0.41149428486824036, "learning_rate": 4.105640515562187e-06, "loss": 0.7261, "step": 4317 }, { "epoch": 0.17895478469891002, "grad_norm": 0.4862017035484314, "learning_rate": 4.105433296033818e-06, "loss": 0.7585, "step": 4318 }, { "epoch": 0.1789962286045837, "grad_norm": 0.4164459705352783, "learning_rate": 4.10522607650545e-06, "loss": 0.7014, "step": 4319 }, { "epoch": 0.17903767251025737, "grad_norm": 0.4301208257675171, "learning_rate": 4.105018856977082e-06, "loss": 0.749, "step": 4320 }, { "epoch": 0.17907911641593105, "grad_norm": 0.44435566663742065, "learning_rate": 4.104811637448714e-06, "loss": 0.7686, "step": 4321 }, { "epoch": 0.1791205603216047, "grad_norm": 0.4533887207508087, "learning_rate": 4.104604417920345e-06, "loss": 0.752, "step": 4322 }, { "epoch": 0.17916200422727838, "grad_norm": 0.4279111623764038, "learning_rate": 4.104397198391977e-06, "loss": 0.7498, "step": 4323 }, { "epoch": 0.17920344813295205, "grad_norm": 0.43398571014404297, "learning_rate": 4.104189978863609e-06, "loss": 0.7939, "step": 4324 }, { "epoch": 0.17924489203862573, "grad_norm": 0.4085802733898163, "learning_rate": 4.10398275933524e-06, "loss": 0.718, "step": 4325 }, { "epoch": 0.17928633594429938, "grad_norm": 0.4387429356575012, "learning_rate": 4.103775539806872e-06, "loss": 0.7324, "step": 4326 }, { "epoch": 0.17932777984997306, "grad_norm": 0.5018410086631775, "learning_rate": 4.103568320278503e-06, "loss": 0.7593, "step": 4327 }, { "epoch": 0.17936922375564673, "grad_norm": 0.4165845513343811, "learning_rate": 4.103361100750135e-06, "loss": 0.7273, "step": 4328 }, { "epoch": 0.1794106676613204, "grad_norm": 0.4184613525867462, "learning_rate": 4.103153881221767e-06, "loss": 0.7676, "step": 4329 }, { "epoch": 0.17945211156699406, "grad_norm": 0.4521818161010742, "learning_rate": 4.102946661693398e-06, "loss": 0.7544, "step": 4330 }, { "epoch": 0.17949355547266774, "grad_norm": 0.41748806834220886, "learning_rate": 4.10273944216503e-06, "loss": 0.7218, "step": 4331 }, { "epoch": 0.17953499937834141, "grad_norm": 0.4290112853050232, "learning_rate": 4.102532222636662e-06, "loss": 0.701, "step": 4332 }, { "epoch": 0.1795764432840151, "grad_norm": 0.43585166335105896, "learning_rate": 4.102325003108293e-06, "loss": 0.6853, "step": 4333 }, { "epoch": 0.17961788718968877, "grad_norm": 0.41494202613830566, "learning_rate": 4.102117783579924e-06, "loss": 0.6816, "step": 4334 }, { "epoch": 0.17965933109536242, "grad_norm": 0.4312579333782196, "learning_rate": 4.101910564051557e-06, "loss": 0.7366, "step": 4335 }, { "epoch": 0.1797007750010361, "grad_norm": 0.3995453715324402, "learning_rate": 4.101703344523188e-06, "loss": 0.6433, "step": 4336 }, { "epoch": 0.17974221890670977, "grad_norm": 0.5056896805763245, "learning_rate": 4.10149612499482e-06, "loss": 0.7495, "step": 4337 }, { "epoch": 0.17978366281238345, "grad_norm": 0.4615245461463928, "learning_rate": 4.101288905466452e-06, "loss": 0.7788, "step": 4338 }, { "epoch": 0.1798251067180571, "grad_norm": 0.46029555797576904, "learning_rate": 4.101081685938083e-06, "loss": 0.7499, "step": 4339 }, { "epoch": 0.17986655062373078, "grad_norm": 0.4137091040611267, "learning_rate": 4.100874466409715e-06, "loss": 0.7512, "step": 4340 }, { "epoch": 0.17990799452940445, "grad_norm": 0.41946813464164734, "learning_rate": 4.100667246881347e-06, "loss": 0.7297, "step": 4341 }, { "epoch": 0.17994943843507813, "grad_norm": 0.4331010580062866, "learning_rate": 4.100460027352978e-06, "loss": 0.7334, "step": 4342 }, { "epoch": 0.17999088234075178, "grad_norm": 0.435464471578598, "learning_rate": 4.100252807824609e-06, "loss": 0.7371, "step": 4343 }, { "epoch": 0.18003232624642546, "grad_norm": 0.44494694471359253, "learning_rate": 4.100045588296242e-06, "loss": 0.7443, "step": 4344 }, { "epoch": 0.18007377015209913, "grad_norm": 0.42720088362693787, "learning_rate": 4.099838368767873e-06, "loss": 0.7146, "step": 4345 }, { "epoch": 0.1801152140577728, "grad_norm": 0.45642855763435364, "learning_rate": 4.099631149239504e-06, "loss": 0.729, "step": 4346 }, { "epoch": 0.1801566579634465, "grad_norm": 0.44834575057029724, "learning_rate": 4.099423929711137e-06, "loss": 0.7793, "step": 4347 }, { "epoch": 0.18019810186912014, "grad_norm": 0.45838481187820435, "learning_rate": 4.099216710182768e-06, "loss": 0.7378, "step": 4348 }, { "epoch": 0.1802395457747938, "grad_norm": 0.40973523259162903, "learning_rate": 4.099009490654399e-06, "loss": 0.7196, "step": 4349 }, { "epoch": 0.1802809896804675, "grad_norm": 0.4404100775718689, "learning_rate": 4.098802271126031e-06, "loss": 0.7258, "step": 4350 }, { "epoch": 0.18032243358614117, "grad_norm": 0.4101592004299164, "learning_rate": 4.098595051597663e-06, "loss": 0.7319, "step": 4351 }, { "epoch": 0.18036387749181482, "grad_norm": 0.4168979525566101, "learning_rate": 4.098387832069294e-06, "loss": 0.7074, "step": 4352 }, { "epoch": 0.1804053213974885, "grad_norm": 0.41221973299980164, "learning_rate": 4.098180612540927e-06, "loss": 0.7434, "step": 4353 }, { "epoch": 0.18044676530316217, "grad_norm": 0.41986793279647827, "learning_rate": 4.097973393012557e-06, "loss": 0.7124, "step": 4354 }, { "epoch": 0.18048820920883585, "grad_norm": 0.476686954498291, "learning_rate": 4.097766173484189e-06, "loss": 0.7437, "step": 4355 }, { "epoch": 0.18052965311450953, "grad_norm": 0.4245510399341583, "learning_rate": 4.097558953955822e-06, "loss": 0.739, "step": 4356 }, { "epoch": 0.18057109702018317, "grad_norm": 0.3986087739467621, "learning_rate": 4.097351734427453e-06, "loss": 0.7214, "step": 4357 }, { "epoch": 0.18061254092585685, "grad_norm": 0.40530362725257874, "learning_rate": 4.097144514899084e-06, "loss": 0.6965, "step": 4358 }, { "epoch": 0.18065398483153053, "grad_norm": 0.425922691822052, "learning_rate": 4.096937295370716e-06, "loss": 0.7234, "step": 4359 }, { "epoch": 0.1806954287372042, "grad_norm": 0.4088808000087738, "learning_rate": 4.096730075842348e-06, "loss": 0.7268, "step": 4360 }, { "epoch": 0.18073687264287785, "grad_norm": 0.4442114233970642, "learning_rate": 4.0965228563139794e-06, "loss": 0.7271, "step": 4361 }, { "epoch": 0.18077831654855153, "grad_norm": 0.4285662770271301, "learning_rate": 4.096315636785611e-06, "loss": 0.751, "step": 4362 }, { "epoch": 0.1808197604542252, "grad_norm": 0.39213985204696655, "learning_rate": 4.096108417257242e-06, "loss": 0.7224, "step": 4363 }, { "epoch": 0.18086120435989889, "grad_norm": 0.4457820653915405, "learning_rate": 4.0959011977288744e-06, "loss": 0.7184, "step": 4364 }, { "epoch": 0.18090264826557254, "grad_norm": 0.4317987263202667, "learning_rate": 4.095693978200506e-06, "loss": 0.6943, "step": 4365 }, { "epoch": 0.1809440921712462, "grad_norm": 0.45262524485588074, "learning_rate": 4.095486758672137e-06, "loss": 0.7598, "step": 4366 }, { "epoch": 0.1809855360769199, "grad_norm": 0.4363974928855896, "learning_rate": 4.0952795391437694e-06, "loss": 0.7041, "step": 4367 }, { "epoch": 0.18102697998259357, "grad_norm": 0.40521758794784546, "learning_rate": 4.095072319615401e-06, "loss": 0.7156, "step": 4368 }, { "epoch": 0.18106842388826724, "grad_norm": 0.4111277163028717, "learning_rate": 4.094865100087033e-06, "loss": 0.7568, "step": 4369 }, { "epoch": 0.1811098677939409, "grad_norm": 0.4023473560810089, "learning_rate": 4.094657880558664e-06, "loss": 0.6863, "step": 4370 }, { "epoch": 0.18115131169961457, "grad_norm": 0.4092337191104889, "learning_rate": 4.094450661030296e-06, "loss": 0.6956, "step": 4371 }, { "epoch": 0.18119275560528825, "grad_norm": 0.40245145559310913, "learning_rate": 4.094243441501927e-06, "loss": 0.6514, "step": 4372 }, { "epoch": 0.18123419951096192, "grad_norm": 0.4169035255908966, "learning_rate": 4.0940362219735594e-06, "loss": 0.7634, "step": 4373 }, { "epoch": 0.18127564341663557, "grad_norm": 0.42160505056381226, "learning_rate": 4.093829002445191e-06, "loss": 0.6617, "step": 4374 }, { "epoch": 0.18131708732230925, "grad_norm": 0.4221780300140381, "learning_rate": 4.093621782916822e-06, "loss": 0.6989, "step": 4375 }, { "epoch": 0.18135853122798293, "grad_norm": 0.4273332357406616, "learning_rate": 4.0934145633884544e-06, "loss": 0.7268, "step": 4376 }, { "epoch": 0.1813999751336566, "grad_norm": 0.42516419291496277, "learning_rate": 4.093207343860086e-06, "loss": 0.7375, "step": 4377 }, { "epoch": 0.18144141903933025, "grad_norm": 0.4265301823616028, "learning_rate": 4.093000124331717e-06, "loss": 0.7668, "step": 4378 }, { "epoch": 0.18148286294500393, "grad_norm": 0.4175337851047516, "learning_rate": 4.092792904803349e-06, "loss": 0.7224, "step": 4379 }, { "epoch": 0.1815243068506776, "grad_norm": 0.4374377727508545, "learning_rate": 4.092585685274981e-06, "loss": 0.7087, "step": 4380 }, { "epoch": 0.18156575075635129, "grad_norm": 0.4657685160636902, "learning_rate": 4.092378465746612e-06, "loss": 0.7585, "step": 4381 }, { "epoch": 0.18160719466202496, "grad_norm": 0.42439979314804077, "learning_rate": 4.092171246218244e-06, "loss": 0.7021, "step": 4382 }, { "epoch": 0.1816486385676986, "grad_norm": 0.4083669185638428, "learning_rate": 4.091964026689876e-06, "loss": 0.7361, "step": 4383 }, { "epoch": 0.1816900824733723, "grad_norm": 0.425096720457077, "learning_rate": 4.091756807161507e-06, "loss": 0.703, "step": 4384 }, { "epoch": 0.18173152637904597, "grad_norm": 0.43057724833488464, "learning_rate": 4.091549587633139e-06, "loss": 0.6553, "step": 4385 }, { "epoch": 0.18177297028471964, "grad_norm": 0.4205147624015808, "learning_rate": 4.09134236810477e-06, "loss": 0.6995, "step": 4386 }, { "epoch": 0.1818144141903933, "grad_norm": 0.43575456738471985, "learning_rate": 4.091135148576402e-06, "loss": 0.7371, "step": 4387 }, { "epoch": 0.18185585809606697, "grad_norm": 0.41343721747398376, "learning_rate": 4.090927929048034e-06, "loss": 0.6698, "step": 4388 }, { "epoch": 0.18189730200174065, "grad_norm": 0.45800450444221497, "learning_rate": 4.090720709519666e-06, "loss": 0.7058, "step": 4389 }, { "epoch": 0.18193874590741432, "grad_norm": 0.4350242614746094, "learning_rate": 4.090513489991297e-06, "loss": 0.7446, "step": 4390 }, { "epoch": 0.18198018981308797, "grad_norm": 0.4259223937988281, "learning_rate": 4.090306270462929e-06, "loss": 0.7288, "step": 4391 }, { "epoch": 0.18202163371876165, "grad_norm": 0.40063032507896423, "learning_rate": 4.090099050934561e-06, "loss": 0.7249, "step": 4392 }, { "epoch": 0.18206307762443533, "grad_norm": 0.40788915753364563, "learning_rate": 4.089891831406192e-06, "loss": 0.7175, "step": 4393 }, { "epoch": 0.182104521530109, "grad_norm": 0.43516749143600464, "learning_rate": 4.089684611877824e-06, "loss": 0.729, "step": 4394 }, { "epoch": 0.18214596543578268, "grad_norm": 0.42761242389678955, "learning_rate": 4.089477392349455e-06, "loss": 0.7712, "step": 4395 }, { "epoch": 0.18218740934145633, "grad_norm": 0.4414919912815094, "learning_rate": 4.089270172821087e-06, "loss": 0.7437, "step": 4396 }, { "epoch": 0.18222885324713, "grad_norm": 0.4128112196922302, "learning_rate": 4.089062953292719e-06, "loss": 0.7532, "step": 4397 }, { "epoch": 0.18227029715280368, "grad_norm": 0.48242437839508057, "learning_rate": 4.08885573376435e-06, "loss": 0.7078, "step": 4398 }, { "epoch": 0.18231174105847736, "grad_norm": 0.5164169073104858, "learning_rate": 4.088648514235982e-06, "loss": 0.811, "step": 4399 }, { "epoch": 0.182353184964151, "grad_norm": 0.4361169934272766, "learning_rate": 4.088441294707614e-06, "loss": 0.7205, "step": 4400 }, { "epoch": 0.1823946288698247, "grad_norm": 0.41624781489372253, "learning_rate": 4.088234075179245e-06, "loss": 0.6914, "step": 4401 }, { "epoch": 0.18243607277549836, "grad_norm": 0.4548285901546478, "learning_rate": 4.088026855650876e-06, "loss": 0.7029, "step": 4402 }, { "epoch": 0.18247751668117204, "grad_norm": 0.4837510585784912, "learning_rate": 4.087819636122509e-06, "loss": 0.7388, "step": 4403 }, { "epoch": 0.1825189605868457, "grad_norm": 0.43088245391845703, "learning_rate": 4.08761241659414e-06, "loss": 0.7451, "step": 4404 }, { "epoch": 0.18256040449251937, "grad_norm": 0.40221038460731506, "learning_rate": 4.087405197065772e-06, "loss": 0.6858, "step": 4405 }, { "epoch": 0.18260184839819305, "grad_norm": 0.38958480954170227, "learning_rate": 4.087197977537404e-06, "loss": 0.7008, "step": 4406 }, { "epoch": 0.18264329230386672, "grad_norm": 0.43535566329956055, "learning_rate": 4.086990758009035e-06, "loss": 0.7241, "step": 4407 }, { "epoch": 0.1826847362095404, "grad_norm": 0.4632434546947479, "learning_rate": 4.086783538480667e-06, "loss": 0.75, "step": 4408 }, { "epoch": 0.18272618011521405, "grad_norm": 0.390055775642395, "learning_rate": 4.086576318952299e-06, "loss": 0.6946, "step": 4409 }, { "epoch": 0.18276762402088773, "grad_norm": 0.38322508335113525, "learning_rate": 4.08636909942393e-06, "loss": 0.715, "step": 4410 }, { "epoch": 0.1828090679265614, "grad_norm": 0.41152116656303406, "learning_rate": 4.086161879895561e-06, "loss": 0.6768, "step": 4411 }, { "epoch": 0.18285051183223508, "grad_norm": 0.4003947973251343, "learning_rate": 4.085954660367194e-06, "loss": 0.7375, "step": 4412 }, { "epoch": 0.18289195573790873, "grad_norm": 0.4073386490345001, "learning_rate": 4.085747440838825e-06, "loss": 0.697, "step": 4413 }, { "epoch": 0.1829333996435824, "grad_norm": 0.43427029252052307, "learning_rate": 4.085540221310456e-06, "loss": 0.7385, "step": 4414 }, { "epoch": 0.18297484354925608, "grad_norm": 0.4059550166130066, "learning_rate": 4.085333001782088e-06, "loss": 0.7085, "step": 4415 }, { "epoch": 0.18301628745492976, "grad_norm": 0.40467342734336853, "learning_rate": 4.08512578225372e-06, "loss": 0.7189, "step": 4416 }, { "epoch": 0.1830577313606034, "grad_norm": 0.48845353722572327, "learning_rate": 4.084918562725351e-06, "loss": 0.7295, "step": 4417 }, { "epoch": 0.1830991752662771, "grad_norm": 0.46005767583847046, "learning_rate": 4.084711343196983e-06, "loss": 0.7432, "step": 4418 }, { "epoch": 0.18314061917195076, "grad_norm": 0.41732802987098694, "learning_rate": 4.084504123668615e-06, "loss": 0.7646, "step": 4419 }, { "epoch": 0.18318206307762444, "grad_norm": 0.45906782150268555, "learning_rate": 4.0842969041402464e-06, "loss": 0.7654, "step": 4420 }, { "epoch": 0.18322350698329812, "grad_norm": 0.42317450046539307, "learning_rate": 4.084089684611879e-06, "loss": 0.7029, "step": 4421 }, { "epoch": 0.18326495088897177, "grad_norm": 0.4046235978603363, "learning_rate": 4.083882465083509e-06, "loss": 0.6926, "step": 4422 }, { "epoch": 0.18330639479464544, "grad_norm": 0.4445026218891144, "learning_rate": 4.0836752455551414e-06, "loss": 0.7227, "step": 4423 }, { "epoch": 0.18334783870031912, "grad_norm": 0.4224022924900055, "learning_rate": 4.083468026026773e-06, "loss": 0.6873, "step": 4424 }, { "epoch": 0.1833892826059928, "grad_norm": 0.4156215488910675, "learning_rate": 4.083260806498405e-06, "loss": 0.7388, "step": 4425 }, { "epoch": 0.18343072651166645, "grad_norm": 0.400493860244751, "learning_rate": 4.0830535869700364e-06, "loss": 0.7314, "step": 4426 }, { "epoch": 0.18347217041734012, "grad_norm": 0.4409414231777191, "learning_rate": 4.082846367441668e-06, "loss": 0.7916, "step": 4427 }, { "epoch": 0.1835136143230138, "grad_norm": 0.4662216305732727, "learning_rate": 4.0826391479133e-06, "loss": 0.7144, "step": 4428 }, { "epoch": 0.18355505822868748, "grad_norm": 0.40235865116119385, "learning_rate": 4.0824319283849314e-06, "loss": 0.6978, "step": 4429 }, { "epoch": 0.18359650213436116, "grad_norm": 0.4039965569972992, "learning_rate": 4.082224708856563e-06, "loss": 0.6951, "step": 4430 }, { "epoch": 0.1836379460400348, "grad_norm": 0.416631817817688, "learning_rate": 4.082017489328194e-06, "loss": 0.6821, "step": 4431 }, { "epoch": 0.18367938994570848, "grad_norm": 0.43682849407196045, "learning_rate": 4.0818102697998264e-06, "loss": 0.7312, "step": 4432 }, { "epoch": 0.18372083385138216, "grad_norm": 0.4431561827659607, "learning_rate": 4.081603050271458e-06, "loss": 0.7601, "step": 4433 }, { "epoch": 0.18376227775705584, "grad_norm": 0.42798924446105957, "learning_rate": 4.081395830743089e-06, "loss": 0.719, "step": 4434 }, { "epoch": 0.18380372166272949, "grad_norm": 0.48576799035072327, "learning_rate": 4.0811886112147214e-06, "loss": 0.8704, "step": 4435 }, { "epoch": 0.18384516556840316, "grad_norm": 0.4031914174556732, "learning_rate": 4.080981391686353e-06, "loss": 0.7087, "step": 4436 }, { "epoch": 0.18388660947407684, "grad_norm": 0.46489468216896057, "learning_rate": 4.080774172157985e-06, "loss": 0.7272, "step": 4437 }, { "epoch": 0.18392805337975052, "grad_norm": 0.39807406067848206, "learning_rate": 4.080566952629616e-06, "loss": 0.7043, "step": 4438 }, { "epoch": 0.18396949728542417, "grad_norm": 0.44383782148361206, "learning_rate": 4.080359733101248e-06, "loss": 0.6855, "step": 4439 }, { "epoch": 0.18401094119109784, "grad_norm": 0.4440809190273285, "learning_rate": 4.080152513572879e-06, "loss": 0.8196, "step": 4440 }, { "epoch": 0.18405238509677152, "grad_norm": 0.4540383815765381, "learning_rate": 4.0799452940445115e-06, "loss": 0.739, "step": 4441 }, { "epoch": 0.1840938290024452, "grad_norm": 0.44314637780189514, "learning_rate": 4.079738074516143e-06, "loss": 0.7979, "step": 4442 }, { "epoch": 0.18413527290811887, "grad_norm": 0.434135764837265, "learning_rate": 4.079530854987774e-06, "loss": 0.7267, "step": 4443 }, { "epoch": 0.18417671681379252, "grad_norm": 0.4147430956363678, "learning_rate": 4.0793236354594065e-06, "loss": 0.7275, "step": 4444 }, { "epoch": 0.1842181607194662, "grad_norm": 0.47405385971069336, "learning_rate": 4.079116415931038e-06, "loss": 0.7388, "step": 4445 }, { "epoch": 0.18425960462513988, "grad_norm": 0.4174647927284241, "learning_rate": 4.078909196402669e-06, "loss": 0.7234, "step": 4446 }, { "epoch": 0.18430104853081355, "grad_norm": 0.4337713420391083, "learning_rate": 4.078701976874301e-06, "loss": 0.7205, "step": 4447 }, { "epoch": 0.1843424924364872, "grad_norm": 0.44968995451927185, "learning_rate": 4.078494757345933e-06, "loss": 0.7454, "step": 4448 }, { "epoch": 0.18438393634216088, "grad_norm": 0.47263431549072266, "learning_rate": 4.078287537817564e-06, "loss": 0.7214, "step": 4449 }, { "epoch": 0.18442538024783456, "grad_norm": 0.4364013373851776, "learning_rate": 4.078080318289196e-06, "loss": 0.7451, "step": 4450 }, { "epoch": 0.18446682415350824, "grad_norm": 0.42869269847869873, "learning_rate": 4.077873098760828e-06, "loss": 0.7695, "step": 4451 }, { "epoch": 0.18450826805918188, "grad_norm": 0.4293075203895569, "learning_rate": 4.077665879232459e-06, "loss": 0.7273, "step": 4452 }, { "epoch": 0.18454971196485556, "grad_norm": 0.4507058560848236, "learning_rate": 4.0774586597040915e-06, "loss": 0.7593, "step": 4453 }, { "epoch": 0.18459115587052924, "grad_norm": 0.4253017008304596, "learning_rate": 4.077251440175722e-06, "loss": 0.7192, "step": 4454 }, { "epoch": 0.18463259977620292, "grad_norm": 0.41147664189338684, "learning_rate": 4.077044220647354e-06, "loss": 0.7231, "step": 4455 }, { "epoch": 0.1846740436818766, "grad_norm": 0.4458121061325073, "learning_rate": 4.076837001118986e-06, "loss": 0.7437, "step": 4456 }, { "epoch": 0.18471548758755024, "grad_norm": 0.46288084983825684, "learning_rate": 4.076629781590618e-06, "loss": 0.7725, "step": 4457 }, { "epoch": 0.18475693149322392, "grad_norm": 0.39144647121429443, "learning_rate": 4.076422562062249e-06, "loss": 0.6869, "step": 4458 }, { "epoch": 0.1847983753988976, "grad_norm": 0.4227301776409149, "learning_rate": 4.076215342533881e-06, "loss": 0.7253, "step": 4459 }, { "epoch": 0.18483981930457127, "grad_norm": 0.45118051767349243, "learning_rate": 4.076008123005513e-06, "loss": 0.7749, "step": 4460 }, { "epoch": 0.18488126321024492, "grad_norm": 0.43180012702941895, "learning_rate": 4.075800903477144e-06, "loss": 0.7208, "step": 4461 }, { "epoch": 0.1849227071159186, "grad_norm": 0.4133701026439667, "learning_rate": 4.075593683948776e-06, "loss": 0.7098, "step": 4462 }, { "epoch": 0.18496415102159228, "grad_norm": 0.3951307535171509, "learning_rate": 4.075386464420407e-06, "loss": 0.6824, "step": 4463 }, { "epoch": 0.18500559492726595, "grad_norm": 0.4145299196243286, "learning_rate": 4.075179244892039e-06, "loss": 0.7207, "step": 4464 }, { "epoch": 0.1850470388329396, "grad_norm": 0.43935656547546387, "learning_rate": 4.074972025363671e-06, "loss": 0.7515, "step": 4465 }, { "epoch": 0.18508848273861328, "grad_norm": 0.3960407078266144, "learning_rate": 4.074764805835302e-06, "loss": 0.7119, "step": 4466 }, { "epoch": 0.18512992664428696, "grad_norm": 0.44292742013931274, "learning_rate": 4.074557586306933e-06, "loss": 0.7581, "step": 4467 }, { "epoch": 0.18517137054996063, "grad_norm": 0.4133780598640442, "learning_rate": 4.074350366778566e-06, "loss": 0.728, "step": 4468 }, { "epoch": 0.1852128144556343, "grad_norm": 0.4245140254497528, "learning_rate": 4.074143147250197e-06, "loss": 0.6985, "step": 4469 }, { "epoch": 0.18525425836130796, "grad_norm": 0.37724199891090393, "learning_rate": 4.073935927721828e-06, "loss": 0.7209, "step": 4470 }, { "epoch": 0.18529570226698164, "grad_norm": 0.42624831199645996, "learning_rate": 4.073728708193461e-06, "loss": 0.71, "step": 4471 }, { "epoch": 0.18533714617265531, "grad_norm": 0.43911442160606384, "learning_rate": 4.073521488665092e-06, "loss": 0.7061, "step": 4472 }, { "epoch": 0.185378590078329, "grad_norm": 0.4500071406364441, "learning_rate": 4.073314269136724e-06, "loss": 0.7402, "step": 4473 }, { "epoch": 0.18542003398400264, "grad_norm": 0.4119059443473816, "learning_rate": 4.073107049608356e-06, "loss": 0.7153, "step": 4474 }, { "epoch": 0.18546147788967632, "grad_norm": 0.3883725106716156, "learning_rate": 4.072899830079987e-06, "loss": 0.6755, "step": 4475 }, { "epoch": 0.18550292179535, "grad_norm": 0.44351139664649963, "learning_rate": 4.072692610551618e-06, "loss": 0.6979, "step": 4476 }, { "epoch": 0.18554436570102367, "grad_norm": 0.41512972116470337, "learning_rate": 4.072485391023251e-06, "loss": 0.7676, "step": 4477 }, { "epoch": 0.18558580960669732, "grad_norm": 0.4742549657821655, "learning_rate": 4.072278171494882e-06, "loss": 0.751, "step": 4478 }, { "epoch": 0.185627253512371, "grad_norm": 0.4293517768383026, "learning_rate": 4.0720709519665134e-06, "loss": 0.7659, "step": 4479 }, { "epoch": 0.18566869741804468, "grad_norm": 0.4198879599571228, "learning_rate": 4.071863732438146e-06, "loss": 0.7866, "step": 4480 }, { "epoch": 0.18571014132371835, "grad_norm": 0.4431789219379425, "learning_rate": 4.071656512909777e-06, "loss": 0.7429, "step": 4481 }, { "epoch": 0.18575158522939203, "grad_norm": 0.3953503966331482, "learning_rate": 4.0714492933814084e-06, "loss": 0.6848, "step": 4482 }, { "epoch": 0.18579302913506568, "grad_norm": 0.40179479122161865, "learning_rate": 4.07124207385304e-06, "loss": 0.7356, "step": 4483 }, { "epoch": 0.18583447304073936, "grad_norm": 0.4447646141052246, "learning_rate": 4.071034854324672e-06, "loss": 0.7722, "step": 4484 }, { "epoch": 0.18587591694641303, "grad_norm": 0.4205664396286011, "learning_rate": 4.0708276347963034e-06, "loss": 0.7019, "step": 4485 }, { "epoch": 0.1859173608520867, "grad_norm": 0.3871537148952484, "learning_rate": 4.070620415267935e-06, "loss": 0.6826, "step": 4486 }, { "epoch": 0.18595880475776036, "grad_norm": 0.42107969522476196, "learning_rate": 4.070413195739567e-06, "loss": 0.6982, "step": 4487 }, { "epoch": 0.18600024866343404, "grad_norm": 0.394418865442276, "learning_rate": 4.0702059762111984e-06, "loss": 0.6499, "step": 4488 }, { "epoch": 0.18604169256910771, "grad_norm": 0.4322035014629364, "learning_rate": 4.069998756682831e-06, "loss": 0.6816, "step": 4489 }, { "epoch": 0.1860831364747814, "grad_norm": 0.41145429015159607, "learning_rate": 4.069791537154461e-06, "loss": 0.7397, "step": 4490 }, { "epoch": 0.18612458038045504, "grad_norm": 0.45068588852882385, "learning_rate": 4.0695843176260934e-06, "loss": 0.7671, "step": 4491 }, { "epoch": 0.18616602428612872, "grad_norm": 0.4302988648414612, "learning_rate": 4.069377098097725e-06, "loss": 0.7092, "step": 4492 }, { "epoch": 0.1862074681918024, "grad_norm": 0.43004539608955383, "learning_rate": 4.069169878569357e-06, "loss": 0.7545, "step": 4493 }, { "epoch": 0.18624891209747607, "grad_norm": 0.41236555576324463, "learning_rate": 4.0689626590409884e-06, "loss": 0.7395, "step": 4494 }, { "epoch": 0.18629035600314975, "grad_norm": 0.4554157555103302, "learning_rate": 4.06875543951262e-06, "loss": 0.7711, "step": 4495 }, { "epoch": 0.1863317999088234, "grad_norm": 0.39537763595581055, "learning_rate": 4.068548219984252e-06, "loss": 0.7258, "step": 4496 }, { "epoch": 0.18637324381449707, "grad_norm": 0.4299403131008148, "learning_rate": 4.0683410004558834e-06, "loss": 0.7246, "step": 4497 }, { "epoch": 0.18641468772017075, "grad_norm": 0.4268067181110382, "learning_rate": 4.068133780927515e-06, "loss": 0.7306, "step": 4498 }, { "epoch": 0.18645613162584443, "grad_norm": 0.3723105788230896, "learning_rate": 4.067926561399146e-06, "loss": 0.6775, "step": 4499 }, { "epoch": 0.18649757553151808, "grad_norm": 0.4244597256183624, "learning_rate": 4.0677193418707785e-06, "loss": 0.7826, "step": 4500 }, { "epoch": 0.18653901943719176, "grad_norm": 0.38987529277801514, "learning_rate": 4.06751212234241e-06, "loss": 0.7034, "step": 4501 }, { "epoch": 0.18658046334286543, "grad_norm": 0.4161781668663025, "learning_rate": 4.067304902814041e-06, "loss": 0.7283, "step": 4502 }, { "epoch": 0.1866219072485391, "grad_norm": 0.39756909012794495, "learning_rate": 4.0670976832856735e-06, "loss": 0.7031, "step": 4503 }, { "epoch": 0.1866633511542128, "grad_norm": 0.43187960982322693, "learning_rate": 4.066890463757305e-06, "loss": 0.7335, "step": 4504 }, { "epoch": 0.18670479505988644, "grad_norm": 0.44504618644714355, "learning_rate": 4.066683244228937e-06, "loss": 0.7786, "step": 4505 }, { "epoch": 0.1867462389655601, "grad_norm": 0.3973124325275421, "learning_rate": 4.066476024700568e-06, "loss": 0.743, "step": 4506 }, { "epoch": 0.1867876828712338, "grad_norm": 0.40122586488723755, "learning_rate": 4.0662688051722e-06, "loss": 0.7004, "step": 4507 }, { "epoch": 0.18682912677690747, "grad_norm": 0.4111984968185425, "learning_rate": 4.066061585643831e-06, "loss": 0.7212, "step": 4508 }, { "epoch": 0.18687057068258112, "grad_norm": 0.3946007490158081, "learning_rate": 4.0658543661154635e-06, "loss": 0.6963, "step": 4509 }, { "epoch": 0.1869120145882548, "grad_norm": 0.4154515862464905, "learning_rate": 4.065647146587095e-06, "loss": 0.7365, "step": 4510 }, { "epoch": 0.18695345849392847, "grad_norm": 0.44412219524383545, "learning_rate": 4.065439927058726e-06, "loss": 0.7659, "step": 4511 }, { "epoch": 0.18699490239960215, "grad_norm": 0.4414145350456238, "learning_rate": 4.0652327075303585e-06, "loss": 0.7441, "step": 4512 }, { "epoch": 0.1870363463052758, "grad_norm": 0.4308534264564514, "learning_rate": 4.06502548800199e-06, "loss": 0.7639, "step": 4513 }, { "epoch": 0.18707779021094947, "grad_norm": 0.42125049233436584, "learning_rate": 4.064818268473621e-06, "loss": 0.6951, "step": 4514 }, { "epoch": 0.18711923411662315, "grad_norm": 0.461679607629776, "learning_rate": 4.064611048945253e-06, "loss": 0.7249, "step": 4515 }, { "epoch": 0.18716067802229683, "grad_norm": 0.4059103727340698, "learning_rate": 4.064403829416885e-06, "loss": 0.7175, "step": 4516 }, { "epoch": 0.1872021219279705, "grad_norm": 0.45587003231048584, "learning_rate": 4.064196609888516e-06, "loss": 0.7754, "step": 4517 }, { "epoch": 0.18724356583364415, "grad_norm": 0.4460197389125824, "learning_rate": 4.063989390360148e-06, "loss": 0.7114, "step": 4518 }, { "epoch": 0.18728500973931783, "grad_norm": 0.44447067379951477, "learning_rate": 4.063782170831779e-06, "loss": 0.7465, "step": 4519 }, { "epoch": 0.1873264536449915, "grad_norm": 0.4353641867637634, "learning_rate": 4.063574951303411e-06, "loss": 0.7134, "step": 4520 }, { "epoch": 0.18736789755066519, "grad_norm": 0.4644107222557068, "learning_rate": 4.0633677317750435e-06, "loss": 0.7518, "step": 4521 }, { "epoch": 0.18740934145633883, "grad_norm": 0.4185137450695038, "learning_rate": 4.063160512246674e-06, "loss": 0.7175, "step": 4522 }, { "epoch": 0.1874507853620125, "grad_norm": 0.4142124652862549, "learning_rate": 4.062953292718306e-06, "loss": 0.7031, "step": 4523 }, { "epoch": 0.1874922292676862, "grad_norm": 0.4611794352531433, "learning_rate": 4.062746073189938e-06, "loss": 0.728, "step": 4524 }, { "epoch": 0.18753367317335987, "grad_norm": 0.44782915711402893, "learning_rate": 4.06253885366157e-06, "loss": 0.7666, "step": 4525 }, { "epoch": 0.18757511707903352, "grad_norm": 0.39586141705513, "learning_rate": 4.062331634133201e-06, "loss": 0.6879, "step": 4526 }, { "epoch": 0.1876165609847072, "grad_norm": 0.415486603975296, "learning_rate": 4.062124414604833e-06, "loss": 0.7319, "step": 4527 }, { "epoch": 0.18765800489038087, "grad_norm": 0.46490809321403503, "learning_rate": 4.061917195076464e-06, "loss": 0.7817, "step": 4528 }, { "epoch": 0.18769944879605455, "grad_norm": 0.4135245978832245, "learning_rate": 4.061709975548096e-06, "loss": 0.73, "step": 4529 }, { "epoch": 0.18774089270172822, "grad_norm": 0.421370267868042, "learning_rate": 4.061502756019728e-06, "loss": 0.7412, "step": 4530 }, { "epoch": 0.18778233660740187, "grad_norm": 0.39247825741767883, "learning_rate": 4.061295536491359e-06, "loss": 0.6741, "step": 4531 }, { "epoch": 0.18782378051307555, "grad_norm": 0.41833725571632385, "learning_rate": 4.061088316962991e-06, "loss": 0.7236, "step": 4532 }, { "epoch": 0.18786522441874923, "grad_norm": 0.45110923051834106, "learning_rate": 4.060881097434623e-06, "loss": 0.7419, "step": 4533 }, { "epoch": 0.1879066683244229, "grad_norm": 0.45057857036590576, "learning_rate": 4.060673877906254e-06, "loss": 0.7288, "step": 4534 }, { "epoch": 0.18794811223009655, "grad_norm": 0.4229746162891388, "learning_rate": 4.060466658377885e-06, "loss": 0.7893, "step": 4535 }, { "epoch": 0.18798955613577023, "grad_norm": 0.4103749692440033, "learning_rate": 4.060259438849518e-06, "loss": 0.7485, "step": 4536 }, { "epoch": 0.1880310000414439, "grad_norm": 0.4778692424297333, "learning_rate": 4.060052219321149e-06, "loss": 0.7432, "step": 4537 }, { "epoch": 0.18807244394711758, "grad_norm": 0.4249318540096283, "learning_rate": 4.0598449997927804e-06, "loss": 0.7505, "step": 4538 }, { "epoch": 0.18811388785279123, "grad_norm": 0.39245545864105225, "learning_rate": 4.059637780264413e-06, "loss": 0.7092, "step": 4539 }, { "epoch": 0.1881553317584649, "grad_norm": 0.42722639441490173, "learning_rate": 4.059430560736044e-06, "loss": 0.7949, "step": 4540 }, { "epoch": 0.1881967756641386, "grad_norm": 0.42837244272232056, "learning_rate": 4.059223341207676e-06, "loss": 0.71, "step": 4541 }, { "epoch": 0.18823821956981227, "grad_norm": 0.3988135755062103, "learning_rate": 4.059016121679308e-06, "loss": 0.6587, "step": 4542 }, { "epoch": 0.18827966347548594, "grad_norm": 0.43268293142318726, "learning_rate": 4.058808902150939e-06, "loss": 0.7151, "step": 4543 }, { "epoch": 0.1883211073811596, "grad_norm": 0.39718854427337646, "learning_rate": 4.0586016826225704e-06, "loss": 0.6899, "step": 4544 }, { "epoch": 0.18836255128683327, "grad_norm": 0.38562101125717163, "learning_rate": 4.058394463094203e-06, "loss": 0.6707, "step": 4545 }, { "epoch": 0.18840399519250695, "grad_norm": 0.4162629246711731, "learning_rate": 4.058187243565834e-06, "loss": 0.7649, "step": 4546 }, { "epoch": 0.18844543909818062, "grad_norm": 0.42889317870140076, "learning_rate": 4.0579800240374654e-06, "loss": 0.6963, "step": 4547 }, { "epoch": 0.18848688300385427, "grad_norm": 0.39304643869400024, "learning_rate": 4.057772804509098e-06, "loss": 0.6926, "step": 4548 }, { "epoch": 0.18852832690952795, "grad_norm": 0.4116537272930145, "learning_rate": 4.057565584980729e-06, "loss": 0.7185, "step": 4549 }, { "epoch": 0.18856977081520163, "grad_norm": 0.43976545333862305, "learning_rate": 4.0573583654523604e-06, "loss": 0.7808, "step": 4550 }, { "epoch": 0.1886112147208753, "grad_norm": 0.4314555823802948, "learning_rate": 4.057151145923992e-06, "loss": 0.7341, "step": 4551 }, { "epoch": 0.18865265862654895, "grad_norm": 0.4224686622619629, "learning_rate": 4.056943926395624e-06, "loss": 0.7239, "step": 4552 }, { "epoch": 0.18869410253222263, "grad_norm": 0.4143499732017517, "learning_rate": 4.0567367068672554e-06, "loss": 0.731, "step": 4553 }, { "epoch": 0.1887355464378963, "grad_norm": 0.46924149990081787, "learning_rate": 4.056529487338887e-06, "loss": 0.752, "step": 4554 }, { "epoch": 0.18877699034356998, "grad_norm": 0.43394437432289124, "learning_rate": 4.056322267810518e-06, "loss": 0.6842, "step": 4555 }, { "epoch": 0.18881843424924366, "grad_norm": 0.44339719414711, "learning_rate": 4.0561150482821504e-06, "loss": 0.7294, "step": 4556 }, { "epoch": 0.1888598781549173, "grad_norm": 0.44915178418159485, "learning_rate": 4.055907828753783e-06, "loss": 0.7786, "step": 4557 }, { "epoch": 0.188901322060591, "grad_norm": 0.43381553888320923, "learning_rate": 4.055700609225413e-06, "loss": 0.7251, "step": 4558 }, { "epoch": 0.18894276596626466, "grad_norm": 0.42380794882774353, "learning_rate": 4.0554933896970455e-06, "loss": 0.6536, "step": 4559 }, { "epoch": 0.18898420987193834, "grad_norm": 0.4210614562034607, "learning_rate": 4.055286170168677e-06, "loss": 0.7212, "step": 4560 }, { "epoch": 0.189025653777612, "grad_norm": 0.43417122960090637, "learning_rate": 4.055078950640309e-06, "loss": 0.7566, "step": 4561 }, { "epoch": 0.18906709768328567, "grad_norm": 0.4526064991950989, "learning_rate": 4.0548717311119405e-06, "loss": 0.7253, "step": 4562 }, { "epoch": 0.18910854158895934, "grad_norm": 0.39087745547294617, "learning_rate": 4.054664511583572e-06, "loss": 0.7168, "step": 4563 }, { "epoch": 0.18914998549463302, "grad_norm": 0.44654592871665955, "learning_rate": 4.054457292055204e-06, "loss": 0.7649, "step": 4564 }, { "epoch": 0.1891914294003067, "grad_norm": 0.4870911240577698, "learning_rate": 4.0542500725268355e-06, "loss": 0.7771, "step": 4565 }, { "epoch": 0.18923287330598035, "grad_norm": 0.3836575448513031, "learning_rate": 4.054042852998467e-06, "loss": 0.6902, "step": 4566 }, { "epoch": 0.18927431721165403, "grad_norm": 0.42108556628227234, "learning_rate": 4.053835633470098e-06, "loss": 0.7312, "step": 4567 }, { "epoch": 0.1893157611173277, "grad_norm": 0.4212074875831604, "learning_rate": 4.0536284139417305e-06, "loss": 0.7681, "step": 4568 }, { "epoch": 0.18935720502300138, "grad_norm": 0.4120270311832428, "learning_rate": 4.053421194413362e-06, "loss": 0.7211, "step": 4569 }, { "epoch": 0.18939864892867503, "grad_norm": 0.4326016306877136, "learning_rate": 4.053213974884993e-06, "loss": 0.7134, "step": 4570 }, { "epoch": 0.1894400928343487, "grad_norm": 0.4769447147846222, "learning_rate": 4.053006755356625e-06, "loss": 0.7971, "step": 4571 }, { "epoch": 0.18948153674002238, "grad_norm": 0.4197107255458832, "learning_rate": 4.052799535828257e-06, "loss": 0.7734, "step": 4572 }, { "epoch": 0.18952298064569606, "grad_norm": 0.4613288640975952, "learning_rate": 4.052592316299889e-06, "loss": 0.7039, "step": 4573 }, { "epoch": 0.1895644245513697, "grad_norm": 0.46716392040252686, "learning_rate": 4.05238509677152e-06, "loss": 0.7341, "step": 4574 }, { "epoch": 0.18960586845704339, "grad_norm": 0.4393904507160187, "learning_rate": 4.052177877243152e-06, "loss": 0.7639, "step": 4575 }, { "epoch": 0.18964731236271706, "grad_norm": 0.4309116005897522, "learning_rate": 4.051970657714783e-06, "loss": 0.7261, "step": 4576 }, { "epoch": 0.18968875626839074, "grad_norm": 0.4547387659549713, "learning_rate": 4.0517634381864155e-06, "loss": 0.7209, "step": 4577 }, { "epoch": 0.18973020017406442, "grad_norm": 0.4115113615989685, "learning_rate": 4.051556218658047e-06, "loss": 0.7495, "step": 4578 }, { "epoch": 0.18977164407973807, "grad_norm": 0.4658704996109009, "learning_rate": 4.051348999129678e-06, "loss": 0.7639, "step": 4579 }, { "epoch": 0.18981308798541174, "grad_norm": 0.4295717477798462, "learning_rate": 4.05114177960131e-06, "loss": 0.7456, "step": 4580 }, { "epoch": 0.18985453189108542, "grad_norm": 0.4501297175884247, "learning_rate": 4.050934560072942e-06, "loss": 0.7111, "step": 4581 }, { "epoch": 0.1898959757967591, "grad_norm": 0.4124656617641449, "learning_rate": 4.050727340544573e-06, "loss": 0.7607, "step": 4582 }, { "epoch": 0.18993741970243275, "grad_norm": 0.3898340165615082, "learning_rate": 4.050520121016205e-06, "loss": 0.7148, "step": 4583 }, { "epoch": 0.18997886360810642, "grad_norm": 0.39090073108673096, "learning_rate": 4.050312901487837e-06, "loss": 0.7058, "step": 4584 }, { "epoch": 0.1900203075137801, "grad_norm": 0.46643778681755066, "learning_rate": 4.050105681959468e-06, "loss": 0.7339, "step": 4585 }, { "epoch": 0.19006175141945378, "grad_norm": 0.39459338784217834, "learning_rate": 4.0498984624311e-06, "loss": 0.7043, "step": 4586 }, { "epoch": 0.19010319532512743, "grad_norm": 0.43162456154823303, "learning_rate": 4.049691242902731e-06, "loss": 0.77, "step": 4587 }, { "epoch": 0.1901446392308011, "grad_norm": 0.40933850407600403, "learning_rate": 4.049484023374363e-06, "loss": 0.6736, "step": 4588 }, { "epoch": 0.19018608313647478, "grad_norm": 0.40110868215560913, "learning_rate": 4.049276803845995e-06, "loss": 0.6628, "step": 4589 }, { "epoch": 0.19022752704214846, "grad_norm": 0.43051692843437195, "learning_rate": 4.049069584317626e-06, "loss": 0.7307, "step": 4590 }, { "epoch": 0.19026897094782214, "grad_norm": 0.4257526993751526, "learning_rate": 4.048862364789258e-06, "loss": 0.687, "step": 4591 }, { "epoch": 0.19031041485349579, "grad_norm": 0.423849880695343, "learning_rate": 4.04865514526089e-06, "loss": 0.7463, "step": 4592 }, { "epoch": 0.19035185875916946, "grad_norm": 0.44114941358566284, "learning_rate": 4.048447925732522e-06, "loss": 0.749, "step": 4593 }, { "epoch": 0.19039330266484314, "grad_norm": 0.4252367913722992, "learning_rate": 4.048240706204153e-06, "loss": 0.7301, "step": 4594 }, { "epoch": 0.19043474657051682, "grad_norm": 0.40352678298950195, "learning_rate": 4.048033486675785e-06, "loss": 0.6644, "step": 4595 }, { "epoch": 0.19047619047619047, "grad_norm": 0.41463467478752136, "learning_rate": 4.047826267147416e-06, "loss": 0.7327, "step": 4596 }, { "epoch": 0.19051763438186414, "grad_norm": 0.4293050169944763, "learning_rate": 4.047619047619048e-06, "loss": 0.6785, "step": 4597 }, { "epoch": 0.19055907828753782, "grad_norm": 0.3792731463909149, "learning_rate": 4.04741182809068e-06, "loss": 0.7295, "step": 4598 }, { "epoch": 0.1906005221932115, "grad_norm": 0.4735502600669861, "learning_rate": 4.047204608562311e-06, "loss": 0.7788, "step": 4599 }, { "epoch": 0.19064196609888515, "grad_norm": 0.4165715277194977, "learning_rate": 4.046997389033943e-06, "loss": 0.6671, "step": 4600 }, { "epoch": 0.19068341000455882, "grad_norm": 0.461496502161026, "learning_rate": 4.046790169505575e-06, "loss": 0.6561, "step": 4601 }, { "epoch": 0.1907248539102325, "grad_norm": 0.411302387714386, "learning_rate": 4.046582949977206e-06, "loss": 0.6951, "step": 4602 }, { "epoch": 0.19076629781590618, "grad_norm": 0.4746898412704468, "learning_rate": 4.0463757304488374e-06, "loss": 0.7471, "step": 4603 }, { "epoch": 0.19080774172157985, "grad_norm": 0.44204774498939514, "learning_rate": 4.04616851092047e-06, "loss": 0.7278, "step": 4604 }, { "epoch": 0.1908491856272535, "grad_norm": 0.42630013823509216, "learning_rate": 4.045961291392101e-06, "loss": 0.707, "step": 4605 }, { "epoch": 0.19089062953292718, "grad_norm": 0.40937915444374084, "learning_rate": 4.0457540718637324e-06, "loss": 0.6929, "step": 4606 }, { "epoch": 0.19093207343860086, "grad_norm": 0.455097496509552, "learning_rate": 4.045546852335364e-06, "loss": 0.7732, "step": 4607 }, { "epoch": 0.19097351734427453, "grad_norm": 0.4573996067047119, "learning_rate": 4.045339632806996e-06, "loss": 0.7803, "step": 4608 }, { "epoch": 0.19101496124994818, "grad_norm": 0.41557538509368896, "learning_rate": 4.045132413278628e-06, "loss": 0.6805, "step": 4609 }, { "epoch": 0.19105640515562186, "grad_norm": 0.4120057225227356, "learning_rate": 4.04492519375026e-06, "loss": 0.6747, "step": 4610 }, { "epoch": 0.19109784906129554, "grad_norm": 0.420397013425827, "learning_rate": 4.044717974221891e-06, "loss": 0.7373, "step": 4611 }, { "epoch": 0.19113929296696922, "grad_norm": 0.45325493812561035, "learning_rate": 4.0445107546935224e-06, "loss": 0.7023, "step": 4612 }, { "epoch": 0.19118073687264286, "grad_norm": 0.5066434741020203, "learning_rate": 4.044303535165155e-06, "loss": 0.767, "step": 4613 }, { "epoch": 0.19122218077831654, "grad_norm": 0.4181893765926361, "learning_rate": 4.044096315636786e-06, "loss": 0.7271, "step": 4614 }, { "epoch": 0.19126362468399022, "grad_norm": 0.39929765462875366, "learning_rate": 4.0438890961084174e-06, "loss": 0.6302, "step": 4615 }, { "epoch": 0.1913050685896639, "grad_norm": 0.45301181077957153, "learning_rate": 4.043681876580049e-06, "loss": 0.698, "step": 4616 }, { "epoch": 0.19134651249533757, "grad_norm": 0.4343165457248688, "learning_rate": 4.043474657051681e-06, "loss": 0.7102, "step": 4617 }, { "epoch": 0.19138795640101122, "grad_norm": 0.42214369773864746, "learning_rate": 4.0432674375233125e-06, "loss": 0.7515, "step": 4618 }, { "epoch": 0.1914294003066849, "grad_norm": 0.409945011138916, "learning_rate": 4.043060217994944e-06, "loss": 0.7046, "step": 4619 }, { "epoch": 0.19147084421235858, "grad_norm": 0.41377487778663635, "learning_rate": 4.042852998466576e-06, "loss": 0.7241, "step": 4620 }, { "epoch": 0.19151228811803225, "grad_norm": 0.5082868933677673, "learning_rate": 4.0426457789382075e-06, "loss": 0.7485, "step": 4621 }, { "epoch": 0.1915537320237059, "grad_norm": 0.4290807843208313, "learning_rate": 4.042438559409839e-06, "loss": 0.6943, "step": 4622 }, { "epoch": 0.19159517592937958, "grad_norm": 0.4289205074310303, "learning_rate": 4.04223133988147e-06, "loss": 0.7024, "step": 4623 }, { "epoch": 0.19163661983505326, "grad_norm": 0.4343908727169037, "learning_rate": 4.0420241203531025e-06, "loss": 0.7393, "step": 4624 }, { "epoch": 0.19167806374072693, "grad_norm": 0.41442152857780457, "learning_rate": 4.041816900824735e-06, "loss": 0.7085, "step": 4625 }, { "epoch": 0.19171950764640058, "grad_norm": 0.47668692469596863, "learning_rate": 4.041609681296366e-06, "loss": 0.7617, "step": 4626 }, { "epoch": 0.19176095155207426, "grad_norm": 0.4013442099094391, "learning_rate": 4.0414024617679975e-06, "loss": 0.6982, "step": 4627 }, { "epoch": 0.19180239545774794, "grad_norm": 0.38636621832847595, "learning_rate": 4.041195242239629e-06, "loss": 0.6858, "step": 4628 }, { "epoch": 0.19184383936342161, "grad_norm": 0.42310792207717896, "learning_rate": 4.040988022711261e-06, "loss": 0.7351, "step": 4629 }, { "epoch": 0.1918852832690953, "grad_norm": 0.4351435899734497, "learning_rate": 4.0407808031828925e-06, "loss": 0.7478, "step": 4630 }, { "epoch": 0.19192672717476894, "grad_norm": 0.42125818133354187, "learning_rate": 4.040573583654524e-06, "loss": 0.7399, "step": 4631 }, { "epoch": 0.19196817108044262, "grad_norm": 0.4257926344871521, "learning_rate": 4.040366364126155e-06, "loss": 0.7258, "step": 4632 }, { "epoch": 0.1920096149861163, "grad_norm": 0.4051901400089264, "learning_rate": 4.0401591445977875e-06, "loss": 0.6868, "step": 4633 }, { "epoch": 0.19205105889178997, "grad_norm": 0.43173763155937195, "learning_rate": 4.039951925069419e-06, "loss": 0.7466, "step": 4634 }, { "epoch": 0.19209250279746362, "grad_norm": 0.379215806722641, "learning_rate": 4.03974470554105e-06, "loss": 0.6306, "step": 4635 }, { "epoch": 0.1921339467031373, "grad_norm": 0.4459201693534851, "learning_rate": 4.0395374860126825e-06, "loss": 0.814, "step": 4636 }, { "epoch": 0.19217539060881098, "grad_norm": 0.39524680376052856, "learning_rate": 4.039330266484314e-06, "loss": 0.704, "step": 4637 }, { "epoch": 0.19221683451448465, "grad_norm": 0.4332173466682434, "learning_rate": 4.039123046955945e-06, "loss": 0.7601, "step": 4638 }, { "epoch": 0.19225827842015833, "grad_norm": 0.4003659188747406, "learning_rate": 4.038915827427577e-06, "loss": 0.7054, "step": 4639 }, { "epoch": 0.19229972232583198, "grad_norm": 0.3922554850578308, "learning_rate": 4.038708607899209e-06, "loss": 0.6956, "step": 4640 }, { "epoch": 0.19234116623150566, "grad_norm": 0.41231662034988403, "learning_rate": 4.03850138837084e-06, "loss": 0.708, "step": 4641 }, { "epoch": 0.19238261013717933, "grad_norm": 0.4207746088504791, "learning_rate": 4.038294168842472e-06, "loss": 0.751, "step": 4642 }, { "epoch": 0.192424054042853, "grad_norm": 0.43675875663757324, "learning_rate": 4.038086949314104e-06, "loss": 0.7655, "step": 4643 }, { "epoch": 0.19246549794852666, "grad_norm": 0.44737759232521057, "learning_rate": 4.037879729785735e-06, "loss": 0.7456, "step": 4644 }, { "epoch": 0.19250694185420034, "grad_norm": 0.41867637634277344, "learning_rate": 4.0376725102573675e-06, "loss": 0.6938, "step": 4645 }, { "epoch": 0.192548385759874, "grad_norm": 0.4108772575855255, "learning_rate": 4.037465290728999e-06, "loss": 0.6995, "step": 4646 }, { "epoch": 0.1925898296655477, "grad_norm": 0.4237038195133209, "learning_rate": 4.03725807120063e-06, "loss": 0.688, "step": 4647 }, { "epoch": 0.19263127357122134, "grad_norm": 0.4350278675556183, "learning_rate": 4.037050851672262e-06, "loss": 0.7729, "step": 4648 }, { "epoch": 0.19267271747689502, "grad_norm": 0.46770337224006653, "learning_rate": 4.036843632143894e-06, "loss": 0.7792, "step": 4649 }, { "epoch": 0.1927141613825687, "grad_norm": 0.4315145015716553, "learning_rate": 4.036636412615525e-06, "loss": 0.7316, "step": 4650 }, { "epoch": 0.19275560528824237, "grad_norm": 0.4256931245326996, "learning_rate": 4.036429193087157e-06, "loss": 0.698, "step": 4651 }, { "epoch": 0.19279704919391605, "grad_norm": 0.44189783930778503, "learning_rate": 4.036221973558789e-06, "loss": 0.7883, "step": 4652 }, { "epoch": 0.1928384930995897, "grad_norm": 0.4143000543117523, "learning_rate": 4.03601475403042e-06, "loss": 0.749, "step": 4653 }, { "epoch": 0.19287993700526337, "grad_norm": 0.42647290229797363, "learning_rate": 4.035807534502052e-06, "loss": 0.7305, "step": 4654 }, { "epoch": 0.19292138091093705, "grad_norm": 0.3948799967765808, "learning_rate": 4.035600314973683e-06, "loss": 0.7607, "step": 4655 }, { "epoch": 0.19296282481661073, "grad_norm": 0.4015995264053345, "learning_rate": 4.035393095445315e-06, "loss": 0.6929, "step": 4656 }, { "epoch": 0.19300426872228438, "grad_norm": 0.4328002333641052, "learning_rate": 4.035185875916947e-06, "loss": 0.7261, "step": 4657 }, { "epoch": 0.19304571262795805, "grad_norm": 0.4168425500392914, "learning_rate": 4.034978656388578e-06, "loss": 0.6941, "step": 4658 }, { "epoch": 0.19308715653363173, "grad_norm": 0.4482492208480835, "learning_rate": 4.0347714368602094e-06, "loss": 0.7507, "step": 4659 }, { "epoch": 0.1931286004393054, "grad_norm": 0.4020955264568329, "learning_rate": 4.034564217331842e-06, "loss": 0.7126, "step": 4660 }, { "epoch": 0.19317004434497906, "grad_norm": 0.42129167914390564, "learning_rate": 4.034356997803474e-06, "loss": 0.698, "step": 4661 }, { "epoch": 0.19321148825065274, "grad_norm": 0.4337470531463623, "learning_rate": 4.034149778275105e-06, "loss": 0.7654, "step": 4662 }, { "epoch": 0.1932529321563264, "grad_norm": 0.4724188446998596, "learning_rate": 4.033942558746737e-06, "loss": 0.7325, "step": 4663 }, { "epoch": 0.1932943760620001, "grad_norm": 0.4335235357284546, "learning_rate": 4.033735339218368e-06, "loss": 0.7449, "step": 4664 }, { "epoch": 0.19333581996767377, "grad_norm": 0.3979817032814026, "learning_rate": 4.03352811969e-06, "loss": 0.6597, "step": 4665 }, { "epoch": 0.19337726387334742, "grad_norm": 0.4097374379634857, "learning_rate": 4.033320900161632e-06, "loss": 0.7461, "step": 4666 }, { "epoch": 0.1934187077790211, "grad_norm": 0.4207673668861389, "learning_rate": 4.033113680633263e-06, "loss": 0.739, "step": 4667 }, { "epoch": 0.19346015168469477, "grad_norm": 0.4265035092830658, "learning_rate": 4.0329064611048944e-06, "loss": 0.7495, "step": 4668 }, { "epoch": 0.19350159559036845, "grad_norm": 0.4405616819858551, "learning_rate": 4.032699241576527e-06, "loss": 0.7598, "step": 4669 }, { "epoch": 0.1935430394960421, "grad_norm": 0.4505399167537689, "learning_rate": 4.032492022048158e-06, "loss": 0.7483, "step": 4670 }, { "epoch": 0.19358448340171577, "grad_norm": 0.46396902203559875, "learning_rate": 4.0322848025197894e-06, "loss": 0.7576, "step": 4671 }, { "epoch": 0.19362592730738945, "grad_norm": 0.4049902558326721, "learning_rate": 4.032077582991422e-06, "loss": 0.7776, "step": 4672 }, { "epoch": 0.19366737121306313, "grad_norm": 0.4100184440612793, "learning_rate": 4.031870363463053e-06, "loss": 0.7155, "step": 4673 }, { "epoch": 0.19370881511873678, "grad_norm": 0.46287715435028076, "learning_rate": 4.0316631439346844e-06, "loss": 0.7488, "step": 4674 }, { "epoch": 0.19375025902441045, "grad_norm": 0.42693856358528137, "learning_rate": 4.031455924406316e-06, "loss": 0.7178, "step": 4675 }, { "epoch": 0.19379170293008413, "grad_norm": 0.4630483090877533, "learning_rate": 4.031248704877948e-06, "loss": 0.7424, "step": 4676 }, { "epoch": 0.1938331468357578, "grad_norm": 0.44017019867897034, "learning_rate": 4.0310414853495795e-06, "loss": 0.7343, "step": 4677 }, { "epoch": 0.19387459074143149, "grad_norm": 0.4545411765575409, "learning_rate": 4.030834265821212e-06, "loss": 0.7278, "step": 4678 }, { "epoch": 0.19391603464710513, "grad_norm": 0.4146547019481659, "learning_rate": 4.030627046292843e-06, "loss": 0.7363, "step": 4679 }, { "epoch": 0.1939574785527788, "grad_norm": 0.4015137851238251, "learning_rate": 4.0304198267644745e-06, "loss": 0.709, "step": 4680 }, { "epoch": 0.1939989224584525, "grad_norm": 0.45078110694885254, "learning_rate": 4.030212607236107e-06, "loss": 0.7137, "step": 4681 }, { "epoch": 0.19404036636412617, "grad_norm": 0.4350622892379761, "learning_rate": 4.030005387707738e-06, "loss": 0.7303, "step": 4682 }, { "epoch": 0.19408181026979981, "grad_norm": 0.4460679292678833, "learning_rate": 4.0297981681793695e-06, "loss": 0.718, "step": 4683 }, { "epoch": 0.1941232541754735, "grad_norm": 0.45762166380882263, "learning_rate": 4.029590948651001e-06, "loss": 0.7769, "step": 4684 }, { "epoch": 0.19416469808114717, "grad_norm": 0.41030752658843994, "learning_rate": 4.029383729122633e-06, "loss": 0.7402, "step": 4685 }, { "epoch": 0.19420614198682085, "grad_norm": 0.41783419251441956, "learning_rate": 4.0291765095942645e-06, "loss": 0.7539, "step": 4686 }, { "epoch": 0.1942475858924945, "grad_norm": 0.42504847049713135, "learning_rate": 4.028969290065896e-06, "loss": 0.7483, "step": 4687 }, { "epoch": 0.19428902979816817, "grad_norm": 0.46193593740463257, "learning_rate": 4.028762070537528e-06, "loss": 0.7795, "step": 4688 }, { "epoch": 0.19433047370384185, "grad_norm": 0.4177502989768982, "learning_rate": 4.0285548510091595e-06, "loss": 0.7109, "step": 4689 }, { "epoch": 0.19437191760951553, "grad_norm": 0.43712031841278076, "learning_rate": 4.028347631480791e-06, "loss": 0.7317, "step": 4690 }, { "epoch": 0.1944133615151892, "grad_norm": 0.40311703085899353, "learning_rate": 4.028140411952422e-06, "loss": 0.6644, "step": 4691 }, { "epoch": 0.19445480542086285, "grad_norm": 0.42702972888946533, "learning_rate": 4.0279331924240545e-06, "loss": 0.7402, "step": 4692 }, { "epoch": 0.19449624932653653, "grad_norm": 0.42521578073501587, "learning_rate": 4.027725972895686e-06, "loss": 0.7621, "step": 4693 }, { "epoch": 0.1945376932322102, "grad_norm": 0.4639471173286438, "learning_rate": 4.027518753367318e-06, "loss": 0.7231, "step": 4694 }, { "epoch": 0.19457913713788388, "grad_norm": 0.7300617098808289, "learning_rate": 4.0273115338389495e-06, "loss": 0.8115, "step": 4695 }, { "epoch": 0.19462058104355753, "grad_norm": 0.42894530296325684, "learning_rate": 4.027104314310581e-06, "loss": 0.7463, "step": 4696 }, { "epoch": 0.1946620249492312, "grad_norm": 0.4382060766220093, "learning_rate": 4.026897094782213e-06, "loss": 0.7388, "step": 4697 }, { "epoch": 0.1947034688549049, "grad_norm": 0.4475901126861572, "learning_rate": 4.0266898752538445e-06, "loss": 0.7185, "step": 4698 }, { "epoch": 0.19474491276057856, "grad_norm": 0.4147927165031433, "learning_rate": 4.026482655725476e-06, "loss": 0.7539, "step": 4699 }, { "epoch": 0.19478635666625221, "grad_norm": 0.4532407522201538, "learning_rate": 4.026275436197107e-06, "loss": 0.7031, "step": 4700 }, { "epoch": 0.1948278005719259, "grad_norm": 0.4391915500164032, "learning_rate": 4.0260682166687395e-06, "loss": 0.7428, "step": 4701 }, { "epoch": 0.19486924447759957, "grad_norm": 0.4146566689014435, "learning_rate": 4.025860997140371e-06, "loss": 0.7, "step": 4702 }, { "epoch": 0.19491068838327325, "grad_norm": 0.3942202031612396, "learning_rate": 4.025653777612002e-06, "loss": 0.7435, "step": 4703 }, { "epoch": 0.19495213228894692, "grad_norm": 0.4221835732460022, "learning_rate": 4.0254465580836345e-06, "loss": 0.7161, "step": 4704 }, { "epoch": 0.19499357619462057, "grad_norm": 0.4317571222782135, "learning_rate": 4.025239338555266e-06, "loss": 0.7295, "step": 4705 }, { "epoch": 0.19503502010029425, "grad_norm": 0.4274677634239197, "learning_rate": 4.025032119026897e-06, "loss": 0.6987, "step": 4706 }, { "epoch": 0.19507646400596793, "grad_norm": 0.4098416268825531, "learning_rate": 4.024824899498529e-06, "loss": 0.6824, "step": 4707 }, { "epoch": 0.1951179079116416, "grad_norm": 0.4584222733974457, "learning_rate": 4.024617679970161e-06, "loss": 0.802, "step": 4708 }, { "epoch": 0.19515935181731525, "grad_norm": 0.44911476969718933, "learning_rate": 4.024410460441792e-06, "loss": 0.6997, "step": 4709 }, { "epoch": 0.19520079572298893, "grad_norm": 0.41559773683547974, "learning_rate": 4.024203240913424e-06, "loss": 0.6874, "step": 4710 }, { "epoch": 0.1952422396286626, "grad_norm": 0.4233519434928894, "learning_rate": 4.023996021385055e-06, "loss": 0.7418, "step": 4711 }, { "epoch": 0.19528368353433628, "grad_norm": 0.4086543619632721, "learning_rate": 4.023788801856687e-06, "loss": 0.739, "step": 4712 }, { "epoch": 0.19532512744000996, "grad_norm": 0.41571271419525146, "learning_rate": 4.0235815823283195e-06, "loss": 0.74, "step": 4713 }, { "epoch": 0.1953665713456836, "grad_norm": 0.416396826505661, "learning_rate": 4.023374362799951e-06, "loss": 0.7745, "step": 4714 }, { "epoch": 0.1954080152513573, "grad_norm": 0.394703209400177, "learning_rate": 4.023167143271582e-06, "loss": 0.7347, "step": 4715 }, { "epoch": 0.19544945915703096, "grad_norm": 0.4165135622024536, "learning_rate": 4.022959923743214e-06, "loss": 0.6516, "step": 4716 }, { "epoch": 0.19549090306270464, "grad_norm": 0.4322117865085602, "learning_rate": 4.022752704214846e-06, "loss": 0.7524, "step": 4717 }, { "epoch": 0.1955323469683783, "grad_norm": 0.4325159788131714, "learning_rate": 4.022545484686477e-06, "loss": 0.7544, "step": 4718 }, { "epoch": 0.19557379087405197, "grad_norm": 0.4852556586265564, "learning_rate": 4.022338265158109e-06, "loss": 0.7518, "step": 4719 }, { "epoch": 0.19561523477972564, "grad_norm": 0.4567812979221344, "learning_rate": 4.02213104562974e-06, "loss": 0.7661, "step": 4720 }, { "epoch": 0.19565667868539932, "grad_norm": 0.43893447518348694, "learning_rate": 4.021923826101372e-06, "loss": 0.7236, "step": 4721 }, { "epoch": 0.19569812259107297, "grad_norm": 0.4195935130119324, "learning_rate": 4.021716606573004e-06, "loss": 0.7136, "step": 4722 }, { "epoch": 0.19573956649674665, "grad_norm": 0.4108867645263672, "learning_rate": 4.021509387044635e-06, "loss": 0.7021, "step": 4723 }, { "epoch": 0.19578101040242032, "grad_norm": 0.4351408779621124, "learning_rate": 4.021302167516267e-06, "loss": 0.7207, "step": 4724 }, { "epoch": 0.195822454308094, "grad_norm": 0.4044736325740814, "learning_rate": 4.021094947987899e-06, "loss": 0.7556, "step": 4725 }, { "epoch": 0.19586389821376768, "grad_norm": 0.40096017718315125, "learning_rate": 4.02088772845953e-06, "loss": 0.7661, "step": 4726 }, { "epoch": 0.19590534211944133, "grad_norm": 0.4301908016204834, "learning_rate": 4.0206805089311614e-06, "loss": 0.7642, "step": 4727 }, { "epoch": 0.195946786025115, "grad_norm": 0.4242943227291107, "learning_rate": 4.020473289402794e-06, "loss": 0.7063, "step": 4728 }, { "epoch": 0.19598822993078868, "grad_norm": 0.4021899402141571, "learning_rate": 4.020266069874425e-06, "loss": 0.7346, "step": 4729 }, { "epoch": 0.19602967383646236, "grad_norm": 0.39232689142227173, "learning_rate": 4.020058850346057e-06, "loss": 0.708, "step": 4730 }, { "epoch": 0.196071117742136, "grad_norm": 0.41039562225341797, "learning_rate": 4.019851630817689e-06, "loss": 0.705, "step": 4731 }, { "epoch": 0.19611256164780969, "grad_norm": 0.42049160599708557, "learning_rate": 4.01964441128932e-06, "loss": 0.7432, "step": 4732 }, { "epoch": 0.19615400555348336, "grad_norm": 0.45206204056739807, "learning_rate": 4.019437191760952e-06, "loss": 0.769, "step": 4733 }, { "epoch": 0.19619544945915704, "grad_norm": 0.4173351228237152, "learning_rate": 4.019229972232584e-06, "loss": 0.7271, "step": 4734 }, { "epoch": 0.1962368933648307, "grad_norm": 0.45424026250839233, "learning_rate": 4.019022752704215e-06, "loss": 0.7788, "step": 4735 }, { "epoch": 0.19627833727050437, "grad_norm": 0.4203905165195465, "learning_rate": 4.0188155331758465e-06, "loss": 0.7263, "step": 4736 }, { "epoch": 0.19631978117617804, "grad_norm": 0.42331433296203613, "learning_rate": 4.018608313647479e-06, "loss": 0.7061, "step": 4737 }, { "epoch": 0.19636122508185172, "grad_norm": 0.43471938371658325, "learning_rate": 4.01840109411911e-06, "loss": 0.7249, "step": 4738 }, { "epoch": 0.1964026689875254, "grad_norm": 0.407564252614975, "learning_rate": 4.0181938745907415e-06, "loss": 0.7015, "step": 4739 }, { "epoch": 0.19644411289319905, "grad_norm": 0.4292104244232178, "learning_rate": 4.017986655062374e-06, "loss": 0.7625, "step": 4740 }, { "epoch": 0.19648555679887272, "grad_norm": 0.40588265657424927, "learning_rate": 4.017779435534005e-06, "loss": 0.7732, "step": 4741 }, { "epoch": 0.1965270007045464, "grad_norm": 0.4564080834388733, "learning_rate": 4.0175722160056365e-06, "loss": 0.6998, "step": 4742 }, { "epoch": 0.19656844461022008, "grad_norm": 0.41946300864219666, "learning_rate": 4.017364996477268e-06, "loss": 0.7659, "step": 4743 }, { "epoch": 0.19660988851589373, "grad_norm": 0.40659552812576294, "learning_rate": 4.0171577769489e-06, "loss": 0.6931, "step": 4744 }, { "epoch": 0.1966513324215674, "grad_norm": 0.4200018346309662, "learning_rate": 4.0169505574205315e-06, "loss": 0.719, "step": 4745 }, { "epoch": 0.19669277632724108, "grad_norm": 0.39950311183929443, "learning_rate": 4.016743337892164e-06, "loss": 0.7173, "step": 4746 }, { "epoch": 0.19673422023291476, "grad_norm": 0.465248703956604, "learning_rate": 4.016536118363795e-06, "loss": 0.7844, "step": 4747 }, { "epoch": 0.1967756641385884, "grad_norm": 0.42291945219039917, "learning_rate": 4.0163288988354265e-06, "loss": 0.7068, "step": 4748 }, { "epoch": 0.19681710804426208, "grad_norm": 0.46166113018989563, "learning_rate": 4.016121679307059e-06, "loss": 0.7686, "step": 4749 }, { "epoch": 0.19685855194993576, "grad_norm": 0.4403153955936432, "learning_rate": 4.01591445977869e-06, "loss": 0.71, "step": 4750 }, { "epoch": 0.19689999585560944, "grad_norm": 0.42193281650543213, "learning_rate": 4.0157072402503215e-06, "loss": 0.7202, "step": 4751 }, { "epoch": 0.19694143976128312, "grad_norm": 0.41574758291244507, "learning_rate": 4.015500020721953e-06, "loss": 0.7223, "step": 4752 }, { "epoch": 0.19698288366695677, "grad_norm": 0.42333516478538513, "learning_rate": 4.015292801193585e-06, "loss": 0.7599, "step": 4753 }, { "epoch": 0.19702432757263044, "grad_norm": 0.4132175147533417, "learning_rate": 4.0150855816652165e-06, "loss": 0.7576, "step": 4754 }, { "epoch": 0.19706577147830412, "grad_norm": 0.4266379475593567, "learning_rate": 4.014878362136848e-06, "loss": 0.7629, "step": 4755 }, { "epoch": 0.1971072153839778, "grad_norm": 0.4091418385505676, "learning_rate": 4.01467114260848e-06, "loss": 0.7119, "step": 4756 }, { "epoch": 0.19714865928965145, "grad_norm": 0.4157925844192505, "learning_rate": 4.0144639230801115e-06, "loss": 0.709, "step": 4757 }, { "epoch": 0.19719010319532512, "grad_norm": 0.46335315704345703, "learning_rate": 4.014256703551743e-06, "loss": 0.7345, "step": 4758 }, { "epoch": 0.1972315471009988, "grad_norm": 0.43674609065055847, "learning_rate": 4.014049484023374e-06, "loss": 0.7651, "step": 4759 }, { "epoch": 0.19727299100667248, "grad_norm": 0.4438663721084595, "learning_rate": 4.0138422644950065e-06, "loss": 0.866, "step": 4760 }, { "epoch": 0.19731443491234613, "grad_norm": 0.4615185558795929, "learning_rate": 4.013635044966638e-06, "loss": 0.7827, "step": 4761 }, { "epoch": 0.1973558788180198, "grad_norm": 0.41016241908073425, "learning_rate": 4.01342782543827e-06, "loss": 0.7964, "step": 4762 }, { "epoch": 0.19739732272369348, "grad_norm": 0.39544183015823364, "learning_rate": 4.013220605909901e-06, "loss": 0.6904, "step": 4763 }, { "epoch": 0.19743876662936716, "grad_norm": 0.4233435392379761, "learning_rate": 4.013013386381533e-06, "loss": 0.7327, "step": 4764 }, { "epoch": 0.19748021053504083, "grad_norm": 0.44317662715911865, "learning_rate": 4.012806166853165e-06, "loss": 0.7527, "step": 4765 }, { "epoch": 0.19752165444071448, "grad_norm": 0.4219374656677246, "learning_rate": 4.0125989473247965e-06, "loss": 0.7488, "step": 4766 }, { "epoch": 0.19756309834638816, "grad_norm": 0.43344035744667053, "learning_rate": 4.012391727796428e-06, "loss": 0.7246, "step": 4767 }, { "epoch": 0.19760454225206184, "grad_norm": 0.4006750285625458, "learning_rate": 4.012184508268059e-06, "loss": 0.7444, "step": 4768 }, { "epoch": 0.19764598615773551, "grad_norm": 0.4036215543746948, "learning_rate": 4.0119772887396915e-06, "loss": 0.6097, "step": 4769 }, { "epoch": 0.19768743006340916, "grad_norm": 0.44010189175605774, "learning_rate": 4.011770069211323e-06, "loss": 0.7466, "step": 4770 }, { "epoch": 0.19772887396908284, "grad_norm": 0.4092191755771637, "learning_rate": 4.011562849682954e-06, "loss": 0.6782, "step": 4771 }, { "epoch": 0.19777031787475652, "grad_norm": 0.41537895798683167, "learning_rate": 4.011355630154586e-06, "loss": 0.6919, "step": 4772 }, { "epoch": 0.1978117617804302, "grad_norm": 0.4215163588523865, "learning_rate": 4.011148410626218e-06, "loss": 0.7454, "step": 4773 }, { "epoch": 0.19785320568610384, "grad_norm": 0.4559418857097626, "learning_rate": 4.010941191097849e-06, "loss": 0.8035, "step": 4774 }, { "epoch": 0.19789464959177752, "grad_norm": 0.40059658885002136, "learning_rate": 4.010733971569481e-06, "loss": 0.74, "step": 4775 }, { "epoch": 0.1979360934974512, "grad_norm": 0.4364556074142456, "learning_rate": 4.010526752041113e-06, "loss": 0.7571, "step": 4776 }, { "epoch": 0.19797753740312488, "grad_norm": 0.43708986043930054, "learning_rate": 4.010319532512744e-06, "loss": 0.7646, "step": 4777 }, { "epoch": 0.19801898130879855, "grad_norm": 0.40500515699386597, "learning_rate": 4.0101123129843765e-06, "loss": 0.7162, "step": 4778 }, { "epoch": 0.1980604252144722, "grad_norm": 0.4781336486339569, "learning_rate": 4.009905093456007e-06, "loss": 0.7686, "step": 4779 }, { "epoch": 0.19810186912014588, "grad_norm": 0.40493327379226685, "learning_rate": 4.009697873927639e-06, "loss": 0.7681, "step": 4780 }, { "epoch": 0.19814331302581956, "grad_norm": 0.41445615887641907, "learning_rate": 4.009490654399271e-06, "loss": 0.7227, "step": 4781 }, { "epoch": 0.19818475693149323, "grad_norm": 0.4168505370616913, "learning_rate": 4.009283434870903e-06, "loss": 0.7578, "step": 4782 }, { "epoch": 0.19822620083716688, "grad_norm": 0.4146203398704529, "learning_rate": 4.009076215342534e-06, "loss": 0.7356, "step": 4783 }, { "epoch": 0.19826764474284056, "grad_norm": 0.4667985737323761, "learning_rate": 4.008868995814166e-06, "loss": 0.7854, "step": 4784 }, { "epoch": 0.19830908864851424, "grad_norm": 0.4219963550567627, "learning_rate": 4.008661776285798e-06, "loss": 0.729, "step": 4785 }, { "epoch": 0.1983505325541879, "grad_norm": 0.4178311824798584, "learning_rate": 4.008454556757429e-06, "loss": 0.6351, "step": 4786 }, { "epoch": 0.1983919764598616, "grad_norm": 0.41092249751091003, "learning_rate": 4.008247337229061e-06, "loss": 0.7224, "step": 4787 }, { "epoch": 0.19843342036553524, "grad_norm": 0.40301552414894104, "learning_rate": 4.008040117700692e-06, "loss": 0.6832, "step": 4788 }, { "epoch": 0.19847486427120892, "grad_norm": 0.38286882638931274, "learning_rate": 4.007832898172324e-06, "loss": 0.6846, "step": 4789 }, { "epoch": 0.1985163081768826, "grad_norm": 0.4484480917453766, "learning_rate": 4.007625678643956e-06, "loss": 0.7231, "step": 4790 }, { "epoch": 0.19855775208255627, "grad_norm": 0.3929670453071594, "learning_rate": 4.007418459115587e-06, "loss": 0.7084, "step": 4791 }, { "epoch": 0.19859919598822992, "grad_norm": 0.4283328950405121, "learning_rate": 4.007211239587219e-06, "loss": 0.7183, "step": 4792 }, { "epoch": 0.1986406398939036, "grad_norm": 0.4432357847690582, "learning_rate": 4.007004020058851e-06, "loss": 0.7385, "step": 4793 }, { "epoch": 0.19868208379957727, "grad_norm": 0.42369091510772705, "learning_rate": 4.006796800530482e-06, "loss": 0.7517, "step": 4794 }, { "epoch": 0.19872352770525095, "grad_norm": 0.3779566287994385, "learning_rate": 4.0065895810021135e-06, "loss": 0.7324, "step": 4795 }, { "epoch": 0.1987649716109246, "grad_norm": 0.4781123697757721, "learning_rate": 4.006382361473746e-06, "loss": 0.7375, "step": 4796 }, { "epoch": 0.19880641551659828, "grad_norm": 0.39827635884284973, "learning_rate": 4.006175141945377e-06, "loss": 0.6938, "step": 4797 }, { "epoch": 0.19884785942227196, "grad_norm": 0.4233674705028534, "learning_rate": 4.005967922417009e-06, "loss": 0.7234, "step": 4798 }, { "epoch": 0.19888930332794563, "grad_norm": 0.424721360206604, "learning_rate": 4.005760702888641e-06, "loss": 0.7705, "step": 4799 }, { "epoch": 0.1989307472336193, "grad_norm": 0.4109826982021332, "learning_rate": 4.005553483360272e-06, "loss": 0.744, "step": 4800 }, { "epoch": 0.19897219113929296, "grad_norm": 0.4068213701248169, "learning_rate": 4.005346263831904e-06, "loss": 0.7263, "step": 4801 }, { "epoch": 0.19901363504496664, "grad_norm": 0.44508999586105347, "learning_rate": 4.005139044303536e-06, "loss": 0.7422, "step": 4802 }, { "epoch": 0.1990550789506403, "grad_norm": 0.45127061009407043, "learning_rate": 4.004931824775167e-06, "loss": 0.7576, "step": 4803 }, { "epoch": 0.199096522856314, "grad_norm": 0.4296184778213501, "learning_rate": 4.0047246052467985e-06, "loss": 0.7273, "step": 4804 }, { "epoch": 0.19913796676198764, "grad_norm": 0.41938453912734985, "learning_rate": 4.004517385718431e-06, "loss": 0.6823, "step": 4805 }, { "epoch": 0.19917941066766132, "grad_norm": 0.4013782739639282, "learning_rate": 4.004310166190062e-06, "loss": 0.7251, "step": 4806 }, { "epoch": 0.199220854573335, "grad_norm": 0.45763158798217773, "learning_rate": 4.0041029466616935e-06, "loss": 0.702, "step": 4807 }, { "epoch": 0.19926229847900867, "grad_norm": 0.4125504195690155, "learning_rate": 4.003895727133326e-06, "loss": 0.7383, "step": 4808 }, { "epoch": 0.19930374238468232, "grad_norm": 0.40301206707954407, "learning_rate": 4.003688507604957e-06, "loss": 0.7234, "step": 4809 }, { "epoch": 0.199345186290356, "grad_norm": 0.4162966012954712, "learning_rate": 4.0034812880765885e-06, "loss": 0.7351, "step": 4810 }, { "epoch": 0.19938663019602967, "grad_norm": 0.42247599363327026, "learning_rate": 4.00327406854822e-06, "loss": 0.7363, "step": 4811 }, { "epoch": 0.19942807410170335, "grad_norm": 0.40589475631713867, "learning_rate": 4.003066849019852e-06, "loss": 0.7085, "step": 4812 }, { "epoch": 0.19946951800737703, "grad_norm": 0.43162259459495544, "learning_rate": 4.0028596294914835e-06, "loss": 0.759, "step": 4813 }, { "epoch": 0.19951096191305068, "grad_norm": 0.4187929630279541, "learning_rate": 4.002652409963116e-06, "loss": 0.682, "step": 4814 }, { "epoch": 0.19955240581872435, "grad_norm": 0.4097151756286621, "learning_rate": 4.002445190434746e-06, "loss": 0.6958, "step": 4815 }, { "epoch": 0.19959384972439803, "grad_norm": 0.4315282702445984, "learning_rate": 4.0022379709063785e-06, "loss": 0.6919, "step": 4816 }, { "epoch": 0.1996352936300717, "grad_norm": 0.4187025725841522, "learning_rate": 4.002030751378011e-06, "loss": 0.7581, "step": 4817 }, { "epoch": 0.19967673753574536, "grad_norm": 0.40198373794555664, "learning_rate": 4.001823531849642e-06, "loss": 0.7001, "step": 4818 }, { "epoch": 0.19971818144141903, "grad_norm": 0.42783963680267334, "learning_rate": 4.0016163123212735e-06, "loss": 0.7449, "step": 4819 }, { "epoch": 0.1997596253470927, "grad_norm": 0.4018191397190094, "learning_rate": 4.001409092792905e-06, "loss": 0.7181, "step": 4820 }, { "epoch": 0.1998010692527664, "grad_norm": 0.42642685770988464, "learning_rate": 4.001201873264537e-06, "loss": 0.6744, "step": 4821 }, { "epoch": 0.19984251315844004, "grad_norm": 0.40416115522384644, "learning_rate": 4.0009946537361685e-06, "loss": 0.7126, "step": 4822 }, { "epoch": 0.19988395706411372, "grad_norm": 0.4481668174266815, "learning_rate": 4.0007874342078e-06, "loss": 0.7468, "step": 4823 }, { "epoch": 0.1999254009697874, "grad_norm": 0.4191342890262604, "learning_rate": 4.000580214679431e-06, "loss": 0.6472, "step": 4824 }, { "epoch": 0.19996684487546107, "grad_norm": 0.3984641432762146, "learning_rate": 4.0003729951510635e-06, "loss": 0.6853, "step": 4825 }, { "epoch": 0.20000828878113475, "grad_norm": 0.4206525981426239, "learning_rate": 4.000165775622695e-06, "loss": 0.6992, "step": 4826 }, { "epoch": 0.2000497326868084, "grad_norm": 0.4518857002258301, "learning_rate": 3.999958556094326e-06, "loss": 0.7422, "step": 4827 }, { "epoch": 0.20009117659248207, "grad_norm": 0.4590130150318146, "learning_rate": 3.9997513365659585e-06, "loss": 0.7649, "step": 4828 }, { "epoch": 0.20013262049815575, "grad_norm": 0.42221808433532715, "learning_rate": 3.99954411703759e-06, "loss": 0.7515, "step": 4829 }, { "epoch": 0.20017406440382943, "grad_norm": 0.4404647946357727, "learning_rate": 3.999336897509222e-06, "loss": 0.7876, "step": 4830 }, { "epoch": 0.20021550830950308, "grad_norm": 0.46112871170043945, "learning_rate": 3.999129677980853e-06, "loss": 0.7463, "step": 4831 }, { "epoch": 0.20025695221517675, "grad_norm": 0.42525625228881836, "learning_rate": 3.998922458452485e-06, "loss": 0.7258, "step": 4832 }, { "epoch": 0.20029839612085043, "grad_norm": 0.4054553508758545, "learning_rate": 3.998715238924116e-06, "loss": 0.6709, "step": 4833 }, { "epoch": 0.2003398400265241, "grad_norm": 0.4212915897369385, "learning_rate": 3.9985080193957485e-06, "loss": 0.7405, "step": 4834 }, { "epoch": 0.20038128393219776, "grad_norm": 0.4358137249946594, "learning_rate": 3.99830079986738e-06, "loss": 0.7234, "step": 4835 }, { "epoch": 0.20042272783787143, "grad_norm": 0.42056816816329956, "learning_rate": 3.998093580339011e-06, "loss": 0.7671, "step": 4836 }, { "epoch": 0.2004641717435451, "grad_norm": 0.41150349378585815, "learning_rate": 3.9978863608106435e-06, "loss": 0.7502, "step": 4837 }, { "epoch": 0.2005056156492188, "grad_norm": 0.4460347592830658, "learning_rate": 3.997679141282275e-06, "loss": 0.6982, "step": 4838 }, { "epoch": 0.20054705955489247, "grad_norm": 0.4188644289970398, "learning_rate": 3.997471921753906e-06, "loss": 0.6716, "step": 4839 }, { "epoch": 0.20058850346056611, "grad_norm": 0.40209364891052246, "learning_rate": 3.997264702225538e-06, "loss": 0.7612, "step": 4840 }, { "epoch": 0.2006299473662398, "grad_norm": 0.4218508005142212, "learning_rate": 3.99705748269717e-06, "loss": 0.7032, "step": 4841 }, { "epoch": 0.20067139127191347, "grad_norm": 0.4284106194972992, "learning_rate": 3.996850263168801e-06, "loss": 0.6982, "step": 4842 }, { "epoch": 0.20071283517758715, "grad_norm": 0.3973173499107361, "learning_rate": 3.996643043640433e-06, "loss": 0.6504, "step": 4843 }, { "epoch": 0.2007542790832608, "grad_norm": 0.3873210847377777, "learning_rate": 3.996435824112065e-06, "loss": 0.6855, "step": 4844 }, { "epoch": 0.20079572298893447, "grad_norm": 0.41400542855262756, "learning_rate": 3.996228604583696e-06, "loss": 0.7173, "step": 4845 }, { "epoch": 0.20083716689460815, "grad_norm": 0.38667428493499756, "learning_rate": 3.9960213850553285e-06, "loss": 0.6799, "step": 4846 }, { "epoch": 0.20087861080028183, "grad_norm": 0.422187864780426, "learning_rate": 3.995814165526959e-06, "loss": 0.7368, "step": 4847 }, { "epoch": 0.2009200547059555, "grad_norm": 0.4072923958301544, "learning_rate": 3.995606945998591e-06, "loss": 0.7122, "step": 4848 }, { "epoch": 0.20096149861162915, "grad_norm": 0.43887796998023987, "learning_rate": 3.995399726470223e-06, "loss": 0.7402, "step": 4849 }, { "epoch": 0.20100294251730283, "grad_norm": 0.3875519335269928, "learning_rate": 3.995192506941855e-06, "loss": 0.6552, "step": 4850 }, { "epoch": 0.2010443864229765, "grad_norm": 0.4179123640060425, "learning_rate": 3.994985287413486e-06, "loss": 0.7043, "step": 4851 }, { "epoch": 0.20108583032865018, "grad_norm": 0.46962517499923706, "learning_rate": 3.994778067885118e-06, "loss": 0.7632, "step": 4852 }, { "epoch": 0.20112727423432383, "grad_norm": 0.4453068971633911, "learning_rate": 3.99457084835675e-06, "loss": 0.6798, "step": 4853 }, { "epoch": 0.2011687181399975, "grad_norm": 0.4193930923938751, "learning_rate": 3.994363628828381e-06, "loss": 0.6865, "step": 4854 }, { "epoch": 0.2012101620456712, "grad_norm": 0.4669552445411682, "learning_rate": 3.994156409300013e-06, "loss": 0.7146, "step": 4855 }, { "epoch": 0.20125160595134486, "grad_norm": 0.4321751594543457, "learning_rate": 3.993949189771644e-06, "loss": 0.7151, "step": 4856 }, { "epoch": 0.2012930498570185, "grad_norm": 0.44281330704689026, "learning_rate": 3.993741970243276e-06, "loss": 0.7314, "step": 4857 }, { "epoch": 0.2013344937626922, "grad_norm": 0.47185227274894714, "learning_rate": 3.993534750714908e-06, "loss": 0.7406, "step": 4858 }, { "epoch": 0.20137593766836587, "grad_norm": 0.48666802048683167, "learning_rate": 3.993327531186539e-06, "loss": 0.7275, "step": 4859 }, { "epoch": 0.20141738157403954, "grad_norm": 0.39160531759262085, "learning_rate": 3.993120311658171e-06, "loss": 0.7263, "step": 4860 }, { "epoch": 0.20145882547971322, "grad_norm": 0.43907567858695984, "learning_rate": 3.992913092129803e-06, "loss": 0.7865, "step": 4861 }, { "epoch": 0.20150026938538687, "grad_norm": 0.4136238992214203, "learning_rate": 3.992705872601434e-06, "loss": 0.6722, "step": 4862 }, { "epoch": 0.20154171329106055, "grad_norm": 0.4332010746002197, "learning_rate": 3.9924986530730655e-06, "loss": 0.719, "step": 4863 }, { "epoch": 0.20158315719673423, "grad_norm": 0.43985840678215027, "learning_rate": 3.992291433544698e-06, "loss": 0.7703, "step": 4864 }, { "epoch": 0.2016246011024079, "grad_norm": 0.401887983083725, "learning_rate": 3.992084214016329e-06, "loss": 0.7217, "step": 4865 }, { "epoch": 0.20166604500808155, "grad_norm": 0.4438116252422333, "learning_rate": 3.991876994487961e-06, "loss": 0.7651, "step": 4866 }, { "epoch": 0.20170748891375523, "grad_norm": 0.4155856966972351, "learning_rate": 3.991669774959593e-06, "loss": 0.7063, "step": 4867 }, { "epoch": 0.2017489328194289, "grad_norm": 0.42813897132873535, "learning_rate": 3.991462555431224e-06, "loss": 0.6705, "step": 4868 }, { "epoch": 0.20179037672510258, "grad_norm": 0.4280458986759186, "learning_rate": 3.991255335902856e-06, "loss": 0.7317, "step": 4869 }, { "epoch": 0.20183182063077623, "grad_norm": 0.4266197383403778, "learning_rate": 3.991048116374488e-06, "loss": 0.7061, "step": 4870 }, { "epoch": 0.2018732645364499, "grad_norm": 0.4213167726993561, "learning_rate": 3.990840896846119e-06, "loss": 0.7058, "step": 4871 }, { "epoch": 0.20191470844212359, "grad_norm": 0.4270082712173462, "learning_rate": 3.9906336773177505e-06, "loss": 0.7188, "step": 4872 }, { "epoch": 0.20195615234779726, "grad_norm": 0.4213247001171112, "learning_rate": 3.990426457789383e-06, "loss": 0.7118, "step": 4873 }, { "epoch": 0.20199759625347094, "grad_norm": 0.4341457784175873, "learning_rate": 3.990219238261014e-06, "loss": 0.7256, "step": 4874 }, { "epoch": 0.2020390401591446, "grad_norm": 0.40950748324394226, "learning_rate": 3.9900120187326455e-06, "loss": 0.7239, "step": 4875 }, { "epoch": 0.20208048406481827, "grad_norm": 0.4047373831272125, "learning_rate": 3.989804799204277e-06, "loss": 0.7344, "step": 4876 }, { "epoch": 0.20212192797049194, "grad_norm": 0.4294915497303009, "learning_rate": 3.989597579675909e-06, "loss": 0.7649, "step": 4877 }, { "epoch": 0.20216337187616562, "grad_norm": 0.4314100444316864, "learning_rate": 3.9893903601475405e-06, "loss": 0.7839, "step": 4878 }, { "epoch": 0.20220481578183927, "grad_norm": 0.41535383462905884, "learning_rate": 3.989183140619172e-06, "loss": 0.7385, "step": 4879 }, { "epoch": 0.20224625968751295, "grad_norm": 0.4173435866832733, "learning_rate": 3.988975921090804e-06, "loss": 0.7635, "step": 4880 }, { "epoch": 0.20228770359318662, "grad_norm": 0.4282142221927643, "learning_rate": 3.9887687015624355e-06, "loss": 0.7429, "step": 4881 }, { "epoch": 0.2023291474988603, "grad_norm": 0.45594847202301025, "learning_rate": 3.988561482034068e-06, "loss": 0.6826, "step": 4882 }, { "epoch": 0.20237059140453395, "grad_norm": 0.395620733499527, "learning_rate": 3.988354262505698e-06, "loss": 0.6853, "step": 4883 }, { "epoch": 0.20241203531020763, "grad_norm": 0.4690057039260864, "learning_rate": 3.9881470429773305e-06, "loss": 0.7943, "step": 4884 }, { "epoch": 0.2024534792158813, "grad_norm": 0.463143527507782, "learning_rate": 3.987939823448962e-06, "loss": 0.7324, "step": 4885 }, { "epoch": 0.20249492312155498, "grad_norm": 0.43740883469581604, "learning_rate": 3.987732603920594e-06, "loss": 0.6914, "step": 4886 }, { "epoch": 0.20253636702722866, "grad_norm": 0.4480329751968384, "learning_rate": 3.9875253843922255e-06, "loss": 0.7478, "step": 4887 }, { "epoch": 0.2025778109329023, "grad_norm": 0.4253040850162506, "learning_rate": 3.987318164863857e-06, "loss": 0.7322, "step": 4888 }, { "epoch": 0.20261925483857599, "grad_norm": 0.42684316635131836, "learning_rate": 3.987110945335489e-06, "loss": 0.7192, "step": 4889 }, { "epoch": 0.20266069874424966, "grad_norm": 0.450831800699234, "learning_rate": 3.9869037258071205e-06, "loss": 0.7317, "step": 4890 }, { "epoch": 0.20270214264992334, "grad_norm": 0.44185078144073486, "learning_rate": 3.986696506278752e-06, "loss": 0.7506, "step": 4891 }, { "epoch": 0.202743586555597, "grad_norm": 0.40829259157180786, "learning_rate": 3.986489286750383e-06, "loss": 0.7305, "step": 4892 }, { "epoch": 0.20278503046127067, "grad_norm": 0.4085010886192322, "learning_rate": 3.9862820672220155e-06, "loss": 0.7202, "step": 4893 }, { "epoch": 0.20282647436694434, "grad_norm": 0.4650880992412567, "learning_rate": 3.986074847693647e-06, "loss": 0.8198, "step": 4894 }, { "epoch": 0.20286791827261802, "grad_norm": 0.40678155422210693, "learning_rate": 3.985867628165278e-06, "loss": 0.6752, "step": 4895 }, { "epoch": 0.20290936217829167, "grad_norm": 0.4065786898136139, "learning_rate": 3.9856604086369105e-06, "loss": 0.6946, "step": 4896 }, { "epoch": 0.20295080608396535, "grad_norm": 0.4177252948284149, "learning_rate": 3.985453189108542e-06, "loss": 0.7271, "step": 4897 }, { "epoch": 0.20299224998963902, "grad_norm": 0.39959901571273804, "learning_rate": 3.985245969580174e-06, "loss": 0.6813, "step": 4898 }, { "epoch": 0.2030336938953127, "grad_norm": 0.466714084148407, "learning_rate": 3.985038750051805e-06, "loss": 0.7603, "step": 4899 }, { "epoch": 0.20307513780098638, "grad_norm": 0.40733465552330017, "learning_rate": 3.984831530523437e-06, "loss": 0.7189, "step": 4900 }, { "epoch": 0.20311658170666003, "grad_norm": 0.43598905205726624, "learning_rate": 3.984624310995068e-06, "loss": 0.7666, "step": 4901 }, { "epoch": 0.2031580256123337, "grad_norm": 0.4384497106075287, "learning_rate": 3.9844170914667005e-06, "loss": 0.7441, "step": 4902 }, { "epoch": 0.20319946951800738, "grad_norm": 0.39899253845214844, "learning_rate": 3.984209871938332e-06, "loss": 0.7247, "step": 4903 }, { "epoch": 0.20324091342368106, "grad_norm": 0.429982990026474, "learning_rate": 3.984002652409963e-06, "loss": 0.6615, "step": 4904 }, { "epoch": 0.2032823573293547, "grad_norm": 0.41308340430259705, "learning_rate": 3.9837954328815955e-06, "loss": 0.7949, "step": 4905 }, { "epoch": 0.20332380123502838, "grad_norm": 0.4428352415561676, "learning_rate": 3.983588213353227e-06, "loss": 0.7732, "step": 4906 }, { "epoch": 0.20336524514070206, "grad_norm": 0.45400720834732056, "learning_rate": 3.983380993824858e-06, "loss": 0.7271, "step": 4907 }, { "epoch": 0.20340668904637574, "grad_norm": 0.41853067278862, "learning_rate": 3.98317377429649e-06, "loss": 0.7192, "step": 4908 }, { "epoch": 0.2034481329520494, "grad_norm": 0.42906585335731506, "learning_rate": 3.982966554768122e-06, "loss": 0.761, "step": 4909 }, { "epoch": 0.20348957685772306, "grad_norm": 0.40976741909980774, "learning_rate": 3.982759335239753e-06, "loss": 0.7561, "step": 4910 }, { "epoch": 0.20353102076339674, "grad_norm": 0.4233849346637726, "learning_rate": 3.982552115711385e-06, "loss": 0.7461, "step": 4911 }, { "epoch": 0.20357246466907042, "grad_norm": 0.4079173505306244, "learning_rate": 3.982344896183016e-06, "loss": 0.6841, "step": 4912 }, { "epoch": 0.2036139085747441, "grad_norm": 0.3959016799926758, "learning_rate": 3.982137676654648e-06, "loss": 0.7144, "step": 4913 }, { "epoch": 0.20365535248041775, "grad_norm": 0.41608691215515137, "learning_rate": 3.9819304571262805e-06, "loss": 0.7065, "step": 4914 }, { "epoch": 0.20369679638609142, "grad_norm": 0.42271432280540466, "learning_rate": 3.981723237597911e-06, "loss": 0.7539, "step": 4915 }, { "epoch": 0.2037382402917651, "grad_norm": 0.43617281317710876, "learning_rate": 3.981516018069543e-06, "loss": 0.7297, "step": 4916 }, { "epoch": 0.20377968419743878, "grad_norm": 0.4270938038825989, "learning_rate": 3.981308798541175e-06, "loss": 0.7177, "step": 4917 }, { "epoch": 0.20382112810311243, "grad_norm": 0.407021701335907, "learning_rate": 3.981101579012807e-06, "loss": 0.7134, "step": 4918 }, { "epoch": 0.2038625720087861, "grad_norm": 0.41759172081947327, "learning_rate": 3.980894359484438e-06, "loss": 0.7058, "step": 4919 }, { "epoch": 0.20390401591445978, "grad_norm": 0.43455517292022705, "learning_rate": 3.98068713995607e-06, "loss": 0.7368, "step": 4920 }, { "epoch": 0.20394545982013346, "grad_norm": 0.4410875141620636, "learning_rate": 3.980479920427702e-06, "loss": 0.7384, "step": 4921 }, { "epoch": 0.20398690372580713, "grad_norm": 0.4648277163505554, "learning_rate": 3.980272700899333e-06, "loss": 0.6777, "step": 4922 }, { "epoch": 0.20402834763148078, "grad_norm": 0.43786296248435974, "learning_rate": 3.980065481370965e-06, "loss": 0.7219, "step": 4923 }, { "epoch": 0.20406979153715446, "grad_norm": 0.42219990491867065, "learning_rate": 3.979858261842596e-06, "loss": 0.7212, "step": 4924 }, { "epoch": 0.20411123544282814, "grad_norm": 0.42502540349960327, "learning_rate": 3.979651042314228e-06, "loss": 0.7229, "step": 4925 }, { "epoch": 0.20415267934850181, "grad_norm": 0.4341442286968231, "learning_rate": 3.97944382278586e-06, "loss": 0.7133, "step": 4926 }, { "epoch": 0.20419412325417546, "grad_norm": 0.43252792954444885, "learning_rate": 3.979236603257491e-06, "loss": 0.7561, "step": 4927 }, { "epoch": 0.20423556715984914, "grad_norm": 0.41907423734664917, "learning_rate": 3.9790293837291225e-06, "loss": 0.6711, "step": 4928 }, { "epoch": 0.20427701106552282, "grad_norm": 0.4227280914783478, "learning_rate": 3.978822164200755e-06, "loss": 0.6853, "step": 4929 }, { "epoch": 0.2043184549711965, "grad_norm": 0.4302346110343933, "learning_rate": 3.978614944672386e-06, "loss": 0.6901, "step": 4930 }, { "epoch": 0.20435989887687014, "grad_norm": 0.42137134075164795, "learning_rate": 3.9784077251440175e-06, "loss": 0.6948, "step": 4931 }, { "epoch": 0.20440134278254382, "grad_norm": 0.4396488666534424, "learning_rate": 3.97820050561565e-06, "loss": 0.7751, "step": 4932 }, { "epoch": 0.2044427866882175, "grad_norm": 0.4327951967716217, "learning_rate": 3.977993286087281e-06, "loss": 0.8035, "step": 4933 }, { "epoch": 0.20448423059389118, "grad_norm": 0.42528650164604187, "learning_rate": 3.977786066558913e-06, "loss": 0.7483, "step": 4934 }, { "epoch": 0.20452567449956485, "grad_norm": 0.39143651723861694, "learning_rate": 3.977578847030545e-06, "loss": 0.6656, "step": 4935 }, { "epoch": 0.2045671184052385, "grad_norm": 0.46127450466156006, "learning_rate": 3.977371627502176e-06, "loss": 0.7507, "step": 4936 }, { "epoch": 0.20460856231091218, "grad_norm": 0.4289734363555908, "learning_rate": 3.9771644079738075e-06, "loss": 0.7341, "step": 4937 }, { "epoch": 0.20465000621658586, "grad_norm": 0.45152151584625244, "learning_rate": 3.97695718844544e-06, "loss": 0.7502, "step": 4938 }, { "epoch": 0.20469145012225953, "grad_norm": 0.421102374792099, "learning_rate": 3.976749968917071e-06, "loss": 0.7339, "step": 4939 }, { "epoch": 0.20473289402793318, "grad_norm": 0.4111858308315277, "learning_rate": 3.9765427493887025e-06, "loss": 0.6887, "step": 4940 }, { "epoch": 0.20477433793360686, "grad_norm": 0.4935225248336792, "learning_rate": 3.976335529860335e-06, "loss": 0.7375, "step": 4941 }, { "epoch": 0.20481578183928054, "grad_norm": 0.4377748370170593, "learning_rate": 3.976128310331966e-06, "loss": 0.762, "step": 4942 }, { "epoch": 0.2048572257449542, "grad_norm": 0.3995705842971802, "learning_rate": 3.9759210908035975e-06, "loss": 0.7666, "step": 4943 }, { "epoch": 0.20489866965062786, "grad_norm": 0.3820174038410187, "learning_rate": 3.975713871275229e-06, "loss": 0.7216, "step": 4944 }, { "epoch": 0.20494011355630154, "grad_norm": 0.43117663264274597, "learning_rate": 3.975506651746861e-06, "loss": 0.6843, "step": 4945 }, { "epoch": 0.20498155746197522, "grad_norm": 0.445446640253067, "learning_rate": 3.9752994322184925e-06, "loss": 0.7305, "step": 4946 }, { "epoch": 0.2050230013676489, "grad_norm": 0.47389575839042664, "learning_rate": 3.975092212690124e-06, "loss": 0.7622, "step": 4947 }, { "epoch": 0.20506444527332257, "grad_norm": 0.42647477984428406, "learning_rate": 3.974884993161756e-06, "loss": 0.7051, "step": 4948 }, { "epoch": 0.20510588917899622, "grad_norm": 0.4340187907218933, "learning_rate": 3.9746777736333875e-06, "loss": 0.6813, "step": 4949 }, { "epoch": 0.2051473330846699, "grad_norm": 0.3840288817882538, "learning_rate": 3.97447055410502e-06, "loss": 0.7229, "step": 4950 }, { "epoch": 0.20518877699034357, "grad_norm": 0.4030836820602417, "learning_rate": 3.974263334576651e-06, "loss": 0.7122, "step": 4951 }, { "epoch": 0.20523022089601725, "grad_norm": 0.4452937841415405, "learning_rate": 3.9740561150482825e-06, "loss": 0.74, "step": 4952 }, { "epoch": 0.2052716648016909, "grad_norm": 0.448479026556015, "learning_rate": 3.973848895519914e-06, "loss": 0.813, "step": 4953 }, { "epoch": 0.20531310870736458, "grad_norm": 0.4458146095275879, "learning_rate": 3.973641675991546e-06, "loss": 0.7532, "step": 4954 }, { "epoch": 0.20535455261303825, "grad_norm": 0.4272207021713257, "learning_rate": 3.9734344564631775e-06, "loss": 0.7322, "step": 4955 }, { "epoch": 0.20539599651871193, "grad_norm": 0.4285518527030945, "learning_rate": 3.973227236934809e-06, "loss": 0.7532, "step": 4956 }, { "epoch": 0.20543744042438558, "grad_norm": 0.40414348244667053, "learning_rate": 3.973020017406441e-06, "loss": 0.7859, "step": 4957 }, { "epoch": 0.20547888433005926, "grad_norm": 0.4786165654659271, "learning_rate": 3.9728127978780725e-06, "loss": 0.7493, "step": 4958 }, { "epoch": 0.20552032823573294, "grad_norm": 0.42078104615211487, "learning_rate": 3.972605578349704e-06, "loss": 0.7366, "step": 4959 }, { "epoch": 0.2055617721414066, "grad_norm": 0.43938061594963074, "learning_rate": 3.972398358821335e-06, "loss": 0.7202, "step": 4960 }, { "epoch": 0.2056032160470803, "grad_norm": 0.45234769582748413, "learning_rate": 3.9721911392929675e-06, "loss": 0.7113, "step": 4961 }, { "epoch": 0.20564465995275394, "grad_norm": 0.4003274738788605, "learning_rate": 3.971983919764599e-06, "loss": 0.6785, "step": 4962 }, { "epoch": 0.20568610385842762, "grad_norm": 0.4167240858078003, "learning_rate": 3.97177670023623e-06, "loss": 0.762, "step": 4963 }, { "epoch": 0.2057275477641013, "grad_norm": 0.40504613518714905, "learning_rate": 3.971569480707862e-06, "loss": 0.6776, "step": 4964 }, { "epoch": 0.20576899166977497, "grad_norm": 0.43072494864463806, "learning_rate": 3.971362261179494e-06, "loss": 0.7222, "step": 4965 }, { "epoch": 0.20581043557544862, "grad_norm": 0.44922390580177307, "learning_rate": 3.971155041651126e-06, "loss": 0.7678, "step": 4966 }, { "epoch": 0.2058518794811223, "grad_norm": 0.4145321846008301, "learning_rate": 3.970947822122757e-06, "loss": 0.7422, "step": 4967 }, { "epoch": 0.20589332338679597, "grad_norm": 0.3864027261734009, "learning_rate": 3.970740602594389e-06, "loss": 0.6833, "step": 4968 }, { "epoch": 0.20593476729246965, "grad_norm": 0.40929099917411804, "learning_rate": 3.97053338306602e-06, "loss": 0.7029, "step": 4969 }, { "epoch": 0.2059762111981433, "grad_norm": 0.44474220275878906, "learning_rate": 3.9703261635376525e-06, "loss": 0.729, "step": 4970 }, { "epoch": 0.20601765510381698, "grad_norm": 0.41776105761528015, "learning_rate": 3.970118944009284e-06, "loss": 0.7463, "step": 4971 }, { "epoch": 0.20605909900949065, "grad_norm": 0.443928986787796, "learning_rate": 3.969911724480915e-06, "loss": 0.7371, "step": 4972 }, { "epoch": 0.20610054291516433, "grad_norm": 0.42484357953071594, "learning_rate": 3.969704504952547e-06, "loss": 0.6926, "step": 4973 }, { "epoch": 0.206141986820838, "grad_norm": 0.4135028123855591, "learning_rate": 3.969497285424179e-06, "loss": 0.6887, "step": 4974 }, { "epoch": 0.20618343072651166, "grad_norm": 0.4275168478488922, "learning_rate": 3.96929006589581e-06, "loss": 0.7393, "step": 4975 }, { "epoch": 0.20622487463218533, "grad_norm": 0.423520028591156, "learning_rate": 3.969082846367442e-06, "loss": 0.7344, "step": 4976 }, { "epoch": 0.206266318537859, "grad_norm": 0.3947014808654785, "learning_rate": 3.968875626839074e-06, "loss": 0.8062, "step": 4977 }, { "epoch": 0.2063077624435327, "grad_norm": 0.4072122871875763, "learning_rate": 3.968668407310705e-06, "loss": 0.7012, "step": 4978 }, { "epoch": 0.20634920634920634, "grad_norm": 0.4207821786403656, "learning_rate": 3.968461187782337e-06, "loss": 0.7273, "step": 4979 }, { "epoch": 0.20639065025488001, "grad_norm": 0.43246567249298096, "learning_rate": 3.968253968253968e-06, "loss": 0.7186, "step": 4980 }, { "epoch": 0.2064320941605537, "grad_norm": 0.4550793766975403, "learning_rate": 3.9680467487256e-06, "loss": 0.7119, "step": 4981 }, { "epoch": 0.20647353806622737, "grad_norm": 0.4283657670021057, "learning_rate": 3.9678395291972325e-06, "loss": 0.7336, "step": 4982 }, { "epoch": 0.20651498197190102, "grad_norm": 0.4309161901473999, "learning_rate": 3.967632309668863e-06, "loss": 0.7582, "step": 4983 }, { "epoch": 0.2065564258775747, "grad_norm": 0.4237719178199768, "learning_rate": 3.967425090140495e-06, "loss": 0.6893, "step": 4984 }, { "epoch": 0.20659786978324837, "grad_norm": 0.4184347689151764, "learning_rate": 3.967217870612127e-06, "loss": 0.6606, "step": 4985 }, { "epoch": 0.20663931368892205, "grad_norm": 0.4322426915168762, "learning_rate": 3.967010651083759e-06, "loss": 0.7224, "step": 4986 }, { "epoch": 0.20668075759459573, "grad_norm": 0.42716529965400696, "learning_rate": 3.96680343155539e-06, "loss": 0.7131, "step": 4987 }, { "epoch": 0.20672220150026938, "grad_norm": 0.4076707363128662, "learning_rate": 3.966596212027022e-06, "loss": 0.7673, "step": 4988 }, { "epoch": 0.20676364540594305, "grad_norm": 0.40031224489212036, "learning_rate": 3.966388992498653e-06, "loss": 0.7062, "step": 4989 }, { "epoch": 0.20680508931161673, "grad_norm": 0.41259315609931946, "learning_rate": 3.966181772970285e-06, "loss": 0.655, "step": 4990 }, { "epoch": 0.2068465332172904, "grad_norm": 0.40943002700805664, "learning_rate": 3.965974553441917e-06, "loss": 0.7766, "step": 4991 }, { "epoch": 0.20688797712296406, "grad_norm": 0.38084378838539124, "learning_rate": 3.965767333913548e-06, "loss": 0.7024, "step": 4992 }, { "epoch": 0.20692942102863773, "grad_norm": 0.45761755108833313, "learning_rate": 3.96556011438518e-06, "loss": 0.7097, "step": 4993 }, { "epoch": 0.2069708649343114, "grad_norm": 0.4409826993942261, "learning_rate": 3.965352894856812e-06, "loss": 0.7443, "step": 4994 }, { "epoch": 0.2070123088399851, "grad_norm": 0.41890180110931396, "learning_rate": 3.965145675328443e-06, "loss": 0.6526, "step": 4995 }, { "epoch": 0.20705375274565876, "grad_norm": 0.4082462787628174, "learning_rate": 3.9649384558000745e-06, "loss": 0.7402, "step": 4996 }, { "epoch": 0.2070951966513324, "grad_norm": 0.4276891052722931, "learning_rate": 3.964731236271707e-06, "loss": 0.7399, "step": 4997 }, { "epoch": 0.2071366405570061, "grad_norm": 0.420146107673645, "learning_rate": 3.964524016743338e-06, "loss": 0.6967, "step": 4998 }, { "epoch": 0.20717808446267977, "grad_norm": 0.47646698355674744, "learning_rate": 3.9643167972149695e-06, "loss": 0.7947, "step": 4999 }, { "epoch": 0.20721952836835345, "grad_norm": 0.4139586389064789, "learning_rate": 3.964109577686602e-06, "loss": 0.6824, "step": 5000 }, { "epoch": 0.2072609722740271, "grad_norm": 0.3895176947116852, "learning_rate": 3.963902358158233e-06, "loss": 0.6851, "step": 5001 }, { "epoch": 0.20730241617970077, "grad_norm": 0.4015359878540039, "learning_rate": 3.963695138629865e-06, "loss": 0.7354, "step": 5002 }, { "epoch": 0.20734386008537445, "grad_norm": 0.3724995255470276, "learning_rate": 3.963487919101497e-06, "loss": 0.672, "step": 5003 }, { "epoch": 0.20738530399104813, "grad_norm": 0.4264490306377411, "learning_rate": 3.963280699573128e-06, "loss": 0.7178, "step": 5004 }, { "epoch": 0.20742674789672177, "grad_norm": 0.4242999255657196, "learning_rate": 3.9630734800447595e-06, "loss": 0.686, "step": 5005 }, { "epoch": 0.20746819180239545, "grad_norm": 0.4459003806114197, "learning_rate": 3.962866260516392e-06, "loss": 0.7822, "step": 5006 }, { "epoch": 0.20750963570806913, "grad_norm": 0.4635668992996216, "learning_rate": 3.962659040988023e-06, "loss": 0.6753, "step": 5007 }, { "epoch": 0.2075510796137428, "grad_norm": 0.3952350616455078, "learning_rate": 3.9624518214596545e-06, "loss": 0.6838, "step": 5008 }, { "epoch": 0.20759252351941648, "grad_norm": 0.44736242294311523, "learning_rate": 3.962244601931287e-06, "loss": 0.7402, "step": 5009 }, { "epoch": 0.20763396742509013, "grad_norm": 0.45444953441619873, "learning_rate": 3.962037382402918e-06, "loss": 0.7052, "step": 5010 }, { "epoch": 0.2076754113307638, "grad_norm": 0.41435879468917847, "learning_rate": 3.9618301628745495e-06, "loss": 0.7153, "step": 5011 }, { "epoch": 0.2077168552364375, "grad_norm": 0.3875054121017456, "learning_rate": 3.961622943346181e-06, "loss": 0.6581, "step": 5012 }, { "epoch": 0.20775829914211116, "grad_norm": 0.45671743154525757, "learning_rate": 3.961415723817813e-06, "loss": 0.7356, "step": 5013 }, { "epoch": 0.2077997430477848, "grad_norm": 0.4382782578468323, "learning_rate": 3.9612085042894445e-06, "loss": 0.6953, "step": 5014 }, { "epoch": 0.2078411869534585, "grad_norm": 0.4314959943294525, "learning_rate": 3.961001284761076e-06, "loss": 0.6853, "step": 5015 }, { "epoch": 0.20788263085913217, "grad_norm": 0.40663015842437744, "learning_rate": 3.960794065232707e-06, "loss": 0.7003, "step": 5016 }, { "epoch": 0.20792407476480584, "grad_norm": 0.4372977614402771, "learning_rate": 3.9605868457043395e-06, "loss": 0.7933, "step": 5017 }, { "epoch": 0.2079655186704795, "grad_norm": 0.42495372891426086, "learning_rate": 3.960379626175972e-06, "loss": 0.6821, "step": 5018 }, { "epoch": 0.20800696257615317, "grad_norm": 0.4128490388393402, "learning_rate": 3.960172406647603e-06, "loss": 0.7156, "step": 5019 }, { "epoch": 0.20804840648182685, "grad_norm": 0.43258345127105713, "learning_rate": 3.9599651871192345e-06, "loss": 0.6819, "step": 5020 }, { "epoch": 0.20808985038750052, "grad_norm": 0.43986499309539795, "learning_rate": 3.959757967590866e-06, "loss": 0.7612, "step": 5021 }, { "epoch": 0.2081312942931742, "grad_norm": 0.4230652451515198, "learning_rate": 3.959550748062498e-06, "loss": 0.7427, "step": 5022 }, { "epoch": 0.20817273819884785, "grad_norm": 0.420604944229126, "learning_rate": 3.9593435285341295e-06, "loss": 0.7271, "step": 5023 }, { "epoch": 0.20821418210452153, "grad_norm": 0.4139954149723053, "learning_rate": 3.959136309005761e-06, "loss": 0.6941, "step": 5024 }, { "epoch": 0.2082556260101952, "grad_norm": 0.42601391673088074, "learning_rate": 3.958929089477392e-06, "loss": 0.7332, "step": 5025 }, { "epoch": 0.20829706991586888, "grad_norm": 0.44510379433631897, "learning_rate": 3.9587218699490245e-06, "loss": 0.7406, "step": 5026 }, { "epoch": 0.20833851382154253, "grad_norm": 0.4313295781612396, "learning_rate": 3.958514650420656e-06, "loss": 0.6915, "step": 5027 }, { "epoch": 0.2083799577272162, "grad_norm": 0.4395928680896759, "learning_rate": 3.958307430892287e-06, "loss": 0.7025, "step": 5028 }, { "epoch": 0.20842140163288989, "grad_norm": 0.40598583221435547, "learning_rate": 3.9581002113639195e-06, "loss": 0.7339, "step": 5029 }, { "epoch": 0.20846284553856356, "grad_norm": 0.404128760099411, "learning_rate": 3.957892991835551e-06, "loss": 0.6766, "step": 5030 }, { "epoch": 0.2085042894442372, "grad_norm": 0.3969388008117676, "learning_rate": 3.957685772307182e-06, "loss": 0.6925, "step": 5031 }, { "epoch": 0.2085457333499109, "grad_norm": 0.40514540672302246, "learning_rate": 3.957478552778814e-06, "loss": 0.6978, "step": 5032 }, { "epoch": 0.20858717725558457, "grad_norm": 0.38450825214385986, "learning_rate": 3.957271333250446e-06, "loss": 0.7083, "step": 5033 }, { "epoch": 0.20862862116125824, "grad_norm": 0.4431573152542114, "learning_rate": 3.957064113722077e-06, "loss": 0.73, "step": 5034 }, { "epoch": 0.20867006506693192, "grad_norm": 0.44064074754714966, "learning_rate": 3.956856894193709e-06, "loss": 0.7795, "step": 5035 }, { "epoch": 0.20871150897260557, "grad_norm": 0.4039342403411865, "learning_rate": 3.956649674665341e-06, "loss": 0.728, "step": 5036 }, { "epoch": 0.20875295287827925, "grad_norm": 0.3996719717979431, "learning_rate": 3.956442455136972e-06, "loss": 0.7205, "step": 5037 }, { "epoch": 0.20879439678395292, "grad_norm": 0.44030120968818665, "learning_rate": 3.9562352356086045e-06, "loss": 0.728, "step": 5038 }, { "epoch": 0.2088358406896266, "grad_norm": 0.4313356280326843, "learning_rate": 3.956028016080236e-06, "loss": 0.7285, "step": 5039 }, { "epoch": 0.20887728459530025, "grad_norm": 0.435509592294693, "learning_rate": 3.955820796551867e-06, "loss": 0.7515, "step": 5040 }, { "epoch": 0.20891872850097393, "grad_norm": 0.4092146158218384, "learning_rate": 3.955613577023499e-06, "loss": 0.725, "step": 5041 }, { "epoch": 0.2089601724066476, "grad_norm": 0.43713656067848206, "learning_rate": 3.955406357495131e-06, "loss": 0.7039, "step": 5042 }, { "epoch": 0.20900161631232128, "grad_norm": 0.4088458716869354, "learning_rate": 3.955199137966762e-06, "loss": 0.691, "step": 5043 }, { "epoch": 0.20904306021799493, "grad_norm": 0.455867201089859, "learning_rate": 3.954991918438394e-06, "loss": 0.7429, "step": 5044 }, { "epoch": 0.2090845041236686, "grad_norm": 0.4370984435081482, "learning_rate": 3.954784698910026e-06, "loss": 0.7611, "step": 5045 }, { "epoch": 0.20912594802934228, "grad_norm": 0.4162183105945587, "learning_rate": 3.954577479381657e-06, "loss": 0.7268, "step": 5046 }, { "epoch": 0.20916739193501596, "grad_norm": 0.43249839544296265, "learning_rate": 3.954370259853289e-06, "loss": 0.7042, "step": 5047 }, { "epoch": 0.20920883584068964, "grad_norm": 0.3995538353919983, "learning_rate": 3.95416304032492e-06, "loss": 0.6987, "step": 5048 }, { "epoch": 0.2092502797463633, "grad_norm": 0.40230026841163635, "learning_rate": 3.953955820796552e-06, "loss": 0.6929, "step": 5049 }, { "epoch": 0.20929172365203697, "grad_norm": 0.41531893610954285, "learning_rate": 3.953748601268184e-06, "loss": 0.7115, "step": 5050 }, { "epoch": 0.20933316755771064, "grad_norm": 0.3995685279369354, "learning_rate": 3.953541381739815e-06, "loss": 0.7405, "step": 5051 }, { "epoch": 0.20937461146338432, "grad_norm": 0.4462067484855652, "learning_rate": 3.953334162211447e-06, "loss": 0.7222, "step": 5052 }, { "epoch": 0.20941605536905797, "grad_norm": 0.4951309859752655, "learning_rate": 3.953126942683079e-06, "loss": 0.8076, "step": 5053 }, { "epoch": 0.20945749927473165, "grad_norm": 0.45819154381752014, "learning_rate": 3.952919723154711e-06, "loss": 0.7302, "step": 5054 }, { "epoch": 0.20949894318040532, "grad_norm": 0.4239514470100403, "learning_rate": 3.952712503626342e-06, "loss": 0.7209, "step": 5055 }, { "epoch": 0.209540387086079, "grad_norm": 0.41919928789138794, "learning_rate": 3.952505284097974e-06, "loss": 0.739, "step": 5056 }, { "epoch": 0.20958183099175265, "grad_norm": 0.4425763487815857, "learning_rate": 3.952298064569605e-06, "loss": 0.7305, "step": 5057 }, { "epoch": 0.20962327489742633, "grad_norm": 0.42079198360443115, "learning_rate": 3.952090845041237e-06, "loss": 0.7084, "step": 5058 }, { "epoch": 0.2096647188031, "grad_norm": 0.4223988652229309, "learning_rate": 3.951883625512869e-06, "loss": 0.7566, "step": 5059 }, { "epoch": 0.20970616270877368, "grad_norm": 0.42822688817977905, "learning_rate": 3.9516764059845e-06, "loss": 0.7522, "step": 5060 }, { "epoch": 0.20974760661444736, "grad_norm": 0.41007164120674133, "learning_rate": 3.951469186456132e-06, "loss": 0.7178, "step": 5061 }, { "epoch": 0.209789050520121, "grad_norm": 0.39228421449661255, "learning_rate": 3.951261966927764e-06, "loss": 0.7, "step": 5062 }, { "epoch": 0.20983049442579468, "grad_norm": 0.4414163827896118, "learning_rate": 3.951054747399395e-06, "loss": 0.7214, "step": 5063 }, { "epoch": 0.20987193833146836, "grad_norm": 0.4180108308792114, "learning_rate": 3.9508475278710265e-06, "loss": 0.7729, "step": 5064 }, { "epoch": 0.20991338223714204, "grad_norm": 0.4564184546470642, "learning_rate": 3.950640308342659e-06, "loss": 0.7627, "step": 5065 }, { "epoch": 0.2099548261428157, "grad_norm": 0.4167024493217468, "learning_rate": 3.95043308881429e-06, "loss": 0.7095, "step": 5066 }, { "epoch": 0.20999627004848936, "grad_norm": 0.44754770398139954, "learning_rate": 3.9502258692859215e-06, "loss": 0.7255, "step": 5067 }, { "epoch": 0.21003771395416304, "grad_norm": 0.41250523924827576, "learning_rate": 3.950018649757553e-06, "loss": 0.6854, "step": 5068 }, { "epoch": 0.21007915785983672, "grad_norm": 0.43757668137550354, "learning_rate": 3.949811430229185e-06, "loss": 0.7227, "step": 5069 }, { "epoch": 0.2101206017655104, "grad_norm": 0.408383309841156, "learning_rate": 3.949604210700817e-06, "loss": 0.7249, "step": 5070 }, { "epoch": 0.21016204567118404, "grad_norm": 0.40836358070373535, "learning_rate": 3.949396991172449e-06, "loss": 0.7515, "step": 5071 }, { "epoch": 0.21020348957685772, "grad_norm": 0.44014766812324524, "learning_rate": 3.94918977164408e-06, "loss": 0.7651, "step": 5072 }, { "epoch": 0.2102449334825314, "grad_norm": 0.40353161096572876, "learning_rate": 3.9489825521157115e-06, "loss": 0.7284, "step": 5073 }, { "epoch": 0.21028637738820508, "grad_norm": 0.44718509912490845, "learning_rate": 3.948775332587344e-06, "loss": 0.731, "step": 5074 }, { "epoch": 0.21032782129387873, "grad_norm": 0.45299291610717773, "learning_rate": 3.948568113058975e-06, "loss": 0.7078, "step": 5075 }, { "epoch": 0.2103692651995524, "grad_norm": 0.446534126996994, "learning_rate": 3.9483608935306065e-06, "loss": 0.7344, "step": 5076 }, { "epoch": 0.21041070910522608, "grad_norm": 0.3768323063850403, "learning_rate": 3.948153674002238e-06, "loss": 0.689, "step": 5077 }, { "epoch": 0.21045215301089976, "grad_norm": 0.4404606223106384, "learning_rate": 3.94794645447387e-06, "loss": 0.7244, "step": 5078 }, { "epoch": 0.2104935969165734, "grad_norm": 0.4412422478199005, "learning_rate": 3.9477392349455015e-06, "loss": 0.6986, "step": 5079 }, { "epoch": 0.21053504082224708, "grad_norm": 0.42820578813552856, "learning_rate": 3.947532015417133e-06, "loss": 0.7676, "step": 5080 }, { "epoch": 0.21057648472792076, "grad_norm": 0.44106268882751465, "learning_rate": 3.947324795888765e-06, "loss": 0.7633, "step": 5081 }, { "epoch": 0.21061792863359444, "grad_norm": 0.43526729941368103, "learning_rate": 3.9471175763603965e-06, "loss": 0.7362, "step": 5082 }, { "epoch": 0.2106593725392681, "grad_norm": 0.4474290609359741, "learning_rate": 3.946910356832028e-06, "loss": 0.688, "step": 5083 }, { "epoch": 0.21070081644494176, "grad_norm": 0.45499250292778015, "learning_rate": 3.946703137303659e-06, "loss": 0.7366, "step": 5084 }, { "epoch": 0.21074226035061544, "grad_norm": 0.4522298574447632, "learning_rate": 3.9464959177752915e-06, "loss": 0.7322, "step": 5085 }, { "epoch": 0.21078370425628912, "grad_norm": 0.43330124020576477, "learning_rate": 3.946288698246923e-06, "loss": 0.7166, "step": 5086 }, { "epoch": 0.2108251481619628, "grad_norm": 0.3907892107963562, "learning_rate": 3.946081478718555e-06, "loss": 0.713, "step": 5087 }, { "epoch": 0.21086659206763644, "grad_norm": 0.4124689996242523, "learning_rate": 3.9458742591901865e-06, "loss": 0.709, "step": 5088 }, { "epoch": 0.21090803597331012, "grad_norm": 0.4523718059062958, "learning_rate": 3.945667039661818e-06, "loss": 0.6847, "step": 5089 }, { "epoch": 0.2109494798789838, "grad_norm": 0.4406546652317047, "learning_rate": 3.94545982013345e-06, "loss": 0.7771, "step": 5090 }, { "epoch": 0.21099092378465747, "grad_norm": 0.4222888648509979, "learning_rate": 3.9452526006050815e-06, "loss": 0.6774, "step": 5091 }, { "epoch": 0.21103236769033112, "grad_norm": 0.4384972155094147, "learning_rate": 3.945045381076713e-06, "loss": 0.6938, "step": 5092 }, { "epoch": 0.2110738115960048, "grad_norm": 0.4407550096511841, "learning_rate": 3.944838161548344e-06, "loss": 0.7139, "step": 5093 }, { "epoch": 0.21111525550167848, "grad_norm": 0.42434924840927124, "learning_rate": 3.9446309420199765e-06, "loss": 0.7142, "step": 5094 }, { "epoch": 0.21115669940735216, "grad_norm": 0.4632549583911896, "learning_rate": 3.944423722491608e-06, "loss": 0.7534, "step": 5095 }, { "epoch": 0.21119814331302583, "grad_norm": 0.395626962184906, "learning_rate": 3.944216502963239e-06, "loss": 0.6675, "step": 5096 }, { "epoch": 0.21123958721869948, "grad_norm": 0.38264575600624084, "learning_rate": 3.9440092834348715e-06, "loss": 0.698, "step": 5097 }, { "epoch": 0.21128103112437316, "grad_norm": 0.4366777241230011, "learning_rate": 3.943802063906503e-06, "loss": 0.7549, "step": 5098 }, { "epoch": 0.21132247503004684, "grad_norm": 0.399330198764801, "learning_rate": 3.943594844378134e-06, "loss": 0.6646, "step": 5099 }, { "epoch": 0.2113639189357205, "grad_norm": 0.44805723428726196, "learning_rate": 3.943387624849766e-06, "loss": 0.7253, "step": 5100 }, { "epoch": 0.21140536284139416, "grad_norm": 0.3996202349662781, "learning_rate": 3.943180405321398e-06, "loss": 0.7134, "step": 5101 }, { "epoch": 0.21144680674706784, "grad_norm": 0.4322737753391266, "learning_rate": 3.942973185793029e-06, "loss": 0.7317, "step": 5102 }, { "epoch": 0.21148825065274152, "grad_norm": 0.38949868083000183, "learning_rate": 3.942765966264661e-06, "loss": 0.6892, "step": 5103 }, { "epoch": 0.2115296945584152, "grad_norm": 0.48289552330970764, "learning_rate": 3.942558746736293e-06, "loss": 0.7013, "step": 5104 }, { "epoch": 0.21157113846408884, "grad_norm": 0.4455186426639557, "learning_rate": 3.942351527207924e-06, "loss": 0.6898, "step": 5105 }, { "epoch": 0.21161258236976252, "grad_norm": 0.4251181185245514, "learning_rate": 3.9421443076795566e-06, "loss": 0.7366, "step": 5106 }, { "epoch": 0.2116540262754362, "grad_norm": 0.41208216547966003, "learning_rate": 3.941937088151188e-06, "loss": 0.7183, "step": 5107 }, { "epoch": 0.21169547018110987, "grad_norm": 0.41952913999557495, "learning_rate": 3.941729868622819e-06, "loss": 0.7275, "step": 5108 }, { "epoch": 0.21173691408678355, "grad_norm": 0.40845420956611633, "learning_rate": 3.941522649094451e-06, "loss": 0.7336, "step": 5109 }, { "epoch": 0.2117783579924572, "grad_norm": 0.41922375559806824, "learning_rate": 3.941315429566083e-06, "loss": 0.717, "step": 5110 }, { "epoch": 0.21181980189813088, "grad_norm": 0.45428892970085144, "learning_rate": 3.941108210037714e-06, "loss": 0.7583, "step": 5111 }, { "epoch": 0.21186124580380455, "grad_norm": 0.43839678168296814, "learning_rate": 3.940900990509346e-06, "loss": 0.7095, "step": 5112 }, { "epoch": 0.21190268970947823, "grad_norm": 0.45987460017204285, "learning_rate": 3.940693770980978e-06, "loss": 0.749, "step": 5113 }, { "epoch": 0.21194413361515188, "grad_norm": 0.3921250104904175, "learning_rate": 3.940486551452609e-06, "loss": 0.7686, "step": 5114 }, { "epoch": 0.21198557752082556, "grad_norm": 0.44409358501434326, "learning_rate": 3.940279331924241e-06, "loss": 0.6809, "step": 5115 }, { "epoch": 0.21202702142649923, "grad_norm": 0.44515255093574524, "learning_rate": 3.940072112395872e-06, "loss": 0.7356, "step": 5116 }, { "epoch": 0.2120684653321729, "grad_norm": 0.41947421431541443, "learning_rate": 3.939864892867504e-06, "loss": 0.7096, "step": 5117 }, { "epoch": 0.21210990923784656, "grad_norm": 0.3819780647754669, "learning_rate": 3.939657673339136e-06, "loss": 0.6553, "step": 5118 }, { "epoch": 0.21215135314352024, "grad_norm": 0.4444463551044464, "learning_rate": 3.939450453810767e-06, "loss": 0.7119, "step": 5119 }, { "epoch": 0.21219279704919392, "grad_norm": 0.42417439818382263, "learning_rate": 3.9392432342823985e-06, "loss": 0.6917, "step": 5120 }, { "epoch": 0.2122342409548676, "grad_norm": 0.442605584859848, "learning_rate": 3.939036014754031e-06, "loss": 0.7161, "step": 5121 }, { "epoch": 0.21227568486054127, "grad_norm": 0.3828595280647278, "learning_rate": 3.938828795225663e-06, "loss": 0.6624, "step": 5122 }, { "epoch": 0.21231712876621492, "grad_norm": 0.40411755442619324, "learning_rate": 3.938621575697294e-06, "loss": 0.6982, "step": 5123 }, { "epoch": 0.2123585726718886, "grad_norm": 0.4094061255455017, "learning_rate": 3.938414356168926e-06, "loss": 0.7162, "step": 5124 }, { "epoch": 0.21240001657756227, "grad_norm": 0.40393975377082825, "learning_rate": 3.938207136640557e-06, "loss": 0.7522, "step": 5125 }, { "epoch": 0.21244146048323595, "grad_norm": 0.4101594090461731, "learning_rate": 3.937999917112189e-06, "loss": 0.73, "step": 5126 }, { "epoch": 0.2124829043889096, "grad_norm": 0.48825618624687195, "learning_rate": 3.937792697583821e-06, "loss": 0.7175, "step": 5127 }, { "epoch": 0.21252434829458328, "grad_norm": 0.4631562829017639, "learning_rate": 3.937585478055452e-06, "loss": 0.7239, "step": 5128 }, { "epoch": 0.21256579220025695, "grad_norm": 0.41530945897102356, "learning_rate": 3.9373782585270835e-06, "loss": 0.6995, "step": 5129 }, { "epoch": 0.21260723610593063, "grad_norm": 0.42156508564949036, "learning_rate": 3.937171038998716e-06, "loss": 0.7169, "step": 5130 }, { "epoch": 0.2126486800116043, "grad_norm": 0.4403069019317627, "learning_rate": 3.936963819470347e-06, "loss": 0.7493, "step": 5131 }, { "epoch": 0.21269012391727796, "grad_norm": 0.44364216923713684, "learning_rate": 3.9367565999419785e-06, "loss": 0.7576, "step": 5132 }, { "epoch": 0.21273156782295163, "grad_norm": 0.4258248507976532, "learning_rate": 3.936549380413611e-06, "loss": 0.702, "step": 5133 }, { "epoch": 0.2127730117286253, "grad_norm": 0.4212954640388489, "learning_rate": 3.936342160885242e-06, "loss": 0.7583, "step": 5134 }, { "epoch": 0.212814455634299, "grad_norm": 0.43298667669296265, "learning_rate": 3.9361349413568735e-06, "loss": 0.7878, "step": 5135 }, { "epoch": 0.21285589953997264, "grad_norm": 0.43907710909843445, "learning_rate": 3.935927721828505e-06, "loss": 0.7444, "step": 5136 }, { "epoch": 0.21289734344564631, "grad_norm": 0.4509316682815552, "learning_rate": 3.935720502300137e-06, "loss": 0.72, "step": 5137 }, { "epoch": 0.21293878735132, "grad_norm": 0.4226623773574829, "learning_rate": 3.9355132827717685e-06, "loss": 0.74, "step": 5138 }, { "epoch": 0.21298023125699367, "grad_norm": 0.4085758924484253, "learning_rate": 3.935306063243401e-06, "loss": 0.6952, "step": 5139 }, { "epoch": 0.21302167516266732, "grad_norm": 0.4268183410167694, "learning_rate": 3.935098843715032e-06, "loss": 0.718, "step": 5140 }, { "epoch": 0.213063119068341, "grad_norm": 0.43286436796188354, "learning_rate": 3.9348916241866635e-06, "loss": 0.6699, "step": 5141 }, { "epoch": 0.21310456297401467, "grad_norm": 0.4486483335494995, "learning_rate": 3.934684404658296e-06, "loss": 0.7191, "step": 5142 }, { "epoch": 0.21314600687968835, "grad_norm": 0.4399454593658447, "learning_rate": 3.934477185129927e-06, "loss": 0.7476, "step": 5143 }, { "epoch": 0.21318745078536203, "grad_norm": 0.45652467012405396, "learning_rate": 3.9342699656015585e-06, "loss": 0.7393, "step": 5144 }, { "epoch": 0.21322889469103568, "grad_norm": 0.3898095488548279, "learning_rate": 3.93406274607319e-06, "loss": 0.6449, "step": 5145 }, { "epoch": 0.21327033859670935, "grad_norm": 0.4764745235443115, "learning_rate": 3.933855526544822e-06, "loss": 0.7598, "step": 5146 }, { "epoch": 0.21331178250238303, "grad_norm": 0.40731653571128845, "learning_rate": 3.9336483070164535e-06, "loss": 0.7012, "step": 5147 }, { "epoch": 0.2133532264080567, "grad_norm": 0.4394112527370453, "learning_rate": 3.933441087488085e-06, "loss": 0.7021, "step": 5148 }, { "epoch": 0.21339467031373036, "grad_norm": 0.4351750612258911, "learning_rate": 3.933233867959717e-06, "loss": 0.7197, "step": 5149 }, { "epoch": 0.21343611421940403, "grad_norm": 0.4212436378002167, "learning_rate": 3.9330266484313485e-06, "loss": 0.6809, "step": 5150 }, { "epoch": 0.2134775581250777, "grad_norm": 0.40793144702911377, "learning_rate": 3.93281942890298e-06, "loss": 0.6821, "step": 5151 }, { "epoch": 0.2135190020307514, "grad_norm": 0.40093353390693665, "learning_rate": 3.932612209374611e-06, "loss": 0.6926, "step": 5152 }, { "epoch": 0.21356044593642504, "grad_norm": 0.4245559275150299, "learning_rate": 3.9324049898462435e-06, "loss": 0.7231, "step": 5153 }, { "epoch": 0.2136018898420987, "grad_norm": 0.42561954259872437, "learning_rate": 3.932197770317875e-06, "loss": 0.7131, "step": 5154 }, { "epoch": 0.2136433337477724, "grad_norm": 0.4299514889717102, "learning_rate": 3.931990550789507e-06, "loss": 0.738, "step": 5155 }, { "epoch": 0.21368477765344607, "grad_norm": 0.444667249917984, "learning_rate": 3.9317833312611385e-06, "loss": 0.7251, "step": 5156 }, { "epoch": 0.21372622155911974, "grad_norm": 0.4218398630619049, "learning_rate": 3.93157611173277e-06, "loss": 0.7035, "step": 5157 }, { "epoch": 0.2137676654647934, "grad_norm": 0.41172507405281067, "learning_rate": 3.931368892204402e-06, "loss": 0.7197, "step": 5158 }, { "epoch": 0.21380910937046707, "grad_norm": 0.38863325119018555, "learning_rate": 3.9311616726760335e-06, "loss": 0.71, "step": 5159 }, { "epoch": 0.21385055327614075, "grad_norm": 0.424308180809021, "learning_rate": 3.930954453147665e-06, "loss": 0.6819, "step": 5160 }, { "epoch": 0.21389199718181442, "grad_norm": 0.4315939247608185, "learning_rate": 3.930747233619296e-06, "loss": 0.7378, "step": 5161 }, { "epoch": 0.21393344108748807, "grad_norm": 0.4201556146144867, "learning_rate": 3.9305400140909286e-06, "loss": 0.749, "step": 5162 }, { "epoch": 0.21397488499316175, "grad_norm": 0.41834041476249695, "learning_rate": 3.93033279456256e-06, "loss": 0.731, "step": 5163 }, { "epoch": 0.21401632889883543, "grad_norm": 0.426853746175766, "learning_rate": 3.930125575034191e-06, "loss": 0.7478, "step": 5164 }, { "epoch": 0.2140577728045091, "grad_norm": 0.42516541481018066, "learning_rate": 3.9299183555058236e-06, "loss": 0.7206, "step": 5165 }, { "epoch": 0.21409921671018275, "grad_norm": 0.39548060297966003, "learning_rate": 3.929711135977455e-06, "loss": 0.7258, "step": 5166 }, { "epoch": 0.21414066061585643, "grad_norm": 0.39803415536880493, "learning_rate": 3.929503916449086e-06, "loss": 0.6753, "step": 5167 }, { "epoch": 0.2141821045215301, "grad_norm": 0.445502907037735, "learning_rate": 3.929296696920718e-06, "loss": 0.7654, "step": 5168 }, { "epoch": 0.21422354842720379, "grad_norm": 0.4254343509674072, "learning_rate": 3.92908947739235e-06, "loss": 0.7148, "step": 5169 }, { "epoch": 0.21426499233287746, "grad_norm": 0.41869983077049255, "learning_rate": 3.928882257863981e-06, "loss": 0.739, "step": 5170 }, { "epoch": 0.2143064362385511, "grad_norm": 0.412995308637619, "learning_rate": 3.9286750383356136e-06, "loss": 0.6746, "step": 5171 }, { "epoch": 0.2143478801442248, "grad_norm": 0.4200811982154846, "learning_rate": 3.928467818807244e-06, "loss": 0.7649, "step": 5172 }, { "epoch": 0.21438932404989847, "grad_norm": 0.43147769570350647, "learning_rate": 3.928260599278876e-06, "loss": 0.7271, "step": 5173 }, { "epoch": 0.21443076795557214, "grad_norm": 0.39612942934036255, "learning_rate": 3.9280533797505086e-06, "loss": 0.6746, "step": 5174 }, { "epoch": 0.2144722118612458, "grad_norm": 0.4001898467540741, "learning_rate": 3.92784616022214e-06, "loss": 0.7356, "step": 5175 }, { "epoch": 0.21451365576691947, "grad_norm": 0.43184083700180054, "learning_rate": 3.927638940693771e-06, "loss": 0.7747, "step": 5176 }, { "epoch": 0.21455509967259315, "grad_norm": 0.4445628523826599, "learning_rate": 3.927431721165403e-06, "loss": 0.7024, "step": 5177 }, { "epoch": 0.21459654357826682, "grad_norm": 0.4417478144168854, "learning_rate": 3.927224501637035e-06, "loss": 0.7415, "step": 5178 }, { "epoch": 0.21463798748394047, "grad_norm": 0.4569386839866638, "learning_rate": 3.927017282108666e-06, "loss": 0.7385, "step": 5179 }, { "epoch": 0.21467943138961415, "grad_norm": 0.4119894802570343, "learning_rate": 3.926810062580298e-06, "loss": 0.7297, "step": 5180 }, { "epoch": 0.21472087529528783, "grad_norm": 0.43150511384010315, "learning_rate": 3.926602843051929e-06, "loss": 0.677, "step": 5181 }, { "epoch": 0.2147623192009615, "grad_norm": 0.41464290022850037, "learning_rate": 3.926395623523561e-06, "loss": 0.7251, "step": 5182 }, { "epoch": 0.21480376310663518, "grad_norm": 0.4310956299304962, "learning_rate": 3.926188403995193e-06, "loss": 0.7439, "step": 5183 }, { "epoch": 0.21484520701230883, "grad_norm": 0.4162323772907257, "learning_rate": 3.925981184466824e-06, "loss": 0.6949, "step": 5184 }, { "epoch": 0.2148866509179825, "grad_norm": 0.43507465720176697, "learning_rate": 3.925773964938456e-06, "loss": 0.6674, "step": 5185 }, { "epoch": 0.21492809482365619, "grad_norm": 0.3878183364868164, "learning_rate": 3.925566745410088e-06, "loss": 0.6829, "step": 5186 }, { "epoch": 0.21496953872932986, "grad_norm": 0.40035155415534973, "learning_rate": 3.925359525881719e-06, "loss": 0.7157, "step": 5187 }, { "epoch": 0.2150109826350035, "grad_norm": 0.42390206456184387, "learning_rate": 3.9251523063533505e-06, "loss": 0.6993, "step": 5188 }, { "epoch": 0.2150524265406772, "grad_norm": 0.4005449414253235, "learning_rate": 3.924945086824983e-06, "loss": 0.6475, "step": 5189 }, { "epoch": 0.21509387044635087, "grad_norm": 0.4318193197250366, "learning_rate": 3.924737867296614e-06, "loss": 0.731, "step": 5190 }, { "epoch": 0.21513531435202454, "grad_norm": 0.40991804003715515, "learning_rate": 3.924530647768246e-06, "loss": 0.708, "step": 5191 }, { "epoch": 0.2151767582576982, "grad_norm": 0.4509611129760742, "learning_rate": 3.924323428239878e-06, "loss": 0.7427, "step": 5192 }, { "epoch": 0.21521820216337187, "grad_norm": 0.4485681653022766, "learning_rate": 3.924116208711509e-06, "loss": 0.7134, "step": 5193 }, { "epoch": 0.21525964606904555, "grad_norm": 0.44018790125846863, "learning_rate": 3.923908989183141e-06, "loss": 0.7583, "step": 5194 }, { "epoch": 0.21530108997471922, "grad_norm": 0.41066861152648926, "learning_rate": 3.923701769654773e-06, "loss": 0.6833, "step": 5195 }, { "epoch": 0.2153425338803929, "grad_norm": 0.4888826906681061, "learning_rate": 3.923494550126404e-06, "loss": 0.7869, "step": 5196 }, { "epoch": 0.21538397778606655, "grad_norm": 0.44207435846328735, "learning_rate": 3.9232873305980355e-06, "loss": 0.7661, "step": 5197 }, { "epoch": 0.21542542169174023, "grad_norm": 0.5160230994224548, "learning_rate": 3.923080111069668e-06, "loss": 0.7111, "step": 5198 }, { "epoch": 0.2154668655974139, "grad_norm": 0.4555935561656952, "learning_rate": 3.922872891541299e-06, "loss": 0.7375, "step": 5199 }, { "epoch": 0.21550830950308758, "grad_norm": 0.41654545068740845, "learning_rate": 3.9226656720129305e-06, "loss": 0.7556, "step": 5200 }, { "epoch": 0.21554975340876123, "grad_norm": 0.43797725439071655, "learning_rate": 3.922458452484563e-06, "loss": 0.7522, "step": 5201 }, { "epoch": 0.2155911973144349, "grad_norm": 0.4216921925544739, "learning_rate": 3.922251232956194e-06, "loss": 0.6951, "step": 5202 }, { "epoch": 0.21563264122010858, "grad_norm": 0.43047675490379333, "learning_rate": 3.9220440134278255e-06, "loss": 0.6967, "step": 5203 }, { "epoch": 0.21567408512578226, "grad_norm": 0.40262022614479065, "learning_rate": 3.921836793899457e-06, "loss": 0.7256, "step": 5204 }, { "epoch": 0.21571552903145594, "grad_norm": 0.4223463237285614, "learning_rate": 3.921629574371089e-06, "loss": 0.7355, "step": 5205 }, { "epoch": 0.2157569729371296, "grad_norm": 0.4099085032939911, "learning_rate": 3.9214223548427205e-06, "loss": 0.7021, "step": 5206 }, { "epoch": 0.21579841684280326, "grad_norm": 0.4082989990711212, "learning_rate": 3.921215135314353e-06, "loss": 0.7234, "step": 5207 }, { "epoch": 0.21583986074847694, "grad_norm": 0.44395506381988525, "learning_rate": 3.921007915785983e-06, "loss": 0.7214, "step": 5208 }, { "epoch": 0.21588130465415062, "grad_norm": 0.4146428406238556, "learning_rate": 3.9208006962576155e-06, "loss": 0.6987, "step": 5209 }, { "epoch": 0.21592274855982427, "grad_norm": 0.43167275190353394, "learning_rate": 3.920593476729248e-06, "loss": 0.6279, "step": 5210 }, { "epoch": 0.21596419246549795, "grad_norm": 0.4153953492641449, "learning_rate": 3.920386257200879e-06, "loss": 0.71, "step": 5211 }, { "epoch": 0.21600563637117162, "grad_norm": 0.4432026743888855, "learning_rate": 3.9201790376725105e-06, "loss": 0.6833, "step": 5212 }, { "epoch": 0.2160470802768453, "grad_norm": 0.4030596613883972, "learning_rate": 3.919971818144142e-06, "loss": 0.7466, "step": 5213 }, { "epoch": 0.21608852418251895, "grad_norm": 0.4269682466983795, "learning_rate": 3.919764598615774e-06, "loss": 0.7031, "step": 5214 }, { "epoch": 0.21612996808819263, "grad_norm": 0.460428386926651, "learning_rate": 3.9195573790874055e-06, "loss": 0.8051, "step": 5215 }, { "epoch": 0.2161714119938663, "grad_norm": 0.43371573090553284, "learning_rate": 3.919350159559037e-06, "loss": 0.7063, "step": 5216 }, { "epoch": 0.21621285589953998, "grad_norm": 0.39581677317619324, "learning_rate": 3.919142940030669e-06, "loss": 0.6775, "step": 5217 }, { "epoch": 0.21625429980521366, "grad_norm": 0.4207910895347595, "learning_rate": 3.9189357205023005e-06, "loss": 0.6982, "step": 5218 }, { "epoch": 0.2162957437108873, "grad_norm": 0.4456447660923004, "learning_rate": 3.918728500973932e-06, "loss": 0.748, "step": 5219 }, { "epoch": 0.21633718761656098, "grad_norm": 0.4551488757133484, "learning_rate": 3.918521281445563e-06, "loss": 0.7275, "step": 5220 }, { "epoch": 0.21637863152223466, "grad_norm": 0.463957279920578, "learning_rate": 3.9183140619171956e-06, "loss": 0.7722, "step": 5221 }, { "epoch": 0.21642007542790834, "grad_norm": 0.419675350189209, "learning_rate": 3.918106842388827e-06, "loss": 0.7603, "step": 5222 }, { "epoch": 0.216461519333582, "grad_norm": 0.4407963752746582, "learning_rate": 3.917899622860459e-06, "loss": 0.7513, "step": 5223 }, { "epoch": 0.21650296323925566, "grad_norm": 0.39439186453819275, "learning_rate": 3.91769240333209e-06, "loss": 0.6663, "step": 5224 }, { "epoch": 0.21654440714492934, "grad_norm": 0.41965436935424805, "learning_rate": 3.917485183803722e-06, "loss": 0.761, "step": 5225 }, { "epoch": 0.21658585105060302, "grad_norm": 0.43240153789520264, "learning_rate": 3.917277964275354e-06, "loss": 0.7869, "step": 5226 }, { "epoch": 0.21662729495627667, "grad_norm": 0.4187496602535248, "learning_rate": 3.9170707447469856e-06, "loss": 0.7039, "step": 5227 }, { "epoch": 0.21666873886195034, "grad_norm": 0.4194868505001068, "learning_rate": 3.916863525218617e-06, "loss": 0.7087, "step": 5228 }, { "epoch": 0.21671018276762402, "grad_norm": 0.44700080156326294, "learning_rate": 3.916656305690248e-06, "loss": 0.7776, "step": 5229 }, { "epoch": 0.2167516266732977, "grad_norm": 0.43503013253211975, "learning_rate": 3.9164490861618806e-06, "loss": 0.7288, "step": 5230 }, { "epoch": 0.21679307057897138, "grad_norm": 0.4542178511619568, "learning_rate": 3.916241866633512e-06, "loss": 0.7188, "step": 5231 }, { "epoch": 0.21683451448464502, "grad_norm": 0.40519118309020996, "learning_rate": 3.916034647105143e-06, "loss": 0.7, "step": 5232 }, { "epoch": 0.2168759583903187, "grad_norm": 0.44479313492774963, "learning_rate": 3.915827427576775e-06, "loss": 0.7708, "step": 5233 }, { "epoch": 0.21691740229599238, "grad_norm": 0.4119117856025696, "learning_rate": 3.915620208048407e-06, "loss": 0.7158, "step": 5234 }, { "epoch": 0.21695884620166606, "grad_norm": 0.4093351662158966, "learning_rate": 3.915412988520038e-06, "loss": 0.689, "step": 5235 }, { "epoch": 0.2170002901073397, "grad_norm": 0.4301641285419464, "learning_rate": 3.91520576899167e-06, "loss": 0.7321, "step": 5236 }, { "epoch": 0.21704173401301338, "grad_norm": 0.4339520037174225, "learning_rate": 3.914998549463302e-06, "loss": 0.7112, "step": 5237 }, { "epoch": 0.21708317791868706, "grad_norm": 0.45186248421669006, "learning_rate": 3.914791329934933e-06, "loss": 0.7366, "step": 5238 }, { "epoch": 0.21712462182436074, "grad_norm": 0.431069016456604, "learning_rate": 3.9145841104065656e-06, "loss": 0.741, "step": 5239 }, { "epoch": 0.21716606573003439, "grad_norm": 0.4019047021865845, "learning_rate": 3.914376890878196e-06, "loss": 0.7255, "step": 5240 }, { "epoch": 0.21720750963570806, "grad_norm": 0.405895471572876, "learning_rate": 3.914169671349828e-06, "loss": 0.7096, "step": 5241 }, { "epoch": 0.21724895354138174, "grad_norm": 0.40362638235092163, "learning_rate": 3.91396245182146e-06, "loss": 0.6694, "step": 5242 }, { "epoch": 0.21729039744705542, "grad_norm": 0.40513262152671814, "learning_rate": 3.913755232293092e-06, "loss": 0.6615, "step": 5243 }, { "epoch": 0.2173318413527291, "grad_norm": 0.43223991990089417, "learning_rate": 3.913548012764723e-06, "loss": 0.72, "step": 5244 }, { "epoch": 0.21737328525840274, "grad_norm": 0.4431764781475067, "learning_rate": 3.913340793236355e-06, "loss": 0.696, "step": 5245 }, { "epoch": 0.21741472916407642, "grad_norm": 0.4312267303466797, "learning_rate": 3.913133573707987e-06, "loss": 0.7383, "step": 5246 }, { "epoch": 0.2174561730697501, "grad_norm": 0.4163609445095062, "learning_rate": 3.912926354179618e-06, "loss": 0.6965, "step": 5247 }, { "epoch": 0.21749761697542377, "grad_norm": 0.41311073303222656, "learning_rate": 3.91271913465125e-06, "loss": 0.7644, "step": 5248 }, { "epoch": 0.21753906088109742, "grad_norm": 0.41359418630599976, "learning_rate": 3.912511915122881e-06, "loss": 0.7251, "step": 5249 }, { "epoch": 0.2175805047867711, "grad_norm": 0.45741909742355347, "learning_rate": 3.912304695594513e-06, "loss": 0.7701, "step": 5250 }, { "epoch": 0.21762194869244478, "grad_norm": 0.44259271025657654, "learning_rate": 3.912097476066145e-06, "loss": 0.7131, "step": 5251 }, { "epoch": 0.21766339259811845, "grad_norm": 0.4387892186641693, "learning_rate": 3.911890256537776e-06, "loss": 0.7439, "step": 5252 }, { "epoch": 0.2177048365037921, "grad_norm": 0.4638464152812958, "learning_rate": 3.911683037009408e-06, "loss": 0.7534, "step": 5253 }, { "epoch": 0.21774628040946578, "grad_norm": 0.3849570155143738, "learning_rate": 3.91147581748104e-06, "loss": 0.7159, "step": 5254 }, { "epoch": 0.21778772431513946, "grad_norm": 0.4461582601070404, "learning_rate": 3.911268597952671e-06, "loss": 0.7268, "step": 5255 }, { "epoch": 0.21782916822081314, "grad_norm": 0.39677193760871887, "learning_rate": 3.9110613784243025e-06, "loss": 0.6819, "step": 5256 }, { "epoch": 0.2178706121264868, "grad_norm": 0.4425094723701477, "learning_rate": 3.910854158895935e-06, "loss": 0.781, "step": 5257 }, { "epoch": 0.21791205603216046, "grad_norm": 0.4975070059299469, "learning_rate": 3.910646939367566e-06, "loss": 0.7612, "step": 5258 }, { "epoch": 0.21795349993783414, "grad_norm": 0.39364317059516907, "learning_rate": 3.910439719839198e-06, "loss": 0.719, "step": 5259 }, { "epoch": 0.21799494384350782, "grad_norm": 0.3889557719230652, "learning_rate": 3.91023250031083e-06, "loss": 0.6968, "step": 5260 }, { "epoch": 0.2180363877491815, "grad_norm": 0.4194556474685669, "learning_rate": 3.910025280782461e-06, "loss": 0.7183, "step": 5261 }, { "epoch": 0.21807783165485514, "grad_norm": 0.4292806386947632, "learning_rate": 3.909818061254093e-06, "loss": 0.7314, "step": 5262 }, { "epoch": 0.21811927556052882, "grad_norm": 0.43367791175842285, "learning_rate": 3.909610841725725e-06, "loss": 0.7319, "step": 5263 }, { "epoch": 0.2181607194662025, "grad_norm": 0.43757525086402893, "learning_rate": 3.909403622197356e-06, "loss": 0.7578, "step": 5264 }, { "epoch": 0.21820216337187617, "grad_norm": 0.4455266296863556, "learning_rate": 3.9091964026689875e-06, "loss": 0.74, "step": 5265 }, { "epoch": 0.21824360727754982, "grad_norm": 0.42043888568878174, "learning_rate": 3.90898918314062e-06, "loss": 0.7893, "step": 5266 }, { "epoch": 0.2182850511832235, "grad_norm": 0.4178803861141205, "learning_rate": 3.908781963612251e-06, "loss": 0.686, "step": 5267 }, { "epoch": 0.21832649508889718, "grad_norm": 0.4013496935367584, "learning_rate": 3.9085747440838825e-06, "loss": 0.7212, "step": 5268 }, { "epoch": 0.21836793899457085, "grad_norm": 0.429502010345459, "learning_rate": 3.908367524555514e-06, "loss": 0.689, "step": 5269 }, { "epoch": 0.21840938290024453, "grad_norm": 0.4205033779144287, "learning_rate": 3.908160305027146e-06, "loss": 0.6893, "step": 5270 }, { "epoch": 0.21845082680591818, "grad_norm": 0.4210493266582489, "learning_rate": 3.9079530854987775e-06, "loss": 0.7507, "step": 5271 }, { "epoch": 0.21849227071159186, "grad_norm": 0.4226735532283783, "learning_rate": 3.907745865970409e-06, "loss": 0.7174, "step": 5272 }, { "epoch": 0.21853371461726553, "grad_norm": 0.4503912925720215, "learning_rate": 3.907538646442041e-06, "loss": 0.7654, "step": 5273 }, { "epoch": 0.2185751585229392, "grad_norm": 0.4206906259059906, "learning_rate": 3.9073314269136725e-06, "loss": 0.7302, "step": 5274 }, { "epoch": 0.21861660242861286, "grad_norm": 0.39331772923469543, "learning_rate": 3.907124207385305e-06, "loss": 0.6672, "step": 5275 }, { "epoch": 0.21865804633428654, "grad_norm": 0.4659024477005005, "learning_rate": 3.906916987856936e-06, "loss": 0.7498, "step": 5276 }, { "epoch": 0.21869949023996021, "grad_norm": 0.4138321280479431, "learning_rate": 3.9067097683285675e-06, "loss": 0.7126, "step": 5277 }, { "epoch": 0.2187409341456339, "grad_norm": 0.42757049202919006, "learning_rate": 3.9065025488002e-06, "loss": 0.735, "step": 5278 }, { "epoch": 0.21878237805130757, "grad_norm": 0.40373218059539795, "learning_rate": 3.906295329271831e-06, "loss": 0.6997, "step": 5279 }, { "epoch": 0.21882382195698122, "grad_norm": 0.464275985956192, "learning_rate": 3.9060881097434626e-06, "loss": 0.7349, "step": 5280 }, { "epoch": 0.2188652658626549, "grad_norm": 0.3973308205604553, "learning_rate": 3.905880890215094e-06, "loss": 0.6904, "step": 5281 }, { "epoch": 0.21890670976832857, "grad_norm": 0.41097933053970337, "learning_rate": 3.905673670686726e-06, "loss": 0.7002, "step": 5282 }, { "epoch": 0.21894815367400225, "grad_norm": 0.4299159646034241, "learning_rate": 3.9054664511583576e-06, "loss": 0.7346, "step": 5283 }, { "epoch": 0.2189895975796759, "grad_norm": 0.4582728147506714, "learning_rate": 3.905259231629989e-06, "loss": 0.6819, "step": 5284 }, { "epoch": 0.21903104148534958, "grad_norm": 0.41970616579055786, "learning_rate": 3.90505201210162e-06, "loss": 0.7382, "step": 5285 }, { "epoch": 0.21907248539102325, "grad_norm": 0.43556076288223267, "learning_rate": 3.9048447925732526e-06, "loss": 0.7551, "step": 5286 }, { "epoch": 0.21911392929669693, "grad_norm": 0.42559918761253357, "learning_rate": 3.904637573044884e-06, "loss": 0.6956, "step": 5287 }, { "epoch": 0.21915537320237058, "grad_norm": 0.4457615315914154, "learning_rate": 3.904430353516515e-06, "loss": 0.7593, "step": 5288 }, { "epoch": 0.21919681710804426, "grad_norm": 0.3909178674221039, "learning_rate": 3.9042231339881476e-06, "loss": 0.6887, "step": 5289 }, { "epoch": 0.21923826101371793, "grad_norm": 0.44917765259742737, "learning_rate": 3.904015914459779e-06, "loss": 0.7339, "step": 5290 }, { "epoch": 0.2192797049193916, "grad_norm": 0.4159770607948303, "learning_rate": 3.903808694931411e-06, "loss": 0.7441, "step": 5291 }, { "epoch": 0.2193211488250653, "grad_norm": 0.4075734615325928, "learning_rate": 3.903601475403042e-06, "loss": 0.6919, "step": 5292 }, { "epoch": 0.21936259273073894, "grad_norm": 0.4347516596317291, "learning_rate": 3.903394255874674e-06, "loss": 0.762, "step": 5293 }, { "epoch": 0.2194040366364126, "grad_norm": 0.42917874455451965, "learning_rate": 3.903187036346305e-06, "loss": 0.7072, "step": 5294 }, { "epoch": 0.2194454805420863, "grad_norm": 0.407070517539978, "learning_rate": 3.9029798168179376e-06, "loss": 0.7308, "step": 5295 }, { "epoch": 0.21948692444775997, "grad_norm": 0.462415486574173, "learning_rate": 3.902772597289569e-06, "loss": 0.7507, "step": 5296 }, { "epoch": 0.21952836835343362, "grad_norm": 0.40350303053855896, "learning_rate": 3.9025653777612e-06, "loss": 0.6743, "step": 5297 }, { "epoch": 0.2195698122591073, "grad_norm": 0.4233470857143402, "learning_rate": 3.9023581582328326e-06, "loss": 0.6796, "step": 5298 }, { "epoch": 0.21961125616478097, "grad_norm": 0.4355199933052063, "learning_rate": 3.902150938704464e-06, "loss": 0.7158, "step": 5299 }, { "epoch": 0.21965270007045465, "grad_norm": 0.4205434322357178, "learning_rate": 3.901943719176095e-06, "loss": 0.6758, "step": 5300 }, { "epoch": 0.2196941439761283, "grad_norm": 0.4295797646045685, "learning_rate": 3.901736499647727e-06, "loss": 0.7261, "step": 5301 }, { "epoch": 0.21973558788180197, "grad_norm": 0.4505961537361145, "learning_rate": 3.901529280119359e-06, "loss": 0.7158, "step": 5302 }, { "epoch": 0.21977703178747565, "grad_norm": 0.3953051269054413, "learning_rate": 3.90132206059099e-06, "loss": 0.7018, "step": 5303 }, { "epoch": 0.21981847569314933, "grad_norm": 0.4183301627635956, "learning_rate": 3.901114841062622e-06, "loss": 0.7075, "step": 5304 }, { "epoch": 0.219859919598823, "grad_norm": 0.4219602942466736, "learning_rate": 3.900907621534254e-06, "loss": 0.6714, "step": 5305 }, { "epoch": 0.21990136350449666, "grad_norm": 0.40538957715034485, "learning_rate": 3.900700402005885e-06, "loss": 0.7947, "step": 5306 }, { "epoch": 0.21994280741017033, "grad_norm": 0.4250020682811737, "learning_rate": 3.900493182477518e-06, "loss": 0.717, "step": 5307 }, { "epoch": 0.219984251315844, "grad_norm": 0.4219813644886017, "learning_rate": 3.900285962949148e-06, "loss": 0.6973, "step": 5308 }, { "epoch": 0.2200256952215177, "grad_norm": 0.42195188999176025, "learning_rate": 3.90007874342078e-06, "loss": 0.6969, "step": 5309 }, { "epoch": 0.22006713912719134, "grad_norm": 0.44586315751075745, "learning_rate": 3.899871523892412e-06, "loss": 0.7317, "step": 5310 }, { "epoch": 0.220108583032865, "grad_norm": 0.4271865785121918, "learning_rate": 3.899664304364044e-06, "loss": 0.6866, "step": 5311 }, { "epoch": 0.2201500269385387, "grad_norm": 0.4247032701969147, "learning_rate": 3.899457084835675e-06, "loss": 0.7034, "step": 5312 }, { "epoch": 0.22019147084421237, "grad_norm": 0.38316693902015686, "learning_rate": 3.899249865307307e-06, "loss": 0.6882, "step": 5313 }, { "epoch": 0.22023291474988602, "grad_norm": 0.3932279348373413, "learning_rate": 3.899042645778939e-06, "loss": 0.6691, "step": 5314 }, { "epoch": 0.2202743586555597, "grad_norm": 0.4380730092525482, "learning_rate": 3.89883542625057e-06, "loss": 0.7395, "step": 5315 }, { "epoch": 0.22031580256123337, "grad_norm": 0.41117554903030396, "learning_rate": 3.898628206722202e-06, "loss": 0.7031, "step": 5316 }, { "epoch": 0.22035724646690705, "grad_norm": 0.4566909670829773, "learning_rate": 3.898420987193833e-06, "loss": 0.7732, "step": 5317 }, { "epoch": 0.22039869037258072, "grad_norm": 0.43347689509391785, "learning_rate": 3.898213767665465e-06, "loss": 0.7449, "step": 5318 }, { "epoch": 0.22044013427825437, "grad_norm": 0.41429945826530457, "learning_rate": 3.898006548137097e-06, "loss": 0.719, "step": 5319 }, { "epoch": 0.22048157818392805, "grad_norm": 0.41373735666275024, "learning_rate": 3.897799328608728e-06, "loss": 0.7329, "step": 5320 }, { "epoch": 0.22052302208960173, "grad_norm": 0.46192625164985657, "learning_rate": 3.8975921090803595e-06, "loss": 0.7244, "step": 5321 }, { "epoch": 0.2205644659952754, "grad_norm": 0.4624522626399994, "learning_rate": 3.897384889551992e-06, "loss": 0.748, "step": 5322 }, { "epoch": 0.22060590990094905, "grad_norm": 0.43876153230667114, "learning_rate": 3.897177670023624e-06, "loss": 0.7549, "step": 5323 }, { "epoch": 0.22064735380662273, "grad_norm": 0.45151281356811523, "learning_rate": 3.8969704504952545e-06, "loss": 0.7551, "step": 5324 }, { "epoch": 0.2206887977122964, "grad_norm": 0.43969374895095825, "learning_rate": 3.896763230966887e-06, "loss": 0.7244, "step": 5325 }, { "epoch": 0.22073024161797009, "grad_norm": 0.4198043942451477, "learning_rate": 3.896556011438518e-06, "loss": 0.7671, "step": 5326 }, { "epoch": 0.22077168552364373, "grad_norm": 0.43629544973373413, "learning_rate": 3.89634879191015e-06, "loss": 0.7296, "step": 5327 }, { "epoch": 0.2208131294293174, "grad_norm": 0.39687860012054443, "learning_rate": 3.896141572381782e-06, "loss": 0.7036, "step": 5328 }, { "epoch": 0.2208545733349911, "grad_norm": 0.427781343460083, "learning_rate": 3.895934352853413e-06, "loss": 0.7545, "step": 5329 }, { "epoch": 0.22089601724066477, "grad_norm": 0.4018625020980835, "learning_rate": 3.8957271333250445e-06, "loss": 0.7361, "step": 5330 }, { "epoch": 0.22093746114633844, "grad_norm": 0.4462703764438629, "learning_rate": 3.895519913796677e-06, "loss": 0.7405, "step": 5331 }, { "epoch": 0.2209789050520121, "grad_norm": 0.4512161612510681, "learning_rate": 3.895312694268308e-06, "loss": 0.7722, "step": 5332 }, { "epoch": 0.22102034895768577, "grad_norm": 0.3935202658176422, "learning_rate": 3.8951054747399395e-06, "loss": 0.7146, "step": 5333 }, { "epoch": 0.22106179286335945, "grad_norm": 0.4229908883571625, "learning_rate": 3.894898255211572e-06, "loss": 0.7461, "step": 5334 }, { "epoch": 0.22110323676903312, "grad_norm": 0.4277470111846924, "learning_rate": 3.894691035683203e-06, "loss": 0.7026, "step": 5335 }, { "epoch": 0.22114468067470677, "grad_norm": 0.45363834500312805, "learning_rate": 3.8944838161548345e-06, "loss": 0.6951, "step": 5336 }, { "epoch": 0.22118612458038045, "grad_norm": 0.4042293429374695, "learning_rate": 3.894276596626466e-06, "loss": 0.7598, "step": 5337 }, { "epoch": 0.22122756848605413, "grad_norm": 0.43923282623291016, "learning_rate": 3.894069377098098e-06, "loss": 0.7407, "step": 5338 }, { "epoch": 0.2212690123917278, "grad_norm": 0.44493502378463745, "learning_rate": 3.8938621575697296e-06, "loss": 0.7014, "step": 5339 }, { "epoch": 0.22131045629740145, "grad_norm": 0.41313377022743225, "learning_rate": 3.893654938041361e-06, "loss": 0.7126, "step": 5340 }, { "epoch": 0.22135190020307513, "grad_norm": 0.42617571353912354, "learning_rate": 3.893447718512993e-06, "loss": 0.7417, "step": 5341 }, { "epoch": 0.2213933441087488, "grad_norm": 0.4842333495616913, "learning_rate": 3.8932404989846246e-06, "loss": 0.7268, "step": 5342 }, { "epoch": 0.22143478801442248, "grad_norm": 0.4101858139038086, "learning_rate": 3.893033279456257e-06, "loss": 0.6903, "step": 5343 }, { "epoch": 0.22147623192009616, "grad_norm": 0.41965484619140625, "learning_rate": 3.892826059927888e-06, "loss": 0.7155, "step": 5344 }, { "epoch": 0.2215176758257698, "grad_norm": 0.40693676471710205, "learning_rate": 3.8926188403995196e-06, "loss": 0.7305, "step": 5345 }, { "epoch": 0.2215591197314435, "grad_norm": 0.39963477849960327, "learning_rate": 3.892411620871151e-06, "loss": 0.7334, "step": 5346 }, { "epoch": 0.22160056363711716, "grad_norm": 0.43732550740242004, "learning_rate": 3.892204401342783e-06, "loss": 0.769, "step": 5347 }, { "epoch": 0.22164200754279084, "grad_norm": 0.4208558201789856, "learning_rate": 3.8919971818144146e-06, "loss": 0.7488, "step": 5348 }, { "epoch": 0.2216834514484645, "grad_norm": 0.443558007478714, "learning_rate": 3.891789962286046e-06, "loss": 0.6896, "step": 5349 }, { "epoch": 0.22172489535413817, "grad_norm": 0.4308137595653534, "learning_rate": 3.891582742757678e-06, "loss": 0.745, "step": 5350 }, { "epoch": 0.22176633925981185, "grad_norm": 0.4351682662963867, "learning_rate": 3.8913755232293096e-06, "loss": 0.7507, "step": 5351 }, { "epoch": 0.22180778316548552, "grad_norm": 0.42666029930114746, "learning_rate": 3.891168303700941e-06, "loss": 0.67, "step": 5352 }, { "epoch": 0.2218492270711592, "grad_norm": 0.3992883265018463, "learning_rate": 3.890961084172572e-06, "loss": 0.6685, "step": 5353 }, { "epoch": 0.22189067097683285, "grad_norm": 0.4222527742385864, "learning_rate": 3.8907538646442046e-06, "loss": 0.771, "step": 5354 }, { "epoch": 0.22193211488250653, "grad_norm": 0.38464218378067017, "learning_rate": 3.890546645115836e-06, "loss": 0.7026, "step": 5355 }, { "epoch": 0.2219735587881802, "grad_norm": 0.44592520594596863, "learning_rate": 3.890339425587467e-06, "loss": 0.7336, "step": 5356 }, { "epoch": 0.22201500269385388, "grad_norm": 0.46838390827178955, "learning_rate": 3.8901322060590996e-06, "loss": 0.7388, "step": 5357 }, { "epoch": 0.22205644659952753, "grad_norm": 0.41290557384490967, "learning_rate": 3.889924986530731e-06, "loss": 0.7246, "step": 5358 }, { "epoch": 0.2220978905052012, "grad_norm": 0.4245964288711548, "learning_rate": 3.889717767002363e-06, "loss": 0.6963, "step": 5359 }, { "epoch": 0.22213933441087488, "grad_norm": 0.43396252393722534, "learning_rate": 3.889510547473994e-06, "loss": 0.7844, "step": 5360 }, { "epoch": 0.22218077831654856, "grad_norm": 0.3974953591823578, "learning_rate": 3.889303327945626e-06, "loss": 0.6871, "step": 5361 }, { "epoch": 0.2222222222222222, "grad_norm": 0.4315508306026459, "learning_rate": 3.889096108417257e-06, "loss": 0.7207, "step": 5362 }, { "epoch": 0.2222636661278959, "grad_norm": 0.42072367668151855, "learning_rate": 3.88888888888889e-06, "loss": 0.6936, "step": 5363 }, { "epoch": 0.22230511003356956, "grad_norm": 0.46010154485702515, "learning_rate": 3.888681669360521e-06, "loss": 0.7389, "step": 5364 }, { "epoch": 0.22234655393924324, "grad_norm": 0.4481925666332245, "learning_rate": 3.888474449832152e-06, "loss": 0.7495, "step": 5365 }, { "epoch": 0.22238799784491692, "grad_norm": 0.4129621982574463, "learning_rate": 3.888267230303785e-06, "loss": 0.7251, "step": 5366 }, { "epoch": 0.22242944175059057, "grad_norm": 0.3947811424732208, "learning_rate": 3.888060010775416e-06, "loss": 0.6549, "step": 5367 }, { "epoch": 0.22247088565626424, "grad_norm": 0.4491768777370453, "learning_rate": 3.887852791247047e-06, "loss": 0.7793, "step": 5368 }, { "epoch": 0.22251232956193792, "grad_norm": 0.42949792742729187, "learning_rate": 3.887645571718679e-06, "loss": 0.7639, "step": 5369 }, { "epoch": 0.2225537734676116, "grad_norm": 0.39363694190979004, "learning_rate": 3.887438352190311e-06, "loss": 0.7085, "step": 5370 }, { "epoch": 0.22259521737328525, "grad_norm": 0.3977954685688019, "learning_rate": 3.887231132661942e-06, "loss": 0.7415, "step": 5371 }, { "epoch": 0.22263666127895892, "grad_norm": 0.40267276763916016, "learning_rate": 3.887023913133574e-06, "loss": 0.7073, "step": 5372 }, { "epoch": 0.2226781051846326, "grad_norm": 0.4195236265659332, "learning_rate": 3.886816693605205e-06, "loss": 0.6488, "step": 5373 }, { "epoch": 0.22271954909030628, "grad_norm": 0.39649805426597595, "learning_rate": 3.886609474076837e-06, "loss": 0.6953, "step": 5374 }, { "epoch": 0.22276099299597993, "grad_norm": 0.459310382604599, "learning_rate": 3.88640225454847e-06, "loss": 0.7209, "step": 5375 }, { "epoch": 0.2228024369016536, "grad_norm": 0.3890974521636963, "learning_rate": 3.8861950350201e-06, "loss": 0.7395, "step": 5376 }, { "epoch": 0.22284388080732728, "grad_norm": 0.444986492395401, "learning_rate": 3.885987815491732e-06, "loss": 0.7712, "step": 5377 }, { "epoch": 0.22288532471300096, "grad_norm": 0.4290887117385864, "learning_rate": 3.885780595963364e-06, "loss": 0.7334, "step": 5378 }, { "epoch": 0.22292676861867464, "grad_norm": 0.411353200674057, "learning_rate": 3.885573376434996e-06, "loss": 0.7111, "step": 5379 }, { "epoch": 0.22296821252434829, "grad_norm": 0.40639862418174744, "learning_rate": 3.885366156906627e-06, "loss": 0.6843, "step": 5380 }, { "epoch": 0.22300965643002196, "grad_norm": 0.42232513427734375, "learning_rate": 3.885158937378259e-06, "loss": 0.7419, "step": 5381 }, { "epoch": 0.22305110033569564, "grad_norm": 0.4078530967235565, "learning_rate": 3.88495171784989e-06, "loss": 0.6921, "step": 5382 }, { "epoch": 0.22309254424136932, "grad_norm": 0.42062216997146606, "learning_rate": 3.884744498321522e-06, "loss": 0.7333, "step": 5383 }, { "epoch": 0.22313398814704297, "grad_norm": 0.38735029101371765, "learning_rate": 3.884537278793154e-06, "loss": 0.7354, "step": 5384 }, { "epoch": 0.22317543205271664, "grad_norm": 0.4239612817764282, "learning_rate": 3.884330059264785e-06, "loss": 0.6442, "step": 5385 }, { "epoch": 0.22321687595839032, "grad_norm": 0.4027920961380005, "learning_rate": 3.884122839736417e-06, "loss": 0.6699, "step": 5386 }, { "epoch": 0.223258319864064, "grad_norm": 0.41494491696357727, "learning_rate": 3.883915620208049e-06, "loss": 0.7114, "step": 5387 }, { "epoch": 0.22329976376973765, "grad_norm": 0.4129941761493683, "learning_rate": 3.88370840067968e-06, "loss": 0.7188, "step": 5388 }, { "epoch": 0.22334120767541132, "grad_norm": 0.39488479495048523, "learning_rate": 3.8835011811513115e-06, "loss": 0.708, "step": 5389 }, { "epoch": 0.223382651581085, "grad_norm": 0.3991526663303375, "learning_rate": 3.883293961622944e-06, "loss": 0.7112, "step": 5390 }, { "epoch": 0.22342409548675868, "grad_norm": 0.4231930077075958, "learning_rate": 3.883086742094575e-06, "loss": 0.7133, "step": 5391 }, { "epoch": 0.22346553939243236, "grad_norm": 0.4067850112915039, "learning_rate": 3.8828795225662065e-06, "loss": 0.6809, "step": 5392 }, { "epoch": 0.223506983298106, "grad_norm": 0.47294390201568604, "learning_rate": 3.882672303037839e-06, "loss": 0.7595, "step": 5393 }, { "epoch": 0.22354842720377968, "grad_norm": 0.38852059841156006, "learning_rate": 3.88246508350947e-06, "loss": 0.7078, "step": 5394 }, { "epoch": 0.22358987110945336, "grad_norm": 0.4436073899269104, "learning_rate": 3.882257863981102e-06, "loss": 0.7249, "step": 5395 }, { "epoch": 0.22363131501512704, "grad_norm": 0.43385428190231323, "learning_rate": 3.882050644452734e-06, "loss": 0.7432, "step": 5396 }, { "epoch": 0.22367275892080069, "grad_norm": 0.4161283075809479, "learning_rate": 3.881843424924365e-06, "loss": 0.6836, "step": 5397 }, { "epoch": 0.22371420282647436, "grad_norm": 0.4175952672958374, "learning_rate": 3.8816362053959966e-06, "loss": 0.6556, "step": 5398 }, { "epoch": 0.22375564673214804, "grad_norm": 0.4315580129623413, "learning_rate": 3.881428985867629e-06, "loss": 0.7202, "step": 5399 }, { "epoch": 0.22379709063782172, "grad_norm": 0.4115661382675171, "learning_rate": 3.88122176633926e-06, "loss": 0.7024, "step": 5400 }, { "epoch": 0.22383853454349537, "grad_norm": 0.46493178606033325, "learning_rate": 3.8810145468108916e-06, "loss": 0.7329, "step": 5401 }, { "epoch": 0.22387997844916904, "grad_norm": 0.42146313190460205, "learning_rate": 3.880807327282524e-06, "loss": 0.7344, "step": 5402 }, { "epoch": 0.22392142235484272, "grad_norm": 0.40090060234069824, "learning_rate": 3.880600107754155e-06, "loss": 0.7373, "step": 5403 }, { "epoch": 0.2239628662605164, "grad_norm": 0.43721842765808105, "learning_rate": 3.8803928882257866e-06, "loss": 0.7349, "step": 5404 }, { "epoch": 0.22400431016619007, "grad_norm": 0.4064861536026001, "learning_rate": 3.880185668697418e-06, "loss": 0.6781, "step": 5405 }, { "epoch": 0.22404575407186372, "grad_norm": 0.459197074174881, "learning_rate": 3.87997844916905e-06, "loss": 0.7908, "step": 5406 }, { "epoch": 0.2240871979775374, "grad_norm": 0.4414818584918976, "learning_rate": 3.8797712296406816e-06, "loss": 0.7632, "step": 5407 }, { "epoch": 0.22412864188321108, "grad_norm": 0.4600870609283447, "learning_rate": 3.879564010112313e-06, "loss": 0.6802, "step": 5408 }, { "epoch": 0.22417008578888475, "grad_norm": 0.4118979275226593, "learning_rate": 3.879356790583945e-06, "loss": 0.7485, "step": 5409 }, { "epoch": 0.2242115296945584, "grad_norm": 0.43717989325523376, "learning_rate": 3.8791495710555766e-06, "loss": 0.6829, "step": 5410 }, { "epoch": 0.22425297360023208, "grad_norm": 0.41334715485572815, "learning_rate": 3.878942351527209e-06, "loss": 0.6887, "step": 5411 }, { "epoch": 0.22429441750590576, "grad_norm": 0.46741175651550293, "learning_rate": 3.87873513199884e-06, "loss": 0.8096, "step": 5412 }, { "epoch": 0.22433586141157943, "grad_norm": 0.38367754220962524, "learning_rate": 3.8785279124704716e-06, "loss": 0.738, "step": 5413 }, { "epoch": 0.2243773053172531, "grad_norm": 0.41923055052757263, "learning_rate": 3.878320692942103e-06, "loss": 0.7153, "step": 5414 }, { "epoch": 0.22441874922292676, "grad_norm": 0.40232938528060913, "learning_rate": 3.878113473413735e-06, "loss": 0.6848, "step": 5415 }, { "epoch": 0.22446019312860044, "grad_norm": 0.43083250522613525, "learning_rate": 3.8779062538853666e-06, "loss": 0.7502, "step": 5416 }, { "epoch": 0.22450163703427412, "grad_norm": 0.40873226523399353, "learning_rate": 3.877699034356998e-06, "loss": 0.7146, "step": 5417 }, { "epoch": 0.2245430809399478, "grad_norm": 0.4067993760108948, "learning_rate": 3.87749181482863e-06, "loss": 0.6741, "step": 5418 }, { "epoch": 0.22458452484562144, "grad_norm": 0.40270358324050903, "learning_rate": 3.877284595300262e-06, "loss": 0.7146, "step": 5419 }, { "epoch": 0.22462596875129512, "grad_norm": 0.4092886447906494, "learning_rate": 3.877077375771893e-06, "loss": 0.6782, "step": 5420 }, { "epoch": 0.2246674126569688, "grad_norm": 0.37673670053482056, "learning_rate": 3.876870156243524e-06, "loss": 0.7356, "step": 5421 }, { "epoch": 0.22470885656264247, "grad_norm": 0.4131222665309906, "learning_rate": 3.876662936715157e-06, "loss": 0.7356, "step": 5422 }, { "epoch": 0.22475030046831612, "grad_norm": 0.36227917671203613, "learning_rate": 3.876455717186788e-06, "loss": 0.6914, "step": 5423 }, { "epoch": 0.2247917443739898, "grad_norm": 0.5417972207069397, "learning_rate": 3.876248497658419e-06, "loss": 0.7161, "step": 5424 }, { "epoch": 0.22483318827966348, "grad_norm": 0.4109330177307129, "learning_rate": 3.876041278130051e-06, "loss": 0.7234, "step": 5425 }, { "epoch": 0.22487463218533715, "grad_norm": 0.4306797981262207, "learning_rate": 3.875834058601683e-06, "loss": 0.7214, "step": 5426 }, { "epoch": 0.22491607609101083, "grad_norm": 0.4222170412540436, "learning_rate": 3.875626839073315e-06, "loss": 0.761, "step": 5427 }, { "epoch": 0.22495751999668448, "grad_norm": 0.3889314532279968, "learning_rate": 3.875419619544946e-06, "loss": 0.6997, "step": 5428 }, { "epoch": 0.22499896390235816, "grad_norm": 0.43970316648483276, "learning_rate": 3.875212400016578e-06, "loss": 0.6753, "step": 5429 }, { "epoch": 0.22504040780803183, "grad_norm": 0.39412930607795715, "learning_rate": 3.875005180488209e-06, "loss": 0.6882, "step": 5430 }, { "epoch": 0.2250818517137055, "grad_norm": 0.4443175792694092, "learning_rate": 3.874797960959842e-06, "loss": 0.7722, "step": 5431 }, { "epoch": 0.22512329561937916, "grad_norm": 0.387222021818161, "learning_rate": 3.874590741431473e-06, "loss": 0.678, "step": 5432 }, { "epoch": 0.22516473952505284, "grad_norm": 0.41984060406684875, "learning_rate": 3.874383521903104e-06, "loss": 0.7026, "step": 5433 }, { "epoch": 0.22520618343072651, "grad_norm": 0.4326939284801483, "learning_rate": 3.874176302374736e-06, "loss": 0.7444, "step": 5434 }, { "epoch": 0.2252476273364002, "grad_norm": 0.41396525502204895, "learning_rate": 3.873969082846368e-06, "loss": 0.6438, "step": 5435 }, { "epoch": 0.22528907124207384, "grad_norm": 0.44119128584861755, "learning_rate": 3.873761863317999e-06, "loss": 0.7363, "step": 5436 }, { "epoch": 0.22533051514774752, "grad_norm": 0.3955668807029724, "learning_rate": 3.873554643789631e-06, "loss": 0.7057, "step": 5437 }, { "epoch": 0.2253719590534212, "grad_norm": 0.45037102699279785, "learning_rate": 3.873347424261263e-06, "loss": 0.7327, "step": 5438 }, { "epoch": 0.22541340295909487, "grad_norm": 0.4175308644771576, "learning_rate": 3.873140204732894e-06, "loss": 0.7026, "step": 5439 }, { "epoch": 0.22545484686476855, "grad_norm": 0.4326030910015106, "learning_rate": 3.872932985204526e-06, "loss": 0.7246, "step": 5440 }, { "epoch": 0.2254962907704422, "grad_norm": 0.41220661997795105, "learning_rate": 3.872725765676157e-06, "loss": 0.7131, "step": 5441 }, { "epoch": 0.22553773467611588, "grad_norm": 0.4504355192184448, "learning_rate": 3.872518546147789e-06, "loss": 0.7356, "step": 5442 }, { "epoch": 0.22557917858178955, "grad_norm": 0.47715362906455994, "learning_rate": 3.872311326619421e-06, "loss": 0.7252, "step": 5443 }, { "epoch": 0.22562062248746323, "grad_norm": 0.41742831468582153, "learning_rate": 3.872104107091052e-06, "loss": 0.7037, "step": 5444 }, { "epoch": 0.22566206639313688, "grad_norm": 0.43994754552841187, "learning_rate": 3.871896887562684e-06, "loss": 0.7026, "step": 5445 }, { "epoch": 0.22570351029881056, "grad_norm": 0.4159255623817444, "learning_rate": 3.871689668034316e-06, "loss": 0.7087, "step": 5446 }, { "epoch": 0.22574495420448423, "grad_norm": 0.43156740069389343, "learning_rate": 3.871482448505948e-06, "loss": 0.7041, "step": 5447 }, { "epoch": 0.2257863981101579, "grad_norm": 0.4280935227870941, "learning_rate": 3.871275228977579e-06, "loss": 0.6992, "step": 5448 }, { "epoch": 0.22582784201583156, "grad_norm": 0.4054677188396454, "learning_rate": 3.871068009449211e-06, "loss": 0.6553, "step": 5449 }, { "epoch": 0.22586928592150524, "grad_norm": 0.4261319637298584, "learning_rate": 3.870860789920842e-06, "loss": 0.728, "step": 5450 }, { "epoch": 0.2259107298271789, "grad_norm": 0.4054911434650421, "learning_rate": 3.870653570392474e-06, "loss": 0.7097, "step": 5451 }, { "epoch": 0.2259521737328526, "grad_norm": 0.42237380146980286, "learning_rate": 3.870446350864106e-06, "loss": 0.7277, "step": 5452 }, { "epoch": 0.22599361763852627, "grad_norm": 0.4473530352115631, "learning_rate": 3.870239131335737e-06, "loss": 0.7302, "step": 5453 }, { "epoch": 0.22603506154419992, "grad_norm": 0.40123751759529114, "learning_rate": 3.870031911807369e-06, "loss": 0.6848, "step": 5454 }, { "epoch": 0.2260765054498736, "grad_norm": 0.4291873574256897, "learning_rate": 3.869824692279001e-06, "loss": 0.7205, "step": 5455 }, { "epoch": 0.22611794935554727, "grad_norm": 0.40258029103279114, "learning_rate": 3.869617472750632e-06, "loss": 0.6764, "step": 5456 }, { "epoch": 0.22615939326122095, "grad_norm": 0.42286384105682373, "learning_rate": 3.8694102532222636e-06, "loss": 0.7451, "step": 5457 }, { "epoch": 0.2262008371668946, "grad_norm": 0.39434486627578735, "learning_rate": 3.869203033693896e-06, "loss": 0.7441, "step": 5458 }, { "epoch": 0.22624228107256827, "grad_norm": 0.4507620930671692, "learning_rate": 3.868995814165527e-06, "loss": 0.7102, "step": 5459 }, { "epoch": 0.22628372497824195, "grad_norm": 0.43697381019592285, "learning_rate": 3.8687885946371586e-06, "loss": 0.7292, "step": 5460 }, { "epoch": 0.22632516888391563, "grad_norm": 0.4523143470287323, "learning_rate": 3.868581375108791e-06, "loss": 0.783, "step": 5461 }, { "epoch": 0.22636661278958928, "grad_norm": 0.44967952370643616, "learning_rate": 3.868374155580422e-06, "loss": 0.7114, "step": 5462 }, { "epoch": 0.22640805669526295, "grad_norm": 0.4223373830318451, "learning_rate": 3.868166936052054e-06, "loss": 0.7112, "step": 5463 }, { "epoch": 0.22644950060093663, "grad_norm": 0.4045344889163971, "learning_rate": 3.867959716523686e-06, "loss": 0.6956, "step": 5464 }, { "epoch": 0.2264909445066103, "grad_norm": 0.4269278049468994, "learning_rate": 3.867752496995317e-06, "loss": 0.7085, "step": 5465 }, { "epoch": 0.22653238841228399, "grad_norm": 0.4227130711078644, "learning_rate": 3.8675452774669486e-06, "loss": 0.7356, "step": 5466 }, { "epoch": 0.22657383231795764, "grad_norm": 0.4446948170661926, "learning_rate": 3.867338057938581e-06, "loss": 0.7344, "step": 5467 }, { "epoch": 0.2266152762236313, "grad_norm": 0.4144286811351776, "learning_rate": 3.867130838410212e-06, "loss": 0.6869, "step": 5468 }, { "epoch": 0.226656720129305, "grad_norm": 0.4508930742740631, "learning_rate": 3.8669236188818436e-06, "loss": 0.7537, "step": 5469 }, { "epoch": 0.22669816403497867, "grad_norm": 0.41933727264404297, "learning_rate": 3.866716399353476e-06, "loss": 0.7793, "step": 5470 }, { "epoch": 0.22673960794065232, "grad_norm": 0.4680531919002533, "learning_rate": 3.866509179825107e-06, "loss": 0.78, "step": 5471 }, { "epoch": 0.226781051846326, "grad_norm": 0.4368168115615845, "learning_rate": 3.8663019602967386e-06, "loss": 0.7441, "step": 5472 }, { "epoch": 0.22682249575199967, "grad_norm": 0.44176942110061646, "learning_rate": 3.86609474076837e-06, "loss": 0.7323, "step": 5473 }, { "epoch": 0.22686393965767335, "grad_norm": 0.408707857131958, "learning_rate": 3.865887521240002e-06, "loss": 0.7119, "step": 5474 }, { "epoch": 0.226905383563347, "grad_norm": 0.4001304507255554, "learning_rate": 3.8656803017116336e-06, "loss": 0.6742, "step": 5475 }, { "epoch": 0.22694682746902067, "grad_norm": 0.42633527517318726, "learning_rate": 3.865473082183265e-06, "loss": 0.7061, "step": 5476 }, { "epoch": 0.22698827137469435, "grad_norm": 0.4415960907936096, "learning_rate": 3.865265862654896e-06, "loss": 0.719, "step": 5477 }, { "epoch": 0.22702971528036803, "grad_norm": 0.4212667644023895, "learning_rate": 3.865058643126529e-06, "loss": 0.6997, "step": 5478 }, { "epoch": 0.2270711591860417, "grad_norm": 0.4382275640964508, "learning_rate": 3.864851423598161e-06, "loss": 0.7509, "step": 5479 }, { "epoch": 0.22711260309171535, "grad_norm": 0.40567347407341003, "learning_rate": 3.864644204069792e-06, "loss": 0.7084, "step": 5480 }, { "epoch": 0.22715404699738903, "grad_norm": 0.417024701833725, "learning_rate": 3.864436984541424e-06, "loss": 0.7395, "step": 5481 }, { "epoch": 0.2271954909030627, "grad_norm": 0.4204524755477905, "learning_rate": 3.864229765013055e-06, "loss": 0.7161, "step": 5482 }, { "epoch": 0.22723693480873638, "grad_norm": 0.43334466218948364, "learning_rate": 3.864022545484687e-06, "loss": 0.7349, "step": 5483 }, { "epoch": 0.22727837871441003, "grad_norm": 0.428235799074173, "learning_rate": 3.863815325956319e-06, "loss": 0.7878, "step": 5484 }, { "epoch": 0.2273198226200837, "grad_norm": 0.46112340688705444, "learning_rate": 3.86360810642795e-06, "loss": 0.7576, "step": 5485 }, { "epoch": 0.2273612665257574, "grad_norm": 0.3978957235813141, "learning_rate": 3.863400886899581e-06, "loss": 0.6677, "step": 5486 }, { "epoch": 0.22740271043143107, "grad_norm": 0.41339531540870667, "learning_rate": 3.863193667371214e-06, "loss": 0.703, "step": 5487 }, { "epoch": 0.22744415433710474, "grad_norm": 0.4287998080253601, "learning_rate": 3.862986447842845e-06, "loss": 0.6953, "step": 5488 }, { "epoch": 0.2274855982427784, "grad_norm": 0.3878413140773773, "learning_rate": 3.862779228314476e-06, "loss": 0.7263, "step": 5489 }, { "epoch": 0.22752704214845207, "grad_norm": 0.40765807032585144, "learning_rate": 3.862572008786109e-06, "loss": 0.7437, "step": 5490 }, { "epoch": 0.22756848605412575, "grad_norm": 0.42820507287979126, "learning_rate": 3.86236478925774e-06, "loss": 0.6677, "step": 5491 }, { "epoch": 0.22760992995979942, "grad_norm": 0.3935137689113617, "learning_rate": 3.862157569729371e-06, "loss": 0.7051, "step": 5492 }, { "epoch": 0.22765137386547307, "grad_norm": 0.4106156826019287, "learning_rate": 3.861950350201003e-06, "loss": 0.6794, "step": 5493 }, { "epoch": 0.22769281777114675, "grad_norm": 0.4227731227874756, "learning_rate": 3.861743130672635e-06, "loss": 0.7446, "step": 5494 }, { "epoch": 0.22773426167682043, "grad_norm": 0.4191523790359497, "learning_rate": 3.861535911144266e-06, "loss": 0.7449, "step": 5495 }, { "epoch": 0.2277757055824941, "grad_norm": 0.42453816533088684, "learning_rate": 3.861328691615899e-06, "loss": 0.7128, "step": 5496 }, { "epoch": 0.22781714948816775, "grad_norm": 0.4352606534957886, "learning_rate": 3.86112147208753e-06, "loss": 0.6963, "step": 5497 }, { "epoch": 0.22785859339384143, "grad_norm": 0.4386427402496338, "learning_rate": 3.860914252559161e-06, "loss": 0.7229, "step": 5498 }, { "epoch": 0.2279000372995151, "grad_norm": 0.4371580481529236, "learning_rate": 3.860707033030794e-06, "loss": 0.7444, "step": 5499 }, { "epoch": 0.22794148120518878, "grad_norm": 0.4059861898422241, "learning_rate": 3.860499813502425e-06, "loss": 0.7134, "step": 5500 }, { "epoch": 0.22798292511086246, "grad_norm": 0.4379953145980835, "learning_rate": 3.860292593974056e-06, "loss": 0.7385, "step": 5501 }, { "epoch": 0.2280243690165361, "grad_norm": 0.40456753969192505, "learning_rate": 3.860085374445688e-06, "loss": 0.6488, "step": 5502 }, { "epoch": 0.2280658129222098, "grad_norm": 0.38490694761276245, "learning_rate": 3.85987815491732e-06, "loss": 0.7454, "step": 5503 }, { "epoch": 0.22810725682788346, "grad_norm": 0.4183681607246399, "learning_rate": 3.859670935388951e-06, "loss": 0.7253, "step": 5504 }, { "epoch": 0.22814870073355714, "grad_norm": 0.40126121044158936, "learning_rate": 3.859463715860583e-06, "loss": 0.6815, "step": 5505 }, { "epoch": 0.2281901446392308, "grad_norm": 0.42413732409477234, "learning_rate": 3.859256496332215e-06, "loss": 0.6853, "step": 5506 }, { "epoch": 0.22823158854490447, "grad_norm": 0.43988099694252014, "learning_rate": 3.859049276803846e-06, "loss": 0.7333, "step": 5507 }, { "epoch": 0.22827303245057814, "grad_norm": 0.4104718565940857, "learning_rate": 3.858842057275478e-06, "loss": 0.7072, "step": 5508 }, { "epoch": 0.22831447635625182, "grad_norm": 0.43918296694755554, "learning_rate": 3.858634837747109e-06, "loss": 0.7798, "step": 5509 }, { "epoch": 0.22835592026192547, "grad_norm": 0.4656141996383667, "learning_rate": 3.858427618218741e-06, "loss": 0.7103, "step": 5510 }, { "epoch": 0.22839736416759915, "grad_norm": 0.42860352993011475, "learning_rate": 3.858220398690373e-06, "loss": 0.7073, "step": 5511 }, { "epoch": 0.22843880807327283, "grad_norm": 0.40856799483299255, "learning_rate": 3.858013179162004e-06, "loss": 0.7092, "step": 5512 }, { "epoch": 0.2284802519789465, "grad_norm": 0.43651264905929565, "learning_rate": 3.857805959633636e-06, "loss": 0.7434, "step": 5513 }, { "epoch": 0.22852169588462018, "grad_norm": 0.3919404447078705, "learning_rate": 3.857598740105268e-06, "loss": 0.6553, "step": 5514 }, { "epoch": 0.22856313979029383, "grad_norm": 0.439002126455307, "learning_rate": 3.8573915205769e-06, "loss": 0.7007, "step": 5515 }, { "epoch": 0.2286045836959675, "grad_norm": 0.46591916680336, "learning_rate": 3.857184301048531e-06, "loss": 0.7039, "step": 5516 }, { "epoch": 0.22864602760164118, "grad_norm": 0.41391071677207947, "learning_rate": 3.856977081520163e-06, "loss": 0.6882, "step": 5517 }, { "epoch": 0.22868747150731486, "grad_norm": 0.483742892742157, "learning_rate": 3.856769861991794e-06, "loss": 0.7131, "step": 5518 }, { "epoch": 0.2287289154129885, "grad_norm": 0.4225747585296631, "learning_rate": 3.856562642463426e-06, "loss": 0.7288, "step": 5519 }, { "epoch": 0.2287703593186622, "grad_norm": 0.40649762749671936, "learning_rate": 3.856355422935058e-06, "loss": 0.7061, "step": 5520 }, { "epoch": 0.22881180322433586, "grad_norm": 0.42282813787460327, "learning_rate": 3.856148203406689e-06, "loss": 0.7305, "step": 5521 }, { "epoch": 0.22885324713000954, "grad_norm": 0.43030551075935364, "learning_rate": 3.855940983878321e-06, "loss": 0.6943, "step": 5522 }, { "epoch": 0.2288946910356832, "grad_norm": 0.403641939163208, "learning_rate": 3.855733764349953e-06, "loss": 0.6803, "step": 5523 }, { "epoch": 0.22893613494135687, "grad_norm": 0.42390596866607666, "learning_rate": 3.855526544821584e-06, "loss": 0.7913, "step": 5524 }, { "epoch": 0.22897757884703054, "grad_norm": 0.40827974677085876, "learning_rate": 3.8553193252932156e-06, "loss": 0.6919, "step": 5525 }, { "epoch": 0.22901902275270422, "grad_norm": 0.4869561195373535, "learning_rate": 3.855112105764848e-06, "loss": 0.791, "step": 5526 }, { "epoch": 0.2290604666583779, "grad_norm": 0.4569469690322876, "learning_rate": 3.854904886236479e-06, "loss": 0.7087, "step": 5527 }, { "epoch": 0.22910191056405155, "grad_norm": 0.40679633617401123, "learning_rate": 3.8546976667081106e-06, "loss": 0.7145, "step": 5528 }, { "epoch": 0.22914335446972522, "grad_norm": 0.41108059883117676, "learning_rate": 3.854490447179742e-06, "loss": 0.7341, "step": 5529 }, { "epoch": 0.2291847983753989, "grad_norm": 0.42208361625671387, "learning_rate": 3.854283227651374e-06, "loss": 0.752, "step": 5530 }, { "epoch": 0.22922624228107258, "grad_norm": 0.4092867076396942, "learning_rate": 3.854076008123006e-06, "loss": 0.7585, "step": 5531 }, { "epoch": 0.22926768618674623, "grad_norm": 0.43245115876197815, "learning_rate": 3.853868788594638e-06, "loss": 0.6981, "step": 5532 }, { "epoch": 0.2293091300924199, "grad_norm": 0.4605277180671692, "learning_rate": 3.853661569066269e-06, "loss": 0.8114, "step": 5533 }, { "epoch": 0.22935057399809358, "grad_norm": 0.4263192415237427, "learning_rate": 3.8534543495379006e-06, "loss": 0.6931, "step": 5534 }, { "epoch": 0.22939201790376726, "grad_norm": 0.42190173268318176, "learning_rate": 3.853247130009533e-06, "loss": 0.771, "step": 5535 }, { "epoch": 0.2294334618094409, "grad_norm": 0.3998081386089325, "learning_rate": 3.853039910481164e-06, "loss": 0.6938, "step": 5536 }, { "epoch": 0.22947490571511459, "grad_norm": 0.41955485939979553, "learning_rate": 3.852832690952796e-06, "loss": 0.7522, "step": 5537 }, { "epoch": 0.22951634962078826, "grad_norm": 0.4481874406337738, "learning_rate": 3.852625471424427e-06, "loss": 0.6958, "step": 5538 }, { "epoch": 0.22955779352646194, "grad_norm": 0.4355217218399048, "learning_rate": 3.852418251896059e-06, "loss": 0.7839, "step": 5539 }, { "epoch": 0.22959923743213562, "grad_norm": 0.45812365412712097, "learning_rate": 3.852211032367691e-06, "loss": 0.7026, "step": 5540 }, { "epoch": 0.22964068133780927, "grad_norm": 0.47549113631248474, "learning_rate": 3.852003812839322e-06, "loss": 0.7703, "step": 5541 }, { "epoch": 0.22968212524348294, "grad_norm": 0.460114985704422, "learning_rate": 3.851796593310954e-06, "loss": 0.7961, "step": 5542 }, { "epoch": 0.22972356914915662, "grad_norm": 0.3793071210384369, "learning_rate": 3.851589373782586e-06, "loss": 0.6823, "step": 5543 }, { "epoch": 0.2297650130548303, "grad_norm": 0.4346812665462494, "learning_rate": 3.851382154254217e-06, "loss": 0.697, "step": 5544 }, { "epoch": 0.22980645696050395, "grad_norm": 0.40741777420043945, "learning_rate": 3.851174934725848e-06, "loss": 0.7086, "step": 5545 }, { "epoch": 0.22984790086617762, "grad_norm": 0.4668141007423401, "learning_rate": 3.850967715197481e-06, "loss": 0.7402, "step": 5546 }, { "epoch": 0.2298893447718513, "grad_norm": 0.4044327139854431, "learning_rate": 3.850760495669112e-06, "loss": 0.7505, "step": 5547 }, { "epoch": 0.22993078867752498, "grad_norm": 0.44644707441329956, "learning_rate": 3.850553276140744e-06, "loss": 0.7373, "step": 5548 }, { "epoch": 0.22997223258319863, "grad_norm": 0.41388368606567383, "learning_rate": 3.850346056612376e-06, "loss": 0.7742, "step": 5549 }, { "epoch": 0.2300136764888723, "grad_norm": 0.42548173666000366, "learning_rate": 3.850138837084007e-06, "loss": 0.6958, "step": 5550 }, { "epoch": 0.23005512039454598, "grad_norm": 0.3944973647594452, "learning_rate": 3.849931617555639e-06, "loss": 0.7595, "step": 5551 }, { "epoch": 0.23009656430021966, "grad_norm": 0.4284839332103729, "learning_rate": 3.849724398027271e-06, "loss": 0.6643, "step": 5552 }, { "epoch": 0.23013800820589334, "grad_norm": 0.4222176969051361, "learning_rate": 3.849517178498902e-06, "loss": 0.7008, "step": 5553 }, { "epoch": 0.23017945211156698, "grad_norm": 0.4128127992153168, "learning_rate": 3.849309958970533e-06, "loss": 0.7179, "step": 5554 }, { "epoch": 0.23022089601724066, "grad_norm": 0.407051146030426, "learning_rate": 3.849102739442166e-06, "loss": 0.7373, "step": 5555 }, { "epoch": 0.23026233992291434, "grad_norm": 0.43033885955810547, "learning_rate": 3.848895519913797e-06, "loss": 0.7322, "step": 5556 }, { "epoch": 0.23030378382858802, "grad_norm": 0.39870449900627136, "learning_rate": 3.848688300385428e-06, "loss": 0.689, "step": 5557 }, { "epoch": 0.23034522773426166, "grad_norm": 0.4337127208709717, "learning_rate": 3.848481080857061e-06, "loss": 0.6991, "step": 5558 }, { "epoch": 0.23038667163993534, "grad_norm": 0.4392353892326355, "learning_rate": 3.848273861328692e-06, "loss": 0.7349, "step": 5559 }, { "epoch": 0.23042811554560902, "grad_norm": 0.4600617587566376, "learning_rate": 3.848066641800323e-06, "loss": 0.7358, "step": 5560 }, { "epoch": 0.2304695594512827, "grad_norm": 0.43855032324790955, "learning_rate": 3.847859422271955e-06, "loss": 0.7451, "step": 5561 }, { "epoch": 0.23051100335695637, "grad_norm": 0.42295798659324646, "learning_rate": 3.847652202743587e-06, "loss": 0.7455, "step": 5562 }, { "epoch": 0.23055244726263002, "grad_norm": 0.4431600868701935, "learning_rate": 3.847444983215218e-06, "loss": 0.8052, "step": 5563 }, { "epoch": 0.2305938911683037, "grad_norm": 0.3967609703540802, "learning_rate": 3.847237763686851e-06, "loss": 0.6902, "step": 5564 }, { "epoch": 0.23063533507397738, "grad_norm": 0.4342823922634125, "learning_rate": 3.847030544158481e-06, "loss": 0.7316, "step": 5565 }, { "epoch": 0.23067677897965105, "grad_norm": 0.44450509548187256, "learning_rate": 3.846823324630113e-06, "loss": 0.7703, "step": 5566 }, { "epoch": 0.2307182228853247, "grad_norm": 0.444663941860199, "learning_rate": 3.846616105101746e-06, "loss": 0.7219, "step": 5567 }, { "epoch": 0.23075966679099838, "grad_norm": 0.4459175169467926, "learning_rate": 3.846408885573377e-06, "loss": 0.7515, "step": 5568 }, { "epoch": 0.23080111069667206, "grad_norm": 0.4098242223262787, "learning_rate": 3.846201666045008e-06, "loss": 0.6958, "step": 5569 }, { "epoch": 0.23084255460234573, "grad_norm": 0.4208475649356842, "learning_rate": 3.84599444651664e-06, "loss": 0.7028, "step": 5570 }, { "epoch": 0.23088399850801938, "grad_norm": 0.4160793423652649, "learning_rate": 3.845787226988272e-06, "loss": 0.7197, "step": 5571 }, { "epoch": 0.23092544241369306, "grad_norm": 0.44672784209251404, "learning_rate": 3.845580007459903e-06, "loss": 0.7805, "step": 5572 }, { "epoch": 0.23096688631936674, "grad_norm": 0.4576667845249176, "learning_rate": 3.845372787931535e-06, "loss": 0.7522, "step": 5573 }, { "epoch": 0.23100833022504041, "grad_norm": 0.42562589049339294, "learning_rate": 3.845165568403167e-06, "loss": 0.7189, "step": 5574 }, { "epoch": 0.2310497741307141, "grad_norm": 0.4158920645713806, "learning_rate": 3.844958348874798e-06, "loss": 0.7389, "step": 5575 }, { "epoch": 0.23109121803638774, "grad_norm": 0.40517657995224, "learning_rate": 3.84475112934643e-06, "loss": 0.658, "step": 5576 }, { "epoch": 0.23113266194206142, "grad_norm": 0.43797409534454346, "learning_rate": 3.844543909818061e-06, "loss": 0.7194, "step": 5577 }, { "epoch": 0.2311741058477351, "grad_norm": 0.4056747555732727, "learning_rate": 3.844336690289693e-06, "loss": 0.7466, "step": 5578 }, { "epoch": 0.23121554975340877, "grad_norm": 0.4311405122280121, "learning_rate": 3.844129470761325e-06, "loss": 0.7051, "step": 5579 }, { "epoch": 0.23125699365908242, "grad_norm": 0.40698176622390747, "learning_rate": 3.843922251232956e-06, "loss": 0.7419, "step": 5580 }, { "epoch": 0.2312984375647561, "grad_norm": 0.39689794182777405, "learning_rate": 3.8437150317045876e-06, "loss": 0.7014, "step": 5581 }, { "epoch": 0.23133988147042978, "grad_norm": 0.3983305096626282, "learning_rate": 3.84350781217622e-06, "loss": 0.6691, "step": 5582 }, { "epoch": 0.23138132537610345, "grad_norm": 0.399709552526474, "learning_rate": 3.843300592647852e-06, "loss": 0.6902, "step": 5583 }, { "epoch": 0.2314227692817771, "grad_norm": 0.3940114676952362, "learning_rate": 3.843093373119483e-06, "loss": 0.7532, "step": 5584 }, { "epoch": 0.23146421318745078, "grad_norm": 0.42690834403038025, "learning_rate": 3.842886153591115e-06, "loss": 0.7048, "step": 5585 }, { "epoch": 0.23150565709312446, "grad_norm": 0.40040233731269836, "learning_rate": 3.842678934062746e-06, "loss": 0.7151, "step": 5586 }, { "epoch": 0.23154710099879813, "grad_norm": 0.41486865282058716, "learning_rate": 3.842471714534378e-06, "loss": 0.7367, "step": 5587 }, { "epoch": 0.2315885449044718, "grad_norm": 0.40725836157798767, "learning_rate": 3.84226449500601e-06, "loss": 0.6962, "step": 5588 }, { "epoch": 0.23162998881014546, "grad_norm": 0.4552771747112274, "learning_rate": 3.842057275477641e-06, "loss": 0.7098, "step": 5589 }, { "epoch": 0.23167143271581914, "grad_norm": 0.4065553843975067, "learning_rate": 3.8418500559492726e-06, "loss": 0.6941, "step": 5590 }, { "epoch": 0.2317128766214928, "grad_norm": 0.3973666727542877, "learning_rate": 3.841642836420905e-06, "loss": 0.7322, "step": 5591 }, { "epoch": 0.2317543205271665, "grad_norm": 0.4085070490837097, "learning_rate": 3.841435616892536e-06, "loss": 0.7295, "step": 5592 }, { "epoch": 0.23179576443284014, "grad_norm": 0.4124259352684021, "learning_rate": 3.8412283973641676e-06, "loss": 0.6962, "step": 5593 }, { "epoch": 0.23183720833851382, "grad_norm": 0.4161011278629303, "learning_rate": 3.8410211778358e-06, "loss": 0.6648, "step": 5594 }, { "epoch": 0.2318786522441875, "grad_norm": 0.43627315759658813, "learning_rate": 3.840813958307431e-06, "loss": 0.7158, "step": 5595 }, { "epoch": 0.23192009614986117, "grad_norm": 0.40295183658599854, "learning_rate": 3.840606738779063e-06, "loss": 0.7008, "step": 5596 }, { "epoch": 0.23196154005553482, "grad_norm": 0.43996402621269226, "learning_rate": 3.840399519250694e-06, "loss": 0.7021, "step": 5597 }, { "epoch": 0.2320029839612085, "grad_norm": 0.4422784447669983, "learning_rate": 3.840192299722326e-06, "loss": 0.7378, "step": 5598 }, { "epoch": 0.23204442786688217, "grad_norm": 0.4222504496574402, "learning_rate": 3.839985080193958e-06, "loss": 0.7156, "step": 5599 }, { "epoch": 0.23208587177255585, "grad_norm": 0.40697091817855835, "learning_rate": 3.83977786066559e-06, "loss": 0.6831, "step": 5600 }, { "epoch": 0.23212731567822953, "grad_norm": 0.41009506583213806, "learning_rate": 3.839570641137221e-06, "loss": 0.7036, "step": 5601 }, { "epoch": 0.23216875958390318, "grad_norm": 0.40400463342666626, "learning_rate": 3.839363421608853e-06, "loss": 0.7324, "step": 5602 }, { "epoch": 0.23221020348957686, "grad_norm": 0.4255528748035431, "learning_rate": 3.839156202080485e-06, "loss": 0.726, "step": 5603 }, { "epoch": 0.23225164739525053, "grad_norm": 0.4111621379852295, "learning_rate": 3.838948982552116e-06, "loss": 0.6974, "step": 5604 }, { "epoch": 0.2322930913009242, "grad_norm": 0.45280954241752625, "learning_rate": 3.838741763023748e-06, "loss": 0.688, "step": 5605 }, { "epoch": 0.23233453520659786, "grad_norm": 0.4439070522785187, "learning_rate": 3.838534543495379e-06, "loss": 0.7565, "step": 5606 }, { "epoch": 0.23237597911227154, "grad_norm": 0.38374313712120056, "learning_rate": 3.838327323967011e-06, "loss": 0.7056, "step": 5607 }, { "epoch": 0.2324174230179452, "grad_norm": 0.4170466661453247, "learning_rate": 3.838120104438643e-06, "loss": 0.6824, "step": 5608 }, { "epoch": 0.2324588669236189, "grad_norm": 0.407936155796051, "learning_rate": 3.837912884910274e-06, "loss": 0.7627, "step": 5609 }, { "epoch": 0.23250031082929254, "grad_norm": 0.4167529046535492, "learning_rate": 3.837705665381906e-06, "loss": 0.6926, "step": 5610 }, { "epoch": 0.23254175473496622, "grad_norm": 0.39114439487457275, "learning_rate": 3.837498445853538e-06, "loss": 0.6617, "step": 5611 }, { "epoch": 0.2325831986406399, "grad_norm": 0.45526212453842163, "learning_rate": 3.837291226325169e-06, "loss": 0.7552, "step": 5612 }, { "epoch": 0.23262464254631357, "grad_norm": 0.4180341958999634, "learning_rate": 3.8370840067968e-06, "loss": 0.7297, "step": 5613 }, { "epoch": 0.23266608645198725, "grad_norm": 0.41361042857170105, "learning_rate": 3.836876787268433e-06, "loss": 0.6951, "step": 5614 }, { "epoch": 0.2327075303576609, "grad_norm": 0.45174503326416016, "learning_rate": 3.836669567740064e-06, "loss": 0.7603, "step": 5615 }, { "epoch": 0.23274897426333457, "grad_norm": 0.4098542630672455, "learning_rate": 3.836462348211696e-06, "loss": 0.6952, "step": 5616 }, { "epoch": 0.23279041816900825, "grad_norm": 0.4468437731266022, "learning_rate": 3.836255128683327e-06, "loss": 0.8069, "step": 5617 }, { "epoch": 0.23283186207468193, "grad_norm": 0.4475123882293701, "learning_rate": 3.836047909154959e-06, "loss": 0.7653, "step": 5618 }, { "epoch": 0.23287330598035558, "grad_norm": 0.41617894172668457, "learning_rate": 3.835840689626591e-06, "loss": 0.7305, "step": 5619 }, { "epoch": 0.23291474988602925, "grad_norm": 0.43787604570388794, "learning_rate": 3.835633470098223e-06, "loss": 0.7214, "step": 5620 }, { "epoch": 0.23295619379170293, "grad_norm": 0.3733513355255127, "learning_rate": 3.835426250569854e-06, "loss": 0.7084, "step": 5621 }, { "epoch": 0.2329976376973766, "grad_norm": 0.38421791791915894, "learning_rate": 3.835219031041485e-06, "loss": 0.6968, "step": 5622 }, { "epoch": 0.23303908160305026, "grad_norm": 0.4115132987499237, "learning_rate": 3.835011811513118e-06, "loss": 0.7371, "step": 5623 }, { "epoch": 0.23308052550872393, "grad_norm": 0.41062331199645996, "learning_rate": 3.834804591984749e-06, "loss": 0.7639, "step": 5624 }, { "epoch": 0.2331219694143976, "grad_norm": 0.4384192228317261, "learning_rate": 3.83459737245638e-06, "loss": 0.75, "step": 5625 }, { "epoch": 0.2331634133200713, "grad_norm": 0.43209701776504517, "learning_rate": 3.834390152928012e-06, "loss": 0.7104, "step": 5626 }, { "epoch": 0.23320485722574497, "grad_norm": 0.4395718276500702, "learning_rate": 3.834182933399644e-06, "loss": 0.7358, "step": 5627 }, { "epoch": 0.23324630113141862, "grad_norm": 0.3931125998497009, "learning_rate": 3.833975713871275e-06, "loss": 0.675, "step": 5628 }, { "epoch": 0.2332877450370923, "grad_norm": 0.3964489996433258, "learning_rate": 3.833768494342907e-06, "loss": 0.7073, "step": 5629 }, { "epoch": 0.23332918894276597, "grad_norm": 0.42992570996284485, "learning_rate": 3.833561274814539e-06, "loss": 0.6967, "step": 5630 }, { "epoch": 0.23337063284843965, "grad_norm": 0.45284217596054077, "learning_rate": 3.83335405528617e-06, "loss": 0.7554, "step": 5631 }, { "epoch": 0.2334120767541133, "grad_norm": 0.4270950257778168, "learning_rate": 3.833146835757803e-06, "loss": 0.7035, "step": 5632 }, { "epoch": 0.23345352065978697, "grad_norm": 0.4388374984264374, "learning_rate": 3.832939616229433e-06, "loss": 0.7377, "step": 5633 }, { "epoch": 0.23349496456546065, "grad_norm": 0.4012737572193146, "learning_rate": 3.832732396701065e-06, "loss": 0.6885, "step": 5634 }, { "epoch": 0.23353640847113433, "grad_norm": 0.4167604148387909, "learning_rate": 3.832525177172698e-06, "loss": 0.7905, "step": 5635 }, { "epoch": 0.233577852376808, "grad_norm": 0.4313300848007202, "learning_rate": 3.832317957644329e-06, "loss": 0.7479, "step": 5636 }, { "epoch": 0.23361929628248165, "grad_norm": 0.3826161324977875, "learning_rate": 3.83211073811596e-06, "loss": 0.6891, "step": 5637 }, { "epoch": 0.23366074018815533, "grad_norm": 0.4370412528514862, "learning_rate": 3.831903518587592e-06, "loss": 0.7036, "step": 5638 }, { "epoch": 0.233702184093829, "grad_norm": 0.4497278332710266, "learning_rate": 3.831696299059224e-06, "loss": 0.738, "step": 5639 }, { "epoch": 0.23374362799950268, "grad_norm": 0.4276197552680969, "learning_rate": 3.831489079530855e-06, "loss": 0.6699, "step": 5640 }, { "epoch": 0.23378507190517633, "grad_norm": 0.4678500294685364, "learning_rate": 3.831281860002487e-06, "loss": 0.7798, "step": 5641 }, { "epoch": 0.23382651581085, "grad_norm": 0.39157119393348694, "learning_rate": 3.831074640474118e-06, "loss": 0.7185, "step": 5642 }, { "epoch": 0.2338679597165237, "grad_norm": 0.451463907957077, "learning_rate": 3.83086742094575e-06, "loss": 0.7444, "step": 5643 }, { "epoch": 0.23390940362219736, "grad_norm": 0.40383991599082947, "learning_rate": 3.830660201417382e-06, "loss": 0.7285, "step": 5644 }, { "epoch": 0.23395084752787101, "grad_norm": 0.42612454295158386, "learning_rate": 3.830452981889013e-06, "loss": 0.7727, "step": 5645 }, { "epoch": 0.2339922914335447, "grad_norm": 0.4258905053138733, "learning_rate": 3.830245762360645e-06, "loss": 0.7695, "step": 5646 }, { "epoch": 0.23403373533921837, "grad_norm": 0.39262568950653076, "learning_rate": 3.830038542832277e-06, "loss": 0.6816, "step": 5647 }, { "epoch": 0.23407517924489205, "grad_norm": 0.421977162361145, "learning_rate": 3.829831323303909e-06, "loss": 0.7939, "step": 5648 }, { "epoch": 0.23411662315056572, "grad_norm": 0.43979400396347046, "learning_rate": 3.8296241037755396e-06, "loss": 0.7303, "step": 5649 }, { "epoch": 0.23415806705623937, "grad_norm": 0.4167293310165405, "learning_rate": 3.829416884247172e-06, "loss": 0.6978, "step": 5650 }, { "epoch": 0.23419951096191305, "grad_norm": 0.4585278332233429, "learning_rate": 3.829209664718803e-06, "loss": 0.7708, "step": 5651 }, { "epoch": 0.23424095486758673, "grad_norm": 0.41107773780822754, "learning_rate": 3.829002445190435e-06, "loss": 0.7233, "step": 5652 }, { "epoch": 0.2342823987732604, "grad_norm": 0.43208828568458557, "learning_rate": 3.828795225662067e-06, "loss": 0.6813, "step": 5653 }, { "epoch": 0.23432384267893405, "grad_norm": 0.4073016345500946, "learning_rate": 3.828588006133698e-06, "loss": 0.6816, "step": 5654 }, { "epoch": 0.23436528658460773, "grad_norm": 0.4553734362125397, "learning_rate": 3.8283807866053304e-06, "loss": 0.6709, "step": 5655 }, { "epoch": 0.2344067304902814, "grad_norm": 0.4247308671474457, "learning_rate": 3.828173567076962e-06, "loss": 0.7336, "step": 5656 }, { "epoch": 0.23444817439595508, "grad_norm": 0.37698736786842346, "learning_rate": 3.827966347548593e-06, "loss": 0.6797, "step": 5657 }, { "epoch": 0.23448961830162873, "grad_norm": 0.4262935221195221, "learning_rate": 3.827759128020225e-06, "loss": 0.7212, "step": 5658 }, { "epoch": 0.2345310622073024, "grad_norm": 0.4136400818824768, "learning_rate": 3.827551908491857e-06, "loss": 0.7256, "step": 5659 }, { "epoch": 0.2345725061129761, "grad_norm": 0.4240547716617584, "learning_rate": 3.827344688963488e-06, "loss": 0.6829, "step": 5660 }, { "epoch": 0.23461395001864976, "grad_norm": 0.4264974892139435, "learning_rate": 3.82713746943512e-06, "loss": 0.7017, "step": 5661 }, { "epoch": 0.23465539392432344, "grad_norm": 0.44271957874298096, "learning_rate": 3.826930249906752e-06, "loss": 0.7358, "step": 5662 }, { "epoch": 0.2346968378299971, "grad_norm": 0.3843737840652466, "learning_rate": 3.826723030378383e-06, "loss": 0.6865, "step": 5663 }, { "epoch": 0.23473828173567077, "grad_norm": 0.454171359539032, "learning_rate": 3.826515810850015e-06, "loss": 0.7719, "step": 5664 }, { "epoch": 0.23477972564134444, "grad_norm": 0.4293948709964752, "learning_rate": 3.826308591321646e-06, "loss": 0.7341, "step": 5665 }, { "epoch": 0.23482116954701812, "grad_norm": 0.4440838098526001, "learning_rate": 3.826101371793278e-06, "loss": 0.7117, "step": 5666 }, { "epoch": 0.23486261345269177, "grad_norm": 0.4099890887737274, "learning_rate": 3.82589415226491e-06, "loss": 0.7352, "step": 5667 }, { "epoch": 0.23490405735836545, "grad_norm": 0.4061611294746399, "learning_rate": 3.825686932736542e-06, "loss": 0.7332, "step": 5668 }, { "epoch": 0.23494550126403912, "grad_norm": 0.4340148866176605, "learning_rate": 3.825479713208173e-06, "loss": 0.6975, "step": 5669 }, { "epoch": 0.2349869451697128, "grad_norm": 0.4196752905845642, "learning_rate": 3.825272493679805e-06, "loss": 0.6703, "step": 5670 }, { "epoch": 0.23502838907538645, "grad_norm": 0.455152302980423, "learning_rate": 3.825065274151437e-06, "loss": 0.8018, "step": 5671 }, { "epoch": 0.23506983298106013, "grad_norm": 0.4546992778778076, "learning_rate": 3.824858054623068e-06, "loss": 0.75, "step": 5672 }, { "epoch": 0.2351112768867338, "grad_norm": 0.42143386602401733, "learning_rate": 3.8246508350947e-06, "loss": 0.7273, "step": 5673 }, { "epoch": 0.23515272079240748, "grad_norm": 0.39995497465133667, "learning_rate": 3.824443615566331e-06, "loss": 0.7046, "step": 5674 }, { "epoch": 0.23519416469808116, "grad_norm": 0.429457426071167, "learning_rate": 3.824236396037963e-06, "loss": 0.7219, "step": 5675 }, { "epoch": 0.2352356086037548, "grad_norm": 0.41710224747657776, "learning_rate": 3.824029176509595e-06, "loss": 0.7292, "step": 5676 }, { "epoch": 0.23527705250942849, "grad_norm": 0.4525255858898163, "learning_rate": 3.823821956981226e-06, "loss": 0.692, "step": 5677 }, { "epoch": 0.23531849641510216, "grad_norm": 0.4212068021297455, "learning_rate": 3.823614737452857e-06, "loss": 0.7673, "step": 5678 }, { "epoch": 0.23535994032077584, "grad_norm": 0.43606922030448914, "learning_rate": 3.82340751792449e-06, "loss": 0.7627, "step": 5679 }, { "epoch": 0.2354013842264495, "grad_norm": 0.5065741539001465, "learning_rate": 3.823200298396121e-06, "loss": 0.7336, "step": 5680 }, { "epoch": 0.23544282813212317, "grad_norm": 0.44878125190734863, "learning_rate": 3.822993078867752e-06, "loss": 0.7256, "step": 5681 }, { "epoch": 0.23548427203779684, "grad_norm": 0.4345020651817322, "learning_rate": 3.822785859339385e-06, "loss": 0.7488, "step": 5682 }, { "epoch": 0.23552571594347052, "grad_norm": 0.42891213297843933, "learning_rate": 3.822578639811016e-06, "loss": 0.6921, "step": 5683 }, { "epoch": 0.23556715984914417, "grad_norm": 0.4383144676685333, "learning_rate": 3.822371420282648e-06, "loss": 0.7041, "step": 5684 }, { "epoch": 0.23560860375481785, "grad_norm": 0.3908795118331909, "learning_rate": 3.822164200754279e-06, "loss": 0.6926, "step": 5685 }, { "epoch": 0.23565004766049152, "grad_norm": 0.3841250240802765, "learning_rate": 3.821956981225911e-06, "loss": 0.738, "step": 5686 }, { "epoch": 0.2356914915661652, "grad_norm": 0.41454118490219116, "learning_rate": 3.821749761697542e-06, "loss": 0.6569, "step": 5687 }, { "epoch": 0.23573293547183888, "grad_norm": 0.43832072615623474, "learning_rate": 3.821542542169175e-06, "loss": 0.8132, "step": 5688 }, { "epoch": 0.23577437937751253, "grad_norm": 0.3900536894798279, "learning_rate": 3.821335322640806e-06, "loss": 0.6915, "step": 5689 }, { "epoch": 0.2358158232831862, "grad_norm": 0.3901899456977844, "learning_rate": 3.821128103112437e-06, "loss": 0.7581, "step": 5690 }, { "epoch": 0.23585726718885988, "grad_norm": 0.39633095264434814, "learning_rate": 3.82092088358407e-06, "loss": 0.6641, "step": 5691 }, { "epoch": 0.23589871109453356, "grad_norm": 0.42954111099243164, "learning_rate": 3.820713664055701e-06, "loss": 0.6895, "step": 5692 }, { "epoch": 0.2359401550002072, "grad_norm": 0.414821982383728, "learning_rate": 3.820506444527332e-06, "loss": 0.6901, "step": 5693 }, { "epoch": 0.23598159890588088, "grad_norm": 0.3972018361091614, "learning_rate": 3.820299224998964e-06, "loss": 0.769, "step": 5694 }, { "epoch": 0.23602304281155456, "grad_norm": 0.4027329683303833, "learning_rate": 3.820092005470596e-06, "loss": 0.7014, "step": 5695 }, { "epoch": 0.23606448671722824, "grad_norm": 0.4301128089427948, "learning_rate": 3.819884785942227e-06, "loss": 0.7363, "step": 5696 }, { "epoch": 0.23610593062290192, "grad_norm": 0.41944462060928345, "learning_rate": 3.819677566413859e-06, "loss": 0.7151, "step": 5697 }, { "epoch": 0.23614737452857557, "grad_norm": 0.4161551296710968, "learning_rate": 3.819470346885491e-06, "loss": 0.7068, "step": 5698 }, { "epoch": 0.23618881843424924, "grad_norm": 0.44620856642723083, "learning_rate": 3.819263127357122e-06, "loss": 0.7441, "step": 5699 }, { "epoch": 0.23623026233992292, "grad_norm": 0.3926694393157959, "learning_rate": 3.819055907828755e-06, "loss": 0.707, "step": 5700 }, { "epoch": 0.2362717062455966, "grad_norm": 0.44288885593414307, "learning_rate": 3.818848688300385e-06, "loss": 0.7017, "step": 5701 }, { "epoch": 0.23631315015127025, "grad_norm": 0.41688084602355957, "learning_rate": 3.818641468772017e-06, "loss": 0.7346, "step": 5702 }, { "epoch": 0.23635459405694392, "grad_norm": 0.4357593059539795, "learning_rate": 3.818434249243649e-06, "loss": 0.75, "step": 5703 }, { "epoch": 0.2363960379626176, "grad_norm": 0.4038029611110687, "learning_rate": 3.818227029715281e-06, "loss": 0.7051, "step": 5704 }, { "epoch": 0.23643748186829128, "grad_norm": 0.41408681869506836, "learning_rate": 3.818019810186912e-06, "loss": 0.6863, "step": 5705 }, { "epoch": 0.23647892577396493, "grad_norm": 0.42879465222358704, "learning_rate": 3.817812590658544e-06, "loss": 0.7566, "step": 5706 }, { "epoch": 0.2365203696796386, "grad_norm": 0.4013672173023224, "learning_rate": 3.817605371130176e-06, "loss": 0.6781, "step": 5707 }, { "epoch": 0.23656181358531228, "grad_norm": 0.4084109961986542, "learning_rate": 3.817398151601807e-06, "loss": 0.7285, "step": 5708 }, { "epoch": 0.23660325749098596, "grad_norm": 0.3864114582538605, "learning_rate": 3.817190932073439e-06, "loss": 0.6968, "step": 5709 }, { "epoch": 0.23664470139665963, "grad_norm": 0.4046151340007782, "learning_rate": 3.81698371254507e-06, "loss": 0.7205, "step": 5710 }, { "epoch": 0.23668614530233328, "grad_norm": 0.4075384736061096, "learning_rate": 3.8167764930167024e-06, "loss": 0.7157, "step": 5711 }, { "epoch": 0.23672758920800696, "grad_norm": 0.49778109788894653, "learning_rate": 3.816569273488334e-06, "loss": 0.7288, "step": 5712 }, { "epoch": 0.23676903311368064, "grad_norm": 0.422605961561203, "learning_rate": 3.816362053959965e-06, "loss": 0.686, "step": 5713 }, { "epoch": 0.23681047701935432, "grad_norm": 0.4236895740032196, "learning_rate": 3.8161548344315974e-06, "loss": 0.7017, "step": 5714 }, { "epoch": 0.23685192092502796, "grad_norm": 0.42503538727760315, "learning_rate": 3.815947614903229e-06, "loss": 0.6646, "step": 5715 }, { "epoch": 0.23689336483070164, "grad_norm": 0.40171271562576294, "learning_rate": 3.815740395374861e-06, "loss": 0.7039, "step": 5716 }, { "epoch": 0.23693480873637532, "grad_norm": 0.46380874514579773, "learning_rate": 3.815533175846492e-06, "loss": 0.7178, "step": 5717 }, { "epoch": 0.236976252642049, "grad_norm": 0.4031495153903961, "learning_rate": 3.815325956318124e-06, "loss": 0.72, "step": 5718 }, { "epoch": 0.23701769654772264, "grad_norm": 0.40494564175605774, "learning_rate": 3.815118736789755e-06, "loss": 0.6987, "step": 5719 }, { "epoch": 0.23705914045339632, "grad_norm": 0.45907774567604065, "learning_rate": 3.8149115172613874e-06, "loss": 0.7384, "step": 5720 }, { "epoch": 0.23710058435907, "grad_norm": 0.40058204531669617, "learning_rate": 3.814704297733019e-06, "loss": 0.7114, "step": 5721 }, { "epoch": 0.23714202826474368, "grad_norm": 0.4196268618106842, "learning_rate": 3.81449707820465e-06, "loss": 0.7091, "step": 5722 }, { "epoch": 0.23718347217041735, "grad_norm": 0.4563756287097931, "learning_rate": 3.814289858676282e-06, "loss": 0.7146, "step": 5723 }, { "epoch": 0.237224916076091, "grad_norm": 0.43273094296455383, "learning_rate": 3.8140826391479134e-06, "loss": 0.7554, "step": 5724 }, { "epoch": 0.23726635998176468, "grad_norm": 0.4640210270881653, "learning_rate": 3.8138754196195456e-06, "loss": 0.7822, "step": 5725 }, { "epoch": 0.23730780388743836, "grad_norm": 0.4169848561286926, "learning_rate": 3.8136682000911766e-06, "loss": 0.7122, "step": 5726 }, { "epoch": 0.23734924779311203, "grad_norm": 0.40893521904945374, "learning_rate": 3.813460980562809e-06, "loss": 0.6919, "step": 5727 }, { "epoch": 0.23739069169878568, "grad_norm": 0.40852832794189453, "learning_rate": 3.8132537610344402e-06, "loss": 0.7639, "step": 5728 }, { "epoch": 0.23743213560445936, "grad_norm": 0.3834085464477539, "learning_rate": 3.813046541506072e-06, "loss": 0.6548, "step": 5729 }, { "epoch": 0.23747357951013304, "grad_norm": 0.38973578810691833, "learning_rate": 3.8128393219777034e-06, "loss": 0.6938, "step": 5730 }, { "epoch": 0.23751502341580671, "grad_norm": 0.4480464458465576, "learning_rate": 3.8126321024493352e-06, "loss": 0.7878, "step": 5731 }, { "epoch": 0.23755646732148036, "grad_norm": 0.4180966019630432, "learning_rate": 3.812424882920967e-06, "loss": 0.6931, "step": 5732 }, { "epoch": 0.23759791122715404, "grad_norm": 0.3828777074813843, "learning_rate": 3.8122176633925984e-06, "loss": 0.6162, "step": 5733 }, { "epoch": 0.23763935513282772, "grad_norm": 0.4276162385940552, "learning_rate": 3.8120104438642302e-06, "loss": 0.7266, "step": 5734 }, { "epoch": 0.2376807990385014, "grad_norm": 0.47124335169792175, "learning_rate": 3.8118032243358616e-06, "loss": 0.7483, "step": 5735 }, { "epoch": 0.23772224294417507, "grad_norm": 0.44580909609794617, "learning_rate": 3.8115960048074934e-06, "loss": 0.7059, "step": 5736 }, { "epoch": 0.23776368684984872, "grad_norm": 0.40236401557922363, "learning_rate": 3.811388785279125e-06, "loss": 0.7024, "step": 5737 }, { "epoch": 0.2378051307555224, "grad_norm": 0.41843754053115845, "learning_rate": 3.8111815657507566e-06, "loss": 0.6826, "step": 5738 }, { "epoch": 0.23784657466119608, "grad_norm": 0.40612679719924927, "learning_rate": 3.810974346222388e-06, "loss": 0.7069, "step": 5739 }, { "epoch": 0.23788801856686975, "grad_norm": 0.44002223014831543, "learning_rate": 3.81076712669402e-06, "loss": 0.7502, "step": 5740 }, { "epoch": 0.2379294624725434, "grad_norm": 0.4455436170101166, "learning_rate": 3.8105599071656516e-06, "loss": 0.7419, "step": 5741 }, { "epoch": 0.23797090637821708, "grad_norm": 0.40342798829078674, "learning_rate": 3.810352687637283e-06, "loss": 0.7393, "step": 5742 }, { "epoch": 0.23801235028389076, "grad_norm": 0.4313731789588928, "learning_rate": 3.8101454681089152e-06, "loss": 0.7395, "step": 5743 }, { "epoch": 0.23805379418956443, "grad_norm": 0.43516477942466736, "learning_rate": 3.809938248580546e-06, "loss": 0.7454, "step": 5744 }, { "epoch": 0.23809523809523808, "grad_norm": 0.4345818758010864, "learning_rate": 3.8097310290521784e-06, "loss": 0.7108, "step": 5745 }, { "epoch": 0.23813668200091176, "grad_norm": 0.41741958260536194, "learning_rate": 3.80952380952381e-06, "loss": 0.7057, "step": 5746 }, { "epoch": 0.23817812590658544, "grad_norm": 0.4487571120262146, "learning_rate": 3.8093165899954416e-06, "loss": 0.7417, "step": 5747 }, { "epoch": 0.2382195698122591, "grad_norm": 0.39634135365486145, "learning_rate": 3.809109370467073e-06, "loss": 0.7078, "step": 5748 }, { "epoch": 0.2382610137179328, "grad_norm": 0.41375893354415894, "learning_rate": 3.808902150938705e-06, "loss": 0.7096, "step": 5749 }, { "epoch": 0.23830245762360644, "grad_norm": 0.42929109930992126, "learning_rate": 3.8086949314103366e-06, "loss": 0.751, "step": 5750 }, { "epoch": 0.23834390152928012, "grad_norm": 0.4596971273422241, "learning_rate": 3.808487711881968e-06, "loss": 0.7849, "step": 5751 }, { "epoch": 0.2383853454349538, "grad_norm": 0.41589468717575073, "learning_rate": 3.8082804923536e-06, "loss": 0.72, "step": 5752 }, { "epoch": 0.23842678934062747, "grad_norm": 0.4394682049751282, "learning_rate": 3.808073272825231e-06, "loss": 0.7524, "step": 5753 }, { "epoch": 0.23846823324630112, "grad_norm": 0.4704545736312866, "learning_rate": 3.807866053296863e-06, "loss": 0.6914, "step": 5754 }, { "epoch": 0.2385096771519748, "grad_norm": 0.4443422853946686, "learning_rate": 3.8076588337684944e-06, "loss": 0.7292, "step": 5755 }, { "epoch": 0.23855112105764847, "grad_norm": 0.43635162711143494, "learning_rate": 3.8074516142401262e-06, "loss": 0.7065, "step": 5756 }, { "epoch": 0.23859256496332215, "grad_norm": 0.43623578548431396, "learning_rate": 3.807244394711758e-06, "loss": 0.6941, "step": 5757 }, { "epoch": 0.2386340088689958, "grad_norm": 0.41745543479919434, "learning_rate": 3.8070371751833894e-06, "loss": 0.7542, "step": 5758 }, { "epoch": 0.23867545277466948, "grad_norm": 0.44767022132873535, "learning_rate": 3.8068299556550216e-06, "loss": 0.7456, "step": 5759 }, { "epoch": 0.23871689668034315, "grad_norm": 0.3962811529636383, "learning_rate": 3.8066227361266526e-06, "loss": 0.7224, "step": 5760 }, { "epoch": 0.23875834058601683, "grad_norm": 0.4212293326854706, "learning_rate": 3.806415516598285e-06, "loss": 0.7104, "step": 5761 }, { "epoch": 0.2387997844916905, "grad_norm": 0.414909690618515, "learning_rate": 3.8062082970699162e-06, "loss": 0.7109, "step": 5762 }, { "epoch": 0.23884122839736416, "grad_norm": 0.4372805953025818, "learning_rate": 3.806001077541548e-06, "loss": 0.7208, "step": 5763 }, { "epoch": 0.23888267230303784, "grad_norm": 0.4553775489330292, "learning_rate": 3.8057938580131794e-06, "loss": 0.6899, "step": 5764 }, { "epoch": 0.2389241162087115, "grad_norm": 0.45307597517967224, "learning_rate": 3.8055866384848112e-06, "loss": 0.7515, "step": 5765 }, { "epoch": 0.2389655601143852, "grad_norm": 0.41489067673683167, "learning_rate": 3.805379418956443e-06, "loss": 0.7096, "step": 5766 }, { "epoch": 0.23900700402005884, "grad_norm": 0.4089105725288391, "learning_rate": 3.8051721994280744e-06, "loss": 0.7048, "step": 5767 }, { "epoch": 0.23904844792573252, "grad_norm": 0.4157990515232086, "learning_rate": 3.8049649798997062e-06, "loss": 0.6785, "step": 5768 }, { "epoch": 0.2390898918314062, "grad_norm": 0.4679083228111267, "learning_rate": 3.8047577603713376e-06, "loss": 0.7137, "step": 5769 }, { "epoch": 0.23913133573707987, "grad_norm": 0.42091599106788635, "learning_rate": 3.8045505408429694e-06, "loss": 0.7498, "step": 5770 }, { "epoch": 0.23917277964275355, "grad_norm": 0.4160477817058563, "learning_rate": 3.804343321314601e-06, "loss": 0.7266, "step": 5771 }, { "epoch": 0.2392142235484272, "grad_norm": 0.38867446780204773, "learning_rate": 3.8041361017862326e-06, "loss": 0.6802, "step": 5772 }, { "epoch": 0.23925566745410087, "grad_norm": 0.4366922676563263, "learning_rate": 3.803928882257864e-06, "loss": 0.7422, "step": 5773 }, { "epoch": 0.23929711135977455, "grad_norm": 0.4187376797199249, "learning_rate": 3.803721662729496e-06, "loss": 0.73, "step": 5774 }, { "epoch": 0.23933855526544823, "grad_norm": 0.3953743577003479, "learning_rate": 3.803514443201128e-06, "loss": 0.7271, "step": 5775 }, { "epoch": 0.23937999917112188, "grad_norm": 0.4389801323413849, "learning_rate": 3.803307223672759e-06, "loss": 0.6765, "step": 5776 }, { "epoch": 0.23942144307679555, "grad_norm": 0.4331945776939392, "learning_rate": 3.8031000041443912e-06, "loss": 0.6956, "step": 5777 }, { "epoch": 0.23946288698246923, "grad_norm": 0.4251064658164978, "learning_rate": 3.802892784616022e-06, "loss": 0.7072, "step": 5778 }, { "epoch": 0.2395043308881429, "grad_norm": 0.41855165362358093, "learning_rate": 3.8026855650876544e-06, "loss": 0.7515, "step": 5779 }, { "epoch": 0.23954577479381656, "grad_norm": 0.40960487723350525, "learning_rate": 3.802478345559286e-06, "loss": 0.7339, "step": 5780 }, { "epoch": 0.23958721869949023, "grad_norm": 0.3710502088069916, "learning_rate": 3.8022711260309176e-06, "loss": 0.7004, "step": 5781 }, { "epoch": 0.2396286626051639, "grad_norm": 0.4348476827144623, "learning_rate": 3.802063906502549e-06, "loss": 0.7471, "step": 5782 }, { "epoch": 0.2396701065108376, "grad_norm": 0.40413665771484375, "learning_rate": 3.801856686974181e-06, "loss": 0.6738, "step": 5783 }, { "epoch": 0.23971155041651127, "grad_norm": 0.4238663911819458, "learning_rate": 3.8016494674458126e-06, "loss": 0.7302, "step": 5784 }, { "epoch": 0.23975299432218491, "grad_norm": 0.39110061526298523, "learning_rate": 3.801442247917444e-06, "loss": 0.7007, "step": 5785 }, { "epoch": 0.2397944382278586, "grad_norm": 0.4134226441383362, "learning_rate": 3.801235028389076e-06, "loss": 0.7375, "step": 5786 }, { "epoch": 0.23983588213353227, "grad_norm": 0.3887648284435272, "learning_rate": 3.8010278088607072e-06, "loss": 0.6868, "step": 5787 }, { "epoch": 0.23987732603920595, "grad_norm": 0.39514827728271484, "learning_rate": 3.800820589332339e-06, "loss": 0.7046, "step": 5788 }, { "epoch": 0.2399187699448796, "grad_norm": 0.4230422079563141, "learning_rate": 3.8006133698039704e-06, "loss": 0.7041, "step": 5789 }, { "epoch": 0.23996021385055327, "grad_norm": 0.4029048681259155, "learning_rate": 3.8004061502756022e-06, "loss": 0.7571, "step": 5790 }, { "epoch": 0.24000165775622695, "grad_norm": 0.39887550473213196, "learning_rate": 3.8001989307472336e-06, "loss": 0.6906, "step": 5791 }, { "epoch": 0.24004310166190063, "grad_norm": 0.41522154211997986, "learning_rate": 3.7999917112188654e-06, "loss": 0.6934, "step": 5792 }, { "epoch": 0.24008454556757428, "grad_norm": 0.3844720721244812, "learning_rate": 3.7997844916904976e-06, "loss": 0.6481, "step": 5793 }, { "epoch": 0.24012598947324795, "grad_norm": 0.4491621255874634, "learning_rate": 3.7995772721621286e-06, "loss": 0.7603, "step": 5794 }, { "epoch": 0.24016743337892163, "grad_norm": 0.4107879400253296, "learning_rate": 3.799370052633761e-06, "loss": 0.7009, "step": 5795 }, { "epoch": 0.2402088772845953, "grad_norm": 0.4060125946998596, "learning_rate": 3.7991628331053922e-06, "loss": 0.7256, "step": 5796 }, { "epoch": 0.24025032119026898, "grad_norm": 0.44591009616851807, "learning_rate": 3.798955613577024e-06, "loss": 0.7134, "step": 5797 }, { "epoch": 0.24029176509594263, "grad_norm": 0.42751947045326233, "learning_rate": 3.7987483940486554e-06, "loss": 0.7222, "step": 5798 }, { "epoch": 0.2403332090016163, "grad_norm": 0.4028674066066742, "learning_rate": 3.7985411745202872e-06, "loss": 0.6912, "step": 5799 }, { "epoch": 0.24037465290729, "grad_norm": 0.4358348250389099, "learning_rate": 3.7983339549919186e-06, "loss": 0.7185, "step": 5800 }, { "epoch": 0.24041609681296366, "grad_norm": 0.44847407937049866, "learning_rate": 3.7981267354635504e-06, "loss": 0.8047, "step": 5801 }, { "epoch": 0.2404575407186373, "grad_norm": 0.44925713539123535, "learning_rate": 3.7979195159351822e-06, "loss": 0.7249, "step": 5802 }, { "epoch": 0.240498984624311, "grad_norm": 0.4048302471637726, "learning_rate": 3.7977122964068136e-06, "loss": 0.7048, "step": 5803 }, { "epoch": 0.24054042852998467, "grad_norm": 0.42482465505599976, "learning_rate": 3.7975050768784454e-06, "loss": 0.665, "step": 5804 }, { "epoch": 0.24058187243565834, "grad_norm": 0.44000980257987976, "learning_rate": 3.797297857350077e-06, "loss": 0.7388, "step": 5805 }, { "epoch": 0.240623316341332, "grad_norm": 0.4325239360332489, "learning_rate": 3.7970906378217086e-06, "loss": 0.7317, "step": 5806 }, { "epoch": 0.24066476024700567, "grad_norm": 0.4055592715740204, "learning_rate": 3.79688341829334e-06, "loss": 0.6676, "step": 5807 }, { "epoch": 0.24070620415267935, "grad_norm": 0.45745575428009033, "learning_rate": 3.796676198764972e-06, "loss": 0.7197, "step": 5808 }, { "epoch": 0.24074764805835303, "grad_norm": 0.42545661330223083, "learning_rate": 3.796468979236603e-06, "loss": 0.7007, "step": 5809 }, { "epoch": 0.2407890919640267, "grad_norm": 0.39527949690818787, "learning_rate": 3.796261759708235e-06, "loss": 0.7102, "step": 5810 }, { "epoch": 0.24083053586970035, "grad_norm": 0.4333530366420746, "learning_rate": 3.7960545401798672e-06, "loss": 0.7373, "step": 5811 }, { "epoch": 0.24087197977537403, "grad_norm": 0.44869107007980347, "learning_rate": 3.795847320651498e-06, "loss": 0.7017, "step": 5812 }, { "epoch": 0.2409134236810477, "grad_norm": 0.40590861439704895, "learning_rate": 3.7956401011231304e-06, "loss": 0.6685, "step": 5813 }, { "epoch": 0.24095486758672138, "grad_norm": 0.43441474437713623, "learning_rate": 3.795432881594762e-06, "loss": 0.7646, "step": 5814 }, { "epoch": 0.24099631149239503, "grad_norm": 0.40566182136535645, "learning_rate": 3.7952256620663936e-06, "loss": 0.6803, "step": 5815 }, { "epoch": 0.2410377553980687, "grad_norm": 0.4118754267692566, "learning_rate": 3.795018442538025e-06, "loss": 0.7126, "step": 5816 }, { "epoch": 0.2410791993037424, "grad_norm": 0.4022071957588196, "learning_rate": 3.794811223009657e-06, "loss": 0.6919, "step": 5817 }, { "epoch": 0.24112064320941606, "grad_norm": 0.4032742977142334, "learning_rate": 3.7946040034812886e-06, "loss": 0.6829, "step": 5818 }, { "epoch": 0.2411620871150897, "grad_norm": 0.40466296672821045, "learning_rate": 3.79439678395292e-06, "loss": 0.6671, "step": 5819 }, { "epoch": 0.2412035310207634, "grad_norm": 0.43412697315216064, "learning_rate": 3.794189564424552e-06, "loss": 0.741, "step": 5820 }, { "epoch": 0.24124497492643707, "grad_norm": 0.4570331275463104, "learning_rate": 3.7939823448961832e-06, "loss": 0.7419, "step": 5821 }, { "epoch": 0.24128641883211074, "grad_norm": 0.4352063834667206, "learning_rate": 3.793775125367815e-06, "loss": 0.7368, "step": 5822 }, { "epoch": 0.24132786273778442, "grad_norm": 0.40237486362457275, "learning_rate": 3.7935679058394464e-06, "loss": 0.7034, "step": 5823 }, { "epoch": 0.24136930664345807, "grad_norm": 0.4045490026473999, "learning_rate": 3.7933606863110782e-06, "loss": 0.7112, "step": 5824 }, { "epoch": 0.24141075054913175, "grad_norm": 0.41520121693611145, "learning_rate": 3.7931534667827096e-06, "loss": 0.7114, "step": 5825 }, { "epoch": 0.24145219445480542, "grad_norm": 0.4231642484664917, "learning_rate": 3.7929462472543414e-06, "loss": 0.7332, "step": 5826 }, { "epoch": 0.2414936383604791, "grad_norm": 0.46858036518096924, "learning_rate": 3.7927390277259737e-06, "loss": 0.7434, "step": 5827 }, { "epoch": 0.24153508226615275, "grad_norm": 0.4160548150539398, "learning_rate": 3.7925318081976046e-06, "loss": 0.729, "step": 5828 }, { "epoch": 0.24157652617182643, "grad_norm": 0.4762648642063141, "learning_rate": 3.792324588669237e-06, "loss": 0.7217, "step": 5829 }, { "epoch": 0.2416179700775001, "grad_norm": 0.41812923550605774, "learning_rate": 3.7921173691408682e-06, "loss": 0.7141, "step": 5830 }, { "epoch": 0.24165941398317378, "grad_norm": 0.41029757261276245, "learning_rate": 3.7919101496125e-06, "loss": 0.7533, "step": 5831 }, { "epoch": 0.24170085788884743, "grad_norm": 0.4139564335346222, "learning_rate": 3.7917029300841314e-06, "loss": 0.6831, "step": 5832 }, { "epoch": 0.2417423017945211, "grad_norm": 0.4286637306213379, "learning_rate": 3.7914957105557632e-06, "loss": 0.7441, "step": 5833 }, { "epoch": 0.24178374570019479, "grad_norm": 0.41436415910720825, "learning_rate": 3.7912884910273946e-06, "loss": 0.7361, "step": 5834 }, { "epoch": 0.24182518960586846, "grad_norm": 0.432330459356308, "learning_rate": 3.7910812714990264e-06, "loss": 0.7693, "step": 5835 }, { "epoch": 0.24186663351154214, "grad_norm": 0.47075730562210083, "learning_rate": 3.7908740519706582e-06, "loss": 0.7302, "step": 5836 }, { "epoch": 0.2419080774172158, "grad_norm": 0.4582519233226776, "learning_rate": 3.7906668324422896e-06, "loss": 0.7432, "step": 5837 }, { "epoch": 0.24194952132288947, "grad_norm": 0.4032561182975769, "learning_rate": 3.7904596129139214e-06, "loss": 0.6575, "step": 5838 }, { "epoch": 0.24199096522856314, "grad_norm": 0.4440910220146179, "learning_rate": 3.790252393385553e-06, "loss": 0.667, "step": 5839 }, { "epoch": 0.24203240913423682, "grad_norm": 0.41109806299209595, "learning_rate": 3.7900451738571846e-06, "loss": 0.7581, "step": 5840 }, { "epoch": 0.24207385303991047, "grad_norm": 0.40728530287742615, "learning_rate": 3.789837954328816e-06, "loss": 0.7285, "step": 5841 }, { "epoch": 0.24211529694558415, "grad_norm": 0.4134881794452667, "learning_rate": 3.789630734800448e-06, "loss": 0.7385, "step": 5842 }, { "epoch": 0.24215674085125782, "grad_norm": 0.4340599477291107, "learning_rate": 3.789423515272079e-06, "loss": 0.7107, "step": 5843 }, { "epoch": 0.2421981847569315, "grad_norm": 0.4159688353538513, "learning_rate": 3.789216295743711e-06, "loss": 0.7017, "step": 5844 }, { "epoch": 0.24223962866260518, "grad_norm": 0.4615623950958252, "learning_rate": 3.7890090762153433e-06, "loss": 0.7747, "step": 5845 }, { "epoch": 0.24228107256827883, "grad_norm": 0.3711353540420532, "learning_rate": 3.7888018566869742e-06, "loss": 0.6298, "step": 5846 }, { "epoch": 0.2423225164739525, "grad_norm": 0.41842350363731384, "learning_rate": 3.7885946371586064e-06, "loss": 0.6559, "step": 5847 }, { "epoch": 0.24236396037962618, "grad_norm": 0.43508896231651306, "learning_rate": 3.788387417630238e-06, "loss": 0.701, "step": 5848 }, { "epoch": 0.24240540428529986, "grad_norm": 0.39882227778434753, "learning_rate": 3.7881801981018696e-06, "loss": 0.7327, "step": 5849 }, { "epoch": 0.2424468481909735, "grad_norm": 0.4302796721458435, "learning_rate": 3.787972978573501e-06, "loss": 0.72, "step": 5850 }, { "epoch": 0.24248829209664718, "grad_norm": 0.4106886088848114, "learning_rate": 3.787765759045133e-06, "loss": 0.6692, "step": 5851 }, { "epoch": 0.24252973600232086, "grad_norm": 0.44034191966056824, "learning_rate": 3.7875585395167642e-06, "loss": 0.6552, "step": 5852 }, { "epoch": 0.24257117990799454, "grad_norm": 0.44143223762512207, "learning_rate": 3.787351319988396e-06, "loss": 0.7223, "step": 5853 }, { "epoch": 0.2426126238136682, "grad_norm": 0.4074268341064453, "learning_rate": 3.787144100460028e-06, "loss": 0.7234, "step": 5854 }, { "epoch": 0.24265406771934186, "grad_norm": 0.4701578617095947, "learning_rate": 3.7869368809316592e-06, "loss": 0.761, "step": 5855 }, { "epoch": 0.24269551162501554, "grad_norm": 0.4345472753047943, "learning_rate": 3.786729661403291e-06, "loss": 0.7097, "step": 5856 }, { "epoch": 0.24273695553068922, "grad_norm": 0.4199623763561249, "learning_rate": 3.7865224418749224e-06, "loss": 0.7659, "step": 5857 }, { "epoch": 0.2427783994363629, "grad_norm": 0.44015181064605713, "learning_rate": 3.7863152223465542e-06, "loss": 0.7378, "step": 5858 }, { "epoch": 0.24281984334203655, "grad_norm": 0.38976845145225525, "learning_rate": 3.7861080028181856e-06, "loss": 0.6832, "step": 5859 }, { "epoch": 0.24286128724771022, "grad_norm": 0.4354557991027832, "learning_rate": 3.7859007832898174e-06, "loss": 0.7, "step": 5860 }, { "epoch": 0.2429027311533839, "grad_norm": 0.436708003282547, "learning_rate": 3.785693563761449e-06, "loss": 0.731, "step": 5861 }, { "epoch": 0.24294417505905758, "grad_norm": 0.4433440864086151, "learning_rate": 3.7854863442330806e-06, "loss": 0.7194, "step": 5862 }, { "epoch": 0.24298561896473123, "grad_norm": 0.40738534927368164, "learning_rate": 3.785279124704713e-06, "loss": 0.6716, "step": 5863 }, { "epoch": 0.2430270628704049, "grad_norm": 0.42629101872444153, "learning_rate": 3.7850719051763442e-06, "loss": 0.681, "step": 5864 }, { "epoch": 0.24306850677607858, "grad_norm": 0.41795089840888977, "learning_rate": 3.784864685647976e-06, "loss": 0.6449, "step": 5865 }, { "epoch": 0.24310995068175226, "grad_norm": 0.44087672233581543, "learning_rate": 3.7846574661196074e-06, "loss": 0.6858, "step": 5866 }, { "epoch": 0.2431513945874259, "grad_norm": 0.4304245114326477, "learning_rate": 3.7844502465912392e-06, "loss": 0.7803, "step": 5867 }, { "epoch": 0.24319283849309958, "grad_norm": 0.4216367304325104, "learning_rate": 3.7842430270628706e-06, "loss": 0.7319, "step": 5868 }, { "epoch": 0.24323428239877326, "grad_norm": 0.4494384825229645, "learning_rate": 3.7840358075345024e-06, "loss": 0.7861, "step": 5869 }, { "epoch": 0.24327572630444694, "grad_norm": 0.4380645751953125, "learning_rate": 3.7838285880061342e-06, "loss": 0.7434, "step": 5870 }, { "epoch": 0.24331717021012061, "grad_norm": 0.4153081476688385, "learning_rate": 3.7836213684777656e-06, "loss": 0.7573, "step": 5871 }, { "epoch": 0.24335861411579426, "grad_norm": 0.43208009004592896, "learning_rate": 3.7834141489493974e-06, "loss": 0.7185, "step": 5872 }, { "epoch": 0.24340005802146794, "grad_norm": 0.41795045137405396, "learning_rate": 3.783206929421029e-06, "loss": 0.6566, "step": 5873 }, { "epoch": 0.24344150192714162, "grad_norm": 0.46154290437698364, "learning_rate": 3.7829997098926606e-06, "loss": 0.7493, "step": 5874 }, { "epoch": 0.2434829458328153, "grad_norm": 0.405913382768631, "learning_rate": 3.782792490364292e-06, "loss": 0.6934, "step": 5875 }, { "epoch": 0.24352438973848894, "grad_norm": 0.44273924827575684, "learning_rate": 3.782585270835924e-06, "loss": 0.7294, "step": 5876 }, { "epoch": 0.24356583364416262, "grad_norm": 0.39289596676826477, "learning_rate": 3.7823780513075552e-06, "loss": 0.655, "step": 5877 }, { "epoch": 0.2436072775498363, "grad_norm": 0.37577953934669495, "learning_rate": 3.782170831779187e-06, "loss": 0.6777, "step": 5878 }, { "epoch": 0.24364872145550998, "grad_norm": 0.44401857256889343, "learning_rate": 3.7819636122508193e-06, "loss": 0.7168, "step": 5879 }, { "epoch": 0.24369016536118362, "grad_norm": 0.44974014163017273, "learning_rate": 3.7817563927224502e-06, "loss": 0.7056, "step": 5880 }, { "epoch": 0.2437316092668573, "grad_norm": 0.5078209638595581, "learning_rate": 3.7815491731940825e-06, "loss": 0.7122, "step": 5881 }, { "epoch": 0.24377305317253098, "grad_norm": 0.41901805996894836, "learning_rate": 3.781341953665714e-06, "loss": 0.7091, "step": 5882 }, { "epoch": 0.24381449707820466, "grad_norm": 0.4154927432537079, "learning_rate": 3.7811347341373456e-06, "loss": 0.7084, "step": 5883 }, { "epoch": 0.24385594098387833, "grad_norm": 0.3937971293926239, "learning_rate": 3.780927514608977e-06, "loss": 0.6868, "step": 5884 }, { "epoch": 0.24389738488955198, "grad_norm": 0.4110066592693329, "learning_rate": 3.780720295080609e-06, "loss": 0.7197, "step": 5885 }, { "epoch": 0.24393882879522566, "grad_norm": 0.45111915469169617, "learning_rate": 3.7805130755522402e-06, "loss": 0.7966, "step": 5886 }, { "epoch": 0.24398027270089934, "grad_norm": 0.3754710257053375, "learning_rate": 3.780305856023872e-06, "loss": 0.7288, "step": 5887 }, { "epoch": 0.244021716606573, "grad_norm": 0.4667139947414398, "learning_rate": 3.780098636495504e-06, "loss": 0.7539, "step": 5888 }, { "epoch": 0.24406316051224666, "grad_norm": 0.45286795496940613, "learning_rate": 3.7798914169671352e-06, "loss": 0.804, "step": 5889 }, { "epoch": 0.24410460441792034, "grad_norm": 0.4492534399032593, "learning_rate": 3.779684197438767e-06, "loss": 0.7129, "step": 5890 }, { "epoch": 0.24414604832359402, "grad_norm": 0.42626526951789856, "learning_rate": 3.7794769779103984e-06, "loss": 0.7632, "step": 5891 }, { "epoch": 0.2441874922292677, "grad_norm": 0.42425793409347534, "learning_rate": 3.7792697583820302e-06, "loss": 0.7212, "step": 5892 }, { "epoch": 0.24422893613494134, "grad_norm": 0.3978935778141022, "learning_rate": 3.7790625388536616e-06, "loss": 0.6788, "step": 5893 }, { "epoch": 0.24427038004061502, "grad_norm": 0.4289407730102539, "learning_rate": 3.7788553193252934e-06, "loss": 0.6829, "step": 5894 }, { "epoch": 0.2443118239462887, "grad_norm": 0.4366792142391205, "learning_rate": 3.778648099796925e-06, "loss": 0.7639, "step": 5895 }, { "epoch": 0.24435326785196237, "grad_norm": 0.41078776121139526, "learning_rate": 3.7784408802685566e-06, "loss": 0.7191, "step": 5896 }, { "epoch": 0.24439471175763605, "grad_norm": 0.3971204161643982, "learning_rate": 3.778233660740189e-06, "loss": 0.6943, "step": 5897 }, { "epoch": 0.2444361556633097, "grad_norm": 0.4188547134399414, "learning_rate": 3.7780264412118202e-06, "loss": 0.7273, "step": 5898 }, { "epoch": 0.24447759956898338, "grad_norm": 0.4125039875507355, "learning_rate": 3.777819221683452e-06, "loss": 0.7036, "step": 5899 }, { "epoch": 0.24451904347465706, "grad_norm": 0.42868760228157043, "learning_rate": 3.7776120021550834e-06, "loss": 0.7161, "step": 5900 }, { "epoch": 0.24456048738033073, "grad_norm": 0.4040088355541229, "learning_rate": 3.7774047826267152e-06, "loss": 0.7164, "step": 5901 }, { "epoch": 0.24460193128600438, "grad_norm": 0.4231020212173462, "learning_rate": 3.7771975630983466e-06, "loss": 0.6956, "step": 5902 }, { "epoch": 0.24464337519167806, "grad_norm": 0.40728503465652466, "learning_rate": 3.7769903435699784e-06, "loss": 0.7153, "step": 5903 }, { "epoch": 0.24468481909735174, "grad_norm": 0.4164527952671051, "learning_rate": 3.77678312404161e-06, "loss": 0.7344, "step": 5904 }, { "epoch": 0.2447262630030254, "grad_norm": 0.3739519715309143, "learning_rate": 3.7765759045132416e-06, "loss": 0.6434, "step": 5905 }, { "epoch": 0.24476770690869906, "grad_norm": 0.4463658034801483, "learning_rate": 3.7763686849848734e-06, "loss": 0.7329, "step": 5906 }, { "epoch": 0.24480915081437274, "grad_norm": 0.40119510889053345, "learning_rate": 3.776161465456505e-06, "loss": 0.667, "step": 5907 }, { "epoch": 0.24485059472004642, "grad_norm": 0.43785572052001953, "learning_rate": 3.7759542459281366e-06, "loss": 0.724, "step": 5908 }, { "epoch": 0.2448920386257201, "grad_norm": 0.4276801645755768, "learning_rate": 3.775747026399768e-06, "loss": 0.687, "step": 5909 }, { "epoch": 0.24493348253139377, "grad_norm": 0.41110527515411377, "learning_rate": 3.7755398068714e-06, "loss": 0.7173, "step": 5910 }, { "epoch": 0.24497492643706742, "grad_norm": 0.4754437804222107, "learning_rate": 3.7753325873430312e-06, "loss": 0.7399, "step": 5911 }, { "epoch": 0.2450163703427411, "grad_norm": 0.4362882077693939, "learning_rate": 3.775125367814663e-06, "loss": 0.6903, "step": 5912 }, { "epoch": 0.24505781424841477, "grad_norm": 0.3995305001735687, "learning_rate": 3.7749181482862944e-06, "loss": 0.6829, "step": 5913 }, { "epoch": 0.24509925815408845, "grad_norm": 0.4270041584968567, "learning_rate": 3.7747109287579267e-06, "loss": 0.7314, "step": 5914 }, { "epoch": 0.2451407020597621, "grad_norm": 0.41086867451667786, "learning_rate": 3.7745037092295585e-06, "loss": 0.7163, "step": 5915 }, { "epoch": 0.24518214596543578, "grad_norm": 0.4274124801158905, "learning_rate": 3.77429648970119e-06, "loss": 0.707, "step": 5916 }, { "epoch": 0.24522358987110945, "grad_norm": 0.38177773356437683, "learning_rate": 3.7740892701728217e-06, "loss": 0.6783, "step": 5917 }, { "epoch": 0.24526503377678313, "grad_norm": 0.4351160526275635, "learning_rate": 3.773882050644453e-06, "loss": 0.7225, "step": 5918 }, { "epoch": 0.2453064776824568, "grad_norm": 0.467419296503067, "learning_rate": 3.773674831116085e-06, "loss": 0.7566, "step": 5919 }, { "epoch": 0.24534792158813046, "grad_norm": 0.4119054675102234, "learning_rate": 3.7734676115877162e-06, "loss": 0.6718, "step": 5920 }, { "epoch": 0.24538936549380413, "grad_norm": 0.4179508686065674, "learning_rate": 3.773260392059348e-06, "loss": 0.6887, "step": 5921 }, { "epoch": 0.2454308093994778, "grad_norm": 0.4185778796672821, "learning_rate": 3.7730531725309794e-06, "loss": 0.7217, "step": 5922 }, { "epoch": 0.2454722533051515, "grad_norm": 0.42639216780662537, "learning_rate": 3.7728459530026112e-06, "loss": 0.6978, "step": 5923 }, { "epoch": 0.24551369721082514, "grad_norm": 0.40835005044937134, "learning_rate": 3.772638733474243e-06, "loss": 0.666, "step": 5924 }, { "epoch": 0.24555514111649882, "grad_norm": 0.433062881231308, "learning_rate": 3.7724315139458744e-06, "loss": 0.6959, "step": 5925 }, { "epoch": 0.2455965850221725, "grad_norm": 0.43335452675819397, "learning_rate": 3.7722242944175062e-06, "loss": 0.7346, "step": 5926 }, { "epoch": 0.24563802892784617, "grad_norm": 0.413692831993103, "learning_rate": 3.7720170748891376e-06, "loss": 0.7021, "step": 5927 }, { "epoch": 0.24567947283351982, "grad_norm": 0.40733984112739563, "learning_rate": 3.7718098553607694e-06, "loss": 0.7085, "step": 5928 }, { "epoch": 0.2457209167391935, "grad_norm": 0.4565483033657074, "learning_rate": 3.771602635832401e-06, "loss": 0.7649, "step": 5929 }, { "epoch": 0.24576236064486717, "grad_norm": 0.4183756411075592, "learning_rate": 3.7713954163040326e-06, "loss": 0.6854, "step": 5930 }, { "epoch": 0.24580380455054085, "grad_norm": 0.4421718120574951, "learning_rate": 3.771188196775665e-06, "loss": 0.7328, "step": 5931 }, { "epoch": 0.24584524845621453, "grad_norm": 0.44818130135536194, "learning_rate": 3.7709809772472963e-06, "loss": 0.7666, "step": 5932 }, { "epoch": 0.24588669236188818, "grad_norm": 0.46970635652542114, "learning_rate": 3.770773757718928e-06, "loss": 0.7161, "step": 5933 }, { "epoch": 0.24592813626756185, "grad_norm": 0.4007396697998047, "learning_rate": 3.7705665381905594e-06, "loss": 0.7092, "step": 5934 }, { "epoch": 0.24596958017323553, "grad_norm": 0.41251465678215027, "learning_rate": 3.7703593186621913e-06, "loss": 0.7422, "step": 5935 }, { "epoch": 0.2460110240789092, "grad_norm": 0.41428035497665405, "learning_rate": 3.7701520991338226e-06, "loss": 0.6802, "step": 5936 }, { "epoch": 0.24605246798458286, "grad_norm": 0.4202180504798889, "learning_rate": 3.7699448796054545e-06, "loss": 0.6816, "step": 5937 }, { "epoch": 0.24609391189025653, "grad_norm": 0.3958451449871063, "learning_rate": 3.769737660077086e-06, "loss": 0.7147, "step": 5938 }, { "epoch": 0.2461353557959302, "grad_norm": 0.41043153405189514, "learning_rate": 3.7695304405487176e-06, "loss": 0.7301, "step": 5939 }, { "epoch": 0.2461767997016039, "grad_norm": 0.42235997319221497, "learning_rate": 3.7693232210203495e-06, "loss": 0.7114, "step": 5940 }, { "epoch": 0.24621824360727754, "grad_norm": 0.3827250301837921, "learning_rate": 3.769116001491981e-06, "loss": 0.7188, "step": 5941 }, { "epoch": 0.24625968751295121, "grad_norm": 0.3816659152507782, "learning_rate": 3.7689087819636126e-06, "loss": 0.741, "step": 5942 }, { "epoch": 0.2463011314186249, "grad_norm": 0.4334506690502167, "learning_rate": 3.768701562435244e-06, "loss": 0.72, "step": 5943 }, { "epoch": 0.24634257532429857, "grad_norm": 0.4258240759372711, "learning_rate": 3.768494342906876e-06, "loss": 0.7051, "step": 5944 }, { "epoch": 0.24638401922997225, "grad_norm": 0.41385704278945923, "learning_rate": 3.7682871233785072e-06, "loss": 0.6339, "step": 5945 }, { "epoch": 0.2464254631356459, "grad_norm": 0.4213573932647705, "learning_rate": 3.768079903850139e-06, "loss": 0.7401, "step": 5946 }, { "epoch": 0.24646690704131957, "grad_norm": 0.37417614459991455, "learning_rate": 3.7678726843217704e-06, "loss": 0.7351, "step": 5947 }, { "epoch": 0.24650835094699325, "grad_norm": 0.4234601855278015, "learning_rate": 3.7676654647934027e-06, "loss": 0.7502, "step": 5948 }, { "epoch": 0.24654979485266693, "grad_norm": 0.42342886328697205, "learning_rate": 3.7674582452650345e-06, "loss": 0.6682, "step": 5949 }, { "epoch": 0.24659123875834058, "grad_norm": 0.4131332039833069, "learning_rate": 3.767251025736666e-06, "loss": 0.7109, "step": 5950 }, { "epoch": 0.24663268266401425, "grad_norm": 0.3708457052707672, "learning_rate": 3.7670438062082977e-06, "loss": 0.6621, "step": 5951 }, { "epoch": 0.24667412656968793, "grad_norm": 0.45090943574905396, "learning_rate": 3.766836586679929e-06, "loss": 0.7432, "step": 5952 }, { "epoch": 0.2467155704753616, "grad_norm": 0.4185495972633362, "learning_rate": 3.766629367151561e-06, "loss": 0.7048, "step": 5953 }, { "epoch": 0.24675701438103526, "grad_norm": 0.44534870982170105, "learning_rate": 3.7664221476231922e-06, "loss": 0.7883, "step": 5954 }, { "epoch": 0.24679845828670893, "grad_norm": 0.4371650815010071, "learning_rate": 3.766214928094824e-06, "loss": 0.7205, "step": 5955 }, { "epoch": 0.2468399021923826, "grad_norm": 0.4230118989944458, "learning_rate": 3.7660077085664554e-06, "loss": 0.7163, "step": 5956 }, { "epoch": 0.2468813460980563, "grad_norm": 0.4045245051383972, "learning_rate": 3.7658004890380872e-06, "loss": 0.7007, "step": 5957 }, { "epoch": 0.24692279000372996, "grad_norm": 0.4006343483924866, "learning_rate": 3.765593269509719e-06, "loss": 0.726, "step": 5958 }, { "epoch": 0.2469642339094036, "grad_norm": 0.4007108509540558, "learning_rate": 3.7653860499813504e-06, "loss": 0.6753, "step": 5959 }, { "epoch": 0.2470056778150773, "grad_norm": 0.477323055267334, "learning_rate": 3.7651788304529822e-06, "loss": 0.7179, "step": 5960 }, { "epoch": 0.24704712172075097, "grad_norm": 0.3884926438331604, "learning_rate": 3.7649716109246136e-06, "loss": 0.6909, "step": 5961 }, { "epoch": 0.24708856562642464, "grad_norm": 0.4601760804653168, "learning_rate": 3.7647643913962454e-06, "loss": 0.772, "step": 5962 }, { "epoch": 0.2471300095320983, "grad_norm": 0.46465587615966797, "learning_rate": 3.764557171867877e-06, "loss": 0.7383, "step": 5963 }, { "epoch": 0.24717145343777197, "grad_norm": 0.3981333076953888, "learning_rate": 3.7643499523395086e-06, "loss": 0.708, "step": 5964 }, { "epoch": 0.24721289734344565, "grad_norm": 0.41850435733795166, "learning_rate": 3.76414273281114e-06, "loss": 0.6968, "step": 5965 }, { "epoch": 0.24725434124911932, "grad_norm": 0.41070640087127686, "learning_rate": 3.7639355132827723e-06, "loss": 0.7349, "step": 5966 }, { "epoch": 0.24729578515479297, "grad_norm": 0.3784337639808655, "learning_rate": 3.763728293754404e-06, "loss": 0.6481, "step": 5967 }, { "epoch": 0.24733722906046665, "grad_norm": 0.43608611822128296, "learning_rate": 3.7635210742260355e-06, "loss": 0.7302, "step": 5968 }, { "epoch": 0.24737867296614033, "grad_norm": 0.45348963141441345, "learning_rate": 3.7633138546976673e-06, "loss": 0.7368, "step": 5969 }, { "epoch": 0.247420116871814, "grad_norm": 0.4023630917072296, "learning_rate": 3.7631066351692986e-06, "loss": 0.6952, "step": 5970 }, { "epoch": 0.24746156077748768, "grad_norm": 0.4500887095928192, "learning_rate": 3.7628994156409305e-06, "loss": 0.7659, "step": 5971 }, { "epoch": 0.24750300468316133, "grad_norm": 0.4325196146965027, "learning_rate": 3.762692196112562e-06, "loss": 0.7417, "step": 5972 }, { "epoch": 0.247544448588835, "grad_norm": 0.3835587203502655, "learning_rate": 3.7624849765841937e-06, "loss": 0.6768, "step": 5973 }, { "epoch": 0.24758589249450869, "grad_norm": 0.42258793115615845, "learning_rate": 3.762277757055825e-06, "loss": 0.7471, "step": 5974 }, { "epoch": 0.24762733640018236, "grad_norm": 0.4325827658176422, "learning_rate": 3.762070537527457e-06, "loss": 0.7296, "step": 5975 }, { "epoch": 0.247668780305856, "grad_norm": 0.44545701146125793, "learning_rate": 3.7618633179990887e-06, "loss": 0.7595, "step": 5976 }, { "epoch": 0.2477102242115297, "grad_norm": 0.42231130599975586, "learning_rate": 3.76165609847072e-06, "loss": 0.677, "step": 5977 }, { "epoch": 0.24775166811720337, "grad_norm": 0.40663230419158936, "learning_rate": 3.761448878942352e-06, "loss": 0.6736, "step": 5978 }, { "epoch": 0.24779311202287704, "grad_norm": 0.44123294949531555, "learning_rate": 3.7612416594139832e-06, "loss": 0.7395, "step": 5979 }, { "epoch": 0.24783455592855072, "grad_norm": 0.4406220316886902, "learning_rate": 3.761034439885615e-06, "loss": 0.7223, "step": 5980 }, { "epoch": 0.24787599983422437, "grad_norm": 0.42522433400154114, "learning_rate": 3.7608272203572464e-06, "loss": 0.7531, "step": 5981 }, { "epoch": 0.24791744373989805, "grad_norm": 0.4258928894996643, "learning_rate": 3.7606200008288787e-06, "loss": 0.7332, "step": 5982 }, { "epoch": 0.24795888764557172, "grad_norm": 0.45766928791999817, "learning_rate": 3.7604127813005096e-06, "loss": 0.7712, "step": 5983 }, { "epoch": 0.2480003315512454, "grad_norm": 0.41440561413764954, "learning_rate": 3.760205561772142e-06, "loss": 0.7668, "step": 5984 }, { "epoch": 0.24804177545691905, "grad_norm": 0.3836019039154053, "learning_rate": 3.7599983422437737e-06, "loss": 0.676, "step": 5985 }, { "epoch": 0.24808321936259273, "grad_norm": 0.4378919005393982, "learning_rate": 3.759791122715405e-06, "loss": 0.7791, "step": 5986 }, { "epoch": 0.2481246632682664, "grad_norm": 0.443541944026947, "learning_rate": 3.759583903187037e-06, "loss": 0.754, "step": 5987 }, { "epoch": 0.24816610717394008, "grad_norm": 0.4530448317527771, "learning_rate": 3.7593766836586682e-06, "loss": 0.7437, "step": 5988 }, { "epoch": 0.24820755107961373, "grad_norm": 0.41682401299476624, "learning_rate": 3.7591694641303e-06, "loss": 0.7129, "step": 5989 }, { "epoch": 0.2482489949852874, "grad_norm": 0.4373258650302887, "learning_rate": 3.7589622446019314e-06, "loss": 0.738, "step": 5990 }, { "epoch": 0.24829043889096108, "grad_norm": 0.4031035602092743, "learning_rate": 3.7587550250735633e-06, "loss": 0.7046, "step": 5991 }, { "epoch": 0.24833188279663476, "grad_norm": 0.40147364139556885, "learning_rate": 3.758547805545195e-06, "loss": 0.7228, "step": 5992 }, { "epoch": 0.24837332670230844, "grad_norm": 0.4440365731716156, "learning_rate": 3.7583405860168264e-06, "loss": 0.7961, "step": 5993 }, { "epoch": 0.2484147706079821, "grad_norm": 0.44299235939979553, "learning_rate": 3.7581333664884583e-06, "loss": 0.7114, "step": 5994 }, { "epoch": 0.24845621451365577, "grad_norm": 0.4473583996295929, "learning_rate": 3.7579261469600896e-06, "loss": 0.72, "step": 5995 }, { "epoch": 0.24849765841932944, "grad_norm": 0.41851088404655457, "learning_rate": 3.7577189274317215e-06, "loss": 0.6755, "step": 5996 }, { "epoch": 0.24853910232500312, "grad_norm": 0.4060959815979004, "learning_rate": 3.757511707903353e-06, "loss": 0.7395, "step": 5997 }, { "epoch": 0.24858054623067677, "grad_norm": 0.3918766677379608, "learning_rate": 3.7573044883749846e-06, "loss": 0.6843, "step": 5998 }, { "epoch": 0.24862199013635045, "grad_norm": 0.45000261068344116, "learning_rate": 3.757097268846616e-06, "loss": 0.7712, "step": 5999 }, { "epoch": 0.24866343404202412, "grad_norm": 0.4027317762374878, "learning_rate": 3.7568900493182483e-06, "loss": 0.7156, "step": 6000 }, { "epoch": 0.2487048779476978, "grad_norm": 0.3953678011894226, "learning_rate": 3.75668282978988e-06, "loss": 0.6929, "step": 6001 }, { "epoch": 0.24874632185337145, "grad_norm": 0.4020349383354187, "learning_rate": 3.7564756102615115e-06, "loss": 0.7173, "step": 6002 }, { "epoch": 0.24878776575904513, "grad_norm": 0.4084712564945221, "learning_rate": 3.7562683907331433e-06, "loss": 0.7018, "step": 6003 }, { "epoch": 0.2488292096647188, "grad_norm": 0.4320645332336426, "learning_rate": 3.7560611712047747e-06, "loss": 0.694, "step": 6004 }, { "epoch": 0.24887065357039248, "grad_norm": 0.4257984459400177, "learning_rate": 3.7558539516764065e-06, "loss": 0.6912, "step": 6005 }, { "epoch": 0.24891209747606616, "grad_norm": 0.437749445438385, "learning_rate": 3.755646732148038e-06, "loss": 0.7456, "step": 6006 }, { "epoch": 0.2489535413817398, "grad_norm": 0.4243311285972595, "learning_rate": 3.7554395126196697e-06, "loss": 0.7141, "step": 6007 }, { "epoch": 0.24899498528741348, "grad_norm": 0.4369007349014282, "learning_rate": 3.755232293091301e-06, "loss": 0.7524, "step": 6008 }, { "epoch": 0.24903642919308716, "grad_norm": 0.4096597731113434, "learning_rate": 3.755025073562933e-06, "loss": 0.7622, "step": 6009 }, { "epoch": 0.24907787309876084, "grad_norm": 0.41250327229499817, "learning_rate": 3.7548178540345647e-06, "loss": 0.718, "step": 6010 }, { "epoch": 0.2491193170044345, "grad_norm": 0.411704957485199, "learning_rate": 3.754610634506196e-06, "loss": 0.6879, "step": 6011 }, { "epoch": 0.24916076091010816, "grad_norm": 0.4320193827152252, "learning_rate": 3.754403414977828e-06, "loss": 0.6921, "step": 6012 }, { "epoch": 0.24920220481578184, "grad_norm": 0.3827728033065796, "learning_rate": 3.7541961954494592e-06, "loss": 0.6578, "step": 6013 }, { "epoch": 0.24924364872145552, "grad_norm": 0.42136314511299133, "learning_rate": 3.753988975921091e-06, "loss": 0.7383, "step": 6014 }, { "epoch": 0.24928509262712917, "grad_norm": 0.4044662117958069, "learning_rate": 3.7537817563927224e-06, "loss": 0.668, "step": 6015 }, { "epoch": 0.24932653653280284, "grad_norm": 0.41759005188941956, "learning_rate": 3.7535745368643547e-06, "loss": 0.6996, "step": 6016 }, { "epoch": 0.24936798043847652, "grad_norm": 0.4243796169757843, "learning_rate": 3.7533673173359856e-06, "loss": 0.7639, "step": 6017 }, { "epoch": 0.2494094243441502, "grad_norm": 0.42483577132225037, "learning_rate": 3.753160097807618e-06, "loss": 0.715, "step": 6018 }, { "epoch": 0.24945086824982388, "grad_norm": 0.39412689208984375, "learning_rate": 3.7529528782792497e-06, "loss": 0.6802, "step": 6019 }, { "epoch": 0.24949231215549753, "grad_norm": 0.4239161014556885, "learning_rate": 3.752745658750881e-06, "loss": 0.7122, "step": 6020 }, { "epoch": 0.2495337560611712, "grad_norm": 0.4078959822654724, "learning_rate": 3.752538439222513e-06, "loss": 0.6809, "step": 6021 }, { "epoch": 0.24957519996684488, "grad_norm": 0.42784857749938965, "learning_rate": 3.7523312196941443e-06, "loss": 0.7351, "step": 6022 }, { "epoch": 0.24961664387251856, "grad_norm": 0.42581865191459656, "learning_rate": 3.752124000165776e-06, "loss": 0.7458, "step": 6023 }, { "epoch": 0.2496580877781922, "grad_norm": 0.4087164103984833, "learning_rate": 3.7519167806374074e-06, "loss": 0.7249, "step": 6024 }, { "epoch": 0.24969953168386588, "grad_norm": 0.42763808369636536, "learning_rate": 3.7517095611090393e-06, "loss": 0.7197, "step": 6025 }, { "epoch": 0.24974097558953956, "grad_norm": 0.43708306550979614, "learning_rate": 3.7515023415806706e-06, "loss": 0.6758, "step": 6026 }, { "epoch": 0.24978241949521324, "grad_norm": 0.3713022470474243, "learning_rate": 3.7512951220523025e-06, "loss": 0.692, "step": 6027 }, { "epoch": 0.2498238634008869, "grad_norm": 0.37605151534080505, "learning_rate": 3.7510879025239343e-06, "loss": 0.6919, "step": 6028 }, { "epoch": 0.24986530730656056, "grad_norm": 0.38880184292793274, "learning_rate": 3.7508806829955656e-06, "loss": 0.7205, "step": 6029 }, { "epoch": 0.24990675121223424, "grad_norm": 0.4560619294643402, "learning_rate": 3.7506734634671975e-06, "loss": 0.6831, "step": 6030 }, { "epoch": 0.24994819511790792, "grad_norm": 0.447371244430542, "learning_rate": 3.750466243938829e-06, "loss": 0.7466, "step": 6031 }, { "epoch": 0.2499896390235816, "grad_norm": 0.4342488944530487, "learning_rate": 3.7502590244104607e-06, "loss": 0.7263, "step": 6032 }, { "epoch": 0.25003108292925524, "grad_norm": 0.4200589656829834, "learning_rate": 3.750051804882092e-06, "loss": 0.7305, "step": 6033 }, { "epoch": 0.2500725268349289, "grad_norm": 0.4387052059173584, "learning_rate": 3.7498445853537243e-06, "loss": 0.7075, "step": 6034 }, { "epoch": 0.2501139707406026, "grad_norm": 0.467589408159256, "learning_rate": 3.7496373658253552e-06, "loss": 0.7893, "step": 6035 }, { "epoch": 0.2501554146462763, "grad_norm": 0.3905572295188904, "learning_rate": 3.7494301462969875e-06, "loss": 0.6697, "step": 6036 }, { "epoch": 0.25019685855194995, "grad_norm": 0.4368402361869812, "learning_rate": 3.7492229267686193e-06, "loss": 0.7454, "step": 6037 }, { "epoch": 0.25023830245762363, "grad_norm": 0.46885940432548523, "learning_rate": 3.7490157072402507e-06, "loss": 0.7844, "step": 6038 }, { "epoch": 0.25027974636329725, "grad_norm": 0.40522655844688416, "learning_rate": 3.7488084877118825e-06, "loss": 0.7056, "step": 6039 }, { "epoch": 0.25032119026897093, "grad_norm": 0.4127856492996216, "learning_rate": 3.748601268183514e-06, "loss": 0.7169, "step": 6040 }, { "epoch": 0.2503626341746446, "grad_norm": 0.4249977767467499, "learning_rate": 3.7483940486551457e-06, "loss": 0.7151, "step": 6041 }, { "epoch": 0.2504040780803183, "grad_norm": 0.44575536251068115, "learning_rate": 3.748186829126777e-06, "loss": 0.709, "step": 6042 }, { "epoch": 0.25044552198599196, "grad_norm": 0.45445626974105835, "learning_rate": 3.747979609598409e-06, "loss": 0.7119, "step": 6043 }, { "epoch": 0.25048696589166564, "grad_norm": 0.3987843096256256, "learning_rate": 3.7477723900700402e-06, "loss": 0.6816, "step": 6044 }, { "epoch": 0.2505284097973393, "grad_norm": 0.45341265201568604, "learning_rate": 3.747565170541672e-06, "loss": 0.7505, "step": 6045 }, { "epoch": 0.250569853703013, "grad_norm": 0.4135962724685669, "learning_rate": 3.747357951013304e-06, "loss": 0.7034, "step": 6046 }, { "epoch": 0.25061129760868667, "grad_norm": 0.4489751160144806, "learning_rate": 3.7471507314849352e-06, "loss": 0.752, "step": 6047 }, { "epoch": 0.2506527415143603, "grad_norm": 0.4258221685886383, "learning_rate": 3.746943511956567e-06, "loss": 0.729, "step": 6048 }, { "epoch": 0.25069418542003397, "grad_norm": 0.44324541091918945, "learning_rate": 3.7467362924281984e-06, "loss": 0.7683, "step": 6049 }, { "epoch": 0.25073562932570764, "grad_norm": 0.41910234093666077, "learning_rate": 3.7465290728998307e-06, "loss": 0.7483, "step": 6050 }, { "epoch": 0.2507770732313813, "grad_norm": 0.4094471335411072, "learning_rate": 3.7463218533714616e-06, "loss": 0.6517, "step": 6051 }, { "epoch": 0.250818517137055, "grad_norm": 0.4462414085865021, "learning_rate": 3.746114633843094e-06, "loss": 0.7058, "step": 6052 }, { "epoch": 0.2508599610427287, "grad_norm": 0.434004545211792, "learning_rate": 3.7459074143147257e-06, "loss": 0.7698, "step": 6053 }, { "epoch": 0.25090140494840235, "grad_norm": 0.3918675184249878, "learning_rate": 3.745700194786357e-06, "loss": 0.6931, "step": 6054 }, { "epoch": 0.25094284885407603, "grad_norm": 0.3794451653957367, "learning_rate": 3.745492975257989e-06, "loss": 0.7218, "step": 6055 }, { "epoch": 0.2509842927597497, "grad_norm": 0.4094288647174835, "learning_rate": 3.7452857557296203e-06, "loss": 0.7207, "step": 6056 }, { "epoch": 0.2510257366654233, "grad_norm": 0.46224015951156616, "learning_rate": 3.745078536201252e-06, "loss": 0.7512, "step": 6057 }, { "epoch": 0.251067180571097, "grad_norm": 0.4420699179172516, "learning_rate": 3.7448713166728835e-06, "loss": 0.7397, "step": 6058 }, { "epoch": 0.2511086244767707, "grad_norm": 0.39591434597969055, "learning_rate": 3.7446640971445153e-06, "loss": 0.6831, "step": 6059 }, { "epoch": 0.25115006838244436, "grad_norm": 0.43759599328041077, "learning_rate": 3.7444568776161466e-06, "loss": 0.7205, "step": 6060 }, { "epoch": 0.25119151228811804, "grad_norm": 0.43256527185440063, "learning_rate": 3.7442496580877785e-06, "loss": 0.7148, "step": 6061 }, { "epoch": 0.2512329561937917, "grad_norm": 0.41243574023246765, "learning_rate": 3.7440424385594103e-06, "loss": 0.7302, "step": 6062 }, { "epoch": 0.2512744000994654, "grad_norm": 0.44729259610176086, "learning_rate": 3.7438352190310417e-06, "loss": 0.7402, "step": 6063 }, { "epoch": 0.25131584400513907, "grad_norm": 0.4826046824455261, "learning_rate": 3.7436279995026735e-06, "loss": 0.7119, "step": 6064 }, { "epoch": 0.2513572879108127, "grad_norm": 0.464213490486145, "learning_rate": 3.743420779974305e-06, "loss": 0.7991, "step": 6065 }, { "epoch": 0.25139873181648636, "grad_norm": 0.4252159595489502, "learning_rate": 3.7432135604459367e-06, "loss": 0.7197, "step": 6066 }, { "epoch": 0.25144017572216004, "grad_norm": 0.4489288926124573, "learning_rate": 3.743006340917568e-06, "loss": 0.7103, "step": 6067 }, { "epoch": 0.2514816196278337, "grad_norm": 0.3889477252960205, "learning_rate": 3.7427991213892003e-06, "loss": 0.7043, "step": 6068 }, { "epoch": 0.2515230635335074, "grad_norm": 0.3897220194339752, "learning_rate": 3.7425919018608312e-06, "loss": 0.6821, "step": 6069 }, { "epoch": 0.2515645074391811, "grad_norm": 0.45167189836502075, "learning_rate": 3.7423846823324635e-06, "loss": 0.7058, "step": 6070 }, { "epoch": 0.25160595134485475, "grad_norm": 0.4118085205554962, "learning_rate": 3.7421774628040953e-06, "loss": 0.7422, "step": 6071 }, { "epoch": 0.2516473952505284, "grad_norm": 0.41563406586647034, "learning_rate": 3.7419702432757267e-06, "loss": 0.6758, "step": 6072 }, { "epoch": 0.2516888391562021, "grad_norm": 0.3947673738002777, "learning_rate": 3.7417630237473585e-06, "loss": 0.7109, "step": 6073 }, { "epoch": 0.2517302830618757, "grad_norm": 0.4155599772930145, "learning_rate": 3.74155580421899e-06, "loss": 0.7319, "step": 6074 }, { "epoch": 0.2517717269675494, "grad_norm": 0.4301125109195709, "learning_rate": 3.7413485846906217e-06, "loss": 0.7153, "step": 6075 }, { "epoch": 0.2518131708732231, "grad_norm": 0.4532957971096039, "learning_rate": 3.741141365162253e-06, "loss": 0.7383, "step": 6076 }, { "epoch": 0.25185461477889676, "grad_norm": 0.41179510951042175, "learning_rate": 3.740934145633885e-06, "loss": 0.6641, "step": 6077 }, { "epoch": 0.25189605868457043, "grad_norm": 0.4385316073894501, "learning_rate": 3.7407269261055162e-06, "loss": 0.7422, "step": 6078 }, { "epoch": 0.2519375025902441, "grad_norm": 0.39749807119369507, "learning_rate": 3.740519706577148e-06, "loss": 0.7372, "step": 6079 }, { "epoch": 0.2519789464959178, "grad_norm": 0.4171895980834961, "learning_rate": 3.74031248704878e-06, "loss": 0.7681, "step": 6080 }, { "epoch": 0.25202039040159147, "grad_norm": 0.46200546622276306, "learning_rate": 3.7401052675204113e-06, "loss": 0.7395, "step": 6081 }, { "epoch": 0.25206183430726514, "grad_norm": 0.4416584074497223, "learning_rate": 3.739898047992043e-06, "loss": 0.7295, "step": 6082 }, { "epoch": 0.25210327821293876, "grad_norm": 0.4310278594493866, "learning_rate": 3.7396908284636744e-06, "loss": 0.7485, "step": 6083 }, { "epoch": 0.25214472211861244, "grad_norm": 0.4204119145870209, "learning_rate": 3.7394836089353067e-06, "loss": 0.7156, "step": 6084 }, { "epoch": 0.2521861660242861, "grad_norm": 0.4342232048511505, "learning_rate": 3.7392763894069376e-06, "loss": 0.7324, "step": 6085 }, { "epoch": 0.2522276099299598, "grad_norm": 0.395269513130188, "learning_rate": 3.73906916987857e-06, "loss": 0.7253, "step": 6086 }, { "epoch": 0.25226905383563347, "grad_norm": 0.39830729365348816, "learning_rate": 3.7388619503502013e-06, "loss": 0.6663, "step": 6087 }, { "epoch": 0.25231049774130715, "grad_norm": 0.432003915309906, "learning_rate": 3.738654730821833e-06, "loss": 0.7439, "step": 6088 }, { "epoch": 0.2523519416469808, "grad_norm": 0.40886783599853516, "learning_rate": 3.738447511293465e-06, "loss": 0.6995, "step": 6089 }, { "epoch": 0.2523933855526545, "grad_norm": 0.44154804944992065, "learning_rate": 3.7382402917650963e-06, "loss": 0.7522, "step": 6090 }, { "epoch": 0.2524348294583282, "grad_norm": 0.4480719566345215, "learning_rate": 3.738033072236728e-06, "loss": 0.7439, "step": 6091 }, { "epoch": 0.2524762733640018, "grad_norm": 0.4115225672721863, "learning_rate": 3.7378258527083595e-06, "loss": 0.6628, "step": 6092 }, { "epoch": 0.2525177172696755, "grad_norm": 0.3956930339336395, "learning_rate": 3.7376186331799913e-06, "loss": 0.6376, "step": 6093 }, { "epoch": 0.25255916117534916, "grad_norm": 0.43507394194602966, "learning_rate": 3.7374114136516227e-06, "loss": 0.6757, "step": 6094 }, { "epoch": 0.25260060508102283, "grad_norm": 0.5457044243812561, "learning_rate": 3.7372041941232545e-06, "loss": 0.8149, "step": 6095 }, { "epoch": 0.2526420489866965, "grad_norm": 0.397545725107193, "learning_rate": 3.736996974594886e-06, "loss": 0.7065, "step": 6096 }, { "epoch": 0.2526834928923702, "grad_norm": 0.4298243522644043, "learning_rate": 3.7367897550665177e-06, "loss": 0.7568, "step": 6097 }, { "epoch": 0.25272493679804386, "grad_norm": 0.39709949493408203, "learning_rate": 3.7365825355381495e-06, "loss": 0.7032, "step": 6098 }, { "epoch": 0.25276638070371754, "grad_norm": 0.4216078221797943, "learning_rate": 3.736375316009781e-06, "loss": 0.7374, "step": 6099 }, { "epoch": 0.25280782460939116, "grad_norm": 0.41981711983680725, "learning_rate": 3.736168096481413e-06, "loss": 0.6865, "step": 6100 }, { "epoch": 0.25284926851506484, "grad_norm": 0.4466809928417206, "learning_rate": 3.735960876953044e-06, "loss": 0.7103, "step": 6101 }, { "epoch": 0.2528907124207385, "grad_norm": 0.43563538789749146, "learning_rate": 3.7357536574246763e-06, "loss": 0.6917, "step": 6102 }, { "epoch": 0.2529321563264122, "grad_norm": 0.39142948389053345, "learning_rate": 3.7355464378963072e-06, "loss": 0.6704, "step": 6103 }, { "epoch": 0.25297360023208587, "grad_norm": 0.41214078664779663, "learning_rate": 3.7353392183679395e-06, "loss": 0.6733, "step": 6104 }, { "epoch": 0.25301504413775955, "grad_norm": 0.4458875358104706, "learning_rate": 3.735131998839571e-06, "loss": 0.7428, "step": 6105 }, { "epoch": 0.2530564880434332, "grad_norm": 0.41436514258384705, "learning_rate": 3.7349247793112027e-06, "loss": 0.734, "step": 6106 }, { "epoch": 0.2530979319491069, "grad_norm": 0.4259885549545288, "learning_rate": 3.7347175597828345e-06, "loss": 0.7358, "step": 6107 }, { "epoch": 0.2531393758547806, "grad_norm": 0.46017834544181824, "learning_rate": 3.734510340254466e-06, "loss": 0.7795, "step": 6108 }, { "epoch": 0.2531808197604542, "grad_norm": 0.42012926936149597, "learning_rate": 3.7343031207260977e-06, "loss": 0.7268, "step": 6109 }, { "epoch": 0.2532222636661279, "grad_norm": 0.4547656774520874, "learning_rate": 3.734095901197729e-06, "loss": 0.7327, "step": 6110 }, { "epoch": 0.25326370757180156, "grad_norm": 0.4279201924800873, "learning_rate": 3.733888681669361e-06, "loss": 0.7534, "step": 6111 }, { "epoch": 0.25330515147747523, "grad_norm": 0.41167742013931274, "learning_rate": 3.7336814621409923e-06, "loss": 0.7205, "step": 6112 }, { "epoch": 0.2533465953831489, "grad_norm": 0.4240303635597229, "learning_rate": 3.733474242612624e-06, "loss": 0.6852, "step": 6113 }, { "epoch": 0.2533880392888226, "grad_norm": 0.4501185119152069, "learning_rate": 3.733267023084256e-06, "loss": 0.7407, "step": 6114 }, { "epoch": 0.25342948319449626, "grad_norm": 0.42156773805618286, "learning_rate": 3.7330598035558873e-06, "loss": 0.7356, "step": 6115 }, { "epoch": 0.25347092710016994, "grad_norm": 0.3955972194671631, "learning_rate": 3.732852584027519e-06, "loss": 0.676, "step": 6116 }, { "epoch": 0.2535123710058436, "grad_norm": 0.428305059671402, "learning_rate": 3.7326453644991505e-06, "loss": 0.707, "step": 6117 }, { "epoch": 0.25355381491151724, "grad_norm": 0.41422170400619507, "learning_rate": 3.7324381449707827e-06, "loss": 0.7705, "step": 6118 }, { "epoch": 0.2535952588171909, "grad_norm": 0.41554686427116394, "learning_rate": 3.7322309254424136e-06, "loss": 0.7056, "step": 6119 }, { "epoch": 0.2536367027228646, "grad_norm": 0.39503031969070435, "learning_rate": 3.732023705914046e-06, "loss": 0.6763, "step": 6120 }, { "epoch": 0.25367814662853827, "grad_norm": 0.394479364156723, "learning_rate": 3.7318164863856773e-06, "loss": 0.6619, "step": 6121 }, { "epoch": 0.25371959053421195, "grad_norm": 0.3825823664665222, "learning_rate": 3.731609266857309e-06, "loss": 0.6973, "step": 6122 }, { "epoch": 0.2537610344398856, "grad_norm": 0.43291887640953064, "learning_rate": 3.731402047328941e-06, "loss": 0.7185, "step": 6123 }, { "epoch": 0.2538024783455593, "grad_norm": 0.407031387090683, "learning_rate": 3.7311948278005723e-06, "loss": 0.6604, "step": 6124 }, { "epoch": 0.253843922251233, "grad_norm": 0.45129281282424927, "learning_rate": 3.730987608272204e-06, "loss": 0.7351, "step": 6125 }, { "epoch": 0.2538853661569066, "grad_norm": 0.43731164932250977, "learning_rate": 3.7307803887438355e-06, "loss": 0.6721, "step": 6126 }, { "epoch": 0.2539268100625803, "grad_norm": 0.4282631278038025, "learning_rate": 3.7305731692154673e-06, "loss": 0.7319, "step": 6127 }, { "epoch": 0.25396825396825395, "grad_norm": 0.44185078144073486, "learning_rate": 3.7303659496870987e-06, "loss": 0.7065, "step": 6128 }, { "epoch": 0.25400969787392763, "grad_norm": 0.43341001868247986, "learning_rate": 3.7301587301587305e-06, "loss": 0.738, "step": 6129 }, { "epoch": 0.2540511417796013, "grad_norm": 0.38501355051994324, "learning_rate": 3.729951510630362e-06, "loss": 0.6528, "step": 6130 }, { "epoch": 0.254092585685275, "grad_norm": 0.407642126083374, "learning_rate": 3.7297442911019937e-06, "loss": 0.74, "step": 6131 }, { "epoch": 0.25413402959094866, "grad_norm": 0.4139276444911957, "learning_rate": 3.7295370715736255e-06, "loss": 0.6926, "step": 6132 }, { "epoch": 0.25417547349662234, "grad_norm": 0.43417662382125854, "learning_rate": 3.729329852045257e-06, "loss": 0.7432, "step": 6133 }, { "epoch": 0.254216917402296, "grad_norm": 0.4174732267856598, "learning_rate": 3.729122632516889e-06, "loss": 0.7297, "step": 6134 }, { "epoch": 0.25425836130796964, "grad_norm": 0.41027596592903137, "learning_rate": 3.72891541298852e-06, "loss": 0.7185, "step": 6135 }, { "epoch": 0.2542998052136433, "grad_norm": 0.43580448627471924, "learning_rate": 3.7287081934601523e-06, "loss": 0.752, "step": 6136 }, { "epoch": 0.254341249119317, "grad_norm": 0.4059290587902069, "learning_rate": 3.7285009739317832e-06, "loss": 0.7268, "step": 6137 }, { "epoch": 0.25438269302499067, "grad_norm": 0.39432117342948914, "learning_rate": 3.7282937544034155e-06, "loss": 0.7283, "step": 6138 }, { "epoch": 0.25442413693066435, "grad_norm": 0.4232621490955353, "learning_rate": 3.728086534875047e-06, "loss": 0.7236, "step": 6139 }, { "epoch": 0.254465580836338, "grad_norm": 0.4162512421607971, "learning_rate": 3.7278793153466787e-06, "loss": 0.7546, "step": 6140 }, { "epoch": 0.2545070247420117, "grad_norm": 0.4374372959136963, "learning_rate": 3.7276720958183105e-06, "loss": 0.7205, "step": 6141 }, { "epoch": 0.2545484686476854, "grad_norm": 0.4262143075466156, "learning_rate": 3.727464876289942e-06, "loss": 0.7872, "step": 6142 }, { "epoch": 0.25458991255335905, "grad_norm": 0.4533160924911499, "learning_rate": 3.7272576567615737e-06, "loss": 0.7571, "step": 6143 }, { "epoch": 0.2546313564590327, "grad_norm": 0.39063358306884766, "learning_rate": 3.727050437233205e-06, "loss": 0.728, "step": 6144 }, { "epoch": 0.25467280036470635, "grad_norm": 0.39919576048851013, "learning_rate": 3.726843217704837e-06, "loss": 0.7412, "step": 6145 }, { "epoch": 0.25471424427038003, "grad_norm": 0.44663751125335693, "learning_rate": 3.7266359981764683e-06, "loss": 0.7273, "step": 6146 }, { "epoch": 0.2547556881760537, "grad_norm": 0.4171029329299927, "learning_rate": 3.7264287786481e-06, "loss": 0.7157, "step": 6147 }, { "epoch": 0.2547971320817274, "grad_norm": 0.40051400661468506, "learning_rate": 3.7262215591197315e-06, "loss": 0.6776, "step": 6148 }, { "epoch": 0.25483857598740106, "grad_norm": 0.42343100905418396, "learning_rate": 3.7260143395913633e-06, "loss": 0.7246, "step": 6149 }, { "epoch": 0.25488001989307474, "grad_norm": 0.42666757106781006, "learning_rate": 3.725807120062995e-06, "loss": 0.7214, "step": 6150 }, { "epoch": 0.2549214637987484, "grad_norm": 0.3996857702732086, "learning_rate": 3.7255999005346265e-06, "loss": 0.6819, "step": 6151 }, { "epoch": 0.25496290770442204, "grad_norm": 0.4599875807762146, "learning_rate": 3.7253926810062587e-06, "loss": 0.6921, "step": 6152 }, { "epoch": 0.2550043516100957, "grad_norm": 0.39232611656188965, "learning_rate": 3.7251854614778897e-06, "loss": 0.7358, "step": 6153 }, { "epoch": 0.2550457955157694, "grad_norm": 0.3988505005836487, "learning_rate": 3.724978241949522e-06, "loss": 0.7046, "step": 6154 }, { "epoch": 0.25508723942144307, "grad_norm": 0.41904374957084656, "learning_rate": 3.7247710224211533e-06, "loss": 0.7188, "step": 6155 }, { "epoch": 0.25512868332711675, "grad_norm": 0.40041491389274597, "learning_rate": 3.724563802892785e-06, "loss": 0.6318, "step": 6156 }, { "epoch": 0.2551701272327904, "grad_norm": 0.4093829393386841, "learning_rate": 3.7243565833644165e-06, "loss": 0.6777, "step": 6157 }, { "epoch": 0.2552115711384641, "grad_norm": 0.400246262550354, "learning_rate": 3.7241493638360483e-06, "loss": 0.7234, "step": 6158 }, { "epoch": 0.2552530150441378, "grad_norm": 0.38729923963546753, "learning_rate": 3.72394214430768e-06, "loss": 0.7227, "step": 6159 }, { "epoch": 0.25529445894981145, "grad_norm": 0.41513344645500183, "learning_rate": 3.7237349247793115e-06, "loss": 0.7075, "step": 6160 }, { "epoch": 0.2553359028554851, "grad_norm": 0.41064581274986267, "learning_rate": 3.7235277052509433e-06, "loss": 0.6682, "step": 6161 }, { "epoch": 0.25537734676115875, "grad_norm": 0.42259982228279114, "learning_rate": 3.7233204857225747e-06, "loss": 0.7343, "step": 6162 }, { "epoch": 0.25541879066683243, "grad_norm": 0.44401171803474426, "learning_rate": 3.7231132661942065e-06, "loss": 0.7236, "step": 6163 }, { "epoch": 0.2554602345725061, "grad_norm": 0.4470435380935669, "learning_rate": 3.722906046665838e-06, "loss": 0.7195, "step": 6164 }, { "epoch": 0.2555016784781798, "grad_norm": 0.4186209738254547, "learning_rate": 3.7226988271374697e-06, "loss": 0.7407, "step": 6165 }, { "epoch": 0.25554312238385346, "grad_norm": 0.364193856716156, "learning_rate": 3.722491607609101e-06, "loss": 0.6737, "step": 6166 }, { "epoch": 0.25558456628952714, "grad_norm": 0.4577482044696808, "learning_rate": 3.722284388080733e-06, "loss": 0.7218, "step": 6167 }, { "epoch": 0.2556260101952008, "grad_norm": 0.4418712258338928, "learning_rate": 3.722077168552365e-06, "loss": 0.6958, "step": 6168 }, { "epoch": 0.2556674541008745, "grad_norm": 0.3981810510158539, "learning_rate": 3.721869949023996e-06, "loss": 0.7332, "step": 6169 }, { "epoch": 0.2557088980065481, "grad_norm": 0.43710967898368835, "learning_rate": 3.7216627294956283e-06, "loss": 0.7478, "step": 6170 }, { "epoch": 0.2557503419122218, "grad_norm": 0.42081883549690247, "learning_rate": 3.7214555099672593e-06, "loss": 0.6965, "step": 6171 }, { "epoch": 0.25579178581789547, "grad_norm": 0.39096295833587646, "learning_rate": 3.7212482904388915e-06, "loss": 0.6886, "step": 6172 }, { "epoch": 0.25583322972356914, "grad_norm": 0.3917675018310547, "learning_rate": 3.721041070910523e-06, "loss": 0.627, "step": 6173 }, { "epoch": 0.2558746736292428, "grad_norm": 0.3938373327255249, "learning_rate": 3.7208338513821547e-06, "loss": 0.6943, "step": 6174 }, { "epoch": 0.2559161175349165, "grad_norm": 0.4196326434612274, "learning_rate": 3.7206266318537865e-06, "loss": 0.7389, "step": 6175 }, { "epoch": 0.2559575614405902, "grad_norm": 0.43998026847839355, "learning_rate": 3.720419412325418e-06, "loss": 0.7117, "step": 6176 }, { "epoch": 0.25599900534626385, "grad_norm": 0.43819454312324524, "learning_rate": 3.7202121927970497e-06, "loss": 0.7494, "step": 6177 }, { "epoch": 0.25604044925193753, "grad_norm": 0.45608949661254883, "learning_rate": 3.720004973268681e-06, "loss": 0.7378, "step": 6178 }, { "epoch": 0.25608189315761115, "grad_norm": 0.41671475768089294, "learning_rate": 3.719797753740313e-06, "loss": 0.7065, "step": 6179 }, { "epoch": 0.25612333706328483, "grad_norm": 0.4319727420806885, "learning_rate": 3.7195905342119443e-06, "loss": 0.717, "step": 6180 }, { "epoch": 0.2561647809689585, "grad_norm": 0.4710858464241028, "learning_rate": 3.719383314683576e-06, "loss": 0.7307, "step": 6181 }, { "epoch": 0.2562062248746322, "grad_norm": 0.40955880284309387, "learning_rate": 3.7191760951552075e-06, "loss": 0.6978, "step": 6182 }, { "epoch": 0.25624766878030586, "grad_norm": 0.41667839884757996, "learning_rate": 3.7189688756268393e-06, "loss": 0.6904, "step": 6183 }, { "epoch": 0.25628911268597954, "grad_norm": 0.42708152532577515, "learning_rate": 3.718761656098471e-06, "loss": 0.6689, "step": 6184 }, { "epoch": 0.2563305565916532, "grad_norm": 0.42344146966934204, "learning_rate": 3.7185544365701025e-06, "loss": 0.7117, "step": 6185 }, { "epoch": 0.2563720004973269, "grad_norm": 0.4450227618217468, "learning_rate": 3.7183472170417347e-06, "loss": 0.7114, "step": 6186 }, { "epoch": 0.2564134444030005, "grad_norm": 0.4261973798274994, "learning_rate": 3.7181399975133657e-06, "loss": 0.7317, "step": 6187 }, { "epoch": 0.2564548883086742, "grad_norm": 0.40273383259773254, "learning_rate": 3.717932777984998e-06, "loss": 0.6539, "step": 6188 }, { "epoch": 0.25649633221434787, "grad_norm": 0.40947920083999634, "learning_rate": 3.7177255584566293e-06, "loss": 0.6924, "step": 6189 }, { "epoch": 0.25653777612002154, "grad_norm": 0.4407038986682892, "learning_rate": 3.717518338928261e-06, "loss": 0.7716, "step": 6190 }, { "epoch": 0.2565792200256952, "grad_norm": 0.4632432162761688, "learning_rate": 3.7173111193998925e-06, "loss": 0.6904, "step": 6191 }, { "epoch": 0.2566206639313689, "grad_norm": 0.43985557556152344, "learning_rate": 3.7171038998715243e-06, "loss": 0.7166, "step": 6192 }, { "epoch": 0.2566621078370426, "grad_norm": 0.4427506923675537, "learning_rate": 3.716896680343156e-06, "loss": 0.6951, "step": 6193 }, { "epoch": 0.25670355174271625, "grad_norm": 0.4563213884830475, "learning_rate": 3.7166894608147875e-06, "loss": 0.7432, "step": 6194 }, { "epoch": 0.25674499564838993, "grad_norm": 0.3936905860900879, "learning_rate": 3.7164822412864193e-06, "loss": 0.6648, "step": 6195 }, { "epoch": 0.25678643955406355, "grad_norm": 0.43082064390182495, "learning_rate": 3.7162750217580507e-06, "loss": 0.7024, "step": 6196 }, { "epoch": 0.2568278834597372, "grad_norm": 0.3919983208179474, "learning_rate": 3.7160678022296825e-06, "loss": 0.6605, "step": 6197 }, { "epoch": 0.2568693273654109, "grad_norm": 0.3916562497615814, "learning_rate": 3.715860582701314e-06, "loss": 0.667, "step": 6198 }, { "epoch": 0.2569107712710846, "grad_norm": 0.3910791277885437, "learning_rate": 3.7156533631729457e-06, "loss": 0.6899, "step": 6199 }, { "epoch": 0.25695221517675826, "grad_norm": 0.4630964398384094, "learning_rate": 3.715446143644577e-06, "loss": 0.7576, "step": 6200 }, { "epoch": 0.25699365908243194, "grad_norm": 0.4109917879104614, "learning_rate": 3.715238924116209e-06, "loss": 0.6863, "step": 6201 }, { "epoch": 0.2570351029881056, "grad_norm": 0.5220915675163269, "learning_rate": 3.715031704587841e-06, "loss": 0.7163, "step": 6202 }, { "epoch": 0.2570765468937793, "grad_norm": 0.4042677879333496, "learning_rate": 3.714824485059472e-06, "loss": 0.733, "step": 6203 }, { "epoch": 0.25711799079945297, "grad_norm": 0.39604419469833374, "learning_rate": 3.7146172655311043e-06, "loss": 0.7205, "step": 6204 }, { "epoch": 0.2571594347051266, "grad_norm": 0.38982924818992615, "learning_rate": 3.7144100460027353e-06, "loss": 0.7111, "step": 6205 }, { "epoch": 0.25720087861080027, "grad_norm": 0.41776251792907715, "learning_rate": 3.7142028264743675e-06, "loss": 0.6851, "step": 6206 }, { "epoch": 0.25724232251647394, "grad_norm": 0.3976080119609833, "learning_rate": 3.713995606945999e-06, "loss": 0.7209, "step": 6207 }, { "epoch": 0.2572837664221476, "grad_norm": 0.45019590854644775, "learning_rate": 3.7137883874176307e-06, "loss": 0.7611, "step": 6208 }, { "epoch": 0.2573252103278213, "grad_norm": 0.42690035700798035, "learning_rate": 3.713581167889262e-06, "loss": 0.7322, "step": 6209 }, { "epoch": 0.257366654233495, "grad_norm": 0.43929794430732727, "learning_rate": 3.713373948360894e-06, "loss": 0.6943, "step": 6210 }, { "epoch": 0.25740809813916865, "grad_norm": 0.44643911719322205, "learning_rate": 3.7131667288325257e-06, "loss": 0.7761, "step": 6211 }, { "epoch": 0.2574495420448423, "grad_norm": 0.45257800817489624, "learning_rate": 3.712959509304157e-06, "loss": 0.7457, "step": 6212 }, { "epoch": 0.25749098595051595, "grad_norm": 0.43421316146850586, "learning_rate": 3.712752289775789e-06, "loss": 0.6964, "step": 6213 }, { "epoch": 0.2575324298561896, "grad_norm": 0.41608262062072754, "learning_rate": 3.7125450702474203e-06, "loss": 0.6831, "step": 6214 }, { "epoch": 0.2575738737618633, "grad_norm": 0.4203833341598511, "learning_rate": 3.712337850719052e-06, "loss": 0.6833, "step": 6215 }, { "epoch": 0.257615317667537, "grad_norm": 0.41846519708633423, "learning_rate": 3.7121306311906835e-06, "loss": 0.6899, "step": 6216 }, { "epoch": 0.25765676157321066, "grad_norm": 0.4179471731185913, "learning_rate": 3.7119234116623153e-06, "loss": 0.7336, "step": 6217 }, { "epoch": 0.25769820547888433, "grad_norm": 0.43322238326072693, "learning_rate": 3.7117161921339467e-06, "loss": 0.7617, "step": 6218 }, { "epoch": 0.257739649384558, "grad_norm": 0.41138553619384766, "learning_rate": 3.7115089726055785e-06, "loss": 0.6938, "step": 6219 }, { "epoch": 0.2577810932902317, "grad_norm": 0.4735654890537262, "learning_rate": 3.7113017530772107e-06, "loss": 0.7441, "step": 6220 }, { "epoch": 0.25782253719590537, "grad_norm": 0.4648279547691345, "learning_rate": 3.7110945335488417e-06, "loss": 0.7776, "step": 6221 }, { "epoch": 0.257863981101579, "grad_norm": 0.40383368730545044, "learning_rate": 3.710887314020474e-06, "loss": 0.6841, "step": 6222 }, { "epoch": 0.25790542500725266, "grad_norm": 0.3935619592666626, "learning_rate": 3.7106800944921053e-06, "loss": 0.7061, "step": 6223 }, { "epoch": 0.25794686891292634, "grad_norm": 0.41908755898475647, "learning_rate": 3.710472874963737e-06, "loss": 0.6965, "step": 6224 }, { "epoch": 0.2579883128186, "grad_norm": 0.4447205662727356, "learning_rate": 3.7102656554353685e-06, "loss": 0.7329, "step": 6225 }, { "epoch": 0.2580297567242737, "grad_norm": 0.4485672116279602, "learning_rate": 3.7100584359070003e-06, "loss": 0.748, "step": 6226 }, { "epoch": 0.2580712006299474, "grad_norm": 0.4435478448867798, "learning_rate": 3.709851216378632e-06, "loss": 0.6873, "step": 6227 }, { "epoch": 0.25811264453562105, "grad_norm": 0.4243341088294983, "learning_rate": 3.7096439968502635e-06, "loss": 0.697, "step": 6228 }, { "epoch": 0.2581540884412947, "grad_norm": 0.41485750675201416, "learning_rate": 3.7094367773218953e-06, "loss": 0.7192, "step": 6229 }, { "epoch": 0.2581955323469684, "grad_norm": 0.4284738302230835, "learning_rate": 3.7092295577935267e-06, "loss": 0.7148, "step": 6230 }, { "epoch": 0.258236976252642, "grad_norm": 0.43482398986816406, "learning_rate": 3.7090223382651585e-06, "loss": 0.6978, "step": 6231 }, { "epoch": 0.2582784201583157, "grad_norm": 0.40069153904914856, "learning_rate": 3.70881511873679e-06, "loss": 0.6829, "step": 6232 }, { "epoch": 0.2583198640639894, "grad_norm": 0.40957170724868774, "learning_rate": 3.7086078992084217e-06, "loss": 0.7054, "step": 6233 }, { "epoch": 0.25836130796966306, "grad_norm": 0.42304345965385437, "learning_rate": 3.708400679680053e-06, "loss": 0.7288, "step": 6234 }, { "epoch": 0.25840275187533673, "grad_norm": 0.4186275005340576, "learning_rate": 3.708193460151685e-06, "loss": 0.687, "step": 6235 }, { "epoch": 0.2584441957810104, "grad_norm": 0.47155824303627014, "learning_rate": 3.707986240623317e-06, "loss": 0.782, "step": 6236 }, { "epoch": 0.2584856396866841, "grad_norm": 0.45221614837646484, "learning_rate": 3.707779021094948e-06, "loss": 0.688, "step": 6237 }, { "epoch": 0.25852708359235776, "grad_norm": 0.4429072141647339, "learning_rate": 3.7075718015665803e-06, "loss": 0.7297, "step": 6238 }, { "epoch": 0.25856852749803144, "grad_norm": 0.39871254563331604, "learning_rate": 3.7073645820382113e-06, "loss": 0.7063, "step": 6239 }, { "epoch": 0.25860997140370506, "grad_norm": 0.40279820561408997, "learning_rate": 3.7071573625098435e-06, "loss": 0.704, "step": 6240 }, { "epoch": 0.25865141530937874, "grad_norm": 0.40577587485313416, "learning_rate": 3.706950142981475e-06, "loss": 0.7048, "step": 6241 }, { "epoch": 0.2586928592150524, "grad_norm": 0.46221402287483215, "learning_rate": 3.7067429234531067e-06, "loss": 0.7419, "step": 6242 }, { "epoch": 0.2587343031207261, "grad_norm": 0.38397467136383057, "learning_rate": 3.706535703924738e-06, "loss": 0.7422, "step": 6243 }, { "epoch": 0.25877574702639977, "grad_norm": 0.4703851044178009, "learning_rate": 3.70632848439637e-06, "loss": 0.7024, "step": 6244 }, { "epoch": 0.25881719093207345, "grad_norm": 0.4419141113758087, "learning_rate": 3.7061212648680017e-06, "loss": 0.741, "step": 6245 }, { "epoch": 0.2588586348377471, "grad_norm": 0.4188176691532135, "learning_rate": 3.705914045339633e-06, "loss": 0.7383, "step": 6246 }, { "epoch": 0.2589000787434208, "grad_norm": 0.4133175015449524, "learning_rate": 3.705706825811265e-06, "loss": 0.6956, "step": 6247 }, { "epoch": 0.2589415226490944, "grad_norm": 0.43742525577545166, "learning_rate": 3.7054996062828963e-06, "loss": 0.7661, "step": 6248 }, { "epoch": 0.2589829665547681, "grad_norm": 0.4420631527900696, "learning_rate": 3.705292386754528e-06, "loss": 0.7517, "step": 6249 }, { "epoch": 0.2590244104604418, "grad_norm": 0.41893723607063293, "learning_rate": 3.7050851672261595e-06, "loss": 0.7678, "step": 6250 }, { "epoch": 0.25906585436611546, "grad_norm": 0.410117506980896, "learning_rate": 3.7048779476977913e-06, "loss": 0.6654, "step": 6251 }, { "epoch": 0.25910729827178913, "grad_norm": 0.41841989755630493, "learning_rate": 3.7046707281694227e-06, "loss": 0.6917, "step": 6252 }, { "epoch": 0.2591487421774628, "grad_norm": 0.40545976161956787, "learning_rate": 3.7044635086410545e-06, "loss": 0.7024, "step": 6253 }, { "epoch": 0.2591901860831365, "grad_norm": 0.4304497539997101, "learning_rate": 3.7042562891126867e-06, "loss": 0.7253, "step": 6254 }, { "epoch": 0.25923162998881016, "grad_norm": 0.41317278146743774, "learning_rate": 3.7040490695843177e-06, "loss": 0.676, "step": 6255 }, { "epoch": 0.25927307389448384, "grad_norm": 0.39634430408477783, "learning_rate": 3.70384185005595e-06, "loss": 0.6338, "step": 6256 }, { "epoch": 0.25931451780015746, "grad_norm": 0.4154151678085327, "learning_rate": 3.7036346305275813e-06, "loss": 0.6873, "step": 6257 }, { "epoch": 0.25935596170583114, "grad_norm": 0.40515556931495667, "learning_rate": 3.703427410999213e-06, "loss": 0.7227, "step": 6258 }, { "epoch": 0.2593974056115048, "grad_norm": 0.38794848322868347, "learning_rate": 3.7032201914708445e-06, "loss": 0.7146, "step": 6259 }, { "epoch": 0.2594388495171785, "grad_norm": 0.40703046321868896, "learning_rate": 3.7030129719424763e-06, "loss": 0.7219, "step": 6260 }, { "epoch": 0.25948029342285217, "grad_norm": 0.4242015480995178, "learning_rate": 3.7028057524141077e-06, "loss": 0.7512, "step": 6261 }, { "epoch": 0.25952173732852585, "grad_norm": 0.42097732424736023, "learning_rate": 3.7025985328857395e-06, "loss": 0.7018, "step": 6262 }, { "epoch": 0.2595631812341995, "grad_norm": 0.4927029311656952, "learning_rate": 3.7023913133573713e-06, "loss": 0.7856, "step": 6263 }, { "epoch": 0.2596046251398732, "grad_norm": 0.4092736542224884, "learning_rate": 3.7021840938290027e-06, "loss": 0.7104, "step": 6264 }, { "epoch": 0.2596460690455469, "grad_norm": 0.42849522829055786, "learning_rate": 3.7019768743006345e-06, "loss": 0.7214, "step": 6265 }, { "epoch": 0.2596875129512205, "grad_norm": 0.4125194847583771, "learning_rate": 3.701769654772266e-06, "loss": 0.7129, "step": 6266 }, { "epoch": 0.2597289568568942, "grad_norm": 0.42058929800987244, "learning_rate": 3.7015624352438977e-06, "loss": 0.7307, "step": 6267 }, { "epoch": 0.25977040076256785, "grad_norm": 0.4356994330883026, "learning_rate": 3.701355215715529e-06, "loss": 0.7751, "step": 6268 }, { "epoch": 0.25981184466824153, "grad_norm": 0.4404783248901367, "learning_rate": 3.701147996187161e-06, "loss": 0.7881, "step": 6269 }, { "epoch": 0.2598532885739152, "grad_norm": 0.4452175796031952, "learning_rate": 3.7009407766587923e-06, "loss": 0.7576, "step": 6270 }, { "epoch": 0.2598947324795889, "grad_norm": 0.42158225178718567, "learning_rate": 3.700733557130424e-06, "loss": 0.6813, "step": 6271 }, { "epoch": 0.25993617638526256, "grad_norm": 0.4066883325576782, "learning_rate": 3.7005263376020563e-06, "loss": 0.7653, "step": 6272 }, { "epoch": 0.25997762029093624, "grad_norm": 0.46558114886283875, "learning_rate": 3.7003191180736877e-06, "loss": 0.8022, "step": 6273 }, { "epoch": 0.26001906419660986, "grad_norm": 0.4042254090309143, "learning_rate": 3.7001118985453195e-06, "loss": 0.6973, "step": 6274 }, { "epoch": 0.26006050810228354, "grad_norm": 0.3820721209049225, "learning_rate": 3.699904679016951e-06, "loss": 0.6603, "step": 6275 }, { "epoch": 0.2601019520079572, "grad_norm": 0.4718785285949707, "learning_rate": 3.6996974594885827e-06, "loss": 0.7434, "step": 6276 }, { "epoch": 0.2601433959136309, "grad_norm": 0.4389110207557678, "learning_rate": 3.699490239960214e-06, "loss": 0.7476, "step": 6277 }, { "epoch": 0.26018483981930457, "grad_norm": 0.40768882632255554, "learning_rate": 3.699283020431846e-06, "loss": 0.6687, "step": 6278 }, { "epoch": 0.26022628372497825, "grad_norm": 0.4219058156013489, "learning_rate": 3.6990758009034773e-06, "loss": 0.7437, "step": 6279 }, { "epoch": 0.2602677276306519, "grad_norm": 0.43802395462989807, "learning_rate": 3.698868581375109e-06, "loss": 0.7358, "step": 6280 }, { "epoch": 0.2603091715363256, "grad_norm": 0.4397730827331543, "learning_rate": 3.698661361846741e-06, "loss": 0.7252, "step": 6281 }, { "epoch": 0.2603506154419993, "grad_norm": 0.4375289976596832, "learning_rate": 3.6984541423183723e-06, "loss": 0.6919, "step": 6282 }, { "epoch": 0.2603920593476729, "grad_norm": 0.41898784041404724, "learning_rate": 3.698246922790004e-06, "loss": 0.6738, "step": 6283 }, { "epoch": 0.2604335032533466, "grad_norm": 0.3902156352996826, "learning_rate": 3.6980397032616355e-06, "loss": 0.7195, "step": 6284 }, { "epoch": 0.26047494715902025, "grad_norm": 0.43567222356796265, "learning_rate": 3.6978324837332673e-06, "loss": 0.75, "step": 6285 }, { "epoch": 0.26051639106469393, "grad_norm": 0.4213079512119293, "learning_rate": 3.6976252642048987e-06, "loss": 0.6906, "step": 6286 }, { "epoch": 0.2605578349703676, "grad_norm": 0.44823721051216125, "learning_rate": 3.6974180446765305e-06, "loss": 0.7073, "step": 6287 }, { "epoch": 0.2605992788760413, "grad_norm": 0.4250064194202423, "learning_rate": 3.6972108251481627e-06, "loss": 0.7151, "step": 6288 }, { "epoch": 0.26064072278171496, "grad_norm": 0.4127296805381775, "learning_rate": 3.6970036056197937e-06, "loss": 0.7314, "step": 6289 }, { "epoch": 0.26068216668738864, "grad_norm": 0.37382060289382935, "learning_rate": 3.696796386091426e-06, "loss": 0.6868, "step": 6290 }, { "epoch": 0.2607236105930623, "grad_norm": 0.40120387077331543, "learning_rate": 3.6965891665630573e-06, "loss": 0.7058, "step": 6291 }, { "epoch": 0.26076505449873594, "grad_norm": 0.4066137969493866, "learning_rate": 3.696381947034689e-06, "loss": 0.6912, "step": 6292 }, { "epoch": 0.2608064984044096, "grad_norm": 0.38882172107696533, "learning_rate": 3.6961747275063205e-06, "loss": 0.7222, "step": 6293 }, { "epoch": 0.2608479423100833, "grad_norm": 0.4207928478717804, "learning_rate": 3.6959675079779523e-06, "loss": 0.7476, "step": 6294 }, { "epoch": 0.26088938621575697, "grad_norm": 0.42194119095802307, "learning_rate": 3.6957602884495837e-06, "loss": 0.7114, "step": 6295 }, { "epoch": 0.26093083012143065, "grad_norm": 0.39714813232421875, "learning_rate": 3.6955530689212155e-06, "loss": 0.6753, "step": 6296 }, { "epoch": 0.2609722740271043, "grad_norm": 0.44001829624176025, "learning_rate": 3.6953458493928473e-06, "loss": 0.7146, "step": 6297 }, { "epoch": 0.261013717932778, "grad_norm": 0.41741615533828735, "learning_rate": 3.6951386298644787e-06, "loss": 0.7354, "step": 6298 }, { "epoch": 0.2610551618384517, "grad_norm": 0.4478265345096588, "learning_rate": 3.6949314103361105e-06, "loss": 0.7429, "step": 6299 }, { "epoch": 0.26109660574412535, "grad_norm": 0.42756351828575134, "learning_rate": 3.694724190807742e-06, "loss": 0.7056, "step": 6300 }, { "epoch": 0.261138049649799, "grad_norm": 0.4073184132575989, "learning_rate": 3.6945169712793737e-06, "loss": 0.7039, "step": 6301 }, { "epoch": 0.26117949355547265, "grad_norm": 0.40883225202560425, "learning_rate": 3.694309751751005e-06, "loss": 0.7604, "step": 6302 }, { "epoch": 0.26122093746114633, "grad_norm": 0.42545434832572937, "learning_rate": 3.694102532222637e-06, "loss": 0.7839, "step": 6303 }, { "epoch": 0.26126238136682, "grad_norm": 0.44375982880592346, "learning_rate": 3.6938953126942683e-06, "loss": 0.7034, "step": 6304 }, { "epoch": 0.2613038252724937, "grad_norm": 0.41876929998397827, "learning_rate": 3.6936880931659e-06, "loss": 0.7417, "step": 6305 }, { "epoch": 0.26134526917816736, "grad_norm": 0.43917280435562134, "learning_rate": 3.6934808736375323e-06, "loss": 0.7205, "step": 6306 }, { "epoch": 0.26138671308384104, "grad_norm": 0.44418275356292725, "learning_rate": 3.6932736541091637e-06, "loss": 0.7347, "step": 6307 }, { "epoch": 0.2614281569895147, "grad_norm": 0.4193587005138397, "learning_rate": 3.6930664345807955e-06, "loss": 0.678, "step": 6308 }, { "epoch": 0.26146960089518834, "grad_norm": 0.4175031781196594, "learning_rate": 3.692859215052427e-06, "loss": 0.7229, "step": 6309 }, { "epoch": 0.261511044800862, "grad_norm": 0.4174012839794159, "learning_rate": 3.6926519955240587e-06, "loss": 0.6697, "step": 6310 }, { "epoch": 0.2615524887065357, "grad_norm": 0.4294160306453705, "learning_rate": 3.69244477599569e-06, "loss": 0.762, "step": 6311 }, { "epoch": 0.26159393261220937, "grad_norm": 0.4562845528125763, "learning_rate": 3.692237556467322e-06, "loss": 0.7053, "step": 6312 }, { "epoch": 0.26163537651788304, "grad_norm": 0.4365430176258087, "learning_rate": 3.6920303369389533e-06, "loss": 0.6494, "step": 6313 }, { "epoch": 0.2616768204235567, "grad_norm": 0.4535042345523834, "learning_rate": 3.691823117410585e-06, "loss": 0.7522, "step": 6314 }, { "epoch": 0.2617182643292304, "grad_norm": 0.4188600480556488, "learning_rate": 3.691615897882217e-06, "loss": 0.7795, "step": 6315 }, { "epoch": 0.2617597082349041, "grad_norm": 0.40750449895858765, "learning_rate": 3.6914086783538483e-06, "loss": 0.7018, "step": 6316 }, { "epoch": 0.26180115214057775, "grad_norm": 0.42078661918640137, "learning_rate": 3.69120145882548e-06, "loss": 0.6948, "step": 6317 }, { "epoch": 0.2618425960462514, "grad_norm": 0.40222805738449097, "learning_rate": 3.6909942392971115e-06, "loss": 0.6687, "step": 6318 }, { "epoch": 0.26188403995192505, "grad_norm": 0.4088183045387268, "learning_rate": 3.6907870197687433e-06, "loss": 0.7329, "step": 6319 }, { "epoch": 0.26192548385759873, "grad_norm": 0.3934989869594574, "learning_rate": 3.6905798002403747e-06, "loss": 0.6893, "step": 6320 }, { "epoch": 0.2619669277632724, "grad_norm": 0.3926834464073181, "learning_rate": 3.6903725807120065e-06, "loss": 0.72, "step": 6321 }, { "epoch": 0.2620083716689461, "grad_norm": 0.435479998588562, "learning_rate": 3.690165361183638e-06, "loss": 0.7522, "step": 6322 }, { "epoch": 0.26204981557461976, "grad_norm": 0.4318770468235016, "learning_rate": 3.6899581416552697e-06, "loss": 0.7498, "step": 6323 }, { "epoch": 0.26209125948029344, "grad_norm": 0.4274018704891205, "learning_rate": 3.689750922126902e-06, "loss": 0.7146, "step": 6324 }, { "epoch": 0.2621327033859671, "grad_norm": 0.4307129383087158, "learning_rate": 3.6895437025985333e-06, "loss": 0.6587, "step": 6325 }, { "epoch": 0.2621741472916408, "grad_norm": 0.42974191904067993, "learning_rate": 3.689336483070165e-06, "loss": 0.7151, "step": 6326 }, { "epoch": 0.2622155911973144, "grad_norm": 0.4023016691207886, "learning_rate": 3.6891292635417965e-06, "loss": 0.6248, "step": 6327 }, { "epoch": 0.2622570351029881, "grad_norm": 0.38814854621887207, "learning_rate": 3.6889220440134283e-06, "loss": 0.728, "step": 6328 }, { "epoch": 0.26229847900866177, "grad_norm": 0.3987759053707123, "learning_rate": 3.6887148244850597e-06, "loss": 0.7273, "step": 6329 }, { "epoch": 0.26233992291433544, "grad_norm": 0.420589804649353, "learning_rate": 3.6885076049566915e-06, "loss": 0.7642, "step": 6330 }, { "epoch": 0.2623813668200091, "grad_norm": 0.4020451307296753, "learning_rate": 3.688300385428323e-06, "loss": 0.6565, "step": 6331 }, { "epoch": 0.2624228107256828, "grad_norm": 0.41302037239074707, "learning_rate": 3.6880931658999547e-06, "loss": 0.7073, "step": 6332 }, { "epoch": 0.2624642546313565, "grad_norm": 0.4590699076652527, "learning_rate": 3.6878859463715865e-06, "loss": 0.674, "step": 6333 }, { "epoch": 0.26250569853703015, "grad_norm": 0.43627530336380005, "learning_rate": 3.687678726843218e-06, "loss": 0.7462, "step": 6334 }, { "epoch": 0.2625471424427038, "grad_norm": 0.41981321573257446, "learning_rate": 3.6874715073148497e-06, "loss": 0.7091, "step": 6335 }, { "epoch": 0.26258858634837745, "grad_norm": 0.40838518738746643, "learning_rate": 3.687264287786481e-06, "loss": 0.6886, "step": 6336 }, { "epoch": 0.26263003025405113, "grad_norm": 0.44184985756874084, "learning_rate": 3.687057068258113e-06, "loss": 0.6758, "step": 6337 }, { "epoch": 0.2626714741597248, "grad_norm": 0.38131433725357056, "learning_rate": 3.6868498487297443e-06, "loss": 0.6975, "step": 6338 }, { "epoch": 0.2627129180653985, "grad_norm": 0.38963258266448975, "learning_rate": 3.686642629201376e-06, "loss": 0.6221, "step": 6339 }, { "epoch": 0.26275436197107216, "grad_norm": 0.4219125509262085, "learning_rate": 3.6864354096730075e-06, "loss": 0.6963, "step": 6340 }, { "epoch": 0.26279580587674584, "grad_norm": 0.4498273432254791, "learning_rate": 3.6862281901446397e-06, "loss": 0.7437, "step": 6341 }, { "epoch": 0.2628372497824195, "grad_norm": 0.4100957214832306, "learning_rate": 3.6860209706162715e-06, "loss": 0.6973, "step": 6342 }, { "epoch": 0.2628786936880932, "grad_norm": 0.46511566638946533, "learning_rate": 3.685813751087903e-06, "loss": 0.7279, "step": 6343 }, { "epoch": 0.2629201375937668, "grad_norm": 0.43290814757347107, "learning_rate": 3.6856065315595347e-06, "loss": 0.7004, "step": 6344 }, { "epoch": 0.2629615814994405, "grad_norm": 0.4244616627693176, "learning_rate": 3.685399312031166e-06, "loss": 0.7302, "step": 6345 }, { "epoch": 0.26300302540511417, "grad_norm": 0.41384050250053406, "learning_rate": 3.685192092502798e-06, "loss": 0.7363, "step": 6346 }, { "epoch": 0.26304446931078784, "grad_norm": 0.4220217168331146, "learning_rate": 3.6849848729744293e-06, "loss": 0.6934, "step": 6347 }, { "epoch": 0.2630859132164615, "grad_norm": 0.46823200583457947, "learning_rate": 3.684777653446061e-06, "loss": 0.7351, "step": 6348 }, { "epoch": 0.2631273571221352, "grad_norm": 0.4548875391483307, "learning_rate": 3.684570433917693e-06, "loss": 0.7567, "step": 6349 }, { "epoch": 0.2631688010278089, "grad_norm": 0.43491724133491516, "learning_rate": 3.6843632143893243e-06, "loss": 0.6829, "step": 6350 }, { "epoch": 0.26321024493348255, "grad_norm": 0.40387147665023804, "learning_rate": 3.684155994860956e-06, "loss": 0.6733, "step": 6351 }, { "epoch": 0.26325168883915623, "grad_norm": 0.4099508225917816, "learning_rate": 3.6839487753325875e-06, "loss": 0.7078, "step": 6352 }, { "epoch": 0.26329313274482985, "grad_norm": 0.42148905992507935, "learning_rate": 3.6837415558042193e-06, "loss": 0.7048, "step": 6353 }, { "epoch": 0.2633345766505035, "grad_norm": 0.450489342212677, "learning_rate": 3.6835343362758507e-06, "loss": 0.7576, "step": 6354 }, { "epoch": 0.2633760205561772, "grad_norm": 0.40780889987945557, "learning_rate": 3.6833271167474825e-06, "loss": 0.6941, "step": 6355 }, { "epoch": 0.2634174644618509, "grad_norm": 0.45322784781455994, "learning_rate": 3.683119897219114e-06, "loss": 0.7654, "step": 6356 }, { "epoch": 0.26345890836752456, "grad_norm": 0.39057061076164246, "learning_rate": 3.6829126776907457e-06, "loss": 0.7151, "step": 6357 }, { "epoch": 0.26350035227319824, "grad_norm": 0.39810916781425476, "learning_rate": 3.682705458162378e-06, "loss": 0.6925, "step": 6358 }, { "epoch": 0.2635417961788719, "grad_norm": 0.42373794317245483, "learning_rate": 3.6824982386340093e-06, "loss": 0.7617, "step": 6359 }, { "epoch": 0.2635832400845456, "grad_norm": 0.44631630182266235, "learning_rate": 3.682291019105641e-06, "loss": 0.7673, "step": 6360 }, { "epoch": 0.2636246839902192, "grad_norm": 0.3960021734237671, "learning_rate": 3.6820837995772725e-06, "loss": 0.7035, "step": 6361 }, { "epoch": 0.2636661278958929, "grad_norm": 0.41545552015304565, "learning_rate": 3.6818765800489043e-06, "loss": 0.7275, "step": 6362 }, { "epoch": 0.26370757180156656, "grad_norm": 0.4479779303073883, "learning_rate": 3.6816693605205357e-06, "loss": 0.7133, "step": 6363 }, { "epoch": 0.26374901570724024, "grad_norm": 0.41297903656959534, "learning_rate": 3.6814621409921675e-06, "loss": 0.7292, "step": 6364 }, { "epoch": 0.2637904596129139, "grad_norm": 0.4408339858055115, "learning_rate": 3.681254921463799e-06, "loss": 0.7407, "step": 6365 }, { "epoch": 0.2638319035185876, "grad_norm": 0.42763832211494446, "learning_rate": 3.6810477019354307e-06, "loss": 0.7297, "step": 6366 }, { "epoch": 0.2638733474242613, "grad_norm": 0.46042579412460327, "learning_rate": 3.6808404824070625e-06, "loss": 0.7437, "step": 6367 }, { "epoch": 0.26391479132993495, "grad_norm": 0.44522392749786377, "learning_rate": 3.680633262878694e-06, "loss": 0.7048, "step": 6368 }, { "epoch": 0.2639562352356086, "grad_norm": 0.42436856031417847, "learning_rate": 3.6804260433503257e-06, "loss": 0.7113, "step": 6369 }, { "epoch": 0.26399767914128225, "grad_norm": 0.44359612464904785, "learning_rate": 3.680218823821957e-06, "loss": 0.7742, "step": 6370 }, { "epoch": 0.2640391230469559, "grad_norm": 0.41687852144241333, "learning_rate": 3.680011604293589e-06, "loss": 0.6907, "step": 6371 }, { "epoch": 0.2640805669526296, "grad_norm": 0.4312986731529236, "learning_rate": 3.6798043847652203e-06, "loss": 0.7466, "step": 6372 }, { "epoch": 0.2641220108583033, "grad_norm": 0.4330451488494873, "learning_rate": 3.679597165236852e-06, "loss": 0.6919, "step": 6373 }, { "epoch": 0.26416345476397696, "grad_norm": 0.43585044145584106, "learning_rate": 3.6793899457084835e-06, "loss": 0.7639, "step": 6374 }, { "epoch": 0.26420489866965063, "grad_norm": 0.442184716463089, "learning_rate": 3.6791827261801157e-06, "loss": 0.7219, "step": 6375 }, { "epoch": 0.2642463425753243, "grad_norm": 0.41909539699554443, "learning_rate": 3.6789755066517475e-06, "loss": 0.7004, "step": 6376 }, { "epoch": 0.264287786480998, "grad_norm": 0.4056285619735718, "learning_rate": 3.678768287123379e-06, "loss": 0.6863, "step": 6377 }, { "epoch": 0.26432923038667167, "grad_norm": 0.4126576781272888, "learning_rate": 3.6785610675950107e-06, "loss": 0.7012, "step": 6378 }, { "epoch": 0.2643706742923453, "grad_norm": 0.4524768888950348, "learning_rate": 3.678353848066642e-06, "loss": 0.7181, "step": 6379 }, { "epoch": 0.26441211819801896, "grad_norm": 0.4047909677028656, "learning_rate": 3.678146628538274e-06, "loss": 0.748, "step": 6380 }, { "epoch": 0.26445356210369264, "grad_norm": 0.4494902789592743, "learning_rate": 3.6779394090099053e-06, "loss": 0.7419, "step": 6381 }, { "epoch": 0.2644950060093663, "grad_norm": 0.4608725309371948, "learning_rate": 3.677732189481537e-06, "loss": 0.762, "step": 6382 }, { "epoch": 0.26453644991504, "grad_norm": 0.4417451024055481, "learning_rate": 3.6775249699531685e-06, "loss": 0.7363, "step": 6383 }, { "epoch": 0.26457789382071367, "grad_norm": 0.40939566493034363, "learning_rate": 3.6773177504248003e-06, "loss": 0.7197, "step": 6384 }, { "epoch": 0.26461933772638735, "grad_norm": 0.41147902607917786, "learning_rate": 3.677110530896432e-06, "loss": 0.7529, "step": 6385 }, { "epoch": 0.264660781632061, "grad_norm": 0.41552749276161194, "learning_rate": 3.6769033113680635e-06, "loss": 0.6954, "step": 6386 }, { "epoch": 0.2647022255377347, "grad_norm": 0.4078802168369293, "learning_rate": 3.6766960918396953e-06, "loss": 0.6814, "step": 6387 }, { "epoch": 0.2647436694434083, "grad_norm": 0.4642527103424072, "learning_rate": 3.6764888723113267e-06, "loss": 0.739, "step": 6388 }, { "epoch": 0.264785113349082, "grad_norm": 0.42460986971855164, "learning_rate": 3.6762816527829585e-06, "loss": 0.705, "step": 6389 }, { "epoch": 0.2648265572547557, "grad_norm": 0.42757096886634827, "learning_rate": 3.67607443325459e-06, "loss": 0.7211, "step": 6390 }, { "epoch": 0.26486800116042936, "grad_norm": 0.42514774203300476, "learning_rate": 3.6758672137262217e-06, "loss": 0.722, "step": 6391 }, { "epoch": 0.26490944506610303, "grad_norm": 0.4477381408214569, "learning_rate": 3.675659994197853e-06, "loss": 0.7505, "step": 6392 }, { "epoch": 0.2649508889717767, "grad_norm": 0.4089907705783844, "learning_rate": 3.6754527746694853e-06, "loss": 0.6439, "step": 6393 }, { "epoch": 0.2649923328774504, "grad_norm": 0.3891829252243042, "learning_rate": 3.675245555141117e-06, "loss": 0.7164, "step": 6394 }, { "epoch": 0.26503377678312406, "grad_norm": 0.43315836787223816, "learning_rate": 3.6750383356127485e-06, "loss": 0.6956, "step": 6395 }, { "epoch": 0.2650752206887977, "grad_norm": 0.4007335603237152, "learning_rate": 3.6748311160843803e-06, "loss": 0.6899, "step": 6396 }, { "epoch": 0.26511666459447136, "grad_norm": 0.4487016201019287, "learning_rate": 3.6746238965560117e-06, "loss": 0.7708, "step": 6397 }, { "epoch": 0.26515810850014504, "grad_norm": 0.44069451093673706, "learning_rate": 3.6744166770276435e-06, "loss": 0.6975, "step": 6398 }, { "epoch": 0.2651995524058187, "grad_norm": 0.46565812826156616, "learning_rate": 3.674209457499275e-06, "loss": 0.7306, "step": 6399 }, { "epoch": 0.2652409963114924, "grad_norm": 0.40225380659103394, "learning_rate": 3.6740022379709067e-06, "loss": 0.6874, "step": 6400 }, { "epoch": 0.26528244021716607, "grad_norm": 0.4467470645904541, "learning_rate": 3.673795018442538e-06, "loss": 0.7396, "step": 6401 }, { "epoch": 0.26532388412283975, "grad_norm": 0.406599223613739, "learning_rate": 3.67358779891417e-06, "loss": 0.6985, "step": 6402 }, { "epoch": 0.2653653280285134, "grad_norm": 0.42188259959220886, "learning_rate": 3.6733805793858017e-06, "loss": 0.7122, "step": 6403 }, { "epoch": 0.2654067719341871, "grad_norm": 0.4489630162715912, "learning_rate": 3.673173359857433e-06, "loss": 0.6533, "step": 6404 }, { "epoch": 0.2654482158398607, "grad_norm": 0.4150291681289673, "learning_rate": 3.672966140329065e-06, "loss": 0.7026, "step": 6405 }, { "epoch": 0.2654896597455344, "grad_norm": 0.4480176270008087, "learning_rate": 3.6727589208006963e-06, "loss": 0.6926, "step": 6406 }, { "epoch": 0.2655311036512081, "grad_norm": 0.4263271689414978, "learning_rate": 3.672551701272328e-06, "loss": 0.6873, "step": 6407 }, { "epoch": 0.26557254755688176, "grad_norm": 0.3974429666996002, "learning_rate": 3.6723444817439595e-06, "loss": 0.6924, "step": 6408 }, { "epoch": 0.26561399146255543, "grad_norm": 0.447875440120697, "learning_rate": 3.6721372622155917e-06, "loss": 0.7415, "step": 6409 }, { "epoch": 0.2656554353682291, "grad_norm": 0.4001208245754242, "learning_rate": 3.6719300426872235e-06, "loss": 0.7051, "step": 6410 }, { "epoch": 0.2656968792739028, "grad_norm": 0.4122028648853302, "learning_rate": 3.671722823158855e-06, "loss": 0.7593, "step": 6411 }, { "epoch": 0.26573832317957646, "grad_norm": 0.4092101752758026, "learning_rate": 3.6715156036304867e-06, "loss": 0.7118, "step": 6412 }, { "epoch": 0.26577976708525014, "grad_norm": 0.4194504916667938, "learning_rate": 3.671308384102118e-06, "loss": 0.7118, "step": 6413 }, { "epoch": 0.26582121099092376, "grad_norm": 0.4180678427219391, "learning_rate": 3.67110116457375e-06, "loss": 0.7251, "step": 6414 }, { "epoch": 0.26586265489659744, "grad_norm": 0.43582284450531006, "learning_rate": 3.6708939450453813e-06, "loss": 0.7334, "step": 6415 }, { "epoch": 0.2659040988022711, "grad_norm": 0.4737831652164459, "learning_rate": 3.670686725517013e-06, "loss": 0.7511, "step": 6416 }, { "epoch": 0.2659455427079448, "grad_norm": 0.42427587509155273, "learning_rate": 3.6704795059886445e-06, "loss": 0.6963, "step": 6417 }, { "epoch": 0.26598698661361847, "grad_norm": 0.436360239982605, "learning_rate": 3.6702722864602763e-06, "loss": 0.7324, "step": 6418 }, { "epoch": 0.26602843051929215, "grad_norm": 0.4255404770374298, "learning_rate": 3.670065066931908e-06, "loss": 0.7314, "step": 6419 }, { "epoch": 0.2660698744249658, "grad_norm": 0.4210447669029236, "learning_rate": 3.6698578474035395e-06, "loss": 0.7466, "step": 6420 }, { "epoch": 0.2661113183306395, "grad_norm": 0.4220668375492096, "learning_rate": 3.6696506278751713e-06, "loss": 0.7144, "step": 6421 }, { "epoch": 0.2661527622363131, "grad_norm": 0.42769214510917664, "learning_rate": 3.6694434083468027e-06, "loss": 0.7019, "step": 6422 }, { "epoch": 0.2661942061419868, "grad_norm": 0.4240381121635437, "learning_rate": 3.6692361888184345e-06, "loss": 0.702, "step": 6423 }, { "epoch": 0.2662356500476605, "grad_norm": 0.4884326159954071, "learning_rate": 3.669028969290066e-06, "loss": 0.7151, "step": 6424 }, { "epoch": 0.26627709395333415, "grad_norm": 0.43229031562805176, "learning_rate": 3.6688217497616977e-06, "loss": 0.7173, "step": 6425 }, { "epoch": 0.26631853785900783, "grad_norm": 0.4293804168701172, "learning_rate": 3.668614530233329e-06, "loss": 0.6862, "step": 6426 }, { "epoch": 0.2663599817646815, "grad_norm": 0.3997860252857208, "learning_rate": 3.6684073107049613e-06, "loss": 0.6982, "step": 6427 }, { "epoch": 0.2664014256703552, "grad_norm": 0.462211936712265, "learning_rate": 3.668200091176593e-06, "loss": 0.739, "step": 6428 }, { "epoch": 0.26644286957602886, "grad_norm": 0.3971467614173889, "learning_rate": 3.6679928716482245e-06, "loss": 0.6676, "step": 6429 }, { "epoch": 0.26648431348170254, "grad_norm": 0.4137811064720154, "learning_rate": 3.6677856521198563e-06, "loss": 0.7159, "step": 6430 }, { "epoch": 0.26652575738737616, "grad_norm": 0.4237821102142334, "learning_rate": 3.6675784325914877e-06, "loss": 0.7866, "step": 6431 }, { "epoch": 0.26656720129304984, "grad_norm": 0.41758179664611816, "learning_rate": 3.6673712130631195e-06, "loss": 0.7096, "step": 6432 }, { "epoch": 0.2666086451987235, "grad_norm": 0.45064622163772583, "learning_rate": 3.667163993534751e-06, "loss": 0.6929, "step": 6433 }, { "epoch": 0.2666500891043972, "grad_norm": 0.4277627170085907, "learning_rate": 3.6669567740063827e-06, "loss": 0.761, "step": 6434 }, { "epoch": 0.26669153301007087, "grad_norm": 0.44265028834342957, "learning_rate": 3.666749554478014e-06, "loss": 0.7246, "step": 6435 }, { "epoch": 0.26673297691574455, "grad_norm": 0.4414396286010742, "learning_rate": 3.666542334949646e-06, "loss": 0.6768, "step": 6436 }, { "epoch": 0.2667744208214182, "grad_norm": 0.39849019050598145, "learning_rate": 3.6663351154212777e-06, "loss": 0.6882, "step": 6437 }, { "epoch": 0.2668158647270919, "grad_norm": 0.4140229821205139, "learning_rate": 3.666127895892909e-06, "loss": 0.7466, "step": 6438 }, { "epoch": 0.2668573086327656, "grad_norm": 0.431058406829834, "learning_rate": 3.665920676364541e-06, "loss": 0.7037, "step": 6439 }, { "epoch": 0.2668987525384392, "grad_norm": 0.3874143660068512, "learning_rate": 3.6657134568361723e-06, "loss": 0.7214, "step": 6440 }, { "epoch": 0.2669401964441129, "grad_norm": 0.4047558605670929, "learning_rate": 3.665506237307804e-06, "loss": 0.6979, "step": 6441 }, { "epoch": 0.26698164034978655, "grad_norm": 0.4260207414627075, "learning_rate": 3.6652990177794355e-06, "loss": 0.7369, "step": 6442 }, { "epoch": 0.26702308425546023, "grad_norm": 0.4203431308269501, "learning_rate": 3.6650917982510677e-06, "loss": 0.7595, "step": 6443 }, { "epoch": 0.2670645281611339, "grad_norm": 0.44023802876472473, "learning_rate": 3.6648845787226987e-06, "loss": 0.6681, "step": 6444 }, { "epoch": 0.2671059720668076, "grad_norm": 0.4268922209739685, "learning_rate": 3.664677359194331e-06, "loss": 0.7581, "step": 6445 }, { "epoch": 0.26714741597248126, "grad_norm": 0.4216556251049042, "learning_rate": 3.6644701396659627e-06, "loss": 0.7388, "step": 6446 }, { "epoch": 0.26718885987815494, "grad_norm": 0.4321242570877075, "learning_rate": 3.664262920137594e-06, "loss": 0.7217, "step": 6447 }, { "epoch": 0.2672303037838286, "grad_norm": 0.4266868531703949, "learning_rate": 3.664055700609226e-06, "loss": 0.7427, "step": 6448 }, { "epoch": 0.26727174768950224, "grad_norm": 0.41267094016075134, "learning_rate": 3.6638484810808573e-06, "loss": 0.6991, "step": 6449 }, { "epoch": 0.2673131915951759, "grad_norm": 0.39347052574157715, "learning_rate": 3.663641261552489e-06, "loss": 0.7659, "step": 6450 }, { "epoch": 0.2673546355008496, "grad_norm": 0.425607830286026, "learning_rate": 3.6634340420241205e-06, "loss": 0.6931, "step": 6451 }, { "epoch": 0.26739607940652327, "grad_norm": 0.3932659924030304, "learning_rate": 3.6632268224957523e-06, "loss": 0.6582, "step": 6452 }, { "epoch": 0.26743752331219695, "grad_norm": 0.38975101709365845, "learning_rate": 3.6630196029673837e-06, "loss": 0.7083, "step": 6453 }, { "epoch": 0.2674789672178706, "grad_norm": 0.4292771518230438, "learning_rate": 3.6628123834390155e-06, "loss": 0.7197, "step": 6454 }, { "epoch": 0.2675204111235443, "grad_norm": 0.40929049253463745, "learning_rate": 3.6626051639106473e-06, "loss": 0.7073, "step": 6455 }, { "epoch": 0.267561855029218, "grad_norm": 0.41723617911338806, "learning_rate": 3.6623979443822787e-06, "loss": 0.6682, "step": 6456 }, { "epoch": 0.2676032989348916, "grad_norm": 0.4117160141468048, "learning_rate": 3.6621907248539105e-06, "loss": 0.7393, "step": 6457 }, { "epoch": 0.2676447428405653, "grad_norm": 0.4089653491973877, "learning_rate": 3.661983505325542e-06, "loss": 0.7305, "step": 6458 }, { "epoch": 0.26768618674623895, "grad_norm": 0.43648481369018555, "learning_rate": 3.661776285797174e-06, "loss": 0.6902, "step": 6459 }, { "epoch": 0.26772763065191263, "grad_norm": 0.40313035249710083, "learning_rate": 3.661569066268805e-06, "loss": 0.7368, "step": 6460 }, { "epoch": 0.2677690745575863, "grad_norm": 0.44201716780662537, "learning_rate": 3.6613618467404373e-06, "loss": 0.741, "step": 6461 }, { "epoch": 0.26781051846326, "grad_norm": 0.4262681007385254, "learning_rate": 3.6611546272120683e-06, "loss": 0.6985, "step": 6462 }, { "epoch": 0.26785196236893366, "grad_norm": 0.42730364203453064, "learning_rate": 3.6609474076837005e-06, "loss": 0.7054, "step": 6463 }, { "epoch": 0.26789340627460734, "grad_norm": 0.41901320219039917, "learning_rate": 3.6607401881553323e-06, "loss": 0.7102, "step": 6464 }, { "epoch": 0.267934850180281, "grad_norm": 0.43879425525665283, "learning_rate": 3.6605329686269637e-06, "loss": 0.7192, "step": 6465 }, { "epoch": 0.26797629408595464, "grad_norm": 0.43288737535476685, "learning_rate": 3.6603257490985955e-06, "loss": 0.7283, "step": 6466 }, { "epoch": 0.2680177379916283, "grad_norm": 0.42803654074668884, "learning_rate": 3.660118529570227e-06, "loss": 0.6982, "step": 6467 }, { "epoch": 0.268059181897302, "grad_norm": 0.433649480342865, "learning_rate": 3.6599113100418587e-06, "loss": 0.6531, "step": 6468 }, { "epoch": 0.26810062580297567, "grad_norm": 0.4290996789932251, "learning_rate": 3.65970409051349e-06, "loss": 0.7219, "step": 6469 }, { "epoch": 0.26814206970864934, "grad_norm": 0.42780062556266785, "learning_rate": 3.659496870985122e-06, "loss": 0.7267, "step": 6470 }, { "epoch": 0.268183513614323, "grad_norm": 0.4274875521659851, "learning_rate": 3.6592896514567537e-06, "loss": 0.7134, "step": 6471 }, { "epoch": 0.2682249575199967, "grad_norm": 0.4348224401473999, "learning_rate": 3.659082431928385e-06, "loss": 0.7258, "step": 6472 }, { "epoch": 0.2682664014256704, "grad_norm": 0.39090046286582947, "learning_rate": 3.658875212400017e-06, "loss": 0.6897, "step": 6473 }, { "epoch": 0.26830784533134405, "grad_norm": 0.41403937339782715, "learning_rate": 3.6586679928716483e-06, "loss": 0.7417, "step": 6474 }, { "epoch": 0.2683492892370177, "grad_norm": 0.39958029985427856, "learning_rate": 3.65846077334328e-06, "loss": 0.6487, "step": 6475 }, { "epoch": 0.26839073314269135, "grad_norm": 0.41907018423080444, "learning_rate": 3.6582535538149115e-06, "loss": 0.7037, "step": 6476 }, { "epoch": 0.26843217704836503, "grad_norm": 0.48555099964141846, "learning_rate": 3.6580463342865437e-06, "loss": 0.8104, "step": 6477 }, { "epoch": 0.2684736209540387, "grad_norm": 0.45586496591567993, "learning_rate": 3.6578391147581747e-06, "loss": 0.7585, "step": 6478 }, { "epoch": 0.2685150648597124, "grad_norm": 0.4613741636276245, "learning_rate": 3.657631895229807e-06, "loss": 0.7429, "step": 6479 }, { "epoch": 0.26855650876538606, "grad_norm": 0.42168542742729187, "learning_rate": 3.6574246757014387e-06, "loss": 0.709, "step": 6480 }, { "epoch": 0.26859795267105974, "grad_norm": 0.409851998090744, "learning_rate": 3.65721745617307e-06, "loss": 0.6951, "step": 6481 }, { "epoch": 0.2686393965767334, "grad_norm": 0.4115036129951477, "learning_rate": 3.657010236644702e-06, "loss": 0.7083, "step": 6482 }, { "epoch": 0.26868084048240704, "grad_norm": 0.4216599464416504, "learning_rate": 3.6568030171163333e-06, "loss": 0.7297, "step": 6483 }, { "epoch": 0.2687222843880807, "grad_norm": 0.41426679491996765, "learning_rate": 3.656595797587965e-06, "loss": 0.7537, "step": 6484 }, { "epoch": 0.2687637282937544, "grad_norm": 0.40590569376945496, "learning_rate": 3.6563885780595965e-06, "loss": 0.6902, "step": 6485 }, { "epoch": 0.26880517219942807, "grad_norm": 0.4273844361305237, "learning_rate": 3.6561813585312283e-06, "loss": 0.6809, "step": 6486 }, { "epoch": 0.26884661610510174, "grad_norm": 0.40438729524612427, "learning_rate": 3.6559741390028597e-06, "loss": 0.75, "step": 6487 }, { "epoch": 0.2688880600107754, "grad_norm": 0.4229685962200165, "learning_rate": 3.6557669194744915e-06, "loss": 0.7189, "step": 6488 }, { "epoch": 0.2689295039164491, "grad_norm": 0.3948942720890045, "learning_rate": 3.6555596999461233e-06, "loss": 0.7068, "step": 6489 }, { "epoch": 0.2689709478221228, "grad_norm": 0.4670775830745697, "learning_rate": 3.6553524804177547e-06, "loss": 0.7797, "step": 6490 }, { "epoch": 0.26901239172779645, "grad_norm": 0.4109322428703308, "learning_rate": 3.6551452608893865e-06, "loss": 0.6995, "step": 6491 }, { "epoch": 0.2690538356334701, "grad_norm": 0.41405197978019714, "learning_rate": 3.654938041361018e-06, "loss": 0.7074, "step": 6492 }, { "epoch": 0.26909527953914375, "grad_norm": 0.40317216515541077, "learning_rate": 3.65473082183265e-06, "loss": 0.6707, "step": 6493 }, { "epoch": 0.2691367234448174, "grad_norm": 0.41362228989601135, "learning_rate": 3.654523602304281e-06, "loss": 0.7036, "step": 6494 }, { "epoch": 0.2691781673504911, "grad_norm": 0.4566660523414612, "learning_rate": 3.6543163827759133e-06, "loss": 0.7386, "step": 6495 }, { "epoch": 0.2692196112561648, "grad_norm": 0.411600798368454, "learning_rate": 3.6541091632475443e-06, "loss": 0.7126, "step": 6496 }, { "epoch": 0.26926105516183846, "grad_norm": 0.41716650128364563, "learning_rate": 3.6539019437191765e-06, "loss": 0.7178, "step": 6497 }, { "epoch": 0.26930249906751214, "grad_norm": 0.394574373960495, "learning_rate": 3.6536947241908083e-06, "loss": 0.731, "step": 6498 }, { "epoch": 0.2693439429731858, "grad_norm": 0.39140281081199646, "learning_rate": 3.6534875046624397e-06, "loss": 0.7139, "step": 6499 }, { "epoch": 0.2693853868788595, "grad_norm": 0.4075028598308563, "learning_rate": 3.6532802851340715e-06, "loss": 0.7336, "step": 6500 }, { "epoch": 0.2694268307845331, "grad_norm": 0.41810670495033264, "learning_rate": 3.653073065605703e-06, "loss": 0.7471, "step": 6501 }, { "epoch": 0.2694682746902068, "grad_norm": 0.4515833854675293, "learning_rate": 3.6528658460773347e-06, "loss": 0.7764, "step": 6502 }, { "epoch": 0.26950971859588047, "grad_norm": 0.4228217601776123, "learning_rate": 3.652658626548966e-06, "loss": 0.8015, "step": 6503 }, { "epoch": 0.26955116250155414, "grad_norm": 0.41787222027778625, "learning_rate": 3.652451407020598e-06, "loss": 0.6761, "step": 6504 }, { "epoch": 0.2695926064072278, "grad_norm": 0.45666253566741943, "learning_rate": 3.6522441874922293e-06, "loss": 0.7216, "step": 6505 }, { "epoch": 0.2696340503129015, "grad_norm": 0.4375092089176178, "learning_rate": 3.652036967963861e-06, "loss": 0.7053, "step": 6506 }, { "epoch": 0.2696754942185752, "grad_norm": 0.44457709789276123, "learning_rate": 3.651829748435493e-06, "loss": 0.7341, "step": 6507 }, { "epoch": 0.26971693812424885, "grad_norm": 0.42417269945144653, "learning_rate": 3.6516225289071243e-06, "loss": 0.7122, "step": 6508 }, { "epoch": 0.26975838202992247, "grad_norm": 0.418937087059021, "learning_rate": 3.651415309378756e-06, "loss": 0.7085, "step": 6509 }, { "epoch": 0.26979982593559615, "grad_norm": 0.40689074993133545, "learning_rate": 3.6512080898503875e-06, "loss": 0.6692, "step": 6510 }, { "epoch": 0.2698412698412698, "grad_norm": 0.42415565252304077, "learning_rate": 3.6510008703220197e-06, "loss": 0.7255, "step": 6511 }, { "epoch": 0.2698827137469435, "grad_norm": 0.3947333097457886, "learning_rate": 3.6507936507936507e-06, "loss": 0.7051, "step": 6512 }, { "epoch": 0.2699241576526172, "grad_norm": 0.375267893075943, "learning_rate": 3.650586431265283e-06, "loss": 0.6907, "step": 6513 }, { "epoch": 0.26996560155829086, "grad_norm": 0.4119984805583954, "learning_rate": 3.6503792117369143e-06, "loss": 0.7532, "step": 6514 }, { "epoch": 0.27000704546396453, "grad_norm": 0.4136695861816406, "learning_rate": 3.650171992208546e-06, "loss": 0.7468, "step": 6515 }, { "epoch": 0.2700484893696382, "grad_norm": 0.41449809074401855, "learning_rate": 3.649964772680178e-06, "loss": 0.7236, "step": 6516 }, { "epoch": 0.2700899332753119, "grad_norm": 0.41555920243263245, "learning_rate": 3.6497575531518093e-06, "loss": 0.7454, "step": 6517 }, { "epoch": 0.2701313771809855, "grad_norm": 0.43922826647758484, "learning_rate": 3.649550333623441e-06, "loss": 0.7012, "step": 6518 }, { "epoch": 0.2701728210866592, "grad_norm": 0.3831327259540558, "learning_rate": 3.6493431140950725e-06, "loss": 0.6974, "step": 6519 }, { "epoch": 0.27021426499233286, "grad_norm": 0.4077354967594147, "learning_rate": 3.6491358945667043e-06, "loss": 0.647, "step": 6520 }, { "epoch": 0.27025570889800654, "grad_norm": 0.4273907244205475, "learning_rate": 3.6489286750383357e-06, "loss": 0.7275, "step": 6521 }, { "epoch": 0.2702971528036802, "grad_norm": 0.4297737777233124, "learning_rate": 3.6487214555099675e-06, "loss": 0.7585, "step": 6522 }, { "epoch": 0.2703385967093539, "grad_norm": 0.4168475866317749, "learning_rate": 3.648514235981599e-06, "loss": 0.7, "step": 6523 }, { "epoch": 0.2703800406150276, "grad_norm": 0.4295629560947418, "learning_rate": 3.6483070164532307e-06, "loss": 0.7021, "step": 6524 }, { "epoch": 0.27042148452070125, "grad_norm": 0.41213661432266235, "learning_rate": 3.6480997969248625e-06, "loss": 0.741, "step": 6525 }, { "epoch": 0.2704629284263749, "grad_norm": 0.435043603181839, "learning_rate": 3.647892577396494e-06, "loss": 0.6699, "step": 6526 }, { "epoch": 0.27050437233204855, "grad_norm": 0.3998495638370514, "learning_rate": 3.647685357868126e-06, "loss": 0.7452, "step": 6527 }, { "epoch": 0.2705458162377222, "grad_norm": 0.3891395032405853, "learning_rate": 3.647478138339757e-06, "loss": 0.696, "step": 6528 }, { "epoch": 0.2705872601433959, "grad_norm": 0.40130409598350525, "learning_rate": 3.6472709188113893e-06, "loss": 0.6704, "step": 6529 }, { "epoch": 0.2706287040490696, "grad_norm": 0.4047229588031769, "learning_rate": 3.6470636992830203e-06, "loss": 0.713, "step": 6530 }, { "epoch": 0.27067014795474326, "grad_norm": 0.43221205472946167, "learning_rate": 3.6468564797546525e-06, "loss": 0.7435, "step": 6531 }, { "epoch": 0.27071159186041693, "grad_norm": 0.4334634244441986, "learning_rate": 3.6466492602262843e-06, "loss": 0.7073, "step": 6532 }, { "epoch": 0.2707530357660906, "grad_norm": 0.41183388233184814, "learning_rate": 3.6464420406979157e-06, "loss": 0.6488, "step": 6533 }, { "epoch": 0.2707944796717643, "grad_norm": 0.42317378520965576, "learning_rate": 3.6462348211695475e-06, "loss": 0.696, "step": 6534 }, { "epoch": 0.27083592357743796, "grad_norm": 0.47166845202445984, "learning_rate": 3.646027601641179e-06, "loss": 0.7548, "step": 6535 }, { "epoch": 0.2708773674831116, "grad_norm": 0.40081077814102173, "learning_rate": 3.6458203821128107e-06, "loss": 0.7388, "step": 6536 }, { "epoch": 0.27091881138878526, "grad_norm": 0.4226260185241699, "learning_rate": 3.645613162584442e-06, "loss": 0.714, "step": 6537 }, { "epoch": 0.27096025529445894, "grad_norm": 0.4098697304725647, "learning_rate": 3.645405943056074e-06, "loss": 0.6904, "step": 6538 }, { "epoch": 0.2710016992001326, "grad_norm": 0.401472806930542, "learning_rate": 3.6451987235277053e-06, "loss": 0.7168, "step": 6539 }, { "epoch": 0.2710431431058063, "grad_norm": 0.4249964654445648, "learning_rate": 3.644991503999337e-06, "loss": 0.6854, "step": 6540 }, { "epoch": 0.27108458701147997, "grad_norm": 0.41084668040275574, "learning_rate": 3.644784284470969e-06, "loss": 0.6895, "step": 6541 }, { "epoch": 0.27112603091715365, "grad_norm": 0.42162275314331055, "learning_rate": 3.6445770649426003e-06, "loss": 0.7118, "step": 6542 }, { "epoch": 0.2711674748228273, "grad_norm": 0.40232178568840027, "learning_rate": 3.644369845414232e-06, "loss": 0.7129, "step": 6543 }, { "epoch": 0.27120891872850095, "grad_norm": 0.42122000455856323, "learning_rate": 3.6441626258858635e-06, "loss": 0.6743, "step": 6544 }, { "epoch": 0.2712503626341746, "grad_norm": 0.42902329564094543, "learning_rate": 3.6439554063574957e-06, "loss": 0.7581, "step": 6545 }, { "epoch": 0.2712918065398483, "grad_norm": 0.4251077473163605, "learning_rate": 3.6437481868291267e-06, "loss": 0.6899, "step": 6546 }, { "epoch": 0.271333250445522, "grad_norm": 0.44206947088241577, "learning_rate": 3.643540967300759e-06, "loss": 0.7224, "step": 6547 }, { "epoch": 0.27137469435119566, "grad_norm": 0.409263014793396, "learning_rate": 3.6433337477723903e-06, "loss": 0.7388, "step": 6548 }, { "epoch": 0.27141613825686933, "grad_norm": 0.40656447410583496, "learning_rate": 3.643126528244022e-06, "loss": 0.6951, "step": 6549 }, { "epoch": 0.271457582162543, "grad_norm": 0.4202533960342407, "learning_rate": 3.642919308715654e-06, "loss": 0.7252, "step": 6550 }, { "epoch": 0.2714990260682167, "grad_norm": 0.39591488242149353, "learning_rate": 3.6427120891872853e-06, "loss": 0.7678, "step": 6551 }, { "epoch": 0.27154046997389036, "grad_norm": 0.4272144138813019, "learning_rate": 3.642504869658917e-06, "loss": 0.7366, "step": 6552 }, { "epoch": 0.271581913879564, "grad_norm": 0.38607990741729736, "learning_rate": 3.6422976501305485e-06, "loss": 0.6488, "step": 6553 }, { "epoch": 0.27162335778523766, "grad_norm": 0.43631815910339355, "learning_rate": 3.6420904306021803e-06, "loss": 0.6951, "step": 6554 }, { "epoch": 0.27166480169091134, "grad_norm": 0.4450920522212982, "learning_rate": 3.6418832110738117e-06, "loss": 0.6943, "step": 6555 }, { "epoch": 0.271706245596585, "grad_norm": 0.44615814089775085, "learning_rate": 3.6416759915454435e-06, "loss": 0.7791, "step": 6556 }, { "epoch": 0.2717476895022587, "grad_norm": 0.4156721532344818, "learning_rate": 3.641468772017075e-06, "loss": 0.7034, "step": 6557 }, { "epoch": 0.27178913340793237, "grad_norm": 0.41198232769966125, "learning_rate": 3.6412615524887067e-06, "loss": 0.7192, "step": 6558 }, { "epoch": 0.27183057731360605, "grad_norm": 0.412671834230423, "learning_rate": 3.6410543329603385e-06, "loss": 0.6755, "step": 6559 }, { "epoch": 0.2718720212192797, "grad_norm": 0.4309196174144745, "learning_rate": 3.64084711343197e-06, "loss": 0.6851, "step": 6560 }, { "epoch": 0.2719134651249534, "grad_norm": 0.3754064440727234, "learning_rate": 3.640639893903602e-06, "loss": 0.6581, "step": 6561 }, { "epoch": 0.271954909030627, "grad_norm": 0.40977543592453003, "learning_rate": 3.640432674375233e-06, "loss": 0.6667, "step": 6562 }, { "epoch": 0.2719963529363007, "grad_norm": 0.43328800797462463, "learning_rate": 3.6402254548468653e-06, "loss": 0.6909, "step": 6563 }, { "epoch": 0.2720377968419744, "grad_norm": 0.4237446188926697, "learning_rate": 3.6400182353184963e-06, "loss": 0.7322, "step": 6564 }, { "epoch": 0.27207924074764805, "grad_norm": 0.3954063057899475, "learning_rate": 3.6398110157901285e-06, "loss": 0.6956, "step": 6565 }, { "epoch": 0.27212068465332173, "grad_norm": 0.4595375657081604, "learning_rate": 3.63960379626176e-06, "loss": 0.6622, "step": 6566 }, { "epoch": 0.2721621285589954, "grad_norm": 0.4188733696937561, "learning_rate": 3.6393965767333917e-06, "loss": 0.6924, "step": 6567 }, { "epoch": 0.2722035724646691, "grad_norm": 0.40549731254577637, "learning_rate": 3.6391893572050235e-06, "loss": 0.6572, "step": 6568 }, { "epoch": 0.27224501637034276, "grad_norm": 0.4261831045150757, "learning_rate": 3.638982137676655e-06, "loss": 0.7046, "step": 6569 }, { "epoch": 0.2722864602760164, "grad_norm": 0.4417162239551544, "learning_rate": 3.6387749181482867e-06, "loss": 0.688, "step": 6570 }, { "epoch": 0.27232790418169006, "grad_norm": 0.4300711452960968, "learning_rate": 3.638567698619918e-06, "loss": 0.7188, "step": 6571 }, { "epoch": 0.27236934808736374, "grad_norm": 0.40618598461151123, "learning_rate": 3.63836047909155e-06, "loss": 0.718, "step": 6572 }, { "epoch": 0.2724107919930374, "grad_norm": 0.41585955023765564, "learning_rate": 3.6381532595631813e-06, "loss": 0.7012, "step": 6573 }, { "epoch": 0.2724522358987111, "grad_norm": 0.39511069655418396, "learning_rate": 3.637946040034813e-06, "loss": 0.7192, "step": 6574 }, { "epoch": 0.27249367980438477, "grad_norm": 0.4753815829753876, "learning_rate": 3.6377388205064445e-06, "loss": 0.718, "step": 6575 }, { "epoch": 0.27253512371005845, "grad_norm": 0.3823046088218689, "learning_rate": 3.6375316009780763e-06, "loss": 0.6702, "step": 6576 }, { "epoch": 0.2725765676157321, "grad_norm": 0.4095743000507355, "learning_rate": 3.637324381449708e-06, "loss": 0.7222, "step": 6577 }, { "epoch": 0.2726180115214058, "grad_norm": 0.4372406303882599, "learning_rate": 3.6371171619213395e-06, "loss": 0.8042, "step": 6578 }, { "epoch": 0.2726594554270794, "grad_norm": 0.4187566339969635, "learning_rate": 3.6369099423929718e-06, "loss": 0.7112, "step": 6579 }, { "epoch": 0.2727008993327531, "grad_norm": 0.433600515127182, "learning_rate": 3.6367027228646027e-06, "loss": 0.7097, "step": 6580 }, { "epoch": 0.2727423432384268, "grad_norm": 0.4430500864982605, "learning_rate": 3.636495503336235e-06, "loss": 0.6813, "step": 6581 }, { "epoch": 0.27278378714410045, "grad_norm": 0.42461520433425903, "learning_rate": 3.6362882838078663e-06, "loss": 0.7238, "step": 6582 }, { "epoch": 0.27282523104977413, "grad_norm": 0.3987359404563904, "learning_rate": 3.636081064279498e-06, "loss": 0.6707, "step": 6583 }, { "epoch": 0.2728666749554478, "grad_norm": 0.4060910940170288, "learning_rate": 3.6358738447511295e-06, "loss": 0.7251, "step": 6584 }, { "epoch": 0.2729081188611215, "grad_norm": 0.415551096200943, "learning_rate": 3.6356666252227613e-06, "loss": 0.7188, "step": 6585 }, { "epoch": 0.27294956276679516, "grad_norm": 0.3749302625656128, "learning_rate": 3.635459405694393e-06, "loss": 0.6985, "step": 6586 }, { "epoch": 0.27299100667246884, "grad_norm": 0.4277219772338867, "learning_rate": 3.6352521861660245e-06, "loss": 0.7454, "step": 6587 }, { "epoch": 0.27303245057814246, "grad_norm": 0.3968074917793274, "learning_rate": 3.6350449666376563e-06, "loss": 0.7117, "step": 6588 }, { "epoch": 0.27307389448381614, "grad_norm": 0.43151921033859253, "learning_rate": 3.6348377471092877e-06, "loss": 0.7217, "step": 6589 }, { "epoch": 0.2731153383894898, "grad_norm": 0.4035494327545166, "learning_rate": 3.6346305275809195e-06, "loss": 0.6826, "step": 6590 }, { "epoch": 0.2731567822951635, "grad_norm": 0.4098740518093109, "learning_rate": 3.634423308052551e-06, "loss": 0.6539, "step": 6591 }, { "epoch": 0.27319822620083717, "grad_norm": 0.39176657795906067, "learning_rate": 3.6342160885241827e-06, "loss": 0.6987, "step": 6592 }, { "epoch": 0.27323967010651085, "grad_norm": 0.46942436695098877, "learning_rate": 3.6340088689958145e-06, "loss": 0.7546, "step": 6593 }, { "epoch": 0.2732811140121845, "grad_norm": 0.43935540318489075, "learning_rate": 3.633801649467446e-06, "loss": 0.7399, "step": 6594 }, { "epoch": 0.2733225579178582, "grad_norm": 0.4395289123058319, "learning_rate": 3.633594429939078e-06, "loss": 0.7263, "step": 6595 }, { "epoch": 0.2733640018235319, "grad_norm": 0.4925175607204437, "learning_rate": 3.633387210410709e-06, "loss": 0.7742, "step": 6596 }, { "epoch": 0.2734054457292055, "grad_norm": 0.433120459318161, "learning_rate": 3.6331799908823414e-06, "loss": 0.7405, "step": 6597 }, { "epoch": 0.2734468896348792, "grad_norm": 0.42321160435676575, "learning_rate": 3.6329727713539727e-06, "loss": 0.7244, "step": 6598 }, { "epoch": 0.27348833354055285, "grad_norm": 0.41757932305336, "learning_rate": 3.6327655518256045e-06, "loss": 0.692, "step": 6599 }, { "epoch": 0.27352977744622653, "grad_norm": 0.41712823510169983, "learning_rate": 3.632558332297236e-06, "loss": 0.6827, "step": 6600 }, { "epoch": 0.2735712213519002, "grad_norm": 0.4401552677154541, "learning_rate": 3.6323511127688677e-06, "loss": 0.7451, "step": 6601 }, { "epoch": 0.2736126652575739, "grad_norm": 0.37812578678131104, "learning_rate": 3.6321438932404996e-06, "loss": 0.6548, "step": 6602 }, { "epoch": 0.27365410916324756, "grad_norm": 0.40258845686912537, "learning_rate": 3.631936673712131e-06, "loss": 0.7045, "step": 6603 }, { "epoch": 0.27369555306892124, "grad_norm": 0.4452533721923828, "learning_rate": 3.6317294541837627e-06, "loss": 0.7125, "step": 6604 }, { "epoch": 0.27373699697459486, "grad_norm": 0.4513268768787384, "learning_rate": 3.631522234655394e-06, "loss": 0.7529, "step": 6605 }, { "epoch": 0.27377844088026854, "grad_norm": 0.4171219766139984, "learning_rate": 3.631315015127026e-06, "loss": 0.7478, "step": 6606 }, { "epoch": 0.2738198847859422, "grad_norm": 0.4141135811805725, "learning_rate": 3.6311077955986573e-06, "loss": 0.6663, "step": 6607 }, { "epoch": 0.2738613286916159, "grad_norm": 0.39465829730033875, "learning_rate": 3.630900576070289e-06, "loss": 0.6835, "step": 6608 }, { "epoch": 0.27390277259728957, "grad_norm": 0.42372745275497437, "learning_rate": 3.6306933565419205e-06, "loss": 0.6804, "step": 6609 }, { "epoch": 0.27394421650296324, "grad_norm": 0.40289783477783203, "learning_rate": 3.6304861370135523e-06, "loss": 0.6603, "step": 6610 }, { "epoch": 0.2739856604086369, "grad_norm": 0.43755820393562317, "learning_rate": 3.630278917485184e-06, "loss": 0.7361, "step": 6611 }, { "epoch": 0.2740271043143106, "grad_norm": 0.4324570894241333, "learning_rate": 3.6300716979568155e-06, "loss": 0.7397, "step": 6612 }, { "epoch": 0.2740685482199843, "grad_norm": 0.4048880934715271, "learning_rate": 3.6298644784284478e-06, "loss": 0.7668, "step": 6613 }, { "epoch": 0.2741099921256579, "grad_norm": 0.41328638792037964, "learning_rate": 3.6296572589000787e-06, "loss": 0.645, "step": 6614 }, { "epoch": 0.2741514360313316, "grad_norm": 0.4019462466239929, "learning_rate": 3.629450039371711e-06, "loss": 0.7576, "step": 6615 }, { "epoch": 0.27419287993700525, "grad_norm": 0.39426904916763306, "learning_rate": 3.6292428198433423e-06, "loss": 0.6985, "step": 6616 }, { "epoch": 0.27423432384267893, "grad_norm": 0.40611618757247925, "learning_rate": 3.629035600314974e-06, "loss": 0.7344, "step": 6617 }, { "epoch": 0.2742757677483526, "grad_norm": 0.4193512499332428, "learning_rate": 3.6288283807866055e-06, "loss": 0.7065, "step": 6618 }, { "epoch": 0.2743172116540263, "grad_norm": 0.3867458403110504, "learning_rate": 3.6286211612582373e-06, "loss": 0.6637, "step": 6619 }, { "epoch": 0.27435865555969996, "grad_norm": 0.441089004278183, "learning_rate": 3.628413941729869e-06, "loss": 0.7155, "step": 6620 }, { "epoch": 0.27440009946537364, "grad_norm": 0.417917400598526, "learning_rate": 3.6282067222015005e-06, "loss": 0.6486, "step": 6621 }, { "epoch": 0.2744415433710473, "grad_norm": 0.45735424757003784, "learning_rate": 3.6279995026731323e-06, "loss": 0.7744, "step": 6622 }, { "epoch": 0.27448298727672094, "grad_norm": 0.3985811173915863, "learning_rate": 3.6277922831447637e-06, "loss": 0.7168, "step": 6623 }, { "epoch": 0.2745244311823946, "grad_norm": 0.4091930389404297, "learning_rate": 3.6275850636163955e-06, "loss": 0.7261, "step": 6624 }, { "epoch": 0.2745658750880683, "grad_norm": 0.39693740010261536, "learning_rate": 3.627377844088027e-06, "loss": 0.7568, "step": 6625 }, { "epoch": 0.27460731899374197, "grad_norm": 0.38955074548721313, "learning_rate": 3.6271706245596587e-06, "loss": 0.6794, "step": 6626 }, { "epoch": 0.27464876289941564, "grad_norm": 0.4055379331111908, "learning_rate": 3.62696340503129e-06, "loss": 0.7133, "step": 6627 }, { "epoch": 0.2746902068050893, "grad_norm": 0.47656673192977905, "learning_rate": 3.626756185502922e-06, "loss": 0.7742, "step": 6628 }, { "epoch": 0.274731650710763, "grad_norm": 0.3985002040863037, "learning_rate": 3.626548965974554e-06, "loss": 0.7214, "step": 6629 }, { "epoch": 0.2747730946164367, "grad_norm": 0.3947330415248871, "learning_rate": 3.626341746446185e-06, "loss": 0.7168, "step": 6630 }, { "epoch": 0.2748145385221103, "grad_norm": 0.4155828356742859, "learning_rate": 3.6261345269178174e-06, "loss": 0.6669, "step": 6631 }, { "epoch": 0.274855982427784, "grad_norm": 0.4428500533103943, "learning_rate": 3.6259273073894487e-06, "loss": 0.7148, "step": 6632 }, { "epoch": 0.27489742633345765, "grad_norm": 0.3943454921245575, "learning_rate": 3.6257200878610806e-06, "loss": 0.6746, "step": 6633 }, { "epoch": 0.2749388702391313, "grad_norm": 0.454054594039917, "learning_rate": 3.625512868332712e-06, "loss": 0.7258, "step": 6634 }, { "epoch": 0.274980314144805, "grad_norm": 0.41878741979599, "learning_rate": 3.6253056488043437e-06, "loss": 0.7151, "step": 6635 }, { "epoch": 0.2750217580504787, "grad_norm": 0.4162358045578003, "learning_rate": 3.625098429275975e-06, "loss": 0.7238, "step": 6636 }, { "epoch": 0.27506320195615236, "grad_norm": 0.46346625685691833, "learning_rate": 3.624891209747607e-06, "loss": 0.7388, "step": 6637 }, { "epoch": 0.27510464586182604, "grad_norm": 0.4319714307785034, "learning_rate": 3.6246839902192388e-06, "loss": 0.7329, "step": 6638 }, { "epoch": 0.2751460897674997, "grad_norm": 0.41148892045021057, "learning_rate": 3.62447677069087e-06, "loss": 0.7036, "step": 6639 }, { "epoch": 0.27518753367317333, "grad_norm": 0.4208109676837921, "learning_rate": 3.624269551162502e-06, "loss": 0.6866, "step": 6640 }, { "epoch": 0.275228977578847, "grad_norm": 0.4079609513282776, "learning_rate": 3.6240623316341333e-06, "loss": 0.6829, "step": 6641 }, { "epoch": 0.2752704214845207, "grad_norm": 0.39932799339294434, "learning_rate": 3.623855112105765e-06, "loss": 0.6521, "step": 6642 }, { "epoch": 0.27531186539019437, "grad_norm": 0.4090936481952667, "learning_rate": 3.6236478925773965e-06, "loss": 0.7455, "step": 6643 }, { "epoch": 0.27535330929586804, "grad_norm": 0.3901226222515106, "learning_rate": 3.6234406730490283e-06, "loss": 0.7371, "step": 6644 }, { "epoch": 0.2753947532015417, "grad_norm": 0.4074028432369232, "learning_rate": 3.6232334535206606e-06, "loss": 0.7798, "step": 6645 }, { "epoch": 0.2754361971072154, "grad_norm": 0.45360350608825684, "learning_rate": 3.6230262339922915e-06, "loss": 0.7544, "step": 6646 }, { "epoch": 0.2754776410128891, "grad_norm": 0.45333215594291687, "learning_rate": 3.6228190144639238e-06, "loss": 0.7432, "step": 6647 }, { "epoch": 0.27551908491856275, "grad_norm": 0.42594078183174133, "learning_rate": 3.6226117949355547e-06, "loss": 0.66, "step": 6648 }, { "epoch": 0.2755605288242364, "grad_norm": 0.41911768913269043, "learning_rate": 3.622404575407187e-06, "loss": 0.7388, "step": 6649 }, { "epoch": 0.27560197272991005, "grad_norm": 0.4502609670162201, "learning_rate": 3.6221973558788183e-06, "loss": 0.6809, "step": 6650 }, { "epoch": 0.2756434166355837, "grad_norm": 0.4637426733970642, "learning_rate": 3.62199013635045e-06, "loss": 0.6959, "step": 6651 }, { "epoch": 0.2756848605412574, "grad_norm": 0.4150581359863281, "learning_rate": 3.6217829168220815e-06, "loss": 0.7444, "step": 6652 }, { "epoch": 0.2757263044469311, "grad_norm": 0.432466059923172, "learning_rate": 3.6215756972937133e-06, "loss": 0.7079, "step": 6653 }, { "epoch": 0.27576774835260476, "grad_norm": 0.38473695516586304, "learning_rate": 3.621368477765345e-06, "loss": 0.6846, "step": 6654 }, { "epoch": 0.27580919225827843, "grad_norm": 0.4081288278102875, "learning_rate": 3.6211612582369765e-06, "loss": 0.6968, "step": 6655 }, { "epoch": 0.2758506361639521, "grad_norm": 0.43915748596191406, "learning_rate": 3.6209540387086084e-06, "loss": 0.7622, "step": 6656 }, { "epoch": 0.2758920800696258, "grad_norm": 0.42115843296051025, "learning_rate": 3.6207468191802397e-06, "loss": 0.749, "step": 6657 }, { "epoch": 0.2759335239752994, "grad_norm": 0.4176490008831024, "learning_rate": 3.6205395996518715e-06, "loss": 0.7004, "step": 6658 }, { "epoch": 0.2759749678809731, "grad_norm": 0.4029833972454071, "learning_rate": 3.620332380123503e-06, "loss": 0.636, "step": 6659 }, { "epoch": 0.27601641178664676, "grad_norm": 0.42111310362815857, "learning_rate": 3.6201251605951347e-06, "loss": 0.7336, "step": 6660 }, { "epoch": 0.27605785569232044, "grad_norm": 0.432941734790802, "learning_rate": 3.619917941066766e-06, "loss": 0.7196, "step": 6661 }, { "epoch": 0.2760992995979941, "grad_norm": 0.4619463086128235, "learning_rate": 3.619710721538398e-06, "loss": 0.7312, "step": 6662 }, { "epoch": 0.2761407435036678, "grad_norm": 0.41966474056243896, "learning_rate": 3.61950350201003e-06, "loss": 0.7485, "step": 6663 }, { "epoch": 0.2761821874093415, "grad_norm": 0.4593070149421692, "learning_rate": 3.619296282481661e-06, "loss": 0.707, "step": 6664 }, { "epoch": 0.27622363131501515, "grad_norm": 0.4630683660507202, "learning_rate": 3.6190890629532934e-06, "loss": 0.7207, "step": 6665 }, { "epoch": 0.27626507522068877, "grad_norm": 0.5042412281036377, "learning_rate": 3.6188818434249248e-06, "loss": 0.7375, "step": 6666 }, { "epoch": 0.27630651912636245, "grad_norm": 0.39843347668647766, "learning_rate": 3.6186746238965566e-06, "loss": 0.6777, "step": 6667 }, { "epoch": 0.2763479630320361, "grad_norm": 0.4366994798183441, "learning_rate": 3.618467404368188e-06, "loss": 0.7119, "step": 6668 }, { "epoch": 0.2763894069377098, "grad_norm": 0.4382764399051666, "learning_rate": 3.6182601848398198e-06, "loss": 0.7378, "step": 6669 }, { "epoch": 0.2764308508433835, "grad_norm": 0.4502507746219635, "learning_rate": 3.618052965311451e-06, "loss": 0.7451, "step": 6670 }, { "epoch": 0.27647229474905716, "grad_norm": 0.42975178360939026, "learning_rate": 3.617845745783083e-06, "loss": 0.7288, "step": 6671 }, { "epoch": 0.27651373865473083, "grad_norm": 0.4352637827396393, "learning_rate": 3.6176385262547148e-06, "loss": 0.7229, "step": 6672 }, { "epoch": 0.2765551825604045, "grad_norm": 0.4746386706829071, "learning_rate": 3.617431306726346e-06, "loss": 0.7759, "step": 6673 }, { "epoch": 0.2765966264660782, "grad_norm": 0.4371624290943146, "learning_rate": 3.617224087197978e-06, "loss": 0.7455, "step": 6674 }, { "epoch": 0.2766380703717518, "grad_norm": 0.4262458086013794, "learning_rate": 3.6170168676696093e-06, "loss": 0.6592, "step": 6675 }, { "epoch": 0.2766795142774255, "grad_norm": 0.44028881192207336, "learning_rate": 3.616809648141241e-06, "loss": 0.7246, "step": 6676 }, { "epoch": 0.27672095818309916, "grad_norm": 0.4979361891746521, "learning_rate": 3.6166024286128725e-06, "loss": 0.7272, "step": 6677 }, { "epoch": 0.27676240208877284, "grad_norm": 0.4421960711479187, "learning_rate": 3.6163952090845043e-06, "loss": 0.7173, "step": 6678 }, { "epoch": 0.2768038459944465, "grad_norm": 0.4271281063556671, "learning_rate": 3.6161879895561357e-06, "loss": 0.7402, "step": 6679 }, { "epoch": 0.2768452899001202, "grad_norm": 0.4092029631137848, "learning_rate": 3.6159807700277675e-06, "loss": 0.6873, "step": 6680 }, { "epoch": 0.27688673380579387, "grad_norm": 0.3896249830722809, "learning_rate": 3.6157735504993998e-06, "loss": 0.6714, "step": 6681 }, { "epoch": 0.27692817771146755, "grad_norm": 0.429233580827713, "learning_rate": 3.6155663309710307e-06, "loss": 0.7041, "step": 6682 }, { "epoch": 0.2769696216171412, "grad_norm": 0.41778188943862915, "learning_rate": 3.615359111442663e-06, "loss": 0.713, "step": 6683 }, { "epoch": 0.27701106552281485, "grad_norm": 0.4221159517765045, "learning_rate": 3.6151518919142944e-06, "loss": 0.686, "step": 6684 }, { "epoch": 0.2770525094284885, "grad_norm": 0.41951772570610046, "learning_rate": 3.614944672385926e-06, "loss": 0.7151, "step": 6685 }, { "epoch": 0.2770939533341622, "grad_norm": 0.40577229857444763, "learning_rate": 3.6147374528575575e-06, "loss": 0.72, "step": 6686 }, { "epoch": 0.2771353972398359, "grad_norm": 0.41571900248527527, "learning_rate": 3.6145302333291894e-06, "loss": 0.7344, "step": 6687 }, { "epoch": 0.27717684114550956, "grad_norm": 0.4272485673427582, "learning_rate": 3.6143230138008207e-06, "loss": 0.733, "step": 6688 }, { "epoch": 0.27721828505118323, "grad_norm": 0.41691747307777405, "learning_rate": 3.6141157942724525e-06, "loss": 0.7324, "step": 6689 }, { "epoch": 0.2772597289568569, "grad_norm": 0.42767179012298584, "learning_rate": 3.6139085747440844e-06, "loss": 0.72, "step": 6690 }, { "epoch": 0.2773011728625306, "grad_norm": 0.40414050221443176, "learning_rate": 3.6137013552157157e-06, "loss": 0.7092, "step": 6691 }, { "epoch": 0.2773426167682042, "grad_norm": 0.4483755826950073, "learning_rate": 3.6134941356873476e-06, "loss": 0.7424, "step": 6692 }, { "epoch": 0.2773840606738779, "grad_norm": 0.38455766439437866, "learning_rate": 3.613286916158979e-06, "loss": 0.7007, "step": 6693 }, { "epoch": 0.27742550457955156, "grad_norm": 0.39429235458374023, "learning_rate": 3.6130796966306107e-06, "loss": 0.6873, "step": 6694 }, { "epoch": 0.27746694848522524, "grad_norm": 0.46908038854599, "learning_rate": 3.612872477102242e-06, "loss": 0.6982, "step": 6695 }, { "epoch": 0.2775083923908989, "grad_norm": 0.3853103816509247, "learning_rate": 3.612665257573874e-06, "loss": 0.6516, "step": 6696 }, { "epoch": 0.2775498362965726, "grad_norm": 0.3900322914123535, "learning_rate": 3.6124580380455053e-06, "loss": 0.6677, "step": 6697 }, { "epoch": 0.27759128020224627, "grad_norm": 0.41675224900245667, "learning_rate": 3.612250818517137e-06, "loss": 0.719, "step": 6698 }, { "epoch": 0.27763272410791995, "grad_norm": 0.4015946388244629, "learning_rate": 3.6120435989887694e-06, "loss": 0.6934, "step": 6699 }, { "epoch": 0.2776741680135936, "grad_norm": 0.40435823798179626, "learning_rate": 3.6118363794604008e-06, "loss": 0.6809, "step": 6700 }, { "epoch": 0.27771561191926725, "grad_norm": 0.3971484303474426, "learning_rate": 3.6116291599320326e-06, "loss": 0.7065, "step": 6701 }, { "epoch": 0.2777570558249409, "grad_norm": 0.4255174994468689, "learning_rate": 3.611421940403664e-06, "loss": 0.7009, "step": 6702 }, { "epoch": 0.2777984997306146, "grad_norm": 0.41539624333381653, "learning_rate": 3.6112147208752958e-06, "loss": 0.7688, "step": 6703 }, { "epoch": 0.2778399436362883, "grad_norm": 0.4160275161266327, "learning_rate": 3.611007501346927e-06, "loss": 0.718, "step": 6704 }, { "epoch": 0.27788138754196196, "grad_norm": 0.4690508246421814, "learning_rate": 3.610800281818559e-06, "loss": 0.7031, "step": 6705 }, { "epoch": 0.27792283144763563, "grad_norm": 0.40456023812294006, "learning_rate": 3.6105930622901908e-06, "loss": 0.6951, "step": 6706 }, { "epoch": 0.2779642753533093, "grad_norm": 0.39568477869033813, "learning_rate": 3.610385842761822e-06, "loss": 0.7195, "step": 6707 }, { "epoch": 0.278005719258983, "grad_norm": 0.41585689783096313, "learning_rate": 3.610178623233454e-06, "loss": 0.7043, "step": 6708 }, { "epoch": 0.27804716316465666, "grad_norm": 0.38726481795310974, "learning_rate": 3.6099714037050853e-06, "loss": 0.6836, "step": 6709 }, { "epoch": 0.2780886070703303, "grad_norm": 0.41844072937965393, "learning_rate": 3.609764184176717e-06, "loss": 0.7561, "step": 6710 }, { "epoch": 0.27813005097600396, "grad_norm": 0.41628044843673706, "learning_rate": 3.6095569646483485e-06, "loss": 0.6868, "step": 6711 }, { "epoch": 0.27817149488167764, "grad_norm": 0.408419132232666, "learning_rate": 3.6093497451199803e-06, "loss": 0.7351, "step": 6712 }, { "epoch": 0.2782129387873513, "grad_norm": 0.465655118227005, "learning_rate": 3.6091425255916117e-06, "loss": 0.7009, "step": 6713 }, { "epoch": 0.278254382693025, "grad_norm": 0.42494451999664307, "learning_rate": 3.6089353060632435e-06, "loss": 0.7006, "step": 6714 }, { "epoch": 0.27829582659869867, "grad_norm": 0.4616956114768982, "learning_rate": 3.6087280865348758e-06, "loss": 0.7642, "step": 6715 }, { "epoch": 0.27833727050437235, "grad_norm": 0.4089677631855011, "learning_rate": 3.6085208670065067e-06, "loss": 0.698, "step": 6716 }, { "epoch": 0.278378714410046, "grad_norm": 0.4243232011795044, "learning_rate": 3.608313647478139e-06, "loss": 0.7103, "step": 6717 }, { "epoch": 0.27842015831571965, "grad_norm": 0.39270323514938354, "learning_rate": 3.6081064279497704e-06, "loss": 0.677, "step": 6718 }, { "epoch": 0.2784616022213933, "grad_norm": 0.4386405646800995, "learning_rate": 3.607899208421402e-06, "loss": 0.693, "step": 6719 }, { "epoch": 0.278503046127067, "grad_norm": 0.4516928791999817, "learning_rate": 3.6076919888930336e-06, "loss": 0.7161, "step": 6720 }, { "epoch": 0.2785444900327407, "grad_norm": 0.43242600560188293, "learning_rate": 3.6074847693646654e-06, "loss": 0.7317, "step": 6721 }, { "epoch": 0.27858593393841435, "grad_norm": 0.4511552155017853, "learning_rate": 3.6072775498362967e-06, "loss": 0.7212, "step": 6722 }, { "epoch": 0.27862737784408803, "grad_norm": 0.4081852436065674, "learning_rate": 3.6070703303079286e-06, "loss": 0.6965, "step": 6723 }, { "epoch": 0.2786688217497617, "grad_norm": 0.41443973779678345, "learning_rate": 3.6068631107795604e-06, "loss": 0.7186, "step": 6724 }, { "epoch": 0.2787102656554354, "grad_norm": 0.39324405789375305, "learning_rate": 3.6066558912511918e-06, "loss": 0.663, "step": 6725 }, { "epoch": 0.27875170956110906, "grad_norm": 0.43067094683647156, "learning_rate": 3.6064486717228236e-06, "loss": 0.7019, "step": 6726 }, { "epoch": 0.2787931534667827, "grad_norm": 0.43750375509262085, "learning_rate": 3.606241452194455e-06, "loss": 0.7188, "step": 6727 }, { "epoch": 0.27883459737245636, "grad_norm": 0.38726162910461426, "learning_rate": 3.6060342326660868e-06, "loss": 0.6537, "step": 6728 }, { "epoch": 0.27887604127813004, "grad_norm": 0.3972233533859253, "learning_rate": 3.605827013137718e-06, "loss": 0.7028, "step": 6729 }, { "epoch": 0.2789174851838037, "grad_norm": 0.4139542579650879, "learning_rate": 3.60561979360935e-06, "loss": 0.7017, "step": 6730 }, { "epoch": 0.2789589290894774, "grad_norm": 0.4411020874977112, "learning_rate": 3.6054125740809813e-06, "loss": 0.6698, "step": 6731 }, { "epoch": 0.27900037299515107, "grad_norm": 0.40187785029411316, "learning_rate": 3.605205354552613e-06, "loss": 0.6788, "step": 6732 }, { "epoch": 0.27904181690082475, "grad_norm": 0.41578540205955505, "learning_rate": 3.6049981350242454e-06, "loss": 0.681, "step": 6733 }, { "epoch": 0.2790832608064984, "grad_norm": 0.41805529594421387, "learning_rate": 3.6047909154958768e-06, "loss": 0.7115, "step": 6734 }, { "epoch": 0.2791247047121721, "grad_norm": 0.41423499584198, "learning_rate": 3.6045836959675086e-06, "loss": 0.6995, "step": 6735 }, { "epoch": 0.2791661486178457, "grad_norm": 0.43814992904663086, "learning_rate": 3.60437647643914e-06, "loss": 0.7383, "step": 6736 }, { "epoch": 0.2792075925235194, "grad_norm": 0.4223230481147766, "learning_rate": 3.6041692569107718e-06, "loss": 0.7043, "step": 6737 }, { "epoch": 0.2792490364291931, "grad_norm": 0.47840362787246704, "learning_rate": 3.603962037382403e-06, "loss": 0.7167, "step": 6738 }, { "epoch": 0.27929048033486675, "grad_norm": 0.4244794249534607, "learning_rate": 3.603754817854035e-06, "loss": 0.646, "step": 6739 }, { "epoch": 0.27933192424054043, "grad_norm": 0.4094058871269226, "learning_rate": 3.6035475983256663e-06, "loss": 0.6912, "step": 6740 }, { "epoch": 0.2793733681462141, "grad_norm": 0.39633408188819885, "learning_rate": 3.603340378797298e-06, "loss": 0.7281, "step": 6741 }, { "epoch": 0.2794148120518878, "grad_norm": 0.4078458845615387, "learning_rate": 3.60313315926893e-06, "loss": 0.7327, "step": 6742 }, { "epoch": 0.27945625595756146, "grad_norm": 0.46389541029930115, "learning_rate": 3.6029259397405614e-06, "loss": 0.7939, "step": 6743 }, { "epoch": 0.27949769986323514, "grad_norm": 0.41833218932151794, "learning_rate": 3.602718720212193e-06, "loss": 0.739, "step": 6744 }, { "epoch": 0.27953914376890876, "grad_norm": 0.4221803545951843, "learning_rate": 3.6025115006838245e-06, "loss": 0.749, "step": 6745 }, { "epoch": 0.27958058767458244, "grad_norm": 0.3903099596500397, "learning_rate": 3.6023042811554564e-06, "loss": 0.7471, "step": 6746 }, { "epoch": 0.2796220315802561, "grad_norm": 0.4545293152332306, "learning_rate": 3.6020970616270877e-06, "loss": 0.6843, "step": 6747 }, { "epoch": 0.2796634754859298, "grad_norm": 0.4799537658691406, "learning_rate": 3.6018898420987196e-06, "loss": 0.7012, "step": 6748 }, { "epoch": 0.27970491939160347, "grad_norm": 0.4690294861793518, "learning_rate": 3.601682622570351e-06, "loss": 0.7593, "step": 6749 }, { "epoch": 0.27974636329727715, "grad_norm": 0.422396183013916, "learning_rate": 3.6014754030419827e-06, "loss": 0.7318, "step": 6750 }, { "epoch": 0.2797878072029508, "grad_norm": 0.4174599051475525, "learning_rate": 3.601268183513615e-06, "loss": 0.6791, "step": 6751 }, { "epoch": 0.2798292511086245, "grad_norm": 0.4698418974876404, "learning_rate": 3.6010609639852464e-06, "loss": 0.7712, "step": 6752 }, { "epoch": 0.2798706950142981, "grad_norm": 0.42749327421188354, "learning_rate": 3.600853744456878e-06, "loss": 0.7096, "step": 6753 }, { "epoch": 0.2799121389199718, "grad_norm": 0.43996813893318176, "learning_rate": 3.6006465249285096e-06, "loss": 0.663, "step": 6754 }, { "epoch": 0.2799535828256455, "grad_norm": 0.4503937363624573, "learning_rate": 3.6004393054001414e-06, "loss": 0.7441, "step": 6755 }, { "epoch": 0.27999502673131915, "grad_norm": 0.4242357909679413, "learning_rate": 3.6002320858717728e-06, "loss": 0.7095, "step": 6756 }, { "epoch": 0.28003647063699283, "grad_norm": 0.3889995217323303, "learning_rate": 3.6000248663434046e-06, "loss": 0.6693, "step": 6757 }, { "epoch": 0.2800779145426665, "grad_norm": 0.4084670841693878, "learning_rate": 3.599817646815036e-06, "loss": 0.6809, "step": 6758 }, { "epoch": 0.2801193584483402, "grad_norm": 0.42046937346458435, "learning_rate": 3.5996104272866678e-06, "loss": 0.7136, "step": 6759 }, { "epoch": 0.28016080235401386, "grad_norm": 0.41826221346855164, "learning_rate": 3.5994032077582996e-06, "loss": 0.6919, "step": 6760 }, { "epoch": 0.28020224625968754, "grad_norm": 0.46046873927116394, "learning_rate": 3.599195988229931e-06, "loss": 0.7378, "step": 6761 }, { "epoch": 0.28024369016536116, "grad_norm": 0.40140989422798157, "learning_rate": 3.5989887687015628e-06, "loss": 0.687, "step": 6762 }, { "epoch": 0.28028513407103484, "grad_norm": 0.48575130105018616, "learning_rate": 3.598781549173194e-06, "loss": 0.7227, "step": 6763 }, { "epoch": 0.2803265779767085, "grad_norm": 0.41963785886764526, "learning_rate": 3.598574329644826e-06, "loss": 0.7074, "step": 6764 }, { "epoch": 0.2803680218823822, "grad_norm": 0.47318413853645325, "learning_rate": 3.5983671101164573e-06, "loss": 0.7209, "step": 6765 }, { "epoch": 0.28040946578805587, "grad_norm": 0.41672495007514954, "learning_rate": 3.598159890588089e-06, "loss": 0.7214, "step": 6766 }, { "epoch": 0.28045090969372954, "grad_norm": 0.38916632533073425, "learning_rate": 3.5979526710597214e-06, "loss": 0.6873, "step": 6767 }, { "epoch": 0.2804923535994032, "grad_norm": 0.4564535319805145, "learning_rate": 3.5977454515313528e-06, "loss": 0.7455, "step": 6768 }, { "epoch": 0.2805337975050769, "grad_norm": 0.4205094873905182, "learning_rate": 3.5975382320029846e-06, "loss": 0.7141, "step": 6769 }, { "epoch": 0.2805752414107506, "grad_norm": 0.40840858221054077, "learning_rate": 3.597331012474616e-06, "loss": 0.7411, "step": 6770 }, { "epoch": 0.2806166853164242, "grad_norm": 0.4047994613647461, "learning_rate": 3.5971237929462478e-06, "loss": 0.7, "step": 6771 }, { "epoch": 0.2806581292220979, "grad_norm": 0.42194804549217224, "learning_rate": 3.596916573417879e-06, "loss": 0.6367, "step": 6772 }, { "epoch": 0.28069957312777155, "grad_norm": 0.4416385293006897, "learning_rate": 3.596709353889511e-06, "loss": 0.7307, "step": 6773 }, { "epoch": 0.28074101703344523, "grad_norm": 0.43495622277259827, "learning_rate": 3.5965021343611424e-06, "loss": 0.6785, "step": 6774 }, { "epoch": 0.2807824609391189, "grad_norm": 0.4550446569919586, "learning_rate": 3.596294914832774e-06, "loss": 0.7905, "step": 6775 }, { "epoch": 0.2808239048447926, "grad_norm": 0.3862614929676056, "learning_rate": 3.596087695304406e-06, "loss": 0.7422, "step": 6776 }, { "epoch": 0.28086534875046626, "grad_norm": 0.3790684938430786, "learning_rate": 3.5958804757760374e-06, "loss": 0.7302, "step": 6777 }, { "epoch": 0.28090679265613994, "grad_norm": 0.4111957848072052, "learning_rate": 3.595673256247669e-06, "loss": 0.7013, "step": 6778 }, { "epoch": 0.28094823656181356, "grad_norm": 0.37922751903533936, "learning_rate": 3.5954660367193006e-06, "loss": 0.682, "step": 6779 }, { "epoch": 0.28098968046748724, "grad_norm": 0.4079330563545227, "learning_rate": 3.5952588171909324e-06, "loss": 0.7131, "step": 6780 }, { "epoch": 0.2810311243731609, "grad_norm": 0.39573752880096436, "learning_rate": 3.5950515976625637e-06, "loss": 0.738, "step": 6781 }, { "epoch": 0.2810725682788346, "grad_norm": 0.3898983597755432, "learning_rate": 3.5948443781341956e-06, "loss": 0.6562, "step": 6782 }, { "epoch": 0.28111401218450827, "grad_norm": 0.4119100272655487, "learning_rate": 3.594637158605827e-06, "loss": 0.7031, "step": 6783 }, { "epoch": 0.28115545609018194, "grad_norm": 0.41005808115005493, "learning_rate": 3.594429939077459e-06, "loss": 0.6927, "step": 6784 }, { "epoch": 0.2811968999958556, "grad_norm": 0.417418509721756, "learning_rate": 3.594222719549091e-06, "loss": 0.6768, "step": 6785 }, { "epoch": 0.2812383439015293, "grad_norm": 0.4223876893520355, "learning_rate": 3.5940155000207224e-06, "loss": 0.6718, "step": 6786 }, { "epoch": 0.281279787807203, "grad_norm": 0.39712852239608765, "learning_rate": 3.593808280492354e-06, "loss": 0.6742, "step": 6787 }, { "epoch": 0.2813212317128766, "grad_norm": 0.38664698600769043, "learning_rate": 3.5936010609639856e-06, "loss": 0.708, "step": 6788 }, { "epoch": 0.2813626756185503, "grad_norm": 0.41835319995880127, "learning_rate": 3.5933938414356174e-06, "loss": 0.6664, "step": 6789 }, { "epoch": 0.28140411952422395, "grad_norm": 0.4258338510990143, "learning_rate": 3.5931866219072488e-06, "loss": 0.7495, "step": 6790 }, { "epoch": 0.2814455634298976, "grad_norm": 0.41461727023124695, "learning_rate": 3.5929794023788806e-06, "loss": 0.7327, "step": 6791 }, { "epoch": 0.2814870073355713, "grad_norm": 0.4301237463951111, "learning_rate": 3.592772182850512e-06, "loss": 0.7136, "step": 6792 }, { "epoch": 0.281528451241245, "grad_norm": 0.45910102128982544, "learning_rate": 3.5925649633221438e-06, "loss": 0.6887, "step": 6793 }, { "epoch": 0.28156989514691866, "grad_norm": 0.40189725160598755, "learning_rate": 3.5923577437937756e-06, "loss": 0.6957, "step": 6794 }, { "epoch": 0.28161133905259234, "grad_norm": 0.39384201169013977, "learning_rate": 3.592150524265407e-06, "loss": 0.7297, "step": 6795 }, { "epoch": 0.281652782958266, "grad_norm": 0.43027767539024353, "learning_rate": 3.5919433047370388e-06, "loss": 0.6921, "step": 6796 }, { "epoch": 0.28169422686393963, "grad_norm": 0.4064258337020874, "learning_rate": 3.59173608520867e-06, "loss": 0.7012, "step": 6797 }, { "epoch": 0.2817356707696133, "grad_norm": 0.47119030356407166, "learning_rate": 3.591528865680302e-06, "loss": 0.7407, "step": 6798 }, { "epoch": 0.281777114675287, "grad_norm": 0.422590047121048, "learning_rate": 3.5913216461519333e-06, "loss": 0.7405, "step": 6799 }, { "epoch": 0.28181855858096067, "grad_norm": 0.42778483033180237, "learning_rate": 3.591114426623565e-06, "loss": 0.7584, "step": 6800 }, { "epoch": 0.28186000248663434, "grad_norm": 0.49652472138404846, "learning_rate": 3.5909072070951965e-06, "loss": 0.7433, "step": 6801 }, { "epoch": 0.281901446392308, "grad_norm": 0.43851438164711, "learning_rate": 3.5906999875668288e-06, "loss": 0.7107, "step": 6802 }, { "epoch": 0.2819428902979817, "grad_norm": 0.4116474390029907, "learning_rate": 3.5904927680384606e-06, "loss": 0.7483, "step": 6803 }, { "epoch": 0.2819843342036554, "grad_norm": 0.4168466031551361, "learning_rate": 3.590285548510092e-06, "loss": 0.6995, "step": 6804 }, { "epoch": 0.28202577810932905, "grad_norm": 0.4503168761730194, "learning_rate": 3.5900783289817238e-06, "loss": 0.7612, "step": 6805 }, { "epoch": 0.28206722201500267, "grad_norm": 0.5053637623786926, "learning_rate": 3.589871109453355e-06, "loss": 0.7769, "step": 6806 }, { "epoch": 0.28210866592067635, "grad_norm": 0.3958262801170349, "learning_rate": 3.589663889924987e-06, "loss": 0.7206, "step": 6807 }, { "epoch": 0.28215010982635, "grad_norm": 0.42177364230155945, "learning_rate": 3.5894566703966184e-06, "loss": 0.7002, "step": 6808 }, { "epoch": 0.2821915537320237, "grad_norm": 0.41398656368255615, "learning_rate": 3.58924945086825e-06, "loss": 0.6926, "step": 6809 }, { "epoch": 0.2822329976376974, "grad_norm": 0.4351273775100708, "learning_rate": 3.5890422313398816e-06, "loss": 0.7874, "step": 6810 }, { "epoch": 0.28227444154337106, "grad_norm": 0.42515799403190613, "learning_rate": 3.5888350118115134e-06, "loss": 0.7341, "step": 6811 }, { "epoch": 0.28231588544904473, "grad_norm": 0.4272284507751465, "learning_rate": 3.588627792283145e-06, "loss": 0.6775, "step": 6812 }, { "epoch": 0.2823573293547184, "grad_norm": 0.4340786337852478, "learning_rate": 3.5884205727547766e-06, "loss": 0.6982, "step": 6813 }, { "epoch": 0.28239877326039203, "grad_norm": 0.42572149634361267, "learning_rate": 3.5882133532264084e-06, "loss": 0.7102, "step": 6814 }, { "epoch": 0.2824402171660657, "grad_norm": 0.3968062996864319, "learning_rate": 3.5880061336980398e-06, "loss": 0.7163, "step": 6815 }, { "epoch": 0.2824816610717394, "grad_norm": 0.4265420436859131, "learning_rate": 3.5877989141696716e-06, "loss": 0.6886, "step": 6816 }, { "epoch": 0.28252310497741306, "grad_norm": 0.43440908193588257, "learning_rate": 3.587591694641303e-06, "loss": 0.6893, "step": 6817 }, { "epoch": 0.28256454888308674, "grad_norm": 0.4182482063770294, "learning_rate": 3.587384475112935e-06, "loss": 0.7516, "step": 6818 }, { "epoch": 0.2826059927887604, "grad_norm": 0.47019872069358826, "learning_rate": 3.587177255584566e-06, "loss": 0.7336, "step": 6819 }, { "epoch": 0.2826474366944341, "grad_norm": 0.43315190076828003, "learning_rate": 3.5869700360561984e-06, "loss": 0.7214, "step": 6820 }, { "epoch": 0.2826888806001078, "grad_norm": 0.41775181889533997, "learning_rate": 3.58676281652783e-06, "loss": 0.7312, "step": 6821 }, { "epoch": 0.28273032450578145, "grad_norm": 0.4487468898296356, "learning_rate": 3.5865555969994616e-06, "loss": 0.7766, "step": 6822 }, { "epoch": 0.28277176841145507, "grad_norm": 0.4046151041984558, "learning_rate": 3.5863483774710934e-06, "loss": 0.6927, "step": 6823 }, { "epoch": 0.28281321231712875, "grad_norm": 0.3830421566963196, "learning_rate": 3.5861411579427248e-06, "loss": 0.6884, "step": 6824 }, { "epoch": 0.2828546562228024, "grad_norm": 0.4627036154270172, "learning_rate": 3.5859339384143566e-06, "loss": 0.6952, "step": 6825 }, { "epoch": 0.2828961001284761, "grad_norm": 0.4159393310546875, "learning_rate": 3.585726718885988e-06, "loss": 0.7522, "step": 6826 }, { "epoch": 0.2829375440341498, "grad_norm": 0.439156711101532, "learning_rate": 3.5855194993576198e-06, "loss": 0.7345, "step": 6827 }, { "epoch": 0.28297898793982346, "grad_norm": 0.42447394132614136, "learning_rate": 3.5853122798292516e-06, "loss": 0.7217, "step": 6828 }, { "epoch": 0.28302043184549713, "grad_norm": 0.4101831018924713, "learning_rate": 3.585105060300883e-06, "loss": 0.7296, "step": 6829 }, { "epoch": 0.2830618757511708, "grad_norm": 0.40400081872940063, "learning_rate": 3.5848978407725148e-06, "loss": 0.7402, "step": 6830 }, { "epoch": 0.2831033196568445, "grad_norm": 0.4275926649570465, "learning_rate": 3.584690621244146e-06, "loss": 0.7375, "step": 6831 }, { "epoch": 0.2831447635625181, "grad_norm": 0.44269418716430664, "learning_rate": 3.584483401715778e-06, "loss": 0.702, "step": 6832 }, { "epoch": 0.2831862074681918, "grad_norm": 0.4247515797615051, "learning_rate": 3.5842761821874094e-06, "loss": 0.6948, "step": 6833 }, { "epoch": 0.28322765137386546, "grad_norm": 0.39553365111351013, "learning_rate": 3.584068962659041e-06, "loss": 0.6577, "step": 6834 }, { "epoch": 0.28326909527953914, "grad_norm": 0.45518970489501953, "learning_rate": 3.5838617431306725e-06, "loss": 0.7421, "step": 6835 }, { "epoch": 0.2833105391852128, "grad_norm": 0.407031774520874, "learning_rate": 3.5836545236023048e-06, "loss": 0.6809, "step": 6836 }, { "epoch": 0.2833519830908865, "grad_norm": 0.4166657328605652, "learning_rate": 3.5834473040739366e-06, "loss": 0.693, "step": 6837 }, { "epoch": 0.28339342699656017, "grad_norm": 0.5113471150398254, "learning_rate": 3.583240084545568e-06, "loss": 0.729, "step": 6838 }, { "epoch": 0.28343487090223385, "grad_norm": 0.4281917214393616, "learning_rate": 3.5830328650171998e-06, "loss": 0.7344, "step": 6839 }, { "epoch": 0.28347631480790747, "grad_norm": 0.40506628155708313, "learning_rate": 3.582825645488831e-06, "loss": 0.7354, "step": 6840 }, { "epoch": 0.28351775871358115, "grad_norm": 0.44882896542549133, "learning_rate": 3.582618425960463e-06, "loss": 0.7577, "step": 6841 }, { "epoch": 0.2835592026192548, "grad_norm": 0.4255295693874359, "learning_rate": 3.5824112064320944e-06, "loss": 0.6532, "step": 6842 }, { "epoch": 0.2836006465249285, "grad_norm": 0.38957056403160095, "learning_rate": 3.582203986903726e-06, "loss": 0.6298, "step": 6843 }, { "epoch": 0.2836420904306022, "grad_norm": 0.4765869081020355, "learning_rate": 3.5819967673753576e-06, "loss": 0.7773, "step": 6844 }, { "epoch": 0.28368353433627586, "grad_norm": 0.47083091735839844, "learning_rate": 3.5817895478469894e-06, "loss": 0.7004, "step": 6845 }, { "epoch": 0.28372497824194953, "grad_norm": 0.3907012939453125, "learning_rate": 3.581582328318621e-06, "loss": 0.6726, "step": 6846 }, { "epoch": 0.2837664221476232, "grad_norm": 0.39952942728996277, "learning_rate": 3.5813751087902526e-06, "loss": 0.667, "step": 6847 }, { "epoch": 0.2838078660532969, "grad_norm": 0.41507667303085327, "learning_rate": 3.5811678892618844e-06, "loss": 0.72, "step": 6848 }, { "epoch": 0.2838493099589705, "grad_norm": 0.39529845118522644, "learning_rate": 3.5809606697335158e-06, "loss": 0.6989, "step": 6849 }, { "epoch": 0.2838907538646442, "grad_norm": 0.4155879318714142, "learning_rate": 3.5807534502051476e-06, "loss": 0.7163, "step": 6850 }, { "epoch": 0.28393219777031786, "grad_norm": 0.42982950806617737, "learning_rate": 3.580546230676779e-06, "loss": 0.6882, "step": 6851 }, { "epoch": 0.28397364167599154, "grad_norm": 0.44255757331848145, "learning_rate": 3.580339011148411e-06, "loss": 0.7227, "step": 6852 }, { "epoch": 0.2840150855816652, "grad_norm": 0.4067123532295227, "learning_rate": 3.580131791620042e-06, "loss": 0.739, "step": 6853 }, { "epoch": 0.2840565294873389, "grad_norm": 0.440277099609375, "learning_rate": 3.5799245720916744e-06, "loss": 0.6924, "step": 6854 }, { "epoch": 0.28409797339301257, "grad_norm": 0.38350799679756165, "learning_rate": 3.579717352563306e-06, "loss": 0.6819, "step": 6855 }, { "epoch": 0.28413941729868625, "grad_norm": 0.4033219516277313, "learning_rate": 3.5795101330349376e-06, "loss": 0.713, "step": 6856 }, { "epoch": 0.2841808612043599, "grad_norm": 0.4154597520828247, "learning_rate": 3.5793029135065694e-06, "loss": 0.7117, "step": 6857 }, { "epoch": 0.28422230511003355, "grad_norm": 0.40948623418807983, "learning_rate": 3.5790956939782008e-06, "loss": 0.6868, "step": 6858 }, { "epoch": 0.2842637490157072, "grad_norm": 0.4153733253479004, "learning_rate": 3.5788884744498326e-06, "loss": 0.6741, "step": 6859 }, { "epoch": 0.2843051929213809, "grad_norm": 0.4063993990421295, "learning_rate": 3.578681254921464e-06, "loss": 0.7068, "step": 6860 }, { "epoch": 0.2843466368270546, "grad_norm": 0.4236097037792206, "learning_rate": 3.5784740353930958e-06, "loss": 0.686, "step": 6861 }, { "epoch": 0.28438808073272825, "grad_norm": 0.4095933735370636, "learning_rate": 3.578266815864727e-06, "loss": 0.7007, "step": 6862 }, { "epoch": 0.28442952463840193, "grad_norm": 0.44229742884635925, "learning_rate": 3.578059596336359e-06, "loss": 0.6803, "step": 6863 }, { "epoch": 0.2844709685440756, "grad_norm": 0.3964507579803467, "learning_rate": 3.5778523768079908e-06, "loss": 0.6943, "step": 6864 }, { "epoch": 0.2845124124497493, "grad_norm": 0.4179035425186157, "learning_rate": 3.577645157279622e-06, "loss": 0.7395, "step": 6865 }, { "epoch": 0.28455385635542296, "grad_norm": 0.4197254180908203, "learning_rate": 3.577437937751254e-06, "loss": 0.6934, "step": 6866 }, { "epoch": 0.2845953002610966, "grad_norm": 0.39869406819343567, "learning_rate": 3.5772307182228854e-06, "loss": 0.6782, "step": 6867 }, { "epoch": 0.28463674416677026, "grad_norm": 0.424480140209198, "learning_rate": 3.577023498694517e-06, "loss": 0.7069, "step": 6868 }, { "epoch": 0.28467818807244394, "grad_norm": 0.41718241572380066, "learning_rate": 3.5768162791661486e-06, "loss": 0.7478, "step": 6869 }, { "epoch": 0.2847196319781176, "grad_norm": 0.406290739774704, "learning_rate": 3.5766090596377808e-06, "loss": 0.6545, "step": 6870 }, { "epoch": 0.2847610758837913, "grad_norm": 0.4433014690876007, "learning_rate": 3.5764018401094117e-06, "loss": 0.7258, "step": 6871 }, { "epoch": 0.28480251978946497, "grad_norm": 0.4183829128742218, "learning_rate": 3.576194620581044e-06, "loss": 0.7249, "step": 6872 }, { "epoch": 0.28484396369513865, "grad_norm": 0.4561851918697357, "learning_rate": 3.575987401052676e-06, "loss": 0.7566, "step": 6873 }, { "epoch": 0.2848854076008123, "grad_norm": 0.384236216545105, "learning_rate": 3.575780181524307e-06, "loss": 0.6927, "step": 6874 }, { "epoch": 0.28492685150648595, "grad_norm": 0.44704267382621765, "learning_rate": 3.575572961995939e-06, "loss": 0.7361, "step": 6875 }, { "epoch": 0.2849682954121596, "grad_norm": 0.40363720059394836, "learning_rate": 3.5753657424675704e-06, "loss": 0.7205, "step": 6876 }, { "epoch": 0.2850097393178333, "grad_norm": 0.4272850453853607, "learning_rate": 3.575158522939202e-06, "loss": 0.7063, "step": 6877 }, { "epoch": 0.285051183223507, "grad_norm": 0.47055304050445557, "learning_rate": 3.5749513034108336e-06, "loss": 0.6899, "step": 6878 }, { "epoch": 0.28509262712918065, "grad_norm": 0.43875035643577576, "learning_rate": 3.5747440838824654e-06, "loss": 0.7505, "step": 6879 }, { "epoch": 0.28513407103485433, "grad_norm": 0.4094933271408081, "learning_rate": 3.5745368643540968e-06, "loss": 0.6255, "step": 6880 }, { "epoch": 0.285175514940528, "grad_norm": 0.45186007022857666, "learning_rate": 3.5743296448257286e-06, "loss": 0.7068, "step": 6881 }, { "epoch": 0.2852169588462017, "grad_norm": 0.4162149727344513, "learning_rate": 3.5741224252973604e-06, "loss": 0.7461, "step": 6882 }, { "epoch": 0.28525840275187536, "grad_norm": 0.38705718517303467, "learning_rate": 3.5739152057689918e-06, "loss": 0.6702, "step": 6883 }, { "epoch": 0.285299846657549, "grad_norm": 0.41094326972961426, "learning_rate": 3.5737079862406236e-06, "loss": 0.7177, "step": 6884 }, { "epoch": 0.28534129056322266, "grad_norm": 0.4257148802280426, "learning_rate": 3.573500766712255e-06, "loss": 0.719, "step": 6885 }, { "epoch": 0.28538273446889634, "grad_norm": 0.39430439472198486, "learning_rate": 3.573293547183887e-06, "loss": 0.694, "step": 6886 }, { "epoch": 0.28542417837457, "grad_norm": 0.40021035075187683, "learning_rate": 3.573086327655518e-06, "loss": 0.6809, "step": 6887 }, { "epoch": 0.2854656222802437, "grad_norm": 0.42008695006370544, "learning_rate": 3.5728791081271504e-06, "loss": 0.6736, "step": 6888 }, { "epoch": 0.28550706618591737, "grad_norm": 0.4037550091743469, "learning_rate": 3.572671888598782e-06, "loss": 0.6841, "step": 6889 }, { "epoch": 0.28554851009159105, "grad_norm": 0.48485174775123596, "learning_rate": 3.5724646690704136e-06, "loss": 0.7343, "step": 6890 }, { "epoch": 0.2855899539972647, "grad_norm": 0.43097740411758423, "learning_rate": 3.5722574495420454e-06, "loss": 0.705, "step": 6891 }, { "epoch": 0.2856313979029384, "grad_norm": 0.39484497904777527, "learning_rate": 3.5720502300136768e-06, "loss": 0.7366, "step": 6892 }, { "epoch": 0.285672841808612, "grad_norm": 0.45459839701652527, "learning_rate": 3.5718430104853086e-06, "loss": 0.7476, "step": 6893 }, { "epoch": 0.2857142857142857, "grad_norm": 0.46201279759407043, "learning_rate": 3.57163579095694e-06, "loss": 0.7417, "step": 6894 }, { "epoch": 0.2857557296199594, "grad_norm": 0.41701561212539673, "learning_rate": 3.5714285714285718e-06, "loss": 0.708, "step": 6895 }, { "epoch": 0.28579717352563305, "grad_norm": 0.39320895075798035, "learning_rate": 3.571221351900203e-06, "loss": 0.7002, "step": 6896 }, { "epoch": 0.28583861743130673, "grad_norm": 0.41212591528892517, "learning_rate": 3.571014132371835e-06, "loss": 0.7267, "step": 6897 }, { "epoch": 0.2858800613369804, "grad_norm": 0.44438445568084717, "learning_rate": 3.5708069128434668e-06, "loss": 0.7385, "step": 6898 }, { "epoch": 0.2859215052426541, "grad_norm": 0.4290604889392853, "learning_rate": 3.570599693315098e-06, "loss": 0.6769, "step": 6899 }, { "epoch": 0.28596294914832776, "grad_norm": 0.40066683292388916, "learning_rate": 3.57039247378673e-06, "loss": 0.6979, "step": 6900 }, { "epoch": 0.2860043930540014, "grad_norm": 0.4036765992641449, "learning_rate": 3.5701852542583614e-06, "loss": 0.7294, "step": 6901 }, { "epoch": 0.28604583695967506, "grad_norm": 0.4126715660095215, "learning_rate": 3.569978034729993e-06, "loss": 0.6887, "step": 6902 }, { "epoch": 0.28608728086534874, "grad_norm": 0.39421460032463074, "learning_rate": 3.5697708152016246e-06, "loss": 0.7019, "step": 6903 }, { "epoch": 0.2861287247710224, "grad_norm": 0.423803448677063, "learning_rate": 3.569563595673257e-06, "loss": 0.7495, "step": 6904 }, { "epoch": 0.2861701686766961, "grad_norm": 0.41758954524993896, "learning_rate": 3.5693563761448878e-06, "loss": 0.7222, "step": 6905 }, { "epoch": 0.28621161258236977, "grad_norm": 0.4164711833000183, "learning_rate": 3.56914915661652e-06, "loss": 0.6617, "step": 6906 }, { "epoch": 0.28625305648804344, "grad_norm": 0.44317251443862915, "learning_rate": 3.568941937088152e-06, "loss": 0.7107, "step": 6907 }, { "epoch": 0.2862945003937171, "grad_norm": 0.4078119993209839, "learning_rate": 3.568734717559783e-06, "loss": 0.6855, "step": 6908 }, { "epoch": 0.2863359442993908, "grad_norm": 0.4446542263031006, "learning_rate": 3.568527498031415e-06, "loss": 0.7556, "step": 6909 }, { "epoch": 0.2863773882050644, "grad_norm": 0.4360417425632477, "learning_rate": 3.5683202785030464e-06, "loss": 0.7273, "step": 6910 }, { "epoch": 0.2864188321107381, "grad_norm": 0.44036340713500977, "learning_rate": 3.568113058974678e-06, "loss": 0.7649, "step": 6911 }, { "epoch": 0.2864602760164118, "grad_norm": 0.4655351936817169, "learning_rate": 3.5679058394463096e-06, "loss": 0.7517, "step": 6912 }, { "epoch": 0.28650171992208545, "grad_norm": 0.4113084375858307, "learning_rate": 3.5676986199179414e-06, "loss": 0.7412, "step": 6913 }, { "epoch": 0.28654316382775913, "grad_norm": 0.3831551671028137, "learning_rate": 3.5674914003895728e-06, "loss": 0.7151, "step": 6914 }, { "epoch": 0.2865846077334328, "grad_norm": 0.4323503375053406, "learning_rate": 3.5672841808612046e-06, "loss": 0.6804, "step": 6915 }, { "epoch": 0.2866260516391065, "grad_norm": 0.4357014298439026, "learning_rate": 3.5670769613328364e-06, "loss": 0.6738, "step": 6916 }, { "epoch": 0.28666749554478016, "grad_norm": 0.4165201783180237, "learning_rate": 3.5668697418044678e-06, "loss": 0.6687, "step": 6917 }, { "epoch": 0.28670893945045384, "grad_norm": 0.40601587295532227, "learning_rate": 3.5666625222760996e-06, "loss": 0.7288, "step": 6918 }, { "epoch": 0.28675038335612746, "grad_norm": 0.42080700397491455, "learning_rate": 3.566455302747731e-06, "loss": 0.6406, "step": 6919 }, { "epoch": 0.28679182726180114, "grad_norm": 0.4565685987472534, "learning_rate": 3.566248083219363e-06, "loss": 0.7178, "step": 6920 }, { "epoch": 0.2868332711674748, "grad_norm": 0.4114568829536438, "learning_rate": 3.566040863690994e-06, "loss": 0.6576, "step": 6921 }, { "epoch": 0.2868747150731485, "grad_norm": 0.437321275472641, "learning_rate": 3.5658336441626264e-06, "loss": 0.7432, "step": 6922 }, { "epoch": 0.28691615897882217, "grad_norm": 0.4223303496837616, "learning_rate": 3.5656264246342574e-06, "loss": 0.7227, "step": 6923 }, { "epoch": 0.28695760288449584, "grad_norm": 0.42915213108062744, "learning_rate": 3.5654192051058896e-06, "loss": 0.7006, "step": 6924 }, { "epoch": 0.2869990467901695, "grad_norm": 0.4071757197380066, "learning_rate": 3.5652119855775214e-06, "loss": 0.6992, "step": 6925 }, { "epoch": 0.2870404906958432, "grad_norm": 0.4418925344944, "learning_rate": 3.5650047660491528e-06, "loss": 0.6819, "step": 6926 }, { "epoch": 0.2870819346015168, "grad_norm": 0.4335483908653259, "learning_rate": 3.5647975465207846e-06, "loss": 0.6946, "step": 6927 }, { "epoch": 0.2871233785071905, "grad_norm": 0.3746641278266907, "learning_rate": 3.564590326992416e-06, "loss": 0.7258, "step": 6928 }, { "epoch": 0.2871648224128642, "grad_norm": 0.3801139295101166, "learning_rate": 3.5643831074640478e-06, "loss": 0.6484, "step": 6929 }, { "epoch": 0.28720626631853785, "grad_norm": 0.380740225315094, "learning_rate": 3.564175887935679e-06, "loss": 0.6312, "step": 6930 }, { "epoch": 0.2872477102242115, "grad_norm": 0.4376373291015625, "learning_rate": 3.563968668407311e-06, "loss": 0.6968, "step": 6931 }, { "epoch": 0.2872891541298852, "grad_norm": 0.38756129145622253, "learning_rate": 3.5637614488789424e-06, "loss": 0.6603, "step": 6932 }, { "epoch": 0.2873305980355589, "grad_norm": 0.409757137298584, "learning_rate": 3.563554229350574e-06, "loss": 0.7363, "step": 6933 }, { "epoch": 0.28737204194123256, "grad_norm": 0.42089688777923584, "learning_rate": 3.563347009822206e-06, "loss": 0.6438, "step": 6934 }, { "epoch": 0.28741348584690624, "grad_norm": 0.4528174102306366, "learning_rate": 3.5631397902938374e-06, "loss": 0.7427, "step": 6935 }, { "epoch": 0.28745492975257986, "grad_norm": 0.4734955132007599, "learning_rate": 3.562932570765469e-06, "loss": 0.7549, "step": 6936 }, { "epoch": 0.28749637365825353, "grad_norm": 0.4542578160762787, "learning_rate": 3.5627253512371006e-06, "loss": 0.7195, "step": 6937 }, { "epoch": 0.2875378175639272, "grad_norm": 0.41403213143348694, "learning_rate": 3.562518131708733e-06, "loss": 0.6973, "step": 6938 }, { "epoch": 0.2875792614696009, "grad_norm": 0.4291495382785797, "learning_rate": 3.5623109121803638e-06, "loss": 0.762, "step": 6939 }, { "epoch": 0.28762070537527457, "grad_norm": 0.45406877994537354, "learning_rate": 3.562103692651996e-06, "loss": 0.7214, "step": 6940 }, { "epoch": 0.28766214928094824, "grad_norm": 0.41754409670829773, "learning_rate": 3.5618964731236274e-06, "loss": 0.688, "step": 6941 }, { "epoch": 0.2877035931866219, "grad_norm": 0.4389185905456543, "learning_rate": 3.561689253595259e-06, "loss": 0.6749, "step": 6942 }, { "epoch": 0.2877450370922956, "grad_norm": 0.4193671941757202, "learning_rate": 3.561482034066891e-06, "loss": 0.6975, "step": 6943 }, { "epoch": 0.2877864809979693, "grad_norm": 0.4176039695739746, "learning_rate": 3.5612748145385224e-06, "loss": 0.6968, "step": 6944 }, { "epoch": 0.2878279249036429, "grad_norm": 0.4057181477546692, "learning_rate": 3.561067595010154e-06, "loss": 0.7524, "step": 6945 }, { "epoch": 0.2878693688093166, "grad_norm": 0.42737317085266113, "learning_rate": 3.5608603754817856e-06, "loss": 0.7729, "step": 6946 }, { "epoch": 0.28791081271499025, "grad_norm": 0.4156981408596039, "learning_rate": 3.5606531559534174e-06, "loss": 0.6925, "step": 6947 }, { "epoch": 0.2879522566206639, "grad_norm": 0.40141257643699646, "learning_rate": 3.5604459364250488e-06, "loss": 0.7211, "step": 6948 }, { "epoch": 0.2879937005263376, "grad_norm": 0.38518083095550537, "learning_rate": 3.5602387168966806e-06, "loss": 0.6863, "step": 6949 }, { "epoch": 0.2880351444320113, "grad_norm": 0.4188710153102875, "learning_rate": 3.5600314973683124e-06, "loss": 0.6549, "step": 6950 }, { "epoch": 0.28807658833768496, "grad_norm": 0.3895302414894104, "learning_rate": 3.5598242778399438e-06, "loss": 0.6522, "step": 6951 }, { "epoch": 0.28811803224335863, "grad_norm": 0.4401589632034302, "learning_rate": 3.5596170583115756e-06, "loss": 0.7512, "step": 6952 }, { "epoch": 0.2881594761490323, "grad_norm": 0.40924349427223206, "learning_rate": 3.559409838783207e-06, "loss": 0.6821, "step": 6953 }, { "epoch": 0.28820092005470593, "grad_norm": 0.464637815952301, "learning_rate": 3.559202619254839e-06, "loss": 0.7324, "step": 6954 }, { "epoch": 0.2882423639603796, "grad_norm": 0.41231903433799744, "learning_rate": 3.55899539972647e-06, "loss": 0.7085, "step": 6955 }, { "epoch": 0.2882838078660533, "grad_norm": 0.43170279264450073, "learning_rate": 3.5587881801981024e-06, "loss": 0.7134, "step": 6956 }, { "epoch": 0.28832525177172696, "grad_norm": 0.39256665110588074, "learning_rate": 3.5585809606697338e-06, "loss": 0.7053, "step": 6957 }, { "epoch": 0.28836669567740064, "grad_norm": 0.42892512679100037, "learning_rate": 3.5583737411413656e-06, "loss": 0.6918, "step": 6958 }, { "epoch": 0.2884081395830743, "grad_norm": 0.43007582426071167, "learning_rate": 3.5581665216129974e-06, "loss": 0.6985, "step": 6959 }, { "epoch": 0.288449583488748, "grad_norm": 0.38796335458755493, "learning_rate": 3.557959302084629e-06, "loss": 0.7002, "step": 6960 }, { "epoch": 0.2884910273944217, "grad_norm": 0.42302998900413513, "learning_rate": 3.5577520825562606e-06, "loss": 0.7256, "step": 6961 }, { "epoch": 0.2885324713000953, "grad_norm": 0.42087680101394653, "learning_rate": 3.557544863027892e-06, "loss": 0.6429, "step": 6962 }, { "epoch": 0.28857391520576897, "grad_norm": 0.4813157618045807, "learning_rate": 3.557337643499524e-06, "loss": 0.7273, "step": 6963 }, { "epoch": 0.28861535911144265, "grad_norm": 0.4117436110973358, "learning_rate": 3.557130423971155e-06, "loss": 0.7234, "step": 6964 }, { "epoch": 0.2886568030171163, "grad_norm": 0.404603511095047, "learning_rate": 3.556923204442787e-06, "loss": 0.7378, "step": 6965 }, { "epoch": 0.28869824692279, "grad_norm": 0.46006280183792114, "learning_rate": 3.5567159849144184e-06, "loss": 0.7607, "step": 6966 }, { "epoch": 0.2887396908284637, "grad_norm": 0.43420031666755676, "learning_rate": 3.55650876538605e-06, "loss": 0.7297, "step": 6967 }, { "epoch": 0.28878113473413736, "grad_norm": 0.4212886095046997, "learning_rate": 3.556301545857682e-06, "loss": 0.7119, "step": 6968 }, { "epoch": 0.28882257863981103, "grad_norm": 0.4272632598876953, "learning_rate": 3.5560943263293134e-06, "loss": 0.6956, "step": 6969 }, { "epoch": 0.2888640225454847, "grad_norm": 0.4428102672100067, "learning_rate": 3.555887106800945e-06, "loss": 0.7521, "step": 6970 }, { "epoch": 0.28890546645115833, "grad_norm": 0.4249163866043091, "learning_rate": 3.5556798872725766e-06, "loss": 0.6909, "step": 6971 }, { "epoch": 0.288946910356832, "grad_norm": 0.40772005915641785, "learning_rate": 3.555472667744209e-06, "loss": 0.7166, "step": 6972 }, { "epoch": 0.2889883542625057, "grad_norm": 0.4057495594024658, "learning_rate": 3.5552654482158398e-06, "loss": 0.6938, "step": 6973 }, { "epoch": 0.28902979816817936, "grad_norm": 0.3959791958332062, "learning_rate": 3.555058228687472e-06, "loss": 0.6771, "step": 6974 }, { "epoch": 0.28907124207385304, "grad_norm": 0.41590359807014465, "learning_rate": 3.5548510091591034e-06, "loss": 0.6859, "step": 6975 }, { "epoch": 0.2891126859795267, "grad_norm": 0.43379271030426025, "learning_rate": 3.554643789630735e-06, "loss": 0.7222, "step": 6976 }, { "epoch": 0.2891541298852004, "grad_norm": 0.40103286504745483, "learning_rate": 3.554436570102367e-06, "loss": 0.6851, "step": 6977 }, { "epoch": 0.28919557379087407, "grad_norm": 0.4490341544151306, "learning_rate": 3.5542293505739984e-06, "loss": 0.7628, "step": 6978 }, { "epoch": 0.28923701769654775, "grad_norm": 0.4162808060646057, "learning_rate": 3.55402213104563e-06, "loss": 0.7213, "step": 6979 }, { "epoch": 0.28927846160222137, "grad_norm": 0.43383705615997314, "learning_rate": 3.5538149115172616e-06, "loss": 0.7554, "step": 6980 }, { "epoch": 0.28931990550789505, "grad_norm": 0.42794209718704224, "learning_rate": 3.5536076919888934e-06, "loss": 0.7288, "step": 6981 }, { "epoch": 0.2893613494135687, "grad_norm": 0.39905351400375366, "learning_rate": 3.5534004724605248e-06, "loss": 0.7463, "step": 6982 }, { "epoch": 0.2894027933192424, "grad_norm": 0.4054827094078064, "learning_rate": 3.5531932529321566e-06, "loss": 0.698, "step": 6983 }, { "epoch": 0.2894442372249161, "grad_norm": 0.4243659973144531, "learning_rate": 3.552986033403788e-06, "loss": 0.7156, "step": 6984 }, { "epoch": 0.28948568113058976, "grad_norm": 0.4119908809661865, "learning_rate": 3.5527788138754198e-06, "loss": 0.7126, "step": 6985 }, { "epoch": 0.28952712503626343, "grad_norm": 0.482086718082428, "learning_rate": 3.5525715943470516e-06, "loss": 0.7266, "step": 6986 }, { "epoch": 0.2895685689419371, "grad_norm": 0.40206649899482727, "learning_rate": 3.552364374818683e-06, "loss": 0.6976, "step": 6987 }, { "epoch": 0.28961001284761073, "grad_norm": 0.43516209721565247, "learning_rate": 3.552157155290315e-06, "loss": 0.7194, "step": 6988 }, { "epoch": 0.2896514567532844, "grad_norm": 0.41434088349342346, "learning_rate": 3.551949935761946e-06, "loss": 0.7152, "step": 6989 }, { "epoch": 0.2896929006589581, "grad_norm": 0.40670010447502136, "learning_rate": 3.5517427162335784e-06, "loss": 0.7114, "step": 6990 }, { "epoch": 0.28973434456463176, "grad_norm": 0.44849127531051636, "learning_rate": 3.55153549670521e-06, "loss": 0.6891, "step": 6991 }, { "epoch": 0.28977578847030544, "grad_norm": 0.42762190103530884, "learning_rate": 3.5513282771768416e-06, "loss": 0.7119, "step": 6992 }, { "epoch": 0.2898172323759791, "grad_norm": 0.3914492726325989, "learning_rate": 3.551121057648473e-06, "loss": 0.6821, "step": 6993 }, { "epoch": 0.2898586762816528, "grad_norm": 0.4504014551639557, "learning_rate": 3.550913838120105e-06, "loss": 0.7123, "step": 6994 }, { "epoch": 0.28990012018732647, "grad_norm": 0.4167000651359558, "learning_rate": 3.5507066185917366e-06, "loss": 0.6953, "step": 6995 }, { "epoch": 0.28994156409300015, "grad_norm": 0.4186878800392151, "learning_rate": 3.550499399063368e-06, "loss": 0.7194, "step": 6996 }, { "epoch": 0.28998300799867377, "grad_norm": 0.440375953912735, "learning_rate": 3.550292179535e-06, "loss": 0.6802, "step": 6997 }, { "epoch": 0.29002445190434745, "grad_norm": 0.42046085000038147, "learning_rate": 3.550084960006631e-06, "loss": 0.7114, "step": 6998 }, { "epoch": 0.2900658958100211, "grad_norm": 0.43134018778800964, "learning_rate": 3.549877740478263e-06, "loss": 0.7512, "step": 6999 }, { "epoch": 0.2901073397156948, "grad_norm": 0.4230429530143738, "learning_rate": 3.5496705209498944e-06, "loss": 0.6779, "step": 7000 }, { "epoch": 0.2901487836213685, "grad_norm": 0.43294405937194824, "learning_rate": 3.549463301421526e-06, "loss": 0.7444, "step": 7001 }, { "epoch": 0.29019022752704215, "grad_norm": 0.4135044515132904, "learning_rate": 3.549256081893158e-06, "loss": 0.7222, "step": 7002 }, { "epoch": 0.29023167143271583, "grad_norm": 0.42034912109375, "learning_rate": 3.5490488623647894e-06, "loss": 0.7415, "step": 7003 }, { "epoch": 0.2902731153383895, "grad_norm": 0.4094456732273102, "learning_rate": 3.5488416428364216e-06, "loss": 0.7119, "step": 7004 }, { "epoch": 0.2903145592440632, "grad_norm": 0.42484256625175476, "learning_rate": 3.5486344233080526e-06, "loss": 0.7241, "step": 7005 }, { "epoch": 0.2903560031497368, "grad_norm": 0.40012502670288086, "learning_rate": 3.548427203779685e-06, "loss": 0.6891, "step": 7006 }, { "epoch": 0.2903974470554105, "grad_norm": 0.41363972425460815, "learning_rate": 3.5482199842513158e-06, "loss": 0.7004, "step": 7007 }, { "epoch": 0.29043889096108416, "grad_norm": 0.4156229794025421, "learning_rate": 3.548012764722948e-06, "loss": 0.7446, "step": 7008 }, { "epoch": 0.29048033486675784, "grad_norm": 0.4352450966835022, "learning_rate": 3.5478055451945794e-06, "loss": 0.7249, "step": 7009 }, { "epoch": 0.2905217787724315, "grad_norm": 0.44006669521331787, "learning_rate": 3.547598325666211e-06, "loss": 0.7742, "step": 7010 }, { "epoch": 0.2905632226781052, "grad_norm": 0.40405523777008057, "learning_rate": 3.547391106137843e-06, "loss": 0.7136, "step": 7011 }, { "epoch": 0.29060466658377887, "grad_norm": 0.4279027581214905, "learning_rate": 3.5471838866094744e-06, "loss": 0.6897, "step": 7012 }, { "epoch": 0.29064611048945255, "grad_norm": 0.3949183523654938, "learning_rate": 3.546976667081106e-06, "loss": 0.6681, "step": 7013 }, { "epoch": 0.2906875543951262, "grad_norm": 0.3945504426956177, "learning_rate": 3.5467694475527376e-06, "loss": 0.6997, "step": 7014 }, { "epoch": 0.29072899830079985, "grad_norm": 0.4481295347213745, "learning_rate": 3.5465622280243694e-06, "loss": 0.6914, "step": 7015 }, { "epoch": 0.2907704422064735, "grad_norm": 0.4236062169075012, "learning_rate": 3.5463550084960008e-06, "loss": 0.7092, "step": 7016 }, { "epoch": 0.2908118861121472, "grad_norm": 0.4002746343612671, "learning_rate": 3.5461477889676326e-06, "loss": 0.6602, "step": 7017 }, { "epoch": 0.2908533300178209, "grad_norm": 0.40735870599746704, "learning_rate": 3.545940569439264e-06, "loss": 0.7043, "step": 7018 }, { "epoch": 0.29089477392349455, "grad_norm": 0.4325539767742157, "learning_rate": 3.545733349910896e-06, "loss": 0.6782, "step": 7019 }, { "epoch": 0.29093621782916823, "grad_norm": 0.41509175300598145, "learning_rate": 3.5455261303825276e-06, "loss": 0.7207, "step": 7020 }, { "epoch": 0.2909776617348419, "grad_norm": 0.48029980063438416, "learning_rate": 3.545318910854159e-06, "loss": 0.745, "step": 7021 }, { "epoch": 0.2910191056405156, "grad_norm": 0.49317941069602966, "learning_rate": 3.5451116913257912e-06, "loss": 0.7328, "step": 7022 }, { "epoch": 0.2910605495461892, "grad_norm": 0.362125039100647, "learning_rate": 3.544904471797422e-06, "loss": 0.6892, "step": 7023 }, { "epoch": 0.2911019934518629, "grad_norm": 0.45957720279693604, "learning_rate": 3.5446972522690544e-06, "loss": 0.7144, "step": 7024 }, { "epoch": 0.29114343735753656, "grad_norm": 0.410138338804245, "learning_rate": 3.544490032740686e-06, "loss": 0.7266, "step": 7025 }, { "epoch": 0.29118488126321024, "grad_norm": 0.4007231593132019, "learning_rate": 3.5442828132123176e-06, "loss": 0.6771, "step": 7026 }, { "epoch": 0.2912263251688839, "grad_norm": 0.41032305359840393, "learning_rate": 3.544075593683949e-06, "loss": 0.7388, "step": 7027 }, { "epoch": 0.2912677690745576, "grad_norm": 0.4154440760612488, "learning_rate": 3.543868374155581e-06, "loss": 0.7224, "step": 7028 }, { "epoch": 0.29130921298023127, "grad_norm": 0.4457628130912781, "learning_rate": 3.5436611546272126e-06, "loss": 0.7827, "step": 7029 }, { "epoch": 0.29135065688590495, "grad_norm": 0.4295470714569092, "learning_rate": 3.543453935098844e-06, "loss": 0.7007, "step": 7030 }, { "epoch": 0.2913921007915786, "grad_norm": 0.43864497542381287, "learning_rate": 3.543246715570476e-06, "loss": 0.7307, "step": 7031 }, { "epoch": 0.29143354469725224, "grad_norm": 0.4205676019191742, "learning_rate": 3.543039496042107e-06, "loss": 0.7283, "step": 7032 }, { "epoch": 0.2914749886029259, "grad_norm": 0.4613238275051117, "learning_rate": 3.542832276513739e-06, "loss": 0.7434, "step": 7033 }, { "epoch": 0.2915164325085996, "grad_norm": 0.41564393043518066, "learning_rate": 3.5426250569853704e-06, "loss": 0.7327, "step": 7034 }, { "epoch": 0.2915578764142733, "grad_norm": 0.41695675253868103, "learning_rate": 3.542417837457002e-06, "loss": 0.7327, "step": 7035 }, { "epoch": 0.29159932031994695, "grad_norm": 0.4341282248497009, "learning_rate": 3.5422106179286336e-06, "loss": 0.7039, "step": 7036 }, { "epoch": 0.29164076422562063, "grad_norm": 0.4031048119068146, "learning_rate": 3.5420033984002654e-06, "loss": 0.717, "step": 7037 }, { "epoch": 0.2916822081312943, "grad_norm": 0.4074817895889282, "learning_rate": 3.5417961788718976e-06, "loss": 0.6619, "step": 7038 }, { "epoch": 0.291723652036968, "grad_norm": 0.4168263375759125, "learning_rate": 3.5415889593435286e-06, "loss": 0.6832, "step": 7039 }, { "epoch": 0.29176509594264166, "grad_norm": 0.3894229829311371, "learning_rate": 3.541381739815161e-06, "loss": 0.717, "step": 7040 }, { "epoch": 0.2918065398483153, "grad_norm": 0.45080697536468506, "learning_rate": 3.5411745202867918e-06, "loss": 0.7322, "step": 7041 }, { "epoch": 0.29184798375398896, "grad_norm": 0.4704478681087494, "learning_rate": 3.540967300758424e-06, "loss": 0.7319, "step": 7042 }, { "epoch": 0.29188942765966264, "grad_norm": 0.45911282300949097, "learning_rate": 3.5407600812300554e-06, "loss": 0.7681, "step": 7043 }, { "epoch": 0.2919308715653363, "grad_norm": 0.4031442701816559, "learning_rate": 3.540552861701687e-06, "loss": 0.7283, "step": 7044 }, { "epoch": 0.29197231547101, "grad_norm": 0.41739749908447266, "learning_rate": 3.5403456421733186e-06, "loss": 0.6736, "step": 7045 }, { "epoch": 0.29201375937668367, "grad_norm": 0.41814297437667847, "learning_rate": 3.5401384226449504e-06, "loss": 0.6582, "step": 7046 }, { "epoch": 0.29205520328235735, "grad_norm": 0.46693477034568787, "learning_rate": 3.539931203116582e-06, "loss": 0.705, "step": 7047 }, { "epoch": 0.292096647188031, "grad_norm": 0.43323516845703125, "learning_rate": 3.5397239835882136e-06, "loss": 0.7058, "step": 7048 }, { "epoch": 0.29213809109370464, "grad_norm": 0.4054799973964691, "learning_rate": 3.5395167640598454e-06, "loss": 0.6619, "step": 7049 }, { "epoch": 0.2921795349993783, "grad_norm": 0.39226123690605164, "learning_rate": 3.539309544531477e-06, "loss": 0.741, "step": 7050 }, { "epoch": 0.292220978905052, "grad_norm": 0.39724302291870117, "learning_rate": 3.5391023250031086e-06, "loss": 0.6929, "step": 7051 }, { "epoch": 0.2922624228107257, "grad_norm": 0.41185876727104187, "learning_rate": 3.53889510547474e-06, "loss": 0.6763, "step": 7052 }, { "epoch": 0.29230386671639935, "grad_norm": 0.4077242314815521, "learning_rate": 3.538687885946372e-06, "loss": 0.6719, "step": 7053 }, { "epoch": 0.29234531062207303, "grad_norm": 0.4618271291255951, "learning_rate": 3.538480666418003e-06, "loss": 0.7434, "step": 7054 }, { "epoch": 0.2923867545277467, "grad_norm": 0.4621109068393707, "learning_rate": 3.538273446889635e-06, "loss": 0.7534, "step": 7055 }, { "epoch": 0.2924281984334204, "grad_norm": 0.45911550521850586, "learning_rate": 3.5380662273612672e-06, "loss": 0.6941, "step": 7056 }, { "epoch": 0.29246964233909406, "grad_norm": 0.4212634265422821, "learning_rate": 3.537859007832898e-06, "loss": 0.6725, "step": 7057 }, { "epoch": 0.2925110862447677, "grad_norm": 0.4052969813346863, "learning_rate": 3.5376517883045304e-06, "loss": 0.6921, "step": 7058 }, { "epoch": 0.29255253015044136, "grad_norm": 0.40122491121292114, "learning_rate": 3.537444568776162e-06, "loss": 0.7219, "step": 7059 }, { "epoch": 0.29259397405611504, "grad_norm": 0.40673404932022095, "learning_rate": 3.5372373492477936e-06, "loss": 0.6597, "step": 7060 }, { "epoch": 0.2926354179617887, "grad_norm": 0.43931958079338074, "learning_rate": 3.537030129719425e-06, "loss": 0.6665, "step": 7061 }, { "epoch": 0.2926768618674624, "grad_norm": 0.4202190935611725, "learning_rate": 3.536822910191057e-06, "loss": 0.6526, "step": 7062 }, { "epoch": 0.29271830577313607, "grad_norm": 0.4012390375137329, "learning_rate": 3.5366156906626886e-06, "loss": 0.6934, "step": 7063 }, { "epoch": 0.29275974967880974, "grad_norm": 0.4491875469684601, "learning_rate": 3.53640847113432e-06, "loss": 0.7014, "step": 7064 }, { "epoch": 0.2928011935844834, "grad_norm": 0.44229698181152344, "learning_rate": 3.536201251605952e-06, "loss": 0.7367, "step": 7065 }, { "epoch": 0.2928426374901571, "grad_norm": 0.4277951717376709, "learning_rate": 3.535994032077583e-06, "loss": 0.6821, "step": 7066 }, { "epoch": 0.2928840813958307, "grad_norm": 0.40657714009284973, "learning_rate": 3.535786812549215e-06, "loss": 0.6873, "step": 7067 }, { "epoch": 0.2929255253015044, "grad_norm": 0.42000019550323486, "learning_rate": 3.5355795930208464e-06, "loss": 0.7175, "step": 7068 }, { "epoch": 0.2929669692071781, "grad_norm": 0.42083606123924255, "learning_rate": 3.535372373492478e-06, "loss": 0.7001, "step": 7069 }, { "epoch": 0.29300841311285175, "grad_norm": 0.41385912895202637, "learning_rate": 3.5351651539641096e-06, "loss": 0.6873, "step": 7070 }, { "epoch": 0.29304985701852543, "grad_norm": 0.440414160490036, "learning_rate": 3.5349579344357414e-06, "loss": 0.7217, "step": 7071 }, { "epoch": 0.2930913009241991, "grad_norm": 0.3917098343372345, "learning_rate": 3.5347507149073736e-06, "loss": 0.7383, "step": 7072 }, { "epoch": 0.2931327448298728, "grad_norm": 0.4158962666988373, "learning_rate": 3.5345434953790046e-06, "loss": 0.7181, "step": 7073 }, { "epoch": 0.29317418873554646, "grad_norm": 0.4064818024635315, "learning_rate": 3.534336275850637e-06, "loss": 0.7064, "step": 7074 }, { "epoch": 0.2932156326412201, "grad_norm": 0.38659265637397766, "learning_rate": 3.5341290563222678e-06, "loss": 0.6716, "step": 7075 }, { "epoch": 0.29325707654689376, "grad_norm": 0.42484250664711, "learning_rate": 3.5339218367939e-06, "loss": 0.6854, "step": 7076 }, { "epoch": 0.29329852045256743, "grad_norm": 0.40913817286491394, "learning_rate": 3.5337146172655314e-06, "loss": 0.6979, "step": 7077 }, { "epoch": 0.2933399643582411, "grad_norm": 0.42239293456077576, "learning_rate": 3.5335073977371632e-06, "loss": 0.7402, "step": 7078 }, { "epoch": 0.2933814082639148, "grad_norm": 0.44987887144088745, "learning_rate": 3.5333001782087946e-06, "loss": 0.7437, "step": 7079 }, { "epoch": 0.29342285216958847, "grad_norm": 0.4089981019496918, "learning_rate": 3.5330929586804264e-06, "loss": 0.7224, "step": 7080 }, { "epoch": 0.29346429607526214, "grad_norm": 0.4088749885559082, "learning_rate": 3.5328857391520582e-06, "loss": 0.6968, "step": 7081 }, { "epoch": 0.2935057399809358, "grad_norm": 0.4087159335613251, "learning_rate": 3.5326785196236896e-06, "loss": 0.7311, "step": 7082 }, { "epoch": 0.2935471838866095, "grad_norm": 0.44354018568992615, "learning_rate": 3.5324713000953214e-06, "loss": 0.7124, "step": 7083 }, { "epoch": 0.2935886277922831, "grad_norm": 0.4237080514431, "learning_rate": 3.532264080566953e-06, "loss": 0.666, "step": 7084 }, { "epoch": 0.2936300716979568, "grad_norm": 0.4014838635921478, "learning_rate": 3.5320568610385846e-06, "loss": 0.6754, "step": 7085 }, { "epoch": 0.2936715156036305, "grad_norm": 0.4048781096935272, "learning_rate": 3.531849641510216e-06, "loss": 0.6864, "step": 7086 }, { "epoch": 0.29371295950930415, "grad_norm": 0.44869160652160645, "learning_rate": 3.531642421981848e-06, "loss": 0.6904, "step": 7087 }, { "epoch": 0.2937544034149778, "grad_norm": 0.44338735938072205, "learning_rate": 3.531435202453479e-06, "loss": 0.7227, "step": 7088 }, { "epoch": 0.2937958473206515, "grad_norm": 0.44682776927948, "learning_rate": 3.531227982925111e-06, "loss": 0.7411, "step": 7089 }, { "epoch": 0.2938372912263252, "grad_norm": 0.42456531524658203, "learning_rate": 3.5310207633967432e-06, "loss": 0.7163, "step": 7090 }, { "epoch": 0.29387873513199886, "grad_norm": 0.44708770513534546, "learning_rate": 3.530813543868374e-06, "loss": 0.6919, "step": 7091 }, { "epoch": 0.29392017903767254, "grad_norm": 0.41836851835250854, "learning_rate": 3.5306063243400064e-06, "loss": 0.7162, "step": 7092 }, { "epoch": 0.29396162294334616, "grad_norm": 0.4414134621620178, "learning_rate": 3.530399104811638e-06, "loss": 0.7686, "step": 7093 }, { "epoch": 0.29400306684901983, "grad_norm": 0.4520927965641022, "learning_rate": 3.5301918852832696e-06, "loss": 0.7322, "step": 7094 }, { "epoch": 0.2940445107546935, "grad_norm": 0.43770256638526917, "learning_rate": 3.529984665754901e-06, "loss": 0.7314, "step": 7095 }, { "epoch": 0.2940859546603672, "grad_norm": 0.44659924507141113, "learning_rate": 3.529777446226533e-06, "loss": 0.6968, "step": 7096 }, { "epoch": 0.29412739856604087, "grad_norm": 0.46751853823661804, "learning_rate": 3.529570226698164e-06, "loss": 0.7615, "step": 7097 }, { "epoch": 0.29416884247171454, "grad_norm": 0.4149044156074524, "learning_rate": 3.529363007169796e-06, "loss": 0.73, "step": 7098 }, { "epoch": 0.2942102863773882, "grad_norm": 0.4128616750240326, "learning_rate": 3.529155787641428e-06, "loss": 0.7405, "step": 7099 }, { "epoch": 0.2942517302830619, "grad_norm": 0.4283026456832886, "learning_rate": 3.528948568113059e-06, "loss": 0.6963, "step": 7100 }, { "epoch": 0.2942931741887356, "grad_norm": 0.4424697160720825, "learning_rate": 3.528741348584691e-06, "loss": 0.7268, "step": 7101 }, { "epoch": 0.2943346180944092, "grad_norm": 0.46526622772216797, "learning_rate": 3.5285341290563224e-06, "loss": 0.793, "step": 7102 }, { "epoch": 0.29437606200008287, "grad_norm": 0.40357786417007446, "learning_rate": 3.528326909527954e-06, "loss": 0.6758, "step": 7103 }, { "epoch": 0.29441750590575655, "grad_norm": 0.391897052526474, "learning_rate": 3.5281196899995856e-06, "loss": 0.729, "step": 7104 }, { "epoch": 0.2944589498114302, "grad_norm": 0.39721566438674927, "learning_rate": 3.5279124704712174e-06, "loss": 0.7028, "step": 7105 }, { "epoch": 0.2945003937171039, "grad_norm": 0.41112610697746277, "learning_rate": 3.5277052509428488e-06, "loss": 0.7286, "step": 7106 }, { "epoch": 0.2945418376227776, "grad_norm": 0.385834664106369, "learning_rate": 3.5274980314144806e-06, "loss": 0.6736, "step": 7107 }, { "epoch": 0.29458328152845126, "grad_norm": 0.45646584033966064, "learning_rate": 3.527290811886113e-06, "loss": 0.7043, "step": 7108 }, { "epoch": 0.29462472543412493, "grad_norm": 0.3908303678035736, "learning_rate": 3.527083592357744e-06, "loss": 0.7007, "step": 7109 }, { "epoch": 0.29466616933979856, "grad_norm": 0.4678192734718323, "learning_rate": 3.526876372829376e-06, "loss": 0.7385, "step": 7110 }, { "epoch": 0.29470761324547223, "grad_norm": 0.4291054308414459, "learning_rate": 3.5266691533010074e-06, "loss": 0.7261, "step": 7111 }, { "epoch": 0.2947490571511459, "grad_norm": 0.4021909236907959, "learning_rate": 3.5264619337726392e-06, "loss": 0.6973, "step": 7112 }, { "epoch": 0.2947905010568196, "grad_norm": 0.4399035573005676, "learning_rate": 3.5262547142442706e-06, "loss": 0.7483, "step": 7113 }, { "epoch": 0.29483194496249326, "grad_norm": 0.4211803674697876, "learning_rate": 3.5260474947159024e-06, "loss": 0.6836, "step": 7114 }, { "epoch": 0.29487338886816694, "grad_norm": 0.40034130215644836, "learning_rate": 3.525840275187534e-06, "loss": 0.6674, "step": 7115 }, { "epoch": 0.2949148327738406, "grad_norm": 0.44668257236480713, "learning_rate": 3.5256330556591656e-06, "loss": 0.7388, "step": 7116 }, { "epoch": 0.2949562766795143, "grad_norm": 0.463304340839386, "learning_rate": 3.5254258361307974e-06, "loss": 0.7444, "step": 7117 }, { "epoch": 0.294997720585188, "grad_norm": 0.3869861364364624, "learning_rate": 3.525218616602429e-06, "loss": 0.75, "step": 7118 }, { "epoch": 0.2950391644908616, "grad_norm": 0.44453781843185425, "learning_rate": 3.5250113970740606e-06, "loss": 0.6742, "step": 7119 }, { "epoch": 0.29508060839653527, "grad_norm": 0.42569708824157715, "learning_rate": 3.524804177545692e-06, "loss": 0.6909, "step": 7120 }, { "epoch": 0.29512205230220895, "grad_norm": 0.40034160017967224, "learning_rate": 3.524596958017324e-06, "loss": 0.7253, "step": 7121 }, { "epoch": 0.2951634962078826, "grad_norm": 0.4612511098384857, "learning_rate": 3.524389738488955e-06, "loss": 0.7583, "step": 7122 }, { "epoch": 0.2952049401135563, "grad_norm": 0.45482492446899414, "learning_rate": 3.524182518960587e-06, "loss": 0.7405, "step": 7123 }, { "epoch": 0.29524638401923, "grad_norm": 0.42838335037231445, "learning_rate": 3.5239752994322192e-06, "loss": 0.739, "step": 7124 }, { "epoch": 0.29528782792490366, "grad_norm": 0.40979689359664917, "learning_rate": 3.52376807990385e-06, "loss": 0.7202, "step": 7125 }, { "epoch": 0.29532927183057733, "grad_norm": 0.44307535886764526, "learning_rate": 3.5235608603754824e-06, "loss": 0.6858, "step": 7126 }, { "epoch": 0.295370715736251, "grad_norm": 0.3891412913799286, "learning_rate": 3.523353640847114e-06, "loss": 0.6938, "step": 7127 }, { "epoch": 0.29541215964192463, "grad_norm": 0.4264249801635742, "learning_rate": 3.5231464213187456e-06, "loss": 0.709, "step": 7128 }, { "epoch": 0.2954536035475983, "grad_norm": 0.3946525454521179, "learning_rate": 3.522939201790377e-06, "loss": 0.7429, "step": 7129 }, { "epoch": 0.295495047453272, "grad_norm": 0.4395090341567993, "learning_rate": 3.522731982262009e-06, "loss": 0.7383, "step": 7130 }, { "epoch": 0.29553649135894566, "grad_norm": 0.4664108157157898, "learning_rate": 3.52252476273364e-06, "loss": 0.7225, "step": 7131 }, { "epoch": 0.29557793526461934, "grad_norm": 0.4124675393104553, "learning_rate": 3.522317543205272e-06, "loss": 0.7067, "step": 7132 }, { "epoch": 0.295619379170293, "grad_norm": 0.4005729854106903, "learning_rate": 3.522110323676904e-06, "loss": 0.6813, "step": 7133 }, { "epoch": 0.2956608230759667, "grad_norm": 0.4342549443244934, "learning_rate": 3.521903104148535e-06, "loss": 0.7051, "step": 7134 }, { "epoch": 0.29570226698164037, "grad_norm": 0.4355202317237854, "learning_rate": 3.521695884620167e-06, "loss": 0.7397, "step": 7135 }, { "epoch": 0.295743710887314, "grad_norm": 0.41203951835632324, "learning_rate": 3.5214886650917984e-06, "loss": 0.6936, "step": 7136 }, { "epoch": 0.29578515479298767, "grad_norm": 0.41456541419029236, "learning_rate": 3.5212814455634302e-06, "loss": 0.7223, "step": 7137 }, { "epoch": 0.29582659869866135, "grad_norm": 0.40653249621391296, "learning_rate": 3.5210742260350616e-06, "loss": 0.6565, "step": 7138 }, { "epoch": 0.295868042604335, "grad_norm": 0.44600874185562134, "learning_rate": 3.5208670065066934e-06, "loss": 0.7428, "step": 7139 }, { "epoch": 0.2959094865100087, "grad_norm": 0.39629924297332764, "learning_rate": 3.520659786978325e-06, "loss": 0.7058, "step": 7140 }, { "epoch": 0.2959509304156824, "grad_norm": 0.4294497072696686, "learning_rate": 3.5204525674499566e-06, "loss": 0.6694, "step": 7141 }, { "epoch": 0.29599237432135606, "grad_norm": 0.41492316126823425, "learning_rate": 3.520245347921589e-06, "loss": 0.7421, "step": 7142 }, { "epoch": 0.29603381822702973, "grad_norm": 0.43981289863586426, "learning_rate": 3.5200381283932202e-06, "loss": 0.7219, "step": 7143 }, { "epoch": 0.2960752621327034, "grad_norm": 0.4086593985557556, "learning_rate": 3.519830908864852e-06, "loss": 0.7058, "step": 7144 }, { "epoch": 0.29611670603837703, "grad_norm": 0.4153338074684143, "learning_rate": 3.5196236893364834e-06, "loss": 0.7452, "step": 7145 }, { "epoch": 0.2961581499440507, "grad_norm": 0.46243083477020264, "learning_rate": 3.5194164698081152e-06, "loss": 0.7588, "step": 7146 }, { "epoch": 0.2961995938497244, "grad_norm": 0.40634676814079285, "learning_rate": 3.5192092502797466e-06, "loss": 0.6588, "step": 7147 }, { "epoch": 0.29624103775539806, "grad_norm": 0.42810526490211487, "learning_rate": 3.5190020307513784e-06, "loss": 0.7185, "step": 7148 }, { "epoch": 0.29628248166107174, "grad_norm": 0.43551889061927795, "learning_rate": 3.51879481122301e-06, "loss": 0.7178, "step": 7149 }, { "epoch": 0.2963239255667454, "grad_norm": 0.42054837942123413, "learning_rate": 3.5185875916946416e-06, "loss": 0.7146, "step": 7150 }, { "epoch": 0.2963653694724191, "grad_norm": 0.4027163088321686, "learning_rate": 3.5183803721662734e-06, "loss": 0.6924, "step": 7151 }, { "epoch": 0.29640681337809277, "grad_norm": 0.3875315189361572, "learning_rate": 3.518173152637905e-06, "loss": 0.6847, "step": 7152 }, { "epoch": 0.29644825728376645, "grad_norm": 0.4663865566253662, "learning_rate": 3.5179659331095366e-06, "loss": 0.7527, "step": 7153 }, { "epoch": 0.29648970118944007, "grad_norm": 0.39562007784843445, "learning_rate": 3.517758713581168e-06, "loss": 0.7437, "step": 7154 }, { "epoch": 0.29653114509511375, "grad_norm": 0.46124398708343506, "learning_rate": 3.5175514940528e-06, "loss": 0.7937, "step": 7155 }, { "epoch": 0.2965725890007874, "grad_norm": 0.4048207998275757, "learning_rate": 3.517344274524431e-06, "loss": 0.623, "step": 7156 }, { "epoch": 0.2966140329064611, "grad_norm": 0.43110814690589905, "learning_rate": 3.517137054996063e-06, "loss": 0.7168, "step": 7157 }, { "epoch": 0.2966554768121348, "grad_norm": 0.3786778151988983, "learning_rate": 3.5169298354676944e-06, "loss": 0.6995, "step": 7158 }, { "epoch": 0.29669692071780845, "grad_norm": 0.38718733191490173, "learning_rate": 3.516722615939326e-06, "loss": 0.6295, "step": 7159 }, { "epoch": 0.29673836462348213, "grad_norm": 0.41272595524787903, "learning_rate": 3.5165153964109584e-06, "loss": 0.7036, "step": 7160 }, { "epoch": 0.2967798085291558, "grad_norm": 0.4314512312412262, "learning_rate": 3.51630817688259e-06, "loss": 0.7661, "step": 7161 }, { "epoch": 0.2968212524348295, "grad_norm": 0.41737595200538635, "learning_rate": 3.5161009573542216e-06, "loss": 0.7366, "step": 7162 }, { "epoch": 0.2968626963405031, "grad_norm": 0.4425581693649292, "learning_rate": 3.515893737825853e-06, "loss": 0.772, "step": 7163 }, { "epoch": 0.2969041402461768, "grad_norm": 0.4189567267894745, "learning_rate": 3.515686518297485e-06, "loss": 0.7268, "step": 7164 }, { "epoch": 0.29694558415185046, "grad_norm": 0.4525361955165863, "learning_rate": 3.515479298769116e-06, "loss": 0.7756, "step": 7165 }, { "epoch": 0.29698702805752414, "grad_norm": 0.4020131528377533, "learning_rate": 3.515272079240748e-06, "loss": 0.6965, "step": 7166 }, { "epoch": 0.2970284719631978, "grad_norm": 0.4463595449924469, "learning_rate": 3.5150648597123794e-06, "loss": 0.6985, "step": 7167 }, { "epoch": 0.2970699158688715, "grad_norm": 0.4018239974975586, "learning_rate": 3.5148576401840112e-06, "loss": 0.7004, "step": 7168 }, { "epoch": 0.29711135977454517, "grad_norm": 0.40508419275283813, "learning_rate": 3.514650420655643e-06, "loss": 0.7307, "step": 7169 }, { "epoch": 0.29715280368021885, "grad_norm": 0.38697174191474915, "learning_rate": 3.5144432011272744e-06, "loss": 0.6548, "step": 7170 }, { "epoch": 0.29719424758589247, "grad_norm": 0.43206679821014404, "learning_rate": 3.5142359815989062e-06, "loss": 0.6857, "step": 7171 }, { "epoch": 0.29723569149156615, "grad_norm": 0.4318749010562897, "learning_rate": 3.5140287620705376e-06, "loss": 0.7341, "step": 7172 }, { "epoch": 0.2972771353972398, "grad_norm": 0.42490702867507935, "learning_rate": 3.5138215425421694e-06, "loss": 0.7399, "step": 7173 }, { "epoch": 0.2973185793029135, "grad_norm": 0.3991842567920685, "learning_rate": 3.513614323013801e-06, "loss": 0.666, "step": 7174 }, { "epoch": 0.2973600232085872, "grad_norm": 0.3963738977909088, "learning_rate": 3.5134071034854326e-06, "loss": 0.6902, "step": 7175 }, { "epoch": 0.29740146711426085, "grad_norm": 0.4453830420970917, "learning_rate": 3.513199883957064e-06, "loss": 0.7308, "step": 7176 }, { "epoch": 0.29744291101993453, "grad_norm": 0.4318297803401947, "learning_rate": 3.5129926644286962e-06, "loss": 0.7273, "step": 7177 }, { "epoch": 0.2974843549256082, "grad_norm": 0.4356853663921356, "learning_rate": 3.512785444900328e-06, "loss": 0.7351, "step": 7178 }, { "epoch": 0.2975257988312819, "grad_norm": 0.4319220185279846, "learning_rate": 3.5125782253719594e-06, "loss": 0.7283, "step": 7179 }, { "epoch": 0.2975672427369555, "grad_norm": 0.39121752977371216, "learning_rate": 3.5123710058435912e-06, "loss": 0.6591, "step": 7180 }, { "epoch": 0.2976086866426292, "grad_norm": 0.4612372815608978, "learning_rate": 3.5121637863152226e-06, "loss": 0.7107, "step": 7181 }, { "epoch": 0.29765013054830286, "grad_norm": 0.4082043766975403, "learning_rate": 3.5119565667868544e-06, "loss": 0.7466, "step": 7182 }, { "epoch": 0.29769157445397654, "grad_norm": 0.41979148983955383, "learning_rate": 3.511749347258486e-06, "loss": 0.6819, "step": 7183 }, { "epoch": 0.2977330183596502, "grad_norm": 0.38272106647491455, "learning_rate": 3.5115421277301176e-06, "loss": 0.6716, "step": 7184 }, { "epoch": 0.2977744622653239, "grad_norm": 0.42935314774513245, "learning_rate": 3.5113349082017494e-06, "loss": 0.7081, "step": 7185 }, { "epoch": 0.29781590617099757, "grad_norm": 0.39754393696784973, "learning_rate": 3.511127688673381e-06, "loss": 0.7283, "step": 7186 }, { "epoch": 0.29785735007667125, "grad_norm": 0.43284961581230164, "learning_rate": 3.5109204691450126e-06, "loss": 0.7068, "step": 7187 }, { "epoch": 0.2978987939823449, "grad_norm": 0.42626217007637024, "learning_rate": 3.510713249616644e-06, "loss": 0.6765, "step": 7188 }, { "epoch": 0.29794023788801854, "grad_norm": 0.3982051908969879, "learning_rate": 3.510506030088276e-06, "loss": 0.6838, "step": 7189 }, { "epoch": 0.2979816817936922, "grad_norm": 0.4121301472187042, "learning_rate": 3.510298810559907e-06, "loss": 0.7, "step": 7190 }, { "epoch": 0.2980231256993659, "grad_norm": 0.4281240999698639, "learning_rate": 3.510091591031539e-06, "loss": 0.7727, "step": 7191 }, { "epoch": 0.2980645696050396, "grad_norm": 0.44324633479118347, "learning_rate": 3.5098843715031704e-06, "loss": 0.7478, "step": 7192 }, { "epoch": 0.29810601351071325, "grad_norm": 0.4586889445781708, "learning_rate": 3.509677151974802e-06, "loss": 0.7642, "step": 7193 }, { "epoch": 0.29814745741638693, "grad_norm": 0.45172274112701416, "learning_rate": 3.5094699324464344e-06, "loss": 0.7288, "step": 7194 }, { "epoch": 0.2981889013220606, "grad_norm": 0.45836082100868225, "learning_rate": 3.509262712918066e-06, "loss": 0.7678, "step": 7195 }, { "epoch": 0.2982303452277343, "grad_norm": 0.4493940472602844, "learning_rate": 3.5090554933896976e-06, "loss": 0.7429, "step": 7196 }, { "epoch": 0.2982717891334079, "grad_norm": 0.4017223119735718, "learning_rate": 3.508848273861329e-06, "loss": 0.7212, "step": 7197 }, { "epoch": 0.2983132330390816, "grad_norm": 0.4682629704475403, "learning_rate": 3.508641054332961e-06, "loss": 0.7434, "step": 7198 }, { "epoch": 0.29835467694475526, "grad_norm": 0.4443550109863281, "learning_rate": 3.5084338348045922e-06, "loss": 0.6919, "step": 7199 }, { "epoch": 0.29839612085042894, "grad_norm": 0.43907901644706726, "learning_rate": 3.508226615276224e-06, "loss": 0.7657, "step": 7200 }, { "epoch": 0.2984375647561026, "grad_norm": 0.4336368143558502, "learning_rate": 3.5080193957478554e-06, "loss": 0.7363, "step": 7201 }, { "epoch": 0.2984790086617763, "grad_norm": 0.4099287688732147, "learning_rate": 3.5078121762194872e-06, "loss": 0.7312, "step": 7202 }, { "epoch": 0.29852045256744997, "grad_norm": 0.4032919704914093, "learning_rate": 3.507604956691119e-06, "loss": 0.6934, "step": 7203 }, { "epoch": 0.29856189647312364, "grad_norm": 0.39714422821998596, "learning_rate": 3.5073977371627504e-06, "loss": 0.6987, "step": 7204 }, { "epoch": 0.2986033403787973, "grad_norm": 0.3657318353652954, "learning_rate": 3.5071905176343822e-06, "loss": 0.6366, "step": 7205 }, { "epoch": 0.29864478428447094, "grad_norm": 0.4507036805152893, "learning_rate": 3.5069832981060136e-06, "loss": 0.7117, "step": 7206 }, { "epoch": 0.2986862281901446, "grad_norm": 0.40027666091918945, "learning_rate": 3.5067760785776454e-06, "loss": 0.7222, "step": 7207 }, { "epoch": 0.2987276720958183, "grad_norm": 0.39342984557151794, "learning_rate": 3.506568859049277e-06, "loss": 0.6865, "step": 7208 }, { "epoch": 0.298769116001492, "grad_norm": 0.40302762389183044, "learning_rate": 3.5063616395209086e-06, "loss": 0.6825, "step": 7209 }, { "epoch": 0.29881055990716565, "grad_norm": 0.390133261680603, "learning_rate": 3.50615441999254e-06, "loss": 0.6792, "step": 7210 }, { "epoch": 0.29885200381283933, "grad_norm": 0.44662928581237793, "learning_rate": 3.5059472004641722e-06, "loss": 0.698, "step": 7211 }, { "epoch": 0.298893447718513, "grad_norm": 0.4587511122226715, "learning_rate": 3.505739980935804e-06, "loss": 0.7292, "step": 7212 }, { "epoch": 0.2989348916241867, "grad_norm": 0.41628575325012207, "learning_rate": 3.5055327614074354e-06, "loss": 0.7014, "step": 7213 }, { "epoch": 0.29897633552986036, "grad_norm": 0.4412250518798828, "learning_rate": 3.5053255418790672e-06, "loss": 0.7402, "step": 7214 }, { "epoch": 0.299017779435534, "grad_norm": 0.4092228412628174, "learning_rate": 3.5051183223506986e-06, "loss": 0.7089, "step": 7215 }, { "epoch": 0.29905922334120766, "grad_norm": 0.4190283715724945, "learning_rate": 3.5049111028223304e-06, "loss": 0.7393, "step": 7216 }, { "epoch": 0.29910066724688134, "grad_norm": 0.4276978373527527, "learning_rate": 3.504703883293962e-06, "loss": 0.7036, "step": 7217 }, { "epoch": 0.299142111152555, "grad_norm": 0.4398876428604126, "learning_rate": 3.5044966637655936e-06, "loss": 0.6979, "step": 7218 }, { "epoch": 0.2991835550582287, "grad_norm": 0.4226187467575073, "learning_rate": 3.504289444237225e-06, "loss": 0.6531, "step": 7219 }, { "epoch": 0.29922499896390237, "grad_norm": 0.42949700355529785, "learning_rate": 3.504082224708857e-06, "loss": 0.6934, "step": 7220 }, { "epoch": 0.29926644286957604, "grad_norm": 0.4279370605945587, "learning_rate": 3.5038750051804886e-06, "loss": 0.7605, "step": 7221 }, { "epoch": 0.2993078867752497, "grad_norm": 0.41513577103614807, "learning_rate": 3.50366778565212e-06, "loss": 0.7103, "step": 7222 }, { "epoch": 0.2993493306809234, "grad_norm": 0.40499696135520935, "learning_rate": 3.503460566123752e-06, "loss": 0.6652, "step": 7223 }, { "epoch": 0.299390774586597, "grad_norm": 0.3943236768245697, "learning_rate": 3.503253346595383e-06, "loss": 0.6729, "step": 7224 }, { "epoch": 0.2994322184922707, "grad_norm": 0.4183778762817383, "learning_rate": 3.503046127067015e-06, "loss": 0.6957, "step": 7225 }, { "epoch": 0.2994736623979444, "grad_norm": 0.3920329809188843, "learning_rate": 3.5028389075386464e-06, "loss": 0.6736, "step": 7226 }, { "epoch": 0.29951510630361805, "grad_norm": 0.41843655705451965, "learning_rate": 3.5026316880102782e-06, "loss": 0.7083, "step": 7227 }, { "epoch": 0.2995565502092917, "grad_norm": 0.40758731961250305, "learning_rate": 3.5024244684819096e-06, "loss": 0.6938, "step": 7228 }, { "epoch": 0.2995979941149654, "grad_norm": 0.4295382499694824, "learning_rate": 3.502217248953542e-06, "loss": 0.6652, "step": 7229 }, { "epoch": 0.2996394380206391, "grad_norm": 0.41953715682029724, "learning_rate": 3.5020100294251736e-06, "loss": 0.7487, "step": 7230 }, { "epoch": 0.29968088192631276, "grad_norm": 0.40718942880630493, "learning_rate": 3.501802809896805e-06, "loss": 0.7048, "step": 7231 }, { "epoch": 0.2997223258319864, "grad_norm": 0.43632203340530396, "learning_rate": 3.501595590368437e-06, "loss": 0.7059, "step": 7232 }, { "epoch": 0.29976376973766006, "grad_norm": 0.434617817401886, "learning_rate": 3.5013883708400682e-06, "loss": 0.6945, "step": 7233 }, { "epoch": 0.29980521364333373, "grad_norm": 0.4579848647117615, "learning_rate": 3.5011811513117e-06, "loss": 0.7041, "step": 7234 }, { "epoch": 0.2998466575490074, "grad_norm": 0.41240590810775757, "learning_rate": 3.5009739317833314e-06, "loss": 0.726, "step": 7235 }, { "epoch": 0.2998881014546811, "grad_norm": 0.4259759187698364, "learning_rate": 3.5007667122549632e-06, "loss": 0.7434, "step": 7236 }, { "epoch": 0.29992954536035477, "grad_norm": 0.4138964116573334, "learning_rate": 3.5005594927265946e-06, "loss": 0.6958, "step": 7237 }, { "epoch": 0.29997098926602844, "grad_norm": 0.4226977229118347, "learning_rate": 3.5003522731982264e-06, "loss": 0.7195, "step": 7238 }, { "epoch": 0.3000124331717021, "grad_norm": 0.3952260911464691, "learning_rate": 3.5001450536698582e-06, "loss": 0.6283, "step": 7239 }, { "epoch": 0.3000538770773758, "grad_norm": 0.415253609418869, "learning_rate": 3.4999378341414896e-06, "loss": 0.7053, "step": 7240 }, { "epoch": 0.3000953209830494, "grad_norm": 0.5198934078216553, "learning_rate": 3.4997306146131214e-06, "loss": 0.781, "step": 7241 }, { "epoch": 0.3001367648887231, "grad_norm": 0.4069458544254303, "learning_rate": 3.499523395084753e-06, "loss": 0.7332, "step": 7242 }, { "epoch": 0.3001782087943968, "grad_norm": 0.43869447708129883, "learning_rate": 3.4993161755563846e-06, "loss": 0.7126, "step": 7243 }, { "epoch": 0.30021965270007045, "grad_norm": 0.4011303186416626, "learning_rate": 3.499108956028016e-06, "loss": 0.6982, "step": 7244 }, { "epoch": 0.3002610966057441, "grad_norm": 0.3755727708339691, "learning_rate": 3.4989017364996482e-06, "loss": 0.6907, "step": 7245 }, { "epoch": 0.3003025405114178, "grad_norm": 0.507211446762085, "learning_rate": 3.49869451697128e-06, "loss": 0.707, "step": 7246 }, { "epoch": 0.3003439844170915, "grad_norm": 0.4334816038608551, "learning_rate": 3.4984872974429114e-06, "loss": 0.7046, "step": 7247 }, { "epoch": 0.30038542832276516, "grad_norm": 0.4517782926559448, "learning_rate": 3.4982800779145432e-06, "loss": 0.7109, "step": 7248 }, { "epoch": 0.30042687222843883, "grad_norm": 0.45117029547691345, "learning_rate": 3.4980728583861746e-06, "loss": 0.7703, "step": 7249 }, { "epoch": 0.30046831613411246, "grad_norm": 0.4400414526462555, "learning_rate": 3.4978656388578064e-06, "loss": 0.76, "step": 7250 }, { "epoch": 0.30050976003978613, "grad_norm": 0.4033151865005493, "learning_rate": 3.497658419329438e-06, "loss": 0.7351, "step": 7251 }, { "epoch": 0.3005512039454598, "grad_norm": 0.3934030830860138, "learning_rate": 3.4974511998010696e-06, "loss": 0.6821, "step": 7252 }, { "epoch": 0.3005926478511335, "grad_norm": 0.3835507929325104, "learning_rate": 3.497243980272701e-06, "loss": 0.6487, "step": 7253 }, { "epoch": 0.30063409175680716, "grad_norm": 0.5462005138397217, "learning_rate": 3.497036760744333e-06, "loss": 0.7053, "step": 7254 }, { "epoch": 0.30067553566248084, "grad_norm": 0.41436243057250977, "learning_rate": 3.4968295412159646e-06, "loss": 0.6543, "step": 7255 }, { "epoch": 0.3007169795681545, "grad_norm": 0.41642627120018005, "learning_rate": 3.496622321687596e-06, "loss": 0.6919, "step": 7256 }, { "epoch": 0.3007584234738282, "grad_norm": 0.44936898350715637, "learning_rate": 3.496415102159228e-06, "loss": 0.7178, "step": 7257 }, { "epoch": 0.3007998673795018, "grad_norm": 0.43330711126327515, "learning_rate": 3.4962078826308592e-06, "loss": 0.7036, "step": 7258 }, { "epoch": 0.3008413112851755, "grad_norm": 0.4651854634284973, "learning_rate": 3.496000663102491e-06, "loss": 0.707, "step": 7259 }, { "epoch": 0.30088275519084917, "grad_norm": 0.43786242604255676, "learning_rate": 3.4957934435741224e-06, "loss": 0.7412, "step": 7260 }, { "epoch": 0.30092419909652285, "grad_norm": 0.42168518900871277, "learning_rate": 3.4955862240457542e-06, "loss": 0.7844, "step": 7261 }, { "epoch": 0.3009656430021965, "grad_norm": 0.4050391912460327, "learning_rate": 3.4953790045173856e-06, "loss": 0.688, "step": 7262 }, { "epoch": 0.3010070869078702, "grad_norm": 0.45023709535598755, "learning_rate": 3.495171784989018e-06, "loss": 0.7639, "step": 7263 }, { "epoch": 0.3010485308135439, "grad_norm": 0.4256753623485565, "learning_rate": 3.4949645654606496e-06, "loss": 0.7042, "step": 7264 }, { "epoch": 0.30108997471921756, "grad_norm": 0.4467214345932007, "learning_rate": 3.494757345932281e-06, "loss": 0.7354, "step": 7265 }, { "epoch": 0.30113141862489123, "grad_norm": 0.4159295856952667, "learning_rate": 3.494550126403913e-06, "loss": 0.6807, "step": 7266 }, { "epoch": 0.30117286253056486, "grad_norm": 0.40448349714279175, "learning_rate": 3.4943429068755442e-06, "loss": 0.707, "step": 7267 }, { "epoch": 0.30121430643623853, "grad_norm": 0.4183114767074585, "learning_rate": 3.494135687347176e-06, "loss": 0.6997, "step": 7268 }, { "epoch": 0.3012557503419122, "grad_norm": 0.3931894898414612, "learning_rate": 3.4939284678188074e-06, "loss": 0.6808, "step": 7269 }, { "epoch": 0.3012971942475859, "grad_norm": 0.4223577380180359, "learning_rate": 3.4937212482904392e-06, "loss": 0.6349, "step": 7270 }, { "epoch": 0.30133863815325956, "grad_norm": 0.4641520380973816, "learning_rate": 3.4935140287620706e-06, "loss": 0.6782, "step": 7271 }, { "epoch": 0.30138008205893324, "grad_norm": 0.38913285732269287, "learning_rate": 3.4933068092337024e-06, "loss": 0.7092, "step": 7272 }, { "epoch": 0.3014215259646069, "grad_norm": 0.422799676656723, "learning_rate": 3.4930995897053342e-06, "loss": 0.6954, "step": 7273 }, { "epoch": 0.3014629698702806, "grad_norm": 0.4143790602684021, "learning_rate": 3.4928923701769656e-06, "loss": 0.7059, "step": 7274 }, { "epoch": 0.30150441377595427, "grad_norm": 0.43554621934890747, "learning_rate": 3.4926851506485974e-06, "loss": 0.7664, "step": 7275 }, { "epoch": 0.3015458576816279, "grad_norm": 0.41336363554000854, "learning_rate": 3.492477931120229e-06, "loss": 0.7356, "step": 7276 }, { "epoch": 0.30158730158730157, "grad_norm": 0.444933146238327, "learning_rate": 3.4922707115918606e-06, "loss": 0.7192, "step": 7277 }, { "epoch": 0.30162874549297525, "grad_norm": 0.4312666952610016, "learning_rate": 3.492063492063492e-06, "loss": 0.7051, "step": 7278 }, { "epoch": 0.3016701893986489, "grad_norm": 0.4071757197380066, "learning_rate": 3.4918562725351242e-06, "loss": 0.6497, "step": 7279 }, { "epoch": 0.3017116333043226, "grad_norm": 0.41675958037376404, "learning_rate": 3.491649053006755e-06, "loss": 0.6895, "step": 7280 }, { "epoch": 0.3017530772099963, "grad_norm": 0.4068516194820404, "learning_rate": 3.4914418334783874e-06, "loss": 0.7742, "step": 7281 }, { "epoch": 0.30179452111566996, "grad_norm": 0.4245772063732147, "learning_rate": 3.4912346139500192e-06, "loss": 0.7061, "step": 7282 }, { "epoch": 0.30183596502134363, "grad_norm": 0.4134874641895294, "learning_rate": 3.4910273944216506e-06, "loss": 0.708, "step": 7283 }, { "epoch": 0.30187740892701725, "grad_norm": 0.40304699540138245, "learning_rate": 3.4908201748932824e-06, "loss": 0.6945, "step": 7284 }, { "epoch": 0.30191885283269093, "grad_norm": 0.4178912043571472, "learning_rate": 3.490612955364914e-06, "loss": 0.7441, "step": 7285 }, { "epoch": 0.3019602967383646, "grad_norm": 0.41995078325271606, "learning_rate": 3.4904057358365456e-06, "loss": 0.6744, "step": 7286 }, { "epoch": 0.3020017406440383, "grad_norm": 0.4135635197162628, "learning_rate": 3.490198516308177e-06, "loss": 0.6534, "step": 7287 }, { "epoch": 0.30204318454971196, "grad_norm": 0.4651942253112793, "learning_rate": 3.489991296779809e-06, "loss": 0.7122, "step": 7288 }, { "epoch": 0.30208462845538564, "grad_norm": 0.41350629925727844, "learning_rate": 3.4897840772514402e-06, "loss": 0.7224, "step": 7289 }, { "epoch": 0.3021260723610593, "grad_norm": 0.4290896952152252, "learning_rate": 3.489576857723072e-06, "loss": 0.7522, "step": 7290 }, { "epoch": 0.302167516266733, "grad_norm": 0.41058680415153503, "learning_rate": 3.489369638194704e-06, "loss": 0.6831, "step": 7291 }, { "epoch": 0.30220896017240667, "grad_norm": 0.41430723667144775, "learning_rate": 3.4891624186663352e-06, "loss": 0.7046, "step": 7292 }, { "epoch": 0.3022504040780803, "grad_norm": 0.3930942714214325, "learning_rate": 3.488955199137967e-06, "loss": 0.7231, "step": 7293 }, { "epoch": 0.30229184798375397, "grad_norm": 0.44576120376586914, "learning_rate": 3.4887479796095984e-06, "loss": 0.6772, "step": 7294 }, { "epoch": 0.30233329188942765, "grad_norm": 0.4149821698665619, "learning_rate": 3.4885407600812302e-06, "loss": 0.699, "step": 7295 }, { "epoch": 0.3023747357951013, "grad_norm": 0.40366923809051514, "learning_rate": 3.4883335405528616e-06, "loss": 0.6826, "step": 7296 }, { "epoch": 0.302416179700775, "grad_norm": 0.44799765944480896, "learning_rate": 3.488126321024494e-06, "loss": 0.7537, "step": 7297 }, { "epoch": 0.3024576236064487, "grad_norm": 0.424443781375885, "learning_rate": 3.487919101496125e-06, "loss": 0.6985, "step": 7298 }, { "epoch": 0.30249906751212235, "grad_norm": 0.4113139510154724, "learning_rate": 3.487711881967757e-06, "loss": 0.7191, "step": 7299 }, { "epoch": 0.30254051141779603, "grad_norm": 0.43825313448905945, "learning_rate": 3.487504662439389e-06, "loss": 0.717, "step": 7300 }, { "epoch": 0.3025819553234697, "grad_norm": 0.42495718598365784, "learning_rate": 3.4872974429110202e-06, "loss": 0.7325, "step": 7301 }, { "epoch": 0.30262339922914333, "grad_norm": 0.42719680070877075, "learning_rate": 3.487090223382652e-06, "loss": 0.7278, "step": 7302 }, { "epoch": 0.302664843134817, "grad_norm": 0.41216665506362915, "learning_rate": 3.4868830038542834e-06, "loss": 0.7305, "step": 7303 }, { "epoch": 0.3027062870404907, "grad_norm": 0.4190988540649414, "learning_rate": 3.4866757843259152e-06, "loss": 0.6643, "step": 7304 }, { "epoch": 0.30274773094616436, "grad_norm": 0.4264727830886841, "learning_rate": 3.4864685647975466e-06, "loss": 0.7004, "step": 7305 }, { "epoch": 0.30278917485183804, "grad_norm": 0.40712451934814453, "learning_rate": 3.4862613452691784e-06, "loss": 0.7065, "step": 7306 }, { "epoch": 0.3028306187575117, "grad_norm": 0.4183228015899658, "learning_rate": 3.4860541257408102e-06, "loss": 0.6849, "step": 7307 }, { "epoch": 0.3028720626631854, "grad_norm": 0.4363938271999359, "learning_rate": 3.4858469062124416e-06, "loss": 0.7085, "step": 7308 }, { "epoch": 0.30291350656885907, "grad_norm": 0.3858165144920349, "learning_rate": 3.4856396866840734e-06, "loss": 0.6718, "step": 7309 }, { "epoch": 0.30295495047453275, "grad_norm": 0.49259158968925476, "learning_rate": 3.485432467155705e-06, "loss": 0.7852, "step": 7310 }, { "epoch": 0.30299639438020637, "grad_norm": 0.3951805830001831, "learning_rate": 3.4852252476273366e-06, "loss": 0.6914, "step": 7311 }, { "epoch": 0.30303783828588005, "grad_norm": 0.40133509039878845, "learning_rate": 3.485018028098968e-06, "loss": 0.6826, "step": 7312 }, { "epoch": 0.3030792821915537, "grad_norm": 0.36809074878692627, "learning_rate": 3.4848108085706003e-06, "loss": 0.6423, "step": 7313 }, { "epoch": 0.3031207260972274, "grad_norm": 0.4135667383670807, "learning_rate": 3.4846035890422312e-06, "loss": 0.7102, "step": 7314 }, { "epoch": 0.3031621700029011, "grad_norm": 0.40837374329566956, "learning_rate": 3.4843963695138634e-06, "loss": 0.7173, "step": 7315 }, { "epoch": 0.30320361390857475, "grad_norm": 0.4230419993400574, "learning_rate": 3.4841891499854953e-06, "loss": 0.656, "step": 7316 }, { "epoch": 0.30324505781424843, "grad_norm": 0.4288572669029236, "learning_rate": 3.4839819304571266e-06, "loss": 0.7461, "step": 7317 }, { "epoch": 0.3032865017199221, "grad_norm": 0.41343367099761963, "learning_rate": 3.4837747109287585e-06, "loss": 0.6979, "step": 7318 }, { "epoch": 0.30332794562559573, "grad_norm": 0.3919431269168854, "learning_rate": 3.48356749140039e-06, "loss": 0.6714, "step": 7319 }, { "epoch": 0.3033693895312694, "grad_norm": 0.4088151752948761, "learning_rate": 3.4833602718720216e-06, "loss": 0.7322, "step": 7320 }, { "epoch": 0.3034108334369431, "grad_norm": 0.40012434124946594, "learning_rate": 3.483153052343653e-06, "loss": 0.6758, "step": 7321 }, { "epoch": 0.30345227734261676, "grad_norm": 0.39157670736312866, "learning_rate": 3.482945832815285e-06, "loss": 0.6531, "step": 7322 }, { "epoch": 0.30349372124829044, "grad_norm": 0.49350011348724365, "learning_rate": 3.4827386132869162e-06, "loss": 0.7073, "step": 7323 }, { "epoch": 0.3035351651539641, "grad_norm": 0.41279998421669006, "learning_rate": 3.482531393758548e-06, "loss": 0.7061, "step": 7324 }, { "epoch": 0.3035766090596378, "grad_norm": 0.3914352357387543, "learning_rate": 3.48232417423018e-06, "loss": 0.6938, "step": 7325 }, { "epoch": 0.30361805296531147, "grad_norm": 0.41931289434432983, "learning_rate": 3.4821169547018112e-06, "loss": 0.7234, "step": 7326 }, { "epoch": 0.30365949687098515, "grad_norm": 0.4103632867336273, "learning_rate": 3.481909735173443e-06, "loss": 0.7018, "step": 7327 }, { "epoch": 0.30370094077665877, "grad_norm": 0.39839738607406616, "learning_rate": 3.4817025156450744e-06, "loss": 0.7063, "step": 7328 }, { "epoch": 0.30374238468233244, "grad_norm": 0.4106575846672058, "learning_rate": 3.4814952961167067e-06, "loss": 0.7324, "step": 7329 }, { "epoch": 0.3037838285880061, "grad_norm": 0.431655615568161, "learning_rate": 3.4812880765883376e-06, "loss": 0.6924, "step": 7330 }, { "epoch": 0.3038252724936798, "grad_norm": 0.40878739953041077, "learning_rate": 3.48108085705997e-06, "loss": 0.7122, "step": 7331 }, { "epoch": 0.3038667163993535, "grad_norm": 0.4227137565612793, "learning_rate": 3.480873637531601e-06, "loss": 0.7437, "step": 7332 }, { "epoch": 0.30390816030502715, "grad_norm": 0.4016418159008026, "learning_rate": 3.480666418003233e-06, "loss": 0.6528, "step": 7333 }, { "epoch": 0.30394960421070083, "grad_norm": 0.4477592706680298, "learning_rate": 3.480459198474865e-06, "loss": 0.6904, "step": 7334 }, { "epoch": 0.3039910481163745, "grad_norm": 0.42992234230041504, "learning_rate": 3.4802519789464962e-06, "loss": 0.7183, "step": 7335 }, { "epoch": 0.3040324920220482, "grad_norm": 0.4285798966884613, "learning_rate": 3.480044759418128e-06, "loss": 0.7688, "step": 7336 }, { "epoch": 0.3040739359277218, "grad_norm": 0.4552157521247864, "learning_rate": 3.4798375398897594e-06, "loss": 0.7432, "step": 7337 }, { "epoch": 0.3041153798333955, "grad_norm": 0.41803181171417236, "learning_rate": 3.4796303203613912e-06, "loss": 0.6731, "step": 7338 }, { "epoch": 0.30415682373906916, "grad_norm": 0.40412840247154236, "learning_rate": 3.4794231008330226e-06, "loss": 0.714, "step": 7339 }, { "epoch": 0.30419826764474284, "grad_norm": 0.43529561161994934, "learning_rate": 3.4792158813046544e-06, "loss": 0.7042, "step": 7340 }, { "epoch": 0.3042397115504165, "grad_norm": 0.39643725752830505, "learning_rate": 3.479008661776286e-06, "loss": 0.6719, "step": 7341 }, { "epoch": 0.3042811554560902, "grad_norm": 0.4261438548564911, "learning_rate": 3.4788014422479176e-06, "loss": 0.7139, "step": 7342 }, { "epoch": 0.30432259936176387, "grad_norm": 0.40189990401268005, "learning_rate": 3.4785942227195494e-06, "loss": 0.7135, "step": 7343 }, { "epoch": 0.30436404326743755, "grad_norm": 0.4367200434207916, "learning_rate": 3.478387003191181e-06, "loss": 0.748, "step": 7344 }, { "epoch": 0.30440548717311117, "grad_norm": 0.38791799545288086, "learning_rate": 3.4781797836628126e-06, "loss": 0.6963, "step": 7345 }, { "epoch": 0.30444693107878484, "grad_norm": 0.44051676988601685, "learning_rate": 3.477972564134444e-06, "loss": 0.7262, "step": 7346 }, { "epoch": 0.3044883749844585, "grad_norm": 0.43658021092414856, "learning_rate": 3.4777653446060763e-06, "loss": 0.6653, "step": 7347 }, { "epoch": 0.3045298188901322, "grad_norm": 0.48692843317985535, "learning_rate": 3.4775581250777072e-06, "loss": 0.7354, "step": 7348 }, { "epoch": 0.3045712627958059, "grad_norm": 0.41655632853507996, "learning_rate": 3.4773509055493395e-06, "loss": 0.7336, "step": 7349 }, { "epoch": 0.30461270670147955, "grad_norm": 0.38513699173927307, "learning_rate": 3.477143686020971e-06, "loss": 0.71, "step": 7350 }, { "epoch": 0.30465415060715323, "grad_norm": 0.39964449405670166, "learning_rate": 3.4769364664926026e-06, "loss": 0.6849, "step": 7351 }, { "epoch": 0.3046955945128269, "grad_norm": 0.38176125288009644, "learning_rate": 3.4767292469642345e-06, "loss": 0.6635, "step": 7352 }, { "epoch": 0.3047370384185006, "grad_norm": 0.44331446290016174, "learning_rate": 3.476522027435866e-06, "loss": 0.6855, "step": 7353 }, { "epoch": 0.3047784823241742, "grad_norm": 0.44695717096328735, "learning_rate": 3.4763148079074977e-06, "loss": 0.7458, "step": 7354 }, { "epoch": 0.3048199262298479, "grad_norm": 0.40700972080230713, "learning_rate": 3.476107588379129e-06, "loss": 0.7051, "step": 7355 }, { "epoch": 0.30486137013552156, "grad_norm": 0.39625978469848633, "learning_rate": 3.475900368850761e-06, "loss": 0.6316, "step": 7356 }, { "epoch": 0.30490281404119524, "grad_norm": 0.43081000447273254, "learning_rate": 3.4756931493223922e-06, "loss": 0.7179, "step": 7357 }, { "epoch": 0.3049442579468689, "grad_norm": 0.426923006772995, "learning_rate": 3.475485929794024e-06, "loss": 0.7241, "step": 7358 }, { "epoch": 0.3049857018525426, "grad_norm": 0.41886407136917114, "learning_rate": 3.475278710265656e-06, "loss": 0.6332, "step": 7359 }, { "epoch": 0.30502714575821627, "grad_norm": 0.4363551735877991, "learning_rate": 3.4750714907372872e-06, "loss": 0.7268, "step": 7360 }, { "epoch": 0.30506858966388994, "grad_norm": 0.4262523353099823, "learning_rate": 3.474864271208919e-06, "loss": 0.6624, "step": 7361 }, { "epoch": 0.3051100335695636, "grad_norm": 0.4243740737438202, "learning_rate": 3.4746570516805504e-06, "loss": 0.739, "step": 7362 }, { "epoch": 0.30515147747523724, "grad_norm": 0.4773908257484436, "learning_rate": 3.4744498321521827e-06, "loss": 0.7805, "step": 7363 }, { "epoch": 0.3051929213809109, "grad_norm": 0.37355175614356995, "learning_rate": 3.4742426126238136e-06, "loss": 0.6504, "step": 7364 }, { "epoch": 0.3052343652865846, "grad_norm": 0.4607018530368805, "learning_rate": 3.474035393095446e-06, "loss": 0.7507, "step": 7365 }, { "epoch": 0.3052758091922583, "grad_norm": 0.4046276807785034, "learning_rate": 3.473828173567077e-06, "loss": 0.6639, "step": 7366 }, { "epoch": 0.30531725309793195, "grad_norm": 0.4054524600505829, "learning_rate": 3.473620954038709e-06, "loss": 0.7041, "step": 7367 }, { "epoch": 0.30535869700360563, "grad_norm": 0.42565783858299255, "learning_rate": 3.473413734510341e-06, "loss": 0.7205, "step": 7368 }, { "epoch": 0.3054001409092793, "grad_norm": 0.40289342403411865, "learning_rate": 3.4732065149819722e-06, "loss": 0.6892, "step": 7369 }, { "epoch": 0.305441584814953, "grad_norm": 0.4245118498802185, "learning_rate": 3.472999295453604e-06, "loss": 0.7468, "step": 7370 }, { "epoch": 0.30548302872062666, "grad_norm": 0.4195047914981842, "learning_rate": 3.4727920759252354e-06, "loss": 0.7258, "step": 7371 }, { "epoch": 0.3055244726263003, "grad_norm": 0.4067900776863098, "learning_rate": 3.4725848563968673e-06, "loss": 0.689, "step": 7372 }, { "epoch": 0.30556591653197396, "grad_norm": 0.41553857922554016, "learning_rate": 3.4723776368684986e-06, "loss": 0.6711, "step": 7373 }, { "epoch": 0.30560736043764763, "grad_norm": 0.4308988153934479, "learning_rate": 3.4721704173401304e-06, "loss": 0.7137, "step": 7374 }, { "epoch": 0.3056488043433213, "grad_norm": 0.4417855441570282, "learning_rate": 3.471963197811762e-06, "loss": 0.7781, "step": 7375 }, { "epoch": 0.305690248248995, "grad_norm": 0.38502851128578186, "learning_rate": 3.4717559782833936e-06, "loss": 0.7012, "step": 7376 }, { "epoch": 0.30573169215466867, "grad_norm": 0.4085834324359894, "learning_rate": 3.4715487587550255e-06, "loss": 0.677, "step": 7377 }, { "epoch": 0.30577313606034234, "grad_norm": 0.4353998601436615, "learning_rate": 3.471341539226657e-06, "loss": 0.7164, "step": 7378 }, { "epoch": 0.305814579966016, "grad_norm": 0.41055506467819214, "learning_rate": 3.4711343196982886e-06, "loss": 0.6957, "step": 7379 }, { "epoch": 0.30585602387168964, "grad_norm": 0.4064086377620697, "learning_rate": 3.47092710016992e-06, "loss": 0.7722, "step": 7380 }, { "epoch": 0.3058974677773633, "grad_norm": 0.4350390136241913, "learning_rate": 3.4707198806415523e-06, "loss": 0.6581, "step": 7381 }, { "epoch": 0.305938911683037, "grad_norm": 0.4039255380630493, "learning_rate": 3.4705126611131832e-06, "loss": 0.6798, "step": 7382 }, { "epoch": 0.3059803555887107, "grad_norm": 0.41461119055747986, "learning_rate": 3.4703054415848155e-06, "loss": 0.6838, "step": 7383 }, { "epoch": 0.30602179949438435, "grad_norm": 0.41529521346092224, "learning_rate": 3.470098222056447e-06, "loss": 0.7444, "step": 7384 }, { "epoch": 0.306063243400058, "grad_norm": 0.4189828634262085, "learning_rate": 3.4698910025280787e-06, "loss": 0.7542, "step": 7385 }, { "epoch": 0.3061046873057317, "grad_norm": 0.4064129889011383, "learning_rate": 3.4696837829997105e-06, "loss": 0.7058, "step": 7386 }, { "epoch": 0.3061461312114054, "grad_norm": 0.41242772340774536, "learning_rate": 3.469476563471342e-06, "loss": 0.7073, "step": 7387 }, { "epoch": 0.30618757511707906, "grad_norm": 0.4247068762779236, "learning_rate": 3.4692693439429737e-06, "loss": 0.7131, "step": 7388 }, { "epoch": 0.3062290190227527, "grad_norm": 0.4286353886127472, "learning_rate": 3.469062124414605e-06, "loss": 0.7241, "step": 7389 }, { "epoch": 0.30627046292842636, "grad_norm": 0.4292788505554199, "learning_rate": 3.468854904886237e-06, "loss": 0.6774, "step": 7390 }, { "epoch": 0.30631190683410003, "grad_norm": 0.4236053228378296, "learning_rate": 3.4686476853578682e-06, "loss": 0.7146, "step": 7391 }, { "epoch": 0.3063533507397737, "grad_norm": 0.40659981966018677, "learning_rate": 3.4684404658295e-06, "loss": 0.6735, "step": 7392 }, { "epoch": 0.3063947946454474, "grad_norm": 0.4336153566837311, "learning_rate": 3.4682332463011314e-06, "loss": 0.6946, "step": 7393 }, { "epoch": 0.30643623855112107, "grad_norm": 0.46234774589538574, "learning_rate": 3.4680260267727632e-06, "loss": 0.7273, "step": 7394 }, { "epoch": 0.30647768245679474, "grad_norm": 0.41391947865486145, "learning_rate": 3.467818807244395e-06, "loss": 0.7048, "step": 7395 }, { "epoch": 0.3065191263624684, "grad_norm": 0.4224640429019928, "learning_rate": 3.4676115877160264e-06, "loss": 0.7168, "step": 7396 }, { "epoch": 0.3065605702681421, "grad_norm": 0.44375935196876526, "learning_rate": 3.4674043681876587e-06, "loss": 0.7197, "step": 7397 }, { "epoch": 0.3066020141738157, "grad_norm": 0.39048343896865845, "learning_rate": 3.4671971486592896e-06, "loss": 0.6904, "step": 7398 }, { "epoch": 0.3066434580794894, "grad_norm": 0.4068012237548828, "learning_rate": 3.466989929130922e-06, "loss": 0.709, "step": 7399 }, { "epoch": 0.30668490198516307, "grad_norm": 0.4248158037662506, "learning_rate": 3.466782709602553e-06, "loss": 0.7651, "step": 7400 }, { "epoch": 0.30672634589083675, "grad_norm": 0.38290929794311523, "learning_rate": 3.466575490074185e-06, "loss": 0.6323, "step": 7401 }, { "epoch": 0.3067677897965104, "grad_norm": 0.39681991934776306, "learning_rate": 3.4663682705458164e-06, "loss": 0.6722, "step": 7402 }, { "epoch": 0.3068092337021841, "grad_norm": 0.48744985461235046, "learning_rate": 3.4661610510174483e-06, "loss": 0.7, "step": 7403 }, { "epoch": 0.3068506776078578, "grad_norm": 0.4756367802619934, "learning_rate": 3.46595383148908e-06, "loss": 0.7634, "step": 7404 }, { "epoch": 0.30689212151353146, "grad_norm": 0.3931790292263031, "learning_rate": 3.4657466119607114e-06, "loss": 0.6831, "step": 7405 }, { "epoch": 0.3069335654192051, "grad_norm": 0.40859824419021606, "learning_rate": 3.4655393924323433e-06, "loss": 0.6821, "step": 7406 }, { "epoch": 0.30697500932487876, "grad_norm": 0.4261351227760315, "learning_rate": 3.4653321729039746e-06, "loss": 0.6842, "step": 7407 }, { "epoch": 0.30701645323055243, "grad_norm": 0.4002237021923065, "learning_rate": 3.4651249533756065e-06, "loss": 0.7214, "step": 7408 }, { "epoch": 0.3070578971362261, "grad_norm": 0.40172311663627625, "learning_rate": 3.464917733847238e-06, "loss": 0.6337, "step": 7409 }, { "epoch": 0.3070993410418998, "grad_norm": 0.44855910539627075, "learning_rate": 3.4647105143188696e-06, "loss": 0.7794, "step": 7410 }, { "epoch": 0.30714078494757346, "grad_norm": 0.42929014563560486, "learning_rate": 3.464503294790501e-06, "loss": 0.7444, "step": 7411 }, { "epoch": 0.30718222885324714, "grad_norm": 0.38817235827445984, "learning_rate": 3.464296075262133e-06, "loss": 0.6581, "step": 7412 }, { "epoch": 0.3072236727589208, "grad_norm": 0.40362638235092163, "learning_rate": 3.4640888557337647e-06, "loss": 0.7319, "step": 7413 }, { "epoch": 0.3072651166645945, "grad_norm": 0.39868125319480896, "learning_rate": 3.463881636205396e-06, "loss": 0.645, "step": 7414 }, { "epoch": 0.3073065605702681, "grad_norm": 0.3945135176181793, "learning_rate": 3.4636744166770283e-06, "loss": 0.6547, "step": 7415 }, { "epoch": 0.3073480044759418, "grad_norm": 0.43538111448287964, "learning_rate": 3.4634671971486592e-06, "loss": 0.7075, "step": 7416 }, { "epoch": 0.30738944838161547, "grad_norm": 0.4041697084903717, "learning_rate": 3.4632599776202915e-06, "loss": 0.6976, "step": 7417 }, { "epoch": 0.30743089228728915, "grad_norm": 0.4234633147716522, "learning_rate": 3.463052758091923e-06, "loss": 0.6483, "step": 7418 }, { "epoch": 0.3074723361929628, "grad_norm": 0.40138617157936096, "learning_rate": 3.4628455385635547e-06, "loss": 0.6821, "step": 7419 }, { "epoch": 0.3075137800986365, "grad_norm": 0.3986906409263611, "learning_rate": 3.4626383190351865e-06, "loss": 0.7122, "step": 7420 }, { "epoch": 0.3075552240043102, "grad_norm": 0.40693172812461853, "learning_rate": 3.462431099506818e-06, "loss": 0.7253, "step": 7421 }, { "epoch": 0.30759666790998386, "grad_norm": 0.40595442056655884, "learning_rate": 3.4622238799784497e-06, "loss": 0.6517, "step": 7422 }, { "epoch": 0.30763811181565753, "grad_norm": 0.4506520926952362, "learning_rate": 3.462016660450081e-06, "loss": 0.6719, "step": 7423 }, { "epoch": 0.30767955572133115, "grad_norm": 0.4135625660419464, "learning_rate": 3.461809440921713e-06, "loss": 0.736, "step": 7424 }, { "epoch": 0.30772099962700483, "grad_norm": 0.4359142780303955, "learning_rate": 3.4616022213933442e-06, "loss": 0.6891, "step": 7425 }, { "epoch": 0.3077624435326785, "grad_norm": 0.41457247734069824, "learning_rate": 3.461395001864976e-06, "loss": 0.6521, "step": 7426 }, { "epoch": 0.3078038874383522, "grad_norm": 0.3983946144580841, "learning_rate": 3.4611877823366074e-06, "loss": 0.686, "step": 7427 }, { "epoch": 0.30784533134402586, "grad_norm": 0.41855570673942566, "learning_rate": 3.4609805628082392e-06, "loss": 0.7656, "step": 7428 }, { "epoch": 0.30788677524969954, "grad_norm": 0.42434927821159363, "learning_rate": 3.460773343279871e-06, "loss": 0.7097, "step": 7429 }, { "epoch": 0.3079282191553732, "grad_norm": 0.3782005310058594, "learning_rate": 3.4605661237515024e-06, "loss": 0.6736, "step": 7430 }, { "epoch": 0.3079696630610469, "grad_norm": 0.4046684503555298, "learning_rate": 3.4603589042231347e-06, "loss": 0.6934, "step": 7431 }, { "epoch": 0.30801110696672057, "grad_norm": 0.39992770552635193, "learning_rate": 3.4601516846947656e-06, "loss": 0.6533, "step": 7432 }, { "epoch": 0.3080525508723942, "grad_norm": 0.3961031138896942, "learning_rate": 3.459944465166398e-06, "loss": 0.7212, "step": 7433 }, { "epoch": 0.30809399477806787, "grad_norm": 0.4134383201599121, "learning_rate": 3.459737245638029e-06, "loss": 0.679, "step": 7434 }, { "epoch": 0.30813543868374155, "grad_norm": 0.3673158884048462, "learning_rate": 3.459530026109661e-06, "loss": 0.6405, "step": 7435 }, { "epoch": 0.3081768825894152, "grad_norm": 0.41948065161705017, "learning_rate": 3.4593228065812925e-06, "loss": 0.7307, "step": 7436 }, { "epoch": 0.3082183264950889, "grad_norm": 0.4199807643890381, "learning_rate": 3.4591155870529243e-06, "loss": 0.6355, "step": 7437 }, { "epoch": 0.3082597704007626, "grad_norm": 0.4437977075576782, "learning_rate": 3.458908367524556e-06, "loss": 0.7123, "step": 7438 }, { "epoch": 0.30830121430643626, "grad_norm": 0.42537516355514526, "learning_rate": 3.4587011479961875e-06, "loss": 0.7021, "step": 7439 }, { "epoch": 0.30834265821210993, "grad_norm": 0.462179034948349, "learning_rate": 3.4584939284678193e-06, "loss": 0.7058, "step": 7440 }, { "epoch": 0.30838410211778355, "grad_norm": 0.5247779488563538, "learning_rate": 3.4582867089394506e-06, "loss": 0.7861, "step": 7441 }, { "epoch": 0.30842554602345723, "grad_norm": 0.4208361804485321, "learning_rate": 3.4580794894110825e-06, "loss": 0.6921, "step": 7442 }, { "epoch": 0.3084669899291309, "grad_norm": 0.41391265392303467, "learning_rate": 3.457872269882714e-06, "loss": 0.6554, "step": 7443 }, { "epoch": 0.3085084338348046, "grad_norm": 0.4514371454715729, "learning_rate": 3.4576650503543457e-06, "loss": 0.7461, "step": 7444 }, { "epoch": 0.30854987774047826, "grad_norm": 0.43196672201156616, "learning_rate": 3.457457830825977e-06, "loss": 0.6942, "step": 7445 }, { "epoch": 0.30859132164615194, "grad_norm": 0.41849032044410706, "learning_rate": 3.457250611297609e-06, "loss": 0.7034, "step": 7446 }, { "epoch": 0.3086327655518256, "grad_norm": 0.38648521900177, "learning_rate": 3.4570433917692407e-06, "loss": 0.666, "step": 7447 }, { "epoch": 0.3086742094574993, "grad_norm": 0.40261563658714294, "learning_rate": 3.456836172240872e-06, "loss": 0.6942, "step": 7448 }, { "epoch": 0.30871565336317297, "grad_norm": 0.39538857340812683, "learning_rate": 3.4566289527125043e-06, "loss": 0.6466, "step": 7449 }, { "epoch": 0.3087570972688466, "grad_norm": 0.432583212852478, "learning_rate": 3.4564217331841352e-06, "loss": 0.7451, "step": 7450 }, { "epoch": 0.30879854117452027, "grad_norm": 0.4139600098133087, "learning_rate": 3.4562145136557675e-06, "loss": 0.6829, "step": 7451 }, { "epoch": 0.30883998508019395, "grad_norm": 0.3965872824192047, "learning_rate": 3.456007294127399e-06, "loss": 0.7104, "step": 7452 }, { "epoch": 0.3088814289858676, "grad_norm": 0.4589243531227112, "learning_rate": 3.4558000745990307e-06, "loss": 0.7174, "step": 7453 }, { "epoch": 0.3089228728915413, "grad_norm": 0.41212204098701477, "learning_rate": 3.455592855070662e-06, "loss": 0.7429, "step": 7454 }, { "epoch": 0.308964316797215, "grad_norm": 0.41620928049087524, "learning_rate": 3.455385635542294e-06, "loss": 0.7114, "step": 7455 }, { "epoch": 0.30900576070288865, "grad_norm": 0.4345521330833435, "learning_rate": 3.4551784160139257e-06, "loss": 0.748, "step": 7456 }, { "epoch": 0.30904720460856233, "grad_norm": 0.40595024824142456, "learning_rate": 3.454971196485557e-06, "loss": 0.6959, "step": 7457 }, { "epoch": 0.309088648514236, "grad_norm": 0.4046982526779175, "learning_rate": 3.454763976957189e-06, "loss": 0.7261, "step": 7458 }, { "epoch": 0.30913009241990963, "grad_norm": 0.41742590069770813, "learning_rate": 3.4545567574288203e-06, "loss": 0.7285, "step": 7459 }, { "epoch": 0.3091715363255833, "grad_norm": 0.4170762300491333, "learning_rate": 3.454349537900452e-06, "loss": 0.667, "step": 7460 }, { "epoch": 0.309212980231257, "grad_norm": 0.40838125348091125, "learning_rate": 3.4541423183720834e-06, "loss": 0.6522, "step": 7461 }, { "epoch": 0.30925442413693066, "grad_norm": 0.4063316285610199, "learning_rate": 3.4539350988437153e-06, "loss": 0.7156, "step": 7462 }, { "epoch": 0.30929586804260434, "grad_norm": 0.3804436922073364, "learning_rate": 3.4537278793153466e-06, "loss": 0.6378, "step": 7463 }, { "epoch": 0.309337311948278, "grad_norm": 0.40576303005218506, "learning_rate": 3.4535206597869784e-06, "loss": 0.7069, "step": 7464 }, { "epoch": 0.3093787558539517, "grad_norm": 0.4195157289505005, "learning_rate": 3.4533134402586107e-06, "loss": 0.7415, "step": 7465 }, { "epoch": 0.30942019975962537, "grad_norm": 0.3904954195022583, "learning_rate": 3.4531062207302416e-06, "loss": 0.6456, "step": 7466 }, { "epoch": 0.309461643665299, "grad_norm": 0.421353816986084, "learning_rate": 3.452899001201874e-06, "loss": 0.7053, "step": 7467 }, { "epoch": 0.30950308757097267, "grad_norm": 0.41370323300361633, "learning_rate": 3.452691781673505e-06, "loss": 0.6846, "step": 7468 }, { "epoch": 0.30954453147664635, "grad_norm": 0.4078402817249298, "learning_rate": 3.452484562145137e-06, "loss": 0.7607, "step": 7469 }, { "epoch": 0.30958597538232, "grad_norm": 0.4107280671596527, "learning_rate": 3.4522773426167685e-06, "loss": 0.7, "step": 7470 }, { "epoch": 0.3096274192879937, "grad_norm": 0.3857530951499939, "learning_rate": 3.4520701230884003e-06, "loss": 0.6826, "step": 7471 }, { "epoch": 0.3096688631936674, "grad_norm": 0.4017072319984436, "learning_rate": 3.4518629035600317e-06, "loss": 0.6921, "step": 7472 }, { "epoch": 0.30971030709934105, "grad_norm": 0.46790146827697754, "learning_rate": 3.4516556840316635e-06, "loss": 0.7645, "step": 7473 }, { "epoch": 0.30975175100501473, "grad_norm": 0.44236353039741516, "learning_rate": 3.4514484645032953e-06, "loss": 0.7515, "step": 7474 }, { "epoch": 0.3097931949106884, "grad_norm": 0.4386170208454132, "learning_rate": 3.4512412449749267e-06, "loss": 0.729, "step": 7475 }, { "epoch": 0.30983463881636203, "grad_norm": 0.4028629958629608, "learning_rate": 3.4510340254465585e-06, "loss": 0.6648, "step": 7476 }, { "epoch": 0.3098760827220357, "grad_norm": 0.445488303899765, "learning_rate": 3.45082680591819e-06, "loss": 0.7395, "step": 7477 }, { "epoch": 0.3099175266277094, "grad_norm": 0.3786115348339081, "learning_rate": 3.4506195863898217e-06, "loss": 0.6415, "step": 7478 }, { "epoch": 0.30995897053338306, "grad_norm": 0.4242904484272003, "learning_rate": 3.450412366861453e-06, "loss": 0.7103, "step": 7479 }, { "epoch": 0.31000041443905674, "grad_norm": 0.43441250920295715, "learning_rate": 3.450205147333085e-06, "loss": 0.7158, "step": 7480 }, { "epoch": 0.3100418583447304, "grad_norm": 0.40602102875709534, "learning_rate": 3.4499979278047167e-06, "loss": 0.6924, "step": 7481 }, { "epoch": 0.3100833022504041, "grad_norm": 0.4194507598876953, "learning_rate": 3.449790708276348e-06, "loss": 0.7148, "step": 7482 }, { "epoch": 0.31012474615607777, "grad_norm": 0.44745177030563354, "learning_rate": 3.4495834887479803e-06, "loss": 0.7522, "step": 7483 }, { "epoch": 0.31016619006175145, "grad_norm": 0.3976851999759674, "learning_rate": 3.4493762692196112e-06, "loss": 0.6609, "step": 7484 }, { "epoch": 0.31020763396742507, "grad_norm": 0.3988339602947235, "learning_rate": 3.4491690496912435e-06, "loss": 0.7292, "step": 7485 }, { "epoch": 0.31024907787309874, "grad_norm": 0.39881035685539246, "learning_rate": 3.448961830162875e-06, "loss": 0.7434, "step": 7486 }, { "epoch": 0.3102905217787724, "grad_norm": 0.39574187994003296, "learning_rate": 3.4487546106345067e-06, "loss": 0.6707, "step": 7487 }, { "epoch": 0.3103319656844461, "grad_norm": 0.440719872713089, "learning_rate": 3.448547391106138e-06, "loss": 0.7405, "step": 7488 }, { "epoch": 0.3103734095901198, "grad_norm": 0.41781434416770935, "learning_rate": 3.44834017157777e-06, "loss": 0.6636, "step": 7489 }, { "epoch": 0.31041485349579345, "grad_norm": 0.38664522767066956, "learning_rate": 3.4481329520494017e-06, "loss": 0.7581, "step": 7490 }, { "epoch": 0.31045629740146713, "grad_norm": 0.4182446300983429, "learning_rate": 3.447925732521033e-06, "loss": 0.7524, "step": 7491 }, { "epoch": 0.3104977413071408, "grad_norm": 0.43499240279197693, "learning_rate": 3.447718512992665e-06, "loss": 0.7274, "step": 7492 }, { "epoch": 0.31053918521281443, "grad_norm": 0.4100394546985626, "learning_rate": 3.4475112934642963e-06, "loss": 0.7344, "step": 7493 }, { "epoch": 0.3105806291184881, "grad_norm": 0.4300372898578644, "learning_rate": 3.447304073935928e-06, "loss": 0.7051, "step": 7494 }, { "epoch": 0.3106220730241618, "grad_norm": 0.40647703409194946, "learning_rate": 3.4470968544075595e-06, "loss": 0.7255, "step": 7495 }, { "epoch": 0.31066351692983546, "grad_norm": 0.3884662985801697, "learning_rate": 3.4468896348791913e-06, "loss": 0.6992, "step": 7496 }, { "epoch": 0.31070496083550914, "grad_norm": 0.39692962169647217, "learning_rate": 3.4466824153508226e-06, "loss": 0.7073, "step": 7497 }, { "epoch": 0.3107464047411828, "grad_norm": 0.3854995369911194, "learning_rate": 3.4464751958224545e-06, "loss": 0.6799, "step": 7498 }, { "epoch": 0.3107878486468565, "grad_norm": 0.42676448822021484, "learning_rate": 3.4462679762940867e-06, "loss": 0.7096, "step": 7499 }, { "epoch": 0.31082929255253017, "grad_norm": 0.46065542101860046, "learning_rate": 3.4460607567657176e-06, "loss": 0.6981, "step": 7500 }, { "epoch": 0.31087073645820384, "grad_norm": 0.41962409019470215, "learning_rate": 3.44585353723735e-06, "loss": 0.7021, "step": 7501 }, { "epoch": 0.31091218036387747, "grad_norm": 0.3986532986164093, "learning_rate": 3.4456463177089813e-06, "loss": 0.7167, "step": 7502 }, { "epoch": 0.31095362426955114, "grad_norm": 0.4155408442020416, "learning_rate": 3.445439098180613e-06, "loss": 0.7473, "step": 7503 }, { "epoch": 0.3109950681752248, "grad_norm": 0.4450230300426483, "learning_rate": 3.4452318786522445e-06, "loss": 0.6719, "step": 7504 }, { "epoch": 0.3110365120808985, "grad_norm": 0.4236619472503662, "learning_rate": 3.4450246591238763e-06, "loss": 0.7209, "step": 7505 }, { "epoch": 0.3110779559865722, "grad_norm": 0.4422362744808197, "learning_rate": 3.4448174395955077e-06, "loss": 0.7454, "step": 7506 }, { "epoch": 0.31111939989224585, "grad_norm": 0.39725545048713684, "learning_rate": 3.4446102200671395e-06, "loss": 0.6923, "step": 7507 }, { "epoch": 0.31116084379791953, "grad_norm": 0.4980255961418152, "learning_rate": 3.4444030005387713e-06, "loss": 0.7163, "step": 7508 }, { "epoch": 0.3112022877035932, "grad_norm": 0.42044591903686523, "learning_rate": 3.4441957810104027e-06, "loss": 0.6964, "step": 7509 }, { "epoch": 0.3112437316092669, "grad_norm": 0.40436574816703796, "learning_rate": 3.4439885614820345e-06, "loss": 0.7174, "step": 7510 }, { "epoch": 0.3112851755149405, "grad_norm": 0.39719322323799133, "learning_rate": 3.443781341953666e-06, "loss": 0.7346, "step": 7511 }, { "epoch": 0.3113266194206142, "grad_norm": 0.39710134267807007, "learning_rate": 3.4435741224252977e-06, "loss": 0.728, "step": 7512 }, { "epoch": 0.31136806332628786, "grad_norm": 0.40367019176483154, "learning_rate": 3.443366902896929e-06, "loss": 0.6814, "step": 7513 }, { "epoch": 0.31140950723196154, "grad_norm": 0.4167819917201996, "learning_rate": 3.443159683368561e-06, "loss": 0.704, "step": 7514 }, { "epoch": 0.3114509511376352, "grad_norm": 0.4126543402671814, "learning_rate": 3.4429524638401922e-06, "loss": 0.7334, "step": 7515 }, { "epoch": 0.3114923950433089, "grad_norm": 0.41189107298851013, "learning_rate": 3.442745244311824e-06, "loss": 0.736, "step": 7516 }, { "epoch": 0.31153383894898257, "grad_norm": 0.4293951690196991, "learning_rate": 3.4425380247834563e-06, "loss": 0.6775, "step": 7517 }, { "epoch": 0.31157528285465624, "grad_norm": 0.4264795780181885, "learning_rate": 3.4423308052550873e-06, "loss": 0.7024, "step": 7518 }, { "epoch": 0.3116167267603299, "grad_norm": 0.41424986720085144, "learning_rate": 3.4421235857267195e-06, "loss": 0.6647, "step": 7519 }, { "epoch": 0.31165817066600354, "grad_norm": 0.3883225619792938, "learning_rate": 3.441916366198351e-06, "loss": 0.6848, "step": 7520 }, { "epoch": 0.3116996145716772, "grad_norm": 0.40998461842536926, "learning_rate": 3.4417091466699827e-06, "loss": 0.7324, "step": 7521 }, { "epoch": 0.3117410584773509, "grad_norm": 0.4122975766658783, "learning_rate": 3.441501927141614e-06, "loss": 0.6619, "step": 7522 }, { "epoch": 0.3117825023830246, "grad_norm": 0.3984352946281433, "learning_rate": 3.441294707613246e-06, "loss": 0.7292, "step": 7523 }, { "epoch": 0.31182394628869825, "grad_norm": 0.4168083965778351, "learning_rate": 3.4410874880848773e-06, "loss": 0.7017, "step": 7524 }, { "epoch": 0.3118653901943719, "grad_norm": 0.45293116569519043, "learning_rate": 3.440880268556509e-06, "loss": 0.709, "step": 7525 }, { "epoch": 0.3119068341000456, "grad_norm": 0.4320991635322571, "learning_rate": 3.440673049028141e-06, "loss": 0.7201, "step": 7526 }, { "epoch": 0.3119482780057193, "grad_norm": 0.40693241357803345, "learning_rate": 3.4404658294997723e-06, "loss": 0.663, "step": 7527 }, { "epoch": 0.3119897219113929, "grad_norm": 0.3822488784790039, "learning_rate": 3.440258609971404e-06, "loss": 0.6321, "step": 7528 }, { "epoch": 0.3120311658170666, "grad_norm": 0.42916402220726013, "learning_rate": 3.4400513904430355e-06, "loss": 0.7495, "step": 7529 }, { "epoch": 0.31207260972274026, "grad_norm": 0.39259234070777893, "learning_rate": 3.4398441709146673e-06, "loss": 0.7396, "step": 7530 }, { "epoch": 0.31211405362841393, "grad_norm": 0.41370290517807007, "learning_rate": 3.4396369513862987e-06, "loss": 0.7477, "step": 7531 }, { "epoch": 0.3121554975340876, "grad_norm": 0.4350496232509613, "learning_rate": 3.4394297318579305e-06, "loss": 0.7299, "step": 7532 }, { "epoch": 0.3121969414397613, "grad_norm": 0.4280523955821991, "learning_rate": 3.439222512329562e-06, "loss": 0.6707, "step": 7533 }, { "epoch": 0.31223838534543497, "grad_norm": 0.4487139582633972, "learning_rate": 3.4390152928011937e-06, "loss": 0.7424, "step": 7534 }, { "epoch": 0.31227982925110864, "grad_norm": 0.37774088978767395, "learning_rate": 3.438808073272826e-06, "loss": 0.6804, "step": 7535 }, { "epoch": 0.3123212731567823, "grad_norm": 0.4136647880077362, "learning_rate": 3.4386008537444573e-06, "loss": 0.7026, "step": 7536 }, { "epoch": 0.31236271706245594, "grad_norm": 0.4362996220588684, "learning_rate": 3.438393634216089e-06, "loss": 0.6705, "step": 7537 }, { "epoch": 0.3124041609681296, "grad_norm": 0.455662339925766, "learning_rate": 3.4381864146877205e-06, "loss": 0.7585, "step": 7538 }, { "epoch": 0.3124456048738033, "grad_norm": 0.407144159078598, "learning_rate": 3.4379791951593523e-06, "loss": 0.7109, "step": 7539 }, { "epoch": 0.312487048779477, "grad_norm": 0.42984285950660706, "learning_rate": 3.4377719756309837e-06, "loss": 0.6954, "step": 7540 }, { "epoch": 0.31252849268515065, "grad_norm": 0.3828597962856293, "learning_rate": 3.4375647561026155e-06, "loss": 0.6338, "step": 7541 }, { "epoch": 0.3125699365908243, "grad_norm": 0.4313989281654358, "learning_rate": 3.4373575365742473e-06, "loss": 0.6616, "step": 7542 }, { "epoch": 0.312611380496498, "grad_norm": 0.4203551411628723, "learning_rate": 3.4371503170458787e-06, "loss": 0.7487, "step": 7543 }, { "epoch": 0.3126528244021717, "grad_norm": 0.42185747623443604, "learning_rate": 3.4369430975175105e-06, "loss": 0.689, "step": 7544 }, { "epoch": 0.31269426830784536, "grad_norm": 0.40505892038345337, "learning_rate": 3.436735877989142e-06, "loss": 0.693, "step": 7545 }, { "epoch": 0.312735712213519, "grad_norm": 0.43437132239341736, "learning_rate": 3.4365286584607737e-06, "loss": 0.7076, "step": 7546 }, { "epoch": 0.31277715611919266, "grad_norm": 0.4146994650363922, "learning_rate": 3.436321438932405e-06, "loss": 0.7051, "step": 7547 }, { "epoch": 0.31281860002486633, "grad_norm": 0.41935038566589355, "learning_rate": 3.436114219404037e-06, "loss": 0.7714, "step": 7548 }, { "epoch": 0.31286004393054, "grad_norm": 0.4182562232017517, "learning_rate": 3.4359069998756683e-06, "loss": 0.7354, "step": 7549 }, { "epoch": 0.3129014878362137, "grad_norm": 0.4311809837818146, "learning_rate": 3.4356997803473e-06, "loss": 0.6664, "step": 7550 }, { "epoch": 0.31294293174188736, "grad_norm": 0.41274723410606384, "learning_rate": 3.4354925608189323e-06, "loss": 0.7095, "step": 7551 }, { "epoch": 0.31298437564756104, "grad_norm": 0.4006761908531189, "learning_rate": 3.4352853412905633e-06, "loss": 0.72, "step": 7552 }, { "epoch": 0.3130258195532347, "grad_norm": 0.40339601039886475, "learning_rate": 3.4350781217621955e-06, "loss": 0.6765, "step": 7553 }, { "epoch": 0.31306726345890834, "grad_norm": 0.41138267517089844, "learning_rate": 3.434870902233827e-06, "loss": 0.676, "step": 7554 }, { "epoch": 0.313108707364582, "grad_norm": 0.4002581238746643, "learning_rate": 3.4346636827054587e-06, "loss": 0.6326, "step": 7555 }, { "epoch": 0.3131501512702557, "grad_norm": 0.43722182512283325, "learning_rate": 3.43445646317709e-06, "loss": 0.7369, "step": 7556 }, { "epoch": 0.31319159517592937, "grad_norm": 0.44610583782196045, "learning_rate": 3.434249243648722e-06, "loss": 0.7502, "step": 7557 }, { "epoch": 0.31323303908160305, "grad_norm": 0.40255096554756165, "learning_rate": 3.4340420241203533e-06, "loss": 0.693, "step": 7558 }, { "epoch": 0.3132744829872767, "grad_norm": 0.3938567042350769, "learning_rate": 3.433834804591985e-06, "loss": 0.7058, "step": 7559 }, { "epoch": 0.3133159268929504, "grad_norm": 0.4168805181980133, "learning_rate": 3.433627585063617e-06, "loss": 0.6844, "step": 7560 }, { "epoch": 0.3133573707986241, "grad_norm": 0.38529545068740845, "learning_rate": 3.4334203655352483e-06, "loss": 0.6611, "step": 7561 }, { "epoch": 0.31339881470429776, "grad_norm": 0.3977561891078949, "learning_rate": 3.43321314600688e-06, "loss": 0.7312, "step": 7562 }, { "epoch": 0.3134402586099714, "grad_norm": 0.4147842824459076, "learning_rate": 3.4330059264785115e-06, "loss": 0.7148, "step": 7563 }, { "epoch": 0.31348170251564506, "grad_norm": 0.3993547856807709, "learning_rate": 3.4327987069501433e-06, "loss": 0.7006, "step": 7564 }, { "epoch": 0.31352314642131873, "grad_norm": 0.4317115843296051, "learning_rate": 3.4325914874217747e-06, "loss": 0.7109, "step": 7565 }, { "epoch": 0.3135645903269924, "grad_norm": 0.4191565215587616, "learning_rate": 3.4323842678934065e-06, "loss": 0.6929, "step": 7566 }, { "epoch": 0.3136060342326661, "grad_norm": 0.4135379493236542, "learning_rate": 3.432177048365038e-06, "loss": 0.7278, "step": 7567 }, { "epoch": 0.31364747813833976, "grad_norm": 0.4381425976753235, "learning_rate": 3.4319698288366697e-06, "loss": 0.6702, "step": 7568 }, { "epoch": 0.31368892204401344, "grad_norm": 0.42175841331481934, "learning_rate": 3.431762609308302e-06, "loss": 0.7126, "step": 7569 }, { "epoch": 0.3137303659496871, "grad_norm": 0.44989556074142456, "learning_rate": 3.4315553897799333e-06, "loss": 0.7339, "step": 7570 }, { "epoch": 0.3137718098553608, "grad_norm": 0.4091661870479584, "learning_rate": 3.431348170251565e-06, "loss": 0.7267, "step": 7571 }, { "epoch": 0.3138132537610344, "grad_norm": 0.4093273878097534, "learning_rate": 3.4311409507231965e-06, "loss": 0.7229, "step": 7572 }, { "epoch": 0.3138546976667081, "grad_norm": 0.4181423783302307, "learning_rate": 3.4309337311948283e-06, "loss": 0.7021, "step": 7573 }, { "epoch": 0.31389614157238177, "grad_norm": 0.48012322187423706, "learning_rate": 3.4307265116664597e-06, "loss": 0.7498, "step": 7574 }, { "epoch": 0.31393758547805545, "grad_norm": 0.4160819351673126, "learning_rate": 3.4305192921380915e-06, "loss": 0.7339, "step": 7575 }, { "epoch": 0.3139790293837291, "grad_norm": 0.40453293919563293, "learning_rate": 3.430312072609723e-06, "loss": 0.686, "step": 7576 }, { "epoch": 0.3140204732894028, "grad_norm": 0.4028432369232178, "learning_rate": 3.4301048530813547e-06, "loss": 0.6974, "step": 7577 }, { "epoch": 0.3140619171950765, "grad_norm": 0.41426563262939453, "learning_rate": 3.4298976335529865e-06, "loss": 0.7039, "step": 7578 }, { "epoch": 0.31410336110075016, "grad_norm": 0.39181146025657654, "learning_rate": 3.429690414024618e-06, "loss": 0.7236, "step": 7579 }, { "epoch": 0.31414480500642383, "grad_norm": 0.4060283303260803, "learning_rate": 3.4294831944962497e-06, "loss": 0.6846, "step": 7580 }, { "epoch": 0.31418624891209745, "grad_norm": 0.42515116930007935, "learning_rate": 3.429275974967881e-06, "loss": 0.6862, "step": 7581 }, { "epoch": 0.31422769281777113, "grad_norm": 0.417490154504776, "learning_rate": 3.429068755439513e-06, "loss": 0.6772, "step": 7582 }, { "epoch": 0.3142691367234448, "grad_norm": 0.3897988200187683, "learning_rate": 3.4288615359111443e-06, "loss": 0.6956, "step": 7583 }, { "epoch": 0.3143105806291185, "grad_norm": 0.41970372200012207, "learning_rate": 3.428654316382776e-06, "loss": 0.7405, "step": 7584 }, { "epoch": 0.31435202453479216, "grad_norm": 0.41606375575065613, "learning_rate": 3.4284470968544075e-06, "loss": 0.6985, "step": 7585 }, { "epoch": 0.31439346844046584, "grad_norm": 0.4188237190246582, "learning_rate": 3.4282398773260393e-06, "loss": 0.7332, "step": 7586 }, { "epoch": 0.3144349123461395, "grad_norm": 0.4534447491168976, "learning_rate": 3.4280326577976715e-06, "loss": 0.7744, "step": 7587 }, { "epoch": 0.3144763562518132, "grad_norm": 0.38659927248954773, "learning_rate": 3.427825438269303e-06, "loss": 0.7052, "step": 7588 }, { "epoch": 0.3145178001574868, "grad_norm": 0.41620782017707825, "learning_rate": 3.4276182187409347e-06, "loss": 0.6715, "step": 7589 }, { "epoch": 0.3145592440631605, "grad_norm": 0.39819517731666565, "learning_rate": 3.427410999212566e-06, "loss": 0.6671, "step": 7590 }, { "epoch": 0.31460068796883417, "grad_norm": 0.4335789382457733, "learning_rate": 3.427203779684198e-06, "loss": 0.76, "step": 7591 }, { "epoch": 0.31464213187450785, "grad_norm": 0.42374077439308167, "learning_rate": 3.4269965601558293e-06, "loss": 0.6885, "step": 7592 }, { "epoch": 0.3146835757801815, "grad_norm": 0.4187897741794586, "learning_rate": 3.426789340627461e-06, "loss": 0.7407, "step": 7593 }, { "epoch": 0.3147250196858552, "grad_norm": 0.39622336626052856, "learning_rate": 3.4265821210990925e-06, "loss": 0.7104, "step": 7594 }, { "epoch": 0.3147664635915289, "grad_norm": 0.44573697447776794, "learning_rate": 3.4263749015707243e-06, "loss": 0.682, "step": 7595 }, { "epoch": 0.31480790749720255, "grad_norm": 0.4246913194656372, "learning_rate": 3.426167682042356e-06, "loss": 0.7302, "step": 7596 }, { "epoch": 0.31484935140287623, "grad_norm": 0.4266548752784729, "learning_rate": 3.4259604625139875e-06, "loss": 0.7114, "step": 7597 }, { "epoch": 0.31489079530854985, "grad_norm": 0.4314194619655609, "learning_rate": 3.4257532429856193e-06, "loss": 0.6843, "step": 7598 }, { "epoch": 0.31493223921422353, "grad_norm": 0.41192060708999634, "learning_rate": 3.4255460234572507e-06, "loss": 0.7031, "step": 7599 }, { "epoch": 0.3149736831198972, "grad_norm": 0.42709439992904663, "learning_rate": 3.4253388039288825e-06, "loss": 0.7706, "step": 7600 }, { "epoch": 0.3150151270255709, "grad_norm": 0.41548824310302734, "learning_rate": 3.425131584400514e-06, "loss": 0.6832, "step": 7601 }, { "epoch": 0.31505657093124456, "grad_norm": 0.4305908679962158, "learning_rate": 3.4249243648721457e-06, "loss": 0.7131, "step": 7602 }, { "epoch": 0.31509801483691824, "grad_norm": 0.4105145335197449, "learning_rate": 3.424717145343778e-06, "loss": 0.7451, "step": 7603 }, { "epoch": 0.3151394587425919, "grad_norm": 0.441808819770813, "learning_rate": 3.4245099258154093e-06, "loss": 0.7212, "step": 7604 }, { "epoch": 0.3151809026482656, "grad_norm": 0.4478551149368286, "learning_rate": 3.424302706287041e-06, "loss": 0.697, "step": 7605 }, { "epoch": 0.31522234655393927, "grad_norm": 0.39708220958709717, "learning_rate": 3.4240954867586725e-06, "loss": 0.6709, "step": 7606 }, { "epoch": 0.3152637904596129, "grad_norm": 0.4461628794670105, "learning_rate": 3.4238882672303043e-06, "loss": 0.7662, "step": 7607 }, { "epoch": 0.31530523436528657, "grad_norm": 0.42614296078681946, "learning_rate": 3.4236810477019357e-06, "loss": 0.7605, "step": 7608 }, { "epoch": 0.31534667827096025, "grad_norm": 0.4535657465457916, "learning_rate": 3.4234738281735675e-06, "loss": 0.6689, "step": 7609 }, { "epoch": 0.3153881221766339, "grad_norm": 0.4463537931442261, "learning_rate": 3.423266608645199e-06, "loss": 0.7075, "step": 7610 }, { "epoch": 0.3154295660823076, "grad_norm": 0.4315204620361328, "learning_rate": 3.4230593891168307e-06, "loss": 0.698, "step": 7611 }, { "epoch": 0.3154710099879813, "grad_norm": 0.38476037979125977, "learning_rate": 3.4228521695884625e-06, "loss": 0.7104, "step": 7612 }, { "epoch": 0.31551245389365495, "grad_norm": 0.3910635709762573, "learning_rate": 3.422644950060094e-06, "loss": 0.7151, "step": 7613 }, { "epoch": 0.31555389779932863, "grad_norm": 0.43930676579475403, "learning_rate": 3.4224377305317257e-06, "loss": 0.7388, "step": 7614 }, { "epoch": 0.31559534170500225, "grad_norm": 0.3941642642021179, "learning_rate": 3.422230511003357e-06, "loss": 0.7253, "step": 7615 }, { "epoch": 0.31563678561067593, "grad_norm": 0.4005005657672882, "learning_rate": 3.422023291474989e-06, "loss": 0.7275, "step": 7616 }, { "epoch": 0.3156782295163496, "grad_norm": 0.4259610176086426, "learning_rate": 3.4218160719466203e-06, "loss": 0.7018, "step": 7617 }, { "epoch": 0.3157196734220233, "grad_norm": 0.43708252906799316, "learning_rate": 3.421608852418252e-06, "loss": 0.7396, "step": 7618 }, { "epoch": 0.31576111732769696, "grad_norm": 0.4148063063621521, "learning_rate": 3.4214016328898835e-06, "loss": 0.7219, "step": 7619 }, { "epoch": 0.31580256123337064, "grad_norm": 0.38472887873649597, "learning_rate": 3.4211944133615153e-06, "loss": 0.7014, "step": 7620 }, { "epoch": 0.3158440051390443, "grad_norm": 0.4026165306568146, "learning_rate": 3.4209871938331475e-06, "loss": 0.6952, "step": 7621 }, { "epoch": 0.315885449044718, "grad_norm": 0.4526183605194092, "learning_rate": 3.420779974304779e-06, "loss": 0.7534, "step": 7622 }, { "epoch": 0.31592689295039167, "grad_norm": 0.4179258346557617, "learning_rate": 3.4205727547764107e-06, "loss": 0.7466, "step": 7623 }, { "epoch": 0.3159683368560653, "grad_norm": 0.3909228444099426, "learning_rate": 3.420365535248042e-06, "loss": 0.6909, "step": 7624 }, { "epoch": 0.31600978076173897, "grad_norm": 0.42192474007606506, "learning_rate": 3.420158315719674e-06, "loss": 0.7324, "step": 7625 }, { "epoch": 0.31605122466741264, "grad_norm": 0.3940717875957489, "learning_rate": 3.4199510961913053e-06, "loss": 0.6636, "step": 7626 }, { "epoch": 0.3160926685730863, "grad_norm": 0.4045552611351013, "learning_rate": 3.419743876662937e-06, "loss": 0.6958, "step": 7627 }, { "epoch": 0.31613411247876, "grad_norm": 0.41316014528274536, "learning_rate": 3.4195366571345685e-06, "loss": 0.6833, "step": 7628 }, { "epoch": 0.3161755563844337, "grad_norm": 0.4057060480117798, "learning_rate": 3.4193294376062003e-06, "loss": 0.7013, "step": 7629 }, { "epoch": 0.31621700029010735, "grad_norm": 0.41709405183792114, "learning_rate": 3.419122218077832e-06, "loss": 0.7163, "step": 7630 }, { "epoch": 0.31625844419578103, "grad_norm": 0.4237930178642273, "learning_rate": 3.4189149985494635e-06, "loss": 0.6481, "step": 7631 }, { "epoch": 0.3162998881014547, "grad_norm": 0.4220014214515686, "learning_rate": 3.4187077790210953e-06, "loss": 0.7122, "step": 7632 }, { "epoch": 0.31634133200712833, "grad_norm": 0.41987738013267517, "learning_rate": 3.4185005594927267e-06, "loss": 0.7341, "step": 7633 }, { "epoch": 0.316382775912802, "grad_norm": 0.4061524271965027, "learning_rate": 3.4182933399643585e-06, "loss": 0.7395, "step": 7634 }, { "epoch": 0.3164242198184757, "grad_norm": 0.42098337411880493, "learning_rate": 3.41808612043599e-06, "loss": 0.7239, "step": 7635 }, { "epoch": 0.31646566372414936, "grad_norm": 0.4204918146133423, "learning_rate": 3.4178789009076217e-06, "loss": 0.6733, "step": 7636 }, { "epoch": 0.31650710762982304, "grad_norm": 0.4350239038467407, "learning_rate": 3.417671681379253e-06, "loss": 0.7383, "step": 7637 }, { "epoch": 0.3165485515354967, "grad_norm": 0.4121612012386322, "learning_rate": 3.4174644618508853e-06, "loss": 0.72, "step": 7638 }, { "epoch": 0.3165899954411704, "grad_norm": 0.41749104857444763, "learning_rate": 3.417257242322517e-06, "loss": 0.7292, "step": 7639 }, { "epoch": 0.31663143934684407, "grad_norm": 0.3663554787635803, "learning_rate": 3.4170500227941485e-06, "loss": 0.676, "step": 7640 }, { "epoch": 0.3166728832525177, "grad_norm": 0.4047391712665558, "learning_rate": 3.4168428032657803e-06, "loss": 0.6873, "step": 7641 }, { "epoch": 0.31671432715819137, "grad_norm": 0.44748350977897644, "learning_rate": 3.4166355837374117e-06, "loss": 0.7056, "step": 7642 }, { "epoch": 0.31675577106386504, "grad_norm": 0.45415085554122925, "learning_rate": 3.4164283642090435e-06, "loss": 0.6814, "step": 7643 }, { "epoch": 0.3167972149695387, "grad_norm": 0.46014922857284546, "learning_rate": 3.416221144680675e-06, "loss": 0.7327, "step": 7644 }, { "epoch": 0.3168386588752124, "grad_norm": 0.44210028648376465, "learning_rate": 3.4160139251523067e-06, "loss": 0.7363, "step": 7645 }, { "epoch": 0.3168801027808861, "grad_norm": 0.39721691608428955, "learning_rate": 3.415806705623938e-06, "loss": 0.6648, "step": 7646 }, { "epoch": 0.31692154668655975, "grad_norm": 0.4421832263469696, "learning_rate": 3.41559948609557e-06, "loss": 0.7488, "step": 7647 }, { "epoch": 0.31696299059223343, "grad_norm": 0.41342490911483765, "learning_rate": 3.4153922665672017e-06, "loss": 0.7532, "step": 7648 }, { "epoch": 0.3170044344979071, "grad_norm": 0.44288313388824463, "learning_rate": 3.415185047038833e-06, "loss": 0.7102, "step": 7649 }, { "epoch": 0.3170458784035807, "grad_norm": 0.375647634267807, "learning_rate": 3.414977827510465e-06, "loss": 0.678, "step": 7650 }, { "epoch": 0.3170873223092544, "grad_norm": 0.3705850839614868, "learning_rate": 3.4147706079820963e-06, "loss": 0.6506, "step": 7651 }, { "epoch": 0.3171287662149281, "grad_norm": 0.4182378351688385, "learning_rate": 3.414563388453728e-06, "loss": 0.7155, "step": 7652 }, { "epoch": 0.31717021012060176, "grad_norm": 0.37628793716430664, "learning_rate": 3.4143561689253595e-06, "loss": 0.6556, "step": 7653 }, { "epoch": 0.31721165402627544, "grad_norm": 0.3786916732788086, "learning_rate": 3.4141489493969913e-06, "loss": 0.6709, "step": 7654 }, { "epoch": 0.3172530979319491, "grad_norm": 0.39087650179862976, "learning_rate": 3.4139417298686227e-06, "loss": 0.708, "step": 7655 }, { "epoch": 0.3172945418376228, "grad_norm": 0.3908731937408447, "learning_rate": 3.413734510340255e-06, "loss": 0.7054, "step": 7656 }, { "epoch": 0.31733598574329647, "grad_norm": 0.4352070689201355, "learning_rate": 3.4135272908118867e-06, "loss": 0.7251, "step": 7657 }, { "epoch": 0.31737742964897014, "grad_norm": 0.3858291804790497, "learning_rate": 3.413320071283518e-06, "loss": 0.6615, "step": 7658 }, { "epoch": 0.31741887355464377, "grad_norm": 0.49656426906585693, "learning_rate": 3.41311285175515e-06, "loss": 0.688, "step": 7659 }, { "epoch": 0.31746031746031744, "grad_norm": 0.3792707622051239, "learning_rate": 3.4129056322267813e-06, "loss": 0.6516, "step": 7660 }, { "epoch": 0.3175017613659911, "grad_norm": 0.41389650106430054, "learning_rate": 3.412698412698413e-06, "loss": 0.7004, "step": 7661 }, { "epoch": 0.3175432052716648, "grad_norm": 0.46229544281959534, "learning_rate": 3.4124911931700445e-06, "loss": 0.7537, "step": 7662 }, { "epoch": 0.3175846491773385, "grad_norm": 0.39482879638671875, "learning_rate": 3.4122839736416763e-06, "loss": 0.6746, "step": 7663 }, { "epoch": 0.31762609308301215, "grad_norm": 0.41032156348228455, "learning_rate": 3.412076754113308e-06, "loss": 0.6869, "step": 7664 }, { "epoch": 0.31766753698868583, "grad_norm": 0.4453418254852295, "learning_rate": 3.4118695345849395e-06, "loss": 0.7162, "step": 7665 }, { "epoch": 0.3177089808943595, "grad_norm": 0.42089638113975525, "learning_rate": 3.4116623150565713e-06, "loss": 0.7058, "step": 7666 }, { "epoch": 0.3177504248000332, "grad_norm": 0.43652886152267456, "learning_rate": 3.4114550955282027e-06, "loss": 0.7461, "step": 7667 }, { "epoch": 0.3177918687057068, "grad_norm": 0.45390647649765015, "learning_rate": 3.4112478759998345e-06, "loss": 0.7411, "step": 7668 }, { "epoch": 0.3178333126113805, "grad_norm": 0.3878803551197052, "learning_rate": 3.411040656471466e-06, "loss": 0.6814, "step": 7669 }, { "epoch": 0.31787475651705416, "grad_norm": 0.4003022015094757, "learning_rate": 3.4108334369430977e-06, "loss": 0.6626, "step": 7670 }, { "epoch": 0.31791620042272783, "grad_norm": 0.3987198770046234, "learning_rate": 3.410626217414729e-06, "loss": 0.6571, "step": 7671 }, { "epoch": 0.3179576443284015, "grad_norm": 0.40823686122894287, "learning_rate": 3.4104189978863613e-06, "loss": 0.6941, "step": 7672 }, { "epoch": 0.3179990882340752, "grad_norm": 0.40657660365104675, "learning_rate": 3.410211778357993e-06, "loss": 0.741, "step": 7673 }, { "epoch": 0.31804053213974887, "grad_norm": 0.44792336225509644, "learning_rate": 3.4100045588296245e-06, "loss": 0.738, "step": 7674 }, { "epoch": 0.31808197604542254, "grad_norm": 0.4176732301712036, "learning_rate": 3.4097973393012563e-06, "loss": 0.7351, "step": 7675 }, { "epoch": 0.31812341995109616, "grad_norm": 0.41925331950187683, "learning_rate": 3.4095901197728877e-06, "loss": 0.7174, "step": 7676 }, { "epoch": 0.31816486385676984, "grad_norm": 0.4190998673439026, "learning_rate": 3.4093829002445195e-06, "loss": 0.7007, "step": 7677 }, { "epoch": 0.3182063077624435, "grad_norm": 0.40392783284187317, "learning_rate": 3.409175680716151e-06, "loss": 0.6675, "step": 7678 }, { "epoch": 0.3182477516681172, "grad_norm": 0.42512425780296326, "learning_rate": 3.4089684611877827e-06, "loss": 0.7351, "step": 7679 }, { "epoch": 0.3182891955737909, "grad_norm": 0.4120903015136719, "learning_rate": 3.408761241659414e-06, "loss": 0.6718, "step": 7680 }, { "epoch": 0.31833063947946455, "grad_norm": 0.432298481464386, "learning_rate": 3.408554022131046e-06, "loss": 0.7559, "step": 7681 }, { "epoch": 0.3183720833851382, "grad_norm": 0.43162813782691956, "learning_rate": 3.4083468026026777e-06, "loss": 0.73, "step": 7682 }, { "epoch": 0.3184135272908119, "grad_norm": 0.44006428122520447, "learning_rate": 3.408139583074309e-06, "loss": 0.7114, "step": 7683 }, { "epoch": 0.3184549711964856, "grad_norm": 0.45050138235092163, "learning_rate": 3.407932363545941e-06, "loss": 0.71, "step": 7684 }, { "epoch": 0.3184964151021592, "grad_norm": 0.4031083583831787, "learning_rate": 3.4077251440175723e-06, "loss": 0.6733, "step": 7685 }, { "epoch": 0.3185378590078329, "grad_norm": 0.44399768114089966, "learning_rate": 3.407517924489204e-06, "loss": 0.7546, "step": 7686 }, { "epoch": 0.31857930291350656, "grad_norm": 0.3915066719055176, "learning_rate": 3.4073107049608355e-06, "loss": 0.6843, "step": 7687 }, { "epoch": 0.31862074681918023, "grad_norm": 0.4070793092250824, "learning_rate": 3.4071034854324677e-06, "loss": 0.7227, "step": 7688 }, { "epoch": 0.3186621907248539, "grad_norm": 0.42345187067985535, "learning_rate": 3.4068962659040987e-06, "loss": 0.7073, "step": 7689 }, { "epoch": 0.3187036346305276, "grad_norm": 0.3930114209651947, "learning_rate": 3.406689046375731e-06, "loss": 0.7393, "step": 7690 }, { "epoch": 0.31874507853620127, "grad_norm": 0.40518513321876526, "learning_rate": 3.4064818268473627e-06, "loss": 0.7024, "step": 7691 }, { "epoch": 0.31878652244187494, "grad_norm": 0.39365819096565247, "learning_rate": 3.406274607318994e-06, "loss": 0.6772, "step": 7692 }, { "epoch": 0.3188279663475486, "grad_norm": 0.4130285680294037, "learning_rate": 3.406067387790626e-06, "loss": 0.6395, "step": 7693 }, { "epoch": 0.31886941025322224, "grad_norm": 0.4543284475803375, "learning_rate": 3.4058601682622573e-06, "loss": 0.7036, "step": 7694 }, { "epoch": 0.3189108541588959, "grad_norm": 0.43021634221076965, "learning_rate": 3.405652948733889e-06, "loss": 0.6968, "step": 7695 }, { "epoch": 0.3189522980645696, "grad_norm": 0.4006253182888031, "learning_rate": 3.4054457292055205e-06, "loss": 0.7056, "step": 7696 }, { "epoch": 0.31899374197024327, "grad_norm": 0.40772032737731934, "learning_rate": 3.4052385096771523e-06, "loss": 0.6943, "step": 7697 }, { "epoch": 0.31903518587591695, "grad_norm": 0.43194499611854553, "learning_rate": 3.4050312901487837e-06, "loss": 0.698, "step": 7698 }, { "epoch": 0.3190766297815906, "grad_norm": 0.43614739179611206, "learning_rate": 3.4048240706204155e-06, "loss": 0.6924, "step": 7699 }, { "epoch": 0.3191180736872643, "grad_norm": 0.3805427849292755, "learning_rate": 3.4046168510920473e-06, "loss": 0.6582, "step": 7700 }, { "epoch": 0.319159517592938, "grad_norm": 0.42810508608818054, "learning_rate": 3.4044096315636787e-06, "loss": 0.7832, "step": 7701 }, { "epoch": 0.3192009614986116, "grad_norm": 0.4575166702270508, "learning_rate": 3.4042024120353105e-06, "loss": 0.7075, "step": 7702 }, { "epoch": 0.3192424054042853, "grad_norm": 0.4254007041454315, "learning_rate": 3.403995192506942e-06, "loss": 0.6859, "step": 7703 }, { "epoch": 0.31928384930995896, "grad_norm": 0.41642487049102783, "learning_rate": 3.4037879729785737e-06, "loss": 0.677, "step": 7704 }, { "epoch": 0.31932529321563263, "grad_norm": 0.3975803554058075, "learning_rate": 3.403580753450205e-06, "loss": 0.6422, "step": 7705 }, { "epoch": 0.3193667371213063, "grad_norm": 0.381899356842041, "learning_rate": 3.4033735339218373e-06, "loss": 0.6702, "step": 7706 }, { "epoch": 0.31940818102698, "grad_norm": 0.4365960657596588, "learning_rate": 3.4031663143934683e-06, "loss": 0.7109, "step": 7707 }, { "epoch": 0.31944962493265366, "grad_norm": 0.42308947443962097, "learning_rate": 3.4029590948651005e-06, "loss": 0.7271, "step": 7708 }, { "epoch": 0.31949106883832734, "grad_norm": 0.4323892295360565, "learning_rate": 3.4027518753367323e-06, "loss": 0.688, "step": 7709 }, { "epoch": 0.319532512744001, "grad_norm": 0.41515398025512695, "learning_rate": 3.4025446558083637e-06, "loss": 0.7493, "step": 7710 }, { "epoch": 0.31957395664967464, "grad_norm": 0.40579625964164734, "learning_rate": 3.4023374362799955e-06, "loss": 0.6913, "step": 7711 }, { "epoch": 0.3196154005553483, "grad_norm": 0.3911464214324951, "learning_rate": 3.402130216751627e-06, "loss": 0.6416, "step": 7712 }, { "epoch": 0.319656844461022, "grad_norm": 0.491960346698761, "learning_rate": 3.4019229972232587e-06, "loss": 0.7424, "step": 7713 }, { "epoch": 0.31969828836669567, "grad_norm": 0.4209902584552765, "learning_rate": 3.40171577769489e-06, "loss": 0.6841, "step": 7714 }, { "epoch": 0.31973973227236935, "grad_norm": 0.4620499610900879, "learning_rate": 3.401508558166522e-06, "loss": 0.7905, "step": 7715 }, { "epoch": 0.319781176178043, "grad_norm": 0.4331238269805908, "learning_rate": 3.4013013386381533e-06, "loss": 0.7689, "step": 7716 }, { "epoch": 0.3198226200837167, "grad_norm": 0.4361741244792938, "learning_rate": 3.401094119109785e-06, "loss": 0.7283, "step": 7717 }, { "epoch": 0.3198640639893904, "grad_norm": 0.3759622871875763, "learning_rate": 3.400886899581417e-06, "loss": 0.7046, "step": 7718 }, { "epoch": 0.31990550789506406, "grad_norm": 0.4229751527309418, "learning_rate": 3.4006796800530483e-06, "loss": 0.7019, "step": 7719 }, { "epoch": 0.3199469518007377, "grad_norm": 0.40966150164604187, "learning_rate": 3.40047246052468e-06, "loss": 0.7168, "step": 7720 }, { "epoch": 0.31998839570641135, "grad_norm": 0.39007389545440674, "learning_rate": 3.4002652409963115e-06, "loss": 0.7107, "step": 7721 }, { "epoch": 0.32002983961208503, "grad_norm": 0.3803401291370392, "learning_rate": 3.4000580214679437e-06, "loss": 0.6495, "step": 7722 }, { "epoch": 0.3200712835177587, "grad_norm": 0.4482669234275818, "learning_rate": 3.3998508019395747e-06, "loss": 0.7312, "step": 7723 }, { "epoch": 0.3201127274234324, "grad_norm": 0.4213234782218933, "learning_rate": 3.399643582411207e-06, "loss": 0.6666, "step": 7724 }, { "epoch": 0.32015417132910606, "grad_norm": 0.4391288161277771, "learning_rate": 3.3994363628828387e-06, "loss": 0.7227, "step": 7725 }, { "epoch": 0.32019561523477974, "grad_norm": 0.37178272008895874, "learning_rate": 3.39922914335447e-06, "loss": 0.6552, "step": 7726 }, { "epoch": 0.3202370591404534, "grad_norm": 0.4243561327457428, "learning_rate": 3.399021923826102e-06, "loss": 0.7279, "step": 7727 }, { "epoch": 0.3202785030461271, "grad_norm": 0.42968595027923584, "learning_rate": 3.3988147042977333e-06, "loss": 0.7424, "step": 7728 }, { "epoch": 0.3203199469518007, "grad_norm": 0.3924400210380554, "learning_rate": 3.398607484769365e-06, "loss": 0.6787, "step": 7729 }, { "epoch": 0.3203613908574744, "grad_norm": 0.39011120796203613, "learning_rate": 3.3984002652409965e-06, "loss": 0.7041, "step": 7730 }, { "epoch": 0.32040283476314807, "grad_norm": 0.4148184061050415, "learning_rate": 3.3981930457126283e-06, "loss": 0.7153, "step": 7731 }, { "epoch": 0.32044427866882175, "grad_norm": 0.40022215247154236, "learning_rate": 3.3979858261842597e-06, "loss": 0.629, "step": 7732 }, { "epoch": 0.3204857225744954, "grad_norm": 0.4232144355773926, "learning_rate": 3.3977786066558915e-06, "loss": 0.7671, "step": 7733 }, { "epoch": 0.3205271664801691, "grad_norm": 0.38317179679870605, "learning_rate": 3.3975713871275233e-06, "loss": 0.7117, "step": 7734 }, { "epoch": 0.3205686103858428, "grad_norm": 0.4519965350627899, "learning_rate": 3.3973641675991547e-06, "loss": 0.7112, "step": 7735 }, { "epoch": 0.32061005429151646, "grad_norm": 0.45592793822288513, "learning_rate": 3.3971569480707865e-06, "loss": 0.7454, "step": 7736 }, { "epoch": 0.3206514981971901, "grad_norm": 0.4002833068370819, "learning_rate": 3.396949728542418e-06, "loss": 0.6779, "step": 7737 }, { "epoch": 0.32069294210286375, "grad_norm": 0.445517897605896, "learning_rate": 3.3967425090140497e-06, "loss": 0.7224, "step": 7738 }, { "epoch": 0.32073438600853743, "grad_norm": 0.37436121702194214, "learning_rate": 3.396535289485681e-06, "loss": 0.6455, "step": 7739 }, { "epoch": 0.3207758299142111, "grad_norm": 0.4186210632324219, "learning_rate": 3.3963280699573133e-06, "loss": 0.7043, "step": 7740 }, { "epoch": 0.3208172738198848, "grad_norm": 0.42991721630096436, "learning_rate": 3.3961208504289443e-06, "loss": 0.7201, "step": 7741 }, { "epoch": 0.32085871772555846, "grad_norm": 0.4248584806919098, "learning_rate": 3.3959136309005765e-06, "loss": 0.7576, "step": 7742 }, { "epoch": 0.32090016163123214, "grad_norm": 0.3895716667175293, "learning_rate": 3.3957064113722083e-06, "loss": 0.6924, "step": 7743 }, { "epoch": 0.3209416055369058, "grad_norm": 0.405115008354187, "learning_rate": 3.3954991918438397e-06, "loss": 0.6803, "step": 7744 }, { "epoch": 0.3209830494425795, "grad_norm": 0.41199082136154175, "learning_rate": 3.3952919723154715e-06, "loss": 0.6843, "step": 7745 }, { "epoch": 0.3210244933482531, "grad_norm": 0.4533744752407074, "learning_rate": 3.395084752787103e-06, "loss": 0.7114, "step": 7746 }, { "epoch": 0.3210659372539268, "grad_norm": 0.49249210953712463, "learning_rate": 3.3948775332587347e-06, "loss": 0.7623, "step": 7747 }, { "epoch": 0.32110738115960047, "grad_norm": 0.4302029311656952, "learning_rate": 3.394670313730366e-06, "loss": 0.738, "step": 7748 }, { "epoch": 0.32114882506527415, "grad_norm": 0.4031790494918823, "learning_rate": 3.394463094201998e-06, "loss": 0.6936, "step": 7749 }, { "epoch": 0.3211902689709478, "grad_norm": 0.42486757040023804, "learning_rate": 3.3942558746736293e-06, "loss": 0.6865, "step": 7750 }, { "epoch": 0.3212317128766215, "grad_norm": 0.42226642370224, "learning_rate": 3.394048655145261e-06, "loss": 0.7073, "step": 7751 }, { "epoch": 0.3212731567822952, "grad_norm": 0.4190801978111267, "learning_rate": 3.393841435616893e-06, "loss": 0.7295, "step": 7752 }, { "epoch": 0.32131460068796885, "grad_norm": 0.3991812765598297, "learning_rate": 3.3936342160885243e-06, "loss": 0.6738, "step": 7753 }, { "epoch": 0.32135604459364253, "grad_norm": 0.40885910391807556, "learning_rate": 3.393426996560156e-06, "loss": 0.6794, "step": 7754 }, { "epoch": 0.32139748849931615, "grad_norm": 0.4149489402770996, "learning_rate": 3.3932197770317875e-06, "loss": 0.7, "step": 7755 }, { "epoch": 0.32143893240498983, "grad_norm": 0.39827853441238403, "learning_rate": 3.3930125575034197e-06, "loss": 0.6685, "step": 7756 }, { "epoch": 0.3214803763106635, "grad_norm": 0.3996274173259735, "learning_rate": 3.3928053379750507e-06, "loss": 0.6794, "step": 7757 }, { "epoch": 0.3215218202163372, "grad_norm": 0.4261578321456909, "learning_rate": 3.392598118446683e-06, "loss": 0.7463, "step": 7758 }, { "epoch": 0.32156326412201086, "grad_norm": 0.4278627634048462, "learning_rate": 3.392390898918314e-06, "loss": 0.7573, "step": 7759 }, { "epoch": 0.32160470802768454, "grad_norm": 0.5598564743995667, "learning_rate": 3.392183679389946e-06, "loss": 0.7283, "step": 7760 }, { "epoch": 0.3216461519333582, "grad_norm": 0.4112405776977539, "learning_rate": 3.391976459861578e-06, "loss": 0.6989, "step": 7761 }, { "epoch": 0.3216875958390319, "grad_norm": 0.43529078364372253, "learning_rate": 3.3917692403332093e-06, "loss": 0.6851, "step": 7762 }, { "epoch": 0.3217290397447055, "grad_norm": 0.4011897146701813, "learning_rate": 3.391562020804841e-06, "loss": 0.7075, "step": 7763 }, { "epoch": 0.3217704836503792, "grad_norm": 0.40968990325927734, "learning_rate": 3.3913548012764725e-06, "loss": 0.7292, "step": 7764 }, { "epoch": 0.32181192755605287, "grad_norm": 0.4049062430858612, "learning_rate": 3.3911475817481043e-06, "loss": 0.7002, "step": 7765 }, { "epoch": 0.32185337146172655, "grad_norm": 0.42602822184562683, "learning_rate": 3.3909403622197357e-06, "loss": 0.7131, "step": 7766 }, { "epoch": 0.3218948153674002, "grad_norm": 0.4166877269744873, "learning_rate": 3.3907331426913675e-06, "loss": 0.7592, "step": 7767 }, { "epoch": 0.3219362592730739, "grad_norm": 0.46128278970718384, "learning_rate": 3.390525923162999e-06, "loss": 0.7183, "step": 7768 }, { "epoch": 0.3219777031787476, "grad_norm": 0.4284079670906067, "learning_rate": 3.3903187036346307e-06, "loss": 0.6708, "step": 7769 }, { "epoch": 0.32201914708442125, "grad_norm": 0.3837057948112488, "learning_rate": 3.3901114841062625e-06, "loss": 0.6599, "step": 7770 }, { "epoch": 0.32206059099009493, "grad_norm": 0.43382352590560913, "learning_rate": 3.389904264577894e-06, "loss": 0.7249, "step": 7771 }, { "epoch": 0.32210203489576855, "grad_norm": 0.42883485555648804, "learning_rate": 3.3896970450495257e-06, "loss": 0.7001, "step": 7772 }, { "epoch": 0.32214347880144223, "grad_norm": 0.4049244821071625, "learning_rate": 3.389489825521157e-06, "loss": 0.7267, "step": 7773 }, { "epoch": 0.3221849227071159, "grad_norm": 0.4542079269886017, "learning_rate": 3.3892826059927893e-06, "loss": 0.7319, "step": 7774 }, { "epoch": 0.3222263666127896, "grad_norm": 0.41243627667427063, "learning_rate": 3.3890753864644203e-06, "loss": 0.7002, "step": 7775 }, { "epoch": 0.32226781051846326, "grad_norm": 0.4199390709400177, "learning_rate": 3.3888681669360525e-06, "loss": 0.7174, "step": 7776 }, { "epoch": 0.32230925442413694, "grad_norm": 0.4294116497039795, "learning_rate": 3.3886609474076843e-06, "loss": 0.707, "step": 7777 }, { "epoch": 0.3223506983298106, "grad_norm": 0.43246713280677795, "learning_rate": 3.3884537278793157e-06, "loss": 0.686, "step": 7778 }, { "epoch": 0.3223921422354843, "grad_norm": 0.40518781542778015, "learning_rate": 3.3882465083509475e-06, "loss": 0.6604, "step": 7779 }, { "epoch": 0.32243358614115797, "grad_norm": 0.4680253863334656, "learning_rate": 3.388039288822579e-06, "loss": 0.7688, "step": 7780 }, { "epoch": 0.3224750300468316, "grad_norm": 0.39582008123397827, "learning_rate": 3.3878320692942107e-06, "loss": 0.6768, "step": 7781 }, { "epoch": 0.32251647395250527, "grad_norm": 0.39064061641693115, "learning_rate": 3.387624849765842e-06, "loss": 0.6823, "step": 7782 }, { "epoch": 0.32255791785817894, "grad_norm": 0.4117547571659088, "learning_rate": 3.387417630237474e-06, "loss": 0.7107, "step": 7783 }, { "epoch": 0.3225993617638526, "grad_norm": 0.4325833022594452, "learning_rate": 3.3872104107091053e-06, "loss": 0.691, "step": 7784 }, { "epoch": 0.3226408056695263, "grad_norm": 0.43816766142845154, "learning_rate": 3.387003191180737e-06, "loss": 0.6528, "step": 7785 }, { "epoch": 0.3226822495752, "grad_norm": 0.41078343987464905, "learning_rate": 3.386795971652369e-06, "loss": 0.6763, "step": 7786 }, { "epoch": 0.32272369348087365, "grad_norm": 0.46850088238716125, "learning_rate": 3.3865887521240003e-06, "loss": 0.7546, "step": 7787 }, { "epoch": 0.32276513738654733, "grad_norm": 0.3931032419204712, "learning_rate": 3.386381532595632e-06, "loss": 0.6455, "step": 7788 }, { "epoch": 0.322806581292221, "grad_norm": 0.41496604681015015, "learning_rate": 3.3861743130672635e-06, "loss": 0.7097, "step": 7789 }, { "epoch": 0.32284802519789463, "grad_norm": 0.4326360821723938, "learning_rate": 3.3859670935388957e-06, "loss": 0.714, "step": 7790 }, { "epoch": 0.3228894691035683, "grad_norm": 0.4061557352542877, "learning_rate": 3.3857598740105267e-06, "loss": 0.6421, "step": 7791 }, { "epoch": 0.322930913009242, "grad_norm": 0.4397035241127014, "learning_rate": 3.385552654482159e-06, "loss": 0.783, "step": 7792 }, { "epoch": 0.32297235691491566, "grad_norm": 0.43344151973724365, "learning_rate": 3.38534543495379e-06, "loss": 0.6632, "step": 7793 }, { "epoch": 0.32301380082058934, "grad_norm": 0.39462539553642273, "learning_rate": 3.385138215425422e-06, "loss": 0.7057, "step": 7794 }, { "epoch": 0.323055244726263, "grad_norm": 0.4175146520137787, "learning_rate": 3.384930995897054e-06, "loss": 0.684, "step": 7795 }, { "epoch": 0.3230966886319367, "grad_norm": 0.4242911636829376, "learning_rate": 3.3847237763686853e-06, "loss": 0.6768, "step": 7796 }, { "epoch": 0.32313813253761037, "grad_norm": 0.3953508138656616, "learning_rate": 3.384516556840317e-06, "loss": 0.6902, "step": 7797 }, { "epoch": 0.323179576443284, "grad_norm": 0.42934197187423706, "learning_rate": 3.3843093373119485e-06, "loss": 0.6953, "step": 7798 }, { "epoch": 0.32322102034895767, "grad_norm": 0.44775304198265076, "learning_rate": 3.3841021177835803e-06, "loss": 0.7391, "step": 7799 }, { "epoch": 0.32326246425463134, "grad_norm": 0.4067079424858093, "learning_rate": 3.3838948982552117e-06, "loss": 0.7035, "step": 7800 }, { "epoch": 0.323303908160305, "grad_norm": 0.43671005964279175, "learning_rate": 3.3836876787268435e-06, "loss": 0.7458, "step": 7801 }, { "epoch": 0.3233453520659787, "grad_norm": 0.42380115389823914, "learning_rate": 3.383480459198475e-06, "loss": 0.7039, "step": 7802 }, { "epoch": 0.3233867959716524, "grad_norm": 0.4287997782230377, "learning_rate": 3.3832732396701067e-06, "loss": 0.7029, "step": 7803 }, { "epoch": 0.32342823987732605, "grad_norm": 0.47196006774902344, "learning_rate": 3.3830660201417385e-06, "loss": 0.7653, "step": 7804 }, { "epoch": 0.32346968378299973, "grad_norm": 0.4171411991119385, "learning_rate": 3.38285880061337e-06, "loss": 0.6947, "step": 7805 }, { "epoch": 0.3235111276886734, "grad_norm": 0.4181687533855438, "learning_rate": 3.3826515810850017e-06, "loss": 0.709, "step": 7806 }, { "epoch": 0.323552571594347, "grad_norm": 0.4172302484512329, "learning_rate": 3.382444361556633e-06, "loss": 0.7303, "step": 7807 }, { "epoch": 0.3235940155000207, "grad_norm": 0.39349624514579773, "learning_rate": 3.3822371420282653e-06, "loss": 0.6726, "step": 7808 }, { "epoch": 0.3236354594056944, "grad_norm": 0.4072658121585846, "learning_rate": 3.3820299224998963e-06, "loss": 0.7061, "step": 7809 }, { "epoch": 0.32367690331136806, "grad_norm": 0.41630786657333374, "learning_rate": 3.3818227029715285e-06, "loss": 0.7384, "step": 7810 }, { "epoch": 0.32371834721704174, "grad_norm": 0.42587098479270935, "learning_rate": 3.38161548344316e-06, "loss": 0.7151, "step": 7811 }, { "epoch": 0.3237597911227154, "grad_norm": 0.3995664715766907, "learning_rate": 3.3814082639147917e-06, "loss": 0.6643, "step": 7812 }, { "epoch": 0.3238012350283891, "grad_norm": 0.43177494406700134, "learning_rate": 3.3812010443864235e-06, "loss": 0.7104, "step": 7813 }, { "epoch": 0.32384267893406277, "grad_norm": 0.4404847025871277, "learning_rate": 3.380993824858055e-06, "loss": 0.7566, "step": 7814 }, { "epoch": 0.32388412283973644, "grad_norm": 0.4310963451862335, "learning_rate": 3.3807866053296867e-06, "loss": 0.6676, "step": 7815 }, { "epoch": 0.32392556674541007, "grad_norm": 0.4372182786464691, "learning_rate": 3.380579385801318e-06, "loss": 0.7456, "step": 7816 }, { "epoch": 0.32396701065108374, "grad_norm": 0.4283396005630493, "learning_rate": 3.38037216627295e-06, "loss": 0.7419, "step": 7817 }, { "epoch": 0.3240084545567574, "grad_norm": 0.40074852108955383, "learning_rate": 3.3801649467445813e-06, "loss": 0.7122, "step": 7818 }, { "epoch": 0.3240498984624311, "grad_norm": 0.4354152977466583, "learning_rate": 3.379957727216213e-06, "loss": 0.7269, "step": 7819 }, { "epoch": 0.3240913423681048, "grad_norm": 0.4093610644340515, "learning_rate": 3.3797505076878445e-06, "loss": 0.6879, "step": 7820 }, { "epoch": 0.32413278627377845, "grad_norm": 0.4472990930080414, "learning_rate": 3.3795432881594763e-06, "loss": 0.6843, "step": 7821 }, { "epoch": 0.3241742301794521, "grad_norm": 0.4268062114715576, "learning_rate": 3.379336068631108e-06, "loss": 0.6139, "step": 7822 }, { "epoch": 0.3242156740851258, "grad_norm": 0.4400743544101715, "learning_rate": 3.3791288491027395e-06, "loss": 0.7249, "step": 7823 }, { "epoch": 0.3242571179907994, "grad_norm": 0.42358335852622986, "learning_rate": 3.3789216295743717e-06, "loss": 0.6815, "step": 7824 }, { "epoch": 0.3242985618964731, "grad_norm": 0.4414313733577728, "learning_rate": 3.3787144100460027e-06, "loss": 0.7075, "step": 7825 }, { "epoch": 0.3243400058021468, "grad_norm": 0.4234028160572052, "learning_rate": 3.378507190517635e-06, "loss": 0.7173, "step": 7826 }, { "epoch": 0.32438144970782046, "grad_norm": 0.4101279377937317, "learning_rate": 3.3782999709892663e-06, "loss": 0.7098, "step": 7827 }, { "epoch": 0.32442289361349413, "grad_norm": 0.423184335231781, "learning_rate": 3.378092751460898e-06, "loss": 0.6576, "step": 7828 }, { "epoch": 0.3244643375191678, "grad_norm": 0.41824302077293396, "learning_rate": 3.3778855319325295e-06, "loss": 0.6261, "step": 7829 }, { "epoch": 0.3245057814248415, "grad_norm": 0.428160697221756, "learning_rate": 3.3776783124041613e-06, "loss": 0.7188, "step": 7830 }, { "epoch": 0.32454722533051517, "grad_norm": 0.40940743684768677, "learning_rate": 3.377471092875793e-06, "loss": 0.7662, "step": 7831 }, { "epoch": 0.32458866923618884, "grad_norm": 0.47021254897117615, "learning_rate": 3.3772638733474245e-06, "loss": 0.7878, "step": 7832 }, { "epoch": 0.32463011314186246, "grad_norm": 0.42245328426361084, "learning_rate": 3.3770566538190563e-06, "loss": 0.7271, "step": 7833 }, { "epoch": 0.32467155704753614, "grad_norm": 0.42557018995285034, "learning_rate": 3.3768494342906877e-06, "loss": 0.7012, "step": 7834 }, { "epoch": 0.3247130009532098, "grad_norm": 0.43708133697509766, "learning_rate": 3.3766422147623195e-06, "loss": 0.6951, "step": 7835 }, { "epoch": 0.3247544448588835, "grad_norm": 0.46211957931518555, "learning_rate": 3.376434995233951e-06, "loss": 0.7648, "step": 7836 }, { "epoch": 0.3247958887645572, "grad_norm": 0.3861435055732727, "learning_rate": 3.3762277757055827e-06, "loss": 0.6456, "step": 7837 }, { "epoch": 0.32483733267023085, "grad_norm": 0.38586193323135376, "learning_rate": 3.3760205561772145e-06, "loss": 0.6901, "step": 7838 }, { "epoch": 0.3248787765759045, "grad_norm": 0.4112374484539032, "learning_rate": 3.375813336648846e-06, "loss": 0.6674, "step": 7839 }, { "epoch": 0.3249202204815782, "grad_norm": 0.3697628974914551, "learning_rate": 3.3756061171204777e-06, "loss": 0.6833, "step": 7840 }, { "epoch": 0.3249616643872519, "grad_norm": 0.45484375953674316, "learning_rate": 3.375398897592109e-06, "loss": 0.7439, "step": 7841 }, { "epoch": 0.3250031082929255, "grad_norm": 0.4441000521183014, "learning_rate": 3.3751916780637413e-06, "loss": 0.7068, "step": 7842 }, { "epoch": 0.3250445521985992, "grad_norm": 0.4480298161506653, "learning_rate": 3.3749844585353723e-06, "loss": 0.7402, "step": 7843 }, { "epoch": 0.32508599610427286, "grad_norm": 0.3955453634262085, "learning_rate": 3.3747772390070045e-06, "loss": 0.658, "step": 7844 }, { "epoch": 0.32512744000994653, "grad_norm": 0.41058430075645447, "learning_rate": 3.374570019478636e-06, "loss": 0.6842, "step": 7845 }, { "epoch": 0.3251688839156202, "grad_norm": 0.41125938296318054, "learning_rate": 3.3743627999502677e-06, "loss": 0.7217, "step": 7846 }, { "epoch": 0.3252103278212939, "grad_norm": 0.4203391373157501, "learning_rate": 3.3741555804218995e-06, "loss": 0.6921, "step": 7847 }, { "epoch": 0.32525177172696756, "grad_norm": 0.43751221895217896, "learning_rate": 3.373948360893531e-06, "loss": 0.7444, "step": 7848 }, { "epoch": 0.32529321563264124, "grad_norm": 0.434929758310318, "learning_rate": 3.3737411413651627e-06, "loss": 0.7935, "step": 7849 }, { "epoch": 0.32533465953831486, "grad_norm": 0.44635188579559326, "learning_rate": 3.373533921836794e-06, "loss": 0.731, "step": 7850 }, { "epoch": 0.32537610344398854, "grad_norm": 0.45439258217811584, "learning_rate": 3.373326702308426e-06, "loss": 0.7327, "step": 7851 }, { "epoch": 0.3254175473496622, "grad_norm": 0.4235478937625885, "learning_rate": 3.3731194827800573e-06, "loss": 0.6847, "step": 7852 }, { "epoch": 0.3254589912553359, "grad_norm": 0.41780802607536316, "learning_rate": 3.372912263251689e-06, "loss": 0.6902, "step": 7853 }, { "epoch": 0.32550043516100957, "grad_norm": 0.39976009726524353, "learning_rate": 3.3727050437233205e-06, "loss": 0.6765, "step": 7854 }, { "epoch": 0.32554187906668325, "grad_norm": 0.40875163674354553, "learning_rate": 3.3724978241949523e-06, "loss": 0.7148, "step": 7855 }, { "epoch": 0.3255833229723569, "grad_norm": 0.40105557441711426, "learning_rate": 3.372290604666584e-06, "loss": 0.6748, "step": 7856 }, { "epoch": 0.3256247668780306, "grad_norm": 0.39805611968040466, "learning_rate": 3.3720833851382155e-06, "loss": 0.7112, "step": 7857 }, { "epoch": 0.3256662107837043, "grad_norm": 0.4086209833621979, "learning_rate": 3.3718761656098477e-06, "loss": 0.744, "step": 7858 }, { "epoch": 0.3257076546893779, "grad_norm": 0.4500320255756378, "learning_rate": 3.3716689460814787e-06, "loss": 0.7458, "step": 7859 }, { "epoch": 0.3257490985950516, "grad_norm": 0.42177948355674744, "learning_rate": 3.371461726553111e-06, "loss": 0.7153, "step": 7860 }, { "epoch": 0.32579054250072526, "grad_norm": 0.4524421691894531, "learning_rate": 3.3712545070247423e-06, "loss": 0.6619, "step": 7861 }, { "epoch": 0.32583198640639893, "grad_norm": 0.4058699905872345, "learning_rate": 3.371047287496374e-06, "loss": 0.6265, "step": 7862 }, { "epoch": 0.3258734303120726, "grad_norm": 0.457205206155777, "learning_rate": 3.3708400679680055e-06, "loss": 0.7405, "step": 7863 }, { "epoch": 0.3259148742177463, "grad_norm": 0.4275912642478943, "learning_rate": 3.3706328484396373e-06, "loss": 0.74, "step": 7864 }, { "epoch": 0.32595631812341996, "grad_norm": 0.41272756457328796, "learning_rate": 3.370425628911269e-06, "loss": 0.645, "step": 7865 }, { "epoch": 0.32599776202909364, "grad_norm": 0.3926021158695221, "learning_rate": 3.3702184093829005e-06, "loss": 0.6965, "step": 7866 }, { "epoch": 0.3260392059347673, "grad_norm": 0.39076176285743713, "learning_rate": 3.3700111898545323e-06, "loss": 0.6956, "step": 7867 }, { "epoch": 0.32608064984044094, "grad_norm": 0.42536666989326477, "learning_rate": 3.3698039703261637e-06, "loss": 0.6896, "step": 7868 }, { "epoch": 0.3261220937461146, "grad_norm": 0.3805718421936035, "learning_rate": 3.3695967507977955e-06, "loss": 0.6987, "step": 7869 }, { "epoch": 0.3261635376517883, "grad_norm": 0.3970760703086853, "learning_rate": 3.369389531269427e-06, "loss": 0.7068, "step": 7870 }, { "epoch": 0.32620498155746197, "grad_norm": 0.44920942187309265, "learning_rate": 3.3691823117410587e-06, "loss": 0.7263, "step": 7871 }, { "epoch": 0.32624642546313565, "grad_norm": 0.42040398716926575, "learning_rate": 3.36897509221269e-06, "loss": 0.7151, "step": 7872 }, { "epoch": 0.3262878693688093, "grad_norm": 0.4255923926830292, "learning_rate": 3.368767872684322e-06, "loss": 0.7292, "step": 7873 }, { "epoch": 0.326329313274483, "grad_norm": 0.41248470544815063, "learning_rate": 3.368560653155954e-06, "loss": 0.7375, "step": 7874 }, { "epoch": 0.3263707571801567, "grad_norm": 0.42222097516059875, "learning_rate": 3.368353433627585e-06, "loss": 0.7206, "step": 7875 }, { "epoch": 0.32641220108583036, "grad_norm": 0.389122873544693, "learning_rate": 3.3681462140992173e-06, "loss": 0.6531, "step": 7876 }, { "epoch": 0.326453644991504, "grad_norm": 0.44389376044273376, "learning_rate": 3.3679389945708483e-06, "loss": 0.7, "step": 7877 }, { "epoch": 0.32649508889717765, "grad_norm": 0.39713814854621887, "learning_rate": 3.3677317750424805e-06, "loss": 0.719, "step": 7878 }, { "epoch": 0.32653653280285133, "grad_norm": 0.4517744779586792, "learning_rate": 3.367524555514112e-06, "loss": 0.7649, "step": 7879 }, { "epoch": 0.326577976708525, "grad_norm": 0.4171895980834961, "learning_rate": 3.3673173359857437e-06, "loss": 0.696, "step": 7880 }, { "epoch": 0.3266194206141987, "grad_norm": 0.46063557267189026, "learning_rate": 3.367110116457375e-06, "loss": 0.7676, "step": 7881 }, { "epoch": 0.32666086451987236, "grad_norm": 0.4462823271751404, "learning_rate": 3.366902896929007e-06, "loss": 0.7163, "step": 7882 }, { "epoch": 0.32670230842554604, "grad_norm": 0.4188307225704193, "learning_rate": 3.3666956774006387e-06, "loss": 0.696, "step": 7883 }, { "epoch": 0.3267437523312197, "grad_norm": 0.41473913192749023, "learning_rate": 3.36648845787227e-06, "loss": 0.6676, "step": 7884 }, { "epoch": 0.32678519623689334, "grad_norm": 0.42960110306739807, "learning_rate": 3.366281238343902e-06, "loss": 0.687, "step": 7885 }, { "epoch": 0.326826640142567, "grad_norm": 0.40732842683792114, "learning_rate": 3.3660740188155333e-06, "loss": 0.6691, "step": 7886 }, { "epoch": 0.3268680840482407, "grad_norm": 0.43849077820777893, "learning_rate": 3.365866799287165e-06, "loss": 0.7089, "step": 7887 }, { "epoch": 0.32690952795391437, "grad_norm": 0.4181516170501709, "learning_rate": 3.3656595797587965e-06, "loss": 0.6753, "step": 7888 }, { "epoch": 0.32695097185958805, "grad_norm": 0.4406169652938843, "learning_rate": 3.3654523602304283e-06, "loss": 0.7026, "step": 7889 }, { "epoch": 0.3269924157652617, "grad_norm": 0.4158298671245575, "learning_rate": 3.3652451407020597e-06, "loss": 0.6913, "step": 7890 }, { "epoch": 0.3270338596709354, "grad_norm": 0.44252318143844604, "learning_rate": 3.3650379211736915e-06, "loss": 0.7432, "step": 7891 }, { "epoch": 0.3270753035766091, "grad_norm": 0.42553484439849854, "learning_rate": 3.3648307016453237e-06, "loss": 0.7247, "step": 7892 }, { "epoch": 0.32711674748228275, "grad_norm": 0.42196255922317505, "learning_rate": 3.3646234821169547e-06, "loss": 0.7103, "step": 7893 }, { "epoch": 0.3271581913879564, "grad_norm": 0.42846500873565674, "learning_rate": 3.364416262588587e-06, "loss": 0.7192, "step": 7894 }, { "epoch": 0.32719963529363005, "grad_norm": 0.395893394947052, "learning_rate": 3.3642090430602183e-06, "loss": 0.6829, "step": 7895 }, { "epoch": 0.32724107919930373, "grad_norm": 0.41526922583580017, "learning_rate": 3.36400182353185e-06, "loss": 0.7207, "step": 7896 }, { "epoch": 0.3272825231049774, "grad_norm": 0.3700304627418518, "learning_rate": 3.3637946040034815e-06, "loss": 0.6886, "step": 7897 }, { "epoch": 0.3273239670106511, "grad_norm": 0.41648176312446594, "learning_rate": 3.3635873844751133e-06, "loss": 0.7234, "step": 7898 }, { "epoch": 0.32736541091632476, "grad_norm": 0.43109601736068726, "learning_rate": 3.363380164946745e-06, "loss": 0.7903, "step": 7899 }, { "epoch": 0.32740685482199844, "grad_norm": 0.4177792966365814, "learning_rate": 3.3631729454183765e-06, "loss": 0.7197, "step": 7900 }, { "epoch": 0.3274482987276721, "grad_norm": 0.4625702500343323, "learning_rate": 3.3629657258900083e-06, "loss": 0.7349, "step": 7901 }, { "epoch": 0.3274897426333458, "grad_norm": 0.39170798659324646, "learning_rate": 3.3627585063616397e-06, "loss": 0.707, "step": 7902 }, { "epoch": 0.3275311865390194, "grad_norm": 0.4053094983100891, "learning_rate": 3.3625512868332715e-06, "loss": 0.7285, "step": 7903 }, { "epoch": 0.3275726304446931, "grad_norm": 0.4121449291706085, "learning_rate": 3.362344067304903e-06, "loss": 0.7043, "step": 7904 }, { "epoch": 0.32761407435036677, "grad_norm": 0.43291008472442627, "learning_rate": 3.3621368477765347e-06, "loss": 0.6731, "step": 7905 }, { "epoch": 0.32765551825604045, "grad_norm": 0.41992101073265076, "learning_rate": 3.361929628248166e-06, "loss": 0.7046, "step": 7906 }, { "epoch": 0.3276969621617141, "grad_norm": 0.4211881756782532, "learning_rate": 3.361722408719798e-06, "loss": 0.7202, "step": 7907 }, { "epoch": 0.3277384060673878, "grad_norm": 0.4671046733856201, "learning_rate": 3.36151518919143e-06, "loss": 0.7828, "step": 7908 }, { "epoch": 0.3277798499730615, "grad_norm": 0.4107086658477783, "learning_rate": 3.361307969663061e-06, "loss": 0.6829, "step": 7909 }, { "epoch": 0.32782129387873515, "grad_norm": 0.39492759108543396, "learning_rate": 3.3611007501346933e-06, "loss": 0.676, "step": 7910 }, { "epoch": 0.3278627377844088, "grad_norm": 0.41530975699424744, "learning_rate": 3.3608935306063243e-06, "loss": 0.6831, "step": 7911 }, { "epoch": 0.32790418169008245, "grad_norm": 0.39978304505348206, "learning_rate": 3.3606863110779565e-06, "loss": 0.718, "step": 7912 }, { "epoch": 0.32794562559575613, "grad_norm": 0.42126598954200745, "learning_rate": 3.360479091549588e-06, "loss": 0.7046, "step": 7913 }, { "epoch": 0.3279870695014298, "grad_norm": 0.4389324188232422, "learning_rate": 3.3602718720212197e-06, "loss": 0.7102, "step": 7914 }, { "epoch": 0.3280285134071035, "grad_norm": 0.4299924671649933, "learning_rate": 3.360064652492851e-06, "loss": 0.6799, "step": 7915 }, { "epoch": 0.32806995731277716, "grad_norm": 0.4222584366798401, "learning_rate": 3.359857432964483e-06, "loss": 0.6741, "step": 7916 }, { "epoch": 0.32811140121845084, "grad_norm": 0.44245246052742004, "learning_rate": 3.3596502134361147e-06, "loss": 0.7117, "step": 7917 }, { "epoch": 0.3281528451241245, "grad_norm": 0.39610254764556885, "learning_rate": 3.359442993907746e-06, "loss": 0.7102, "step": 7918 }, { "epoch": 0.3281942890297982, "grad_norm": 0.406148225069046, "learning_rate": 3.359235774379378e-06, "loss": 0.7131, "step": 7919 }, { "epoch": 0.3282357329354718, "grad_norm": 0.40056148171424866, "learning_rate": 3.3590285548510093e-06, "loss": 0.7061, "step": 7920 }, { "epoch": 0.3282771768411455, "grad_norm": 0.41547709703445435, "learning_rate": 3.358821335322641e-06, "loss": 0.7144, "step": 7921 }, { "epoch": 0.32831862074681917, "grad_norm": 0.4306262731552124, "learning_rate": 3.3586141157942725e-06, "loss": 0.741, "step": 7922 }, { "epoch": 0.32836006465249284, "grad_norm": 0.40384969115257263, "learning_rate": 3.3584068962659043e-06, "loss": 0.6812, "step": 7923 }, { "epoch": 0.3284015085581665, "grad_norm": 0.38736841082572937, "learning_rate": 3.3581996767375357e-06, "loss": 0.707, "step": 7924 }, { "epoch": 0.3284429524638402, "grad_norm": 0.3965844511985779, "learning_rate": 3.3579924572091675e-06, "loss": 0.6904, "step": 7925 }, { "epoch": 0.3284843963695139, "grad_norm": 0.4387584924697876, "learning_rate": 3.3577852376807997e-06, "loss": 0.6973, "step": 7926 }, { "epoch": 0.32852584027518755, "grad_norm": 0.40261057019233704, "learning_rate": 3.3575780181524307e-06, "loss": 0.6661, "step": 7927 }, { "epoch": 0.32856728418086123, "grad_norm": 0.6934207081794739, "learning_rate": 3.357370798624063e-06, "loss": 0.6597, "step": 7928 }, { "epoch": 0.32860872808653485, "grad_norm": 0.4315175414085388, "learning_rate": 3.3571635790956943e-06, "loss": 0.6582, "step": 7929 }, { "epoch": 0.32865017199220853, "grad_norm": 0.46121904253959656, "learning_rate": 3.356956359567326e-06, "loss": 0.7393, "step": 7930 }, { "epoch": 0.3286916158978822, "grad_norm": 0.4215610921382904, "learning_rate": 3.3567491400389575e-06, "loss": 0.6438, "step": 7931 }, { "epoch": 0.3287330598035559, "grad_norm": 0.4495464861392975, "learning_rate": 3.3565419205105893e-06, "loss": 0.7747, "step": 7932 }, { "epoch": 0.32877450370922956, "grad_norm": 0.40555819869041443, "learning_rate": 3.3563347009822207e-06, "loss": 0.6471, "step": 7933 }, { "epoch": 0.32881594761490324, "grad_norm": 0.42446568608283997, "learning_rate": 3.3561274814538525e-06, "loss": 0.7407, "step": 7934 }, { "epoch": 0.3288573915205769, "grad_norm": 0.4170495867729187, "learning_rate": 3.3559202619254843e-06, "loss": 0.6912, "step": 7935 }, { "epoch": 0.3288988354262506, "grad_norm": 0.4335777461528778, "learning_rate": 3.3557130423971157e-06, "loss": 0.7092, "step": 7936 }, { "epoch": 0.32894027933192427, "grad_norm": 0.4119087755680084, "learning_rate": 3.3555058228687475e-06, "loss": 0.6326, "step": 7937 }, { "epoch": 0.3289817232375979, "grad_norm": 0.41475436091423035, "learning_rate": 3.355298603340379e-06, "loss": 0.6975, "step": 7938 }, { "epoch": 0.32902316714327157, "grad_norm": 0.41199180483818054, "learning_rate": 3.3550913838120107e-06, "loss": 0.7263, "step": 7939 }, { "epoch": 0.32906461104894524, "grad_norm": 0.40641072392463684, "learning_rate": 3.354884164283642e-06, "loss": 0.662, "step": 7940 }, { "epoch": 0.3291060549546189, "grad_norm": 0.430259644985199, "learning_rate": 3.354676944755274e-06, "loss": 0.7074, "step": 7941 }, { "epoch": 0.3291474988602926, "grad_norm": 0.38428762555122375, "learning_rate": 3.3544697252269053e-06, "loss": 0.693, "step": 7942 }, { "epoch": 0.3291889427659663, "grad_norm": 0.389007031917572, "learning_rate": 3.354262505698537e-06, "loss": 0.7434, "step": 7943 }, { "epoch": 0.32923038667163995, "grad_norm": 0.4037756323814392, "learning_rate": 3.3540552861701693e-06, "loss": 0.6674, "step": 7944 }, { "epoch": 0.32927183057731363, "grad_norm": 0.3862777650356293, "learning_rate": 3.3538480666418003e-06, "loss": 0.6833, "step": 7945 }, { "epoch": 0.32931327448298725, "grad_norm": 0.3925323188304901, "learning_rate": 3.3536408471134325e-06, "loss": 0.7014, "step": 7946 }, { "epoch": 0.3293547183886609, "grad_norm": 0.40981143712997437, "learning_rate": 3.353433627585064e-06, "loss": 0.668, "step": 7947 }, { "epoch": 0.3293961622943346, "grad_norm": 0.41306132078170776, "learning_rate": 3.3532264080566957e-06, "loss": 0.676, "step": 7948 }, { "epoch": 0.3294376062000083, "grad_norm": 0.40375250577926636, "learning_rate": 3.353019188528327e-06, "loss": 0.6605, "step": 7949 }, { "epoch": 0.32947905010568196, "grad_norm": 0.4091317355632782, "learning_rate": 3.352811968999959e-06, "loss": 0.6715, "step": 7950 }, { "epoch": 0.32952049401135564, "grad_norm": 0.4747426211833954, "learning_rate": 3.3526047494715903e-06, "loss": 0.6719, "step": 7951 }, { "epoch": 0.3295619379170293, "grad_norm": 0.47717249393463135, "learning_rate": 3.352397529943222e-06, "loss": 0.7605, "step": 7952 }, { "epoch": 0.329603381822703, "grad_norm": 0.4491930902004242, "learning_rate": 3.352190310414854e-06, "loss": 0.7102, "step": 7953 }, { "epoch": 0.32964482572837667, "grad_norm": 0.40875571966171265, "learning_rate": 3.3519830908864853e-06, "loss": 0.7361, "step": 7954 }, { "epoch": 0.3296862696340503, "grad_norm": 0.4491513669490814, "learning_rate": 3.351775871358117e-06, "loss": 0.7107, "step": 7955 }, { "epoch": 0.32972771353972397, "grad_norm": 0.3997478485107422, "learning_rate": 3.3515686518297485e-06, "loss": 0.6857, "step": 7956 }, { "epoch": 0.32976915744539764, "grad_norm": 0.42646029591560364, "learning_rate": 3.3513614323013803e-06, "loss": 0.6965, "step": 7957 }, { "epoch": 0.3298106013510713, "grad_norm": 0.4220108091831207, "learning_rate": 3.3511542127730117e-06, "loss": 0.7163, "step": 7958 }, { "epoch": 0.329852045256745, "grad_norm": 0.4226192533969879, "learning_rate": 3.3509469932446435e-06, "loss": 0.7607, "step": 7959 }, { "epoch": 0.3298934891624187, "grad_norm": 0.39844655990600586, "learning_rate": 3.3507397737162758e-06, "loss": 0.6787, "step": 7960 }, { "epoch": 0.32993493306809235, "grad_norm": 0.4171030521392822, "learning_rate": 3.3505325541879067e-06, "loss": 0.6893, "step": 7961 }, { "epoch": 0.32997637697376603, "grad_norm": 0.4193304181098938, "learning_rate": 3.350325334659539e-06, "loss": 0.7031, "step": 7962 }, { "epoch": 0.3300178208794397, "grad_norm": 0.39458978176116943, "learning_rate": 3.3501181151311703e-06, "loss": 0.696, "step": 7963 }, { "epoch": 0.3300592647851133, "grad_norm": 0.39896777272224426, "learning_rate": 3.349910895602802e-06, "loss": 0.7295, "step": 7964 }, { "epoch": 0.330100708690787, "grad_norm": 0.4222836196422577, "learning_rate": 3.3497036760744335e-06, "loss": 0.726, "step": 7965 }, { "epoch": 0.3301421525964607, "grad_norm": 0.4341786503791809, "learning_rate": 3.3494964565460653e-06, "loss": 0.7219, "step": 7966 }, { "epoch": 0.33018359650213436, "grad_norm": 0.37430259585380554, "learning_rate": 3.3492892370176967e-06, "loss": 0.6487, "step": 7967 }, { "epoch": 0.33022504040780803, "grad_norm": 0.41345641016960144, "learning_rate": 3.3490820174893285e-06, "loss": 0.6907, "step": 7968 }, { "epoch": 0.3302664843134817, "grad_norm": 0.3859317898750305, "learning_rate": 3.3488747979609603e-06, "loss": 0.6832, "step": 7969 }, { "epoch": 0.3303079282191554, "grad_norm": 0.42162033915519714, "learning_rate": 3.3486675784325917e-06, "loss": 0.6761, "step": 7970 }, { "epoch": 0.33034937212482907, "grad_norm": 0.40062597393989563, "learning_rate": 3.3484603589042235e-06, "loss": 0.6755, "step": 7971 }, { "epoch": 0.3303908160305027, "grad_norm": 0.39600634574890137, "learning_rate": 3.348253139375855e-06, "loss": 0.6921, "step": 7972 }, { "epoch": 0.33043225993617636, "grad_norm": 0.44521716237068176, "learning_rate": 3.3480459198474867e-06, "loss": 0.7075, "step": 7973 }, { "epoch": 0.33047370384185004, "grad_norm": 0.4115220904350281, "learning_rate": 3.347838700319118e-06, "loss": 0.7109, "step": 7974 }, { "epoch": 0.3305151477475237, "grad_norm": 0.45592111349105835, "learning_rate": 3.34763148079075e-06, "loss": 0.696, "step": 7975 }, { "epoch": 0.3305565916531974, "grad_norm": 0.45308563113212585, "learning_rate": 3.3474242612623813e-06, "loss": 0.7649, "step": 7976 }, { "epoch": 0.3305980355588711, "grad_norm": 0.3784540295600891, "learning_rate": 3.347217041734013e-06, "loss": 0.6897, "step": 7977 }, { "epoch": 0.33063947946454475, "grad_norm": 0.4064774513244629, "learning_rate": 3.3470098222056454e-06, "loss": 0.6992, "step": 7978 }, { "epoch": 0.3306809233702184, "grad_norm": 0.39900341629981995, "learning_rate": 3.3468026026772763e-06, "loss": 0.6704, "step": 7979 }, { "epoch": 0.3307223672758921, "grad_norm": 0.41627398133277893, "learning_rate": 3.3465953831489085e-06, "loss": 0.6719, "step": 7980 }, { "epoch": 0.3307638111815657, "grad_norm": 0.45161765813827515, "learning_rate": 3.34638816362054e-06, "loss": 0.6938, "step": 7981 }, { "epoch": 0.3308052550872394, "grad_norm": 0.49716946482658386, "learning_rate": 3.3461809440921717e-06, "loss": 0.7283, "step": 7982 }, { "epoch": 0.3308466989929131, "grad_norm": 0.3851609528064728, "learning_rate": 3.345973724563803e-06, "loss": 0.7109, "step": 7983 }, { "epoch": 0.33088814289858676, "grad_norm": 0.39520880579948425, "learning_rate": 3.345766505035435e-06, "loss": 0.7156, "step": 7984 }, { "epoch": 0.33092958680426043, "grad_norm": 0.449020117521286, "learning_rate": 3.3455592855070663e-06, "loss": 0.7263, "step": 7985 }, { "epoch": 0.3309710307099341, "grad_norm": 0.43262919783592224, "learning_rate": 3.345352065978698e-06, "loss": 0.6943, "step": 7986 }, { "epoch": 0.3310124746156078, "grad_norm": 0.4304153025150299, "learning_rate": 3.34514484645033e-06, "loss": 0.7317, "step": 7987 }, { "epoch": 0.33105391852128147, "grad_norm": 0.38980814814567566, "learning_rate": 3.3449376269219613e-06, "loss": 0.6842, "step": 7988 }, { "epoch": 0.33109536242695514, "grad_norm": 0.44170382618904114, "learning_rate": 3.344730407393593e-06, "loss": 0.7313, "step": 7989 }, { "epoch": 0.33113680633262876, "grad_norm": 0.40107449889183044, "learning_rate": 3.3445231878652245e-06, "loss": 0.6997, "step": 7990 }, { "epoch": 0.33117825023830244, "grad_norm": 0.38986918330192566, "learning_rate": 3.3443159683368563e-06, "loss": 0.7222, "step": 7991 }, { "epoch": 0.3312196941439761, "grad_norm": 0.40724360942840576, "learning_rate": 3.3441087488084877e-06, "loss": 0.7181, "step": 7992 }, { "epoch": 0.3312611380496498, "grad_norm": 0.4041632115840912, "learning_rate": 3.3439015292801195e-06, "loss": 0.7472, "step": 7993 }, { "epoch": 0.33130258195532347, "grad_norm": 0.4228819012641907, "learning_rate": 3.343694309751751e-06, "loss": 0.7063, "step": 7994 }, { "epoch": 0.33134402586099715, "grad_norm": 0.4062122106552124, "learning_rate": 3.3434870902233827e-06, "loss": 0.7089, "step": 7995 }, { "epoch": 0.3313854697666708, "grad_norm": 0.4325317442417145, "learning_rate": 3.343279870695015e-06, "loss": 0.7194, "step": 7996 }, { "epoch": 0.3314269136723445, "grad_norm": 0.6145809292793274, "learning_rate": 3.3430726511666463e-06, "loss": 0.661, "step": 7997 }, { "epoch": 0.3314683575780182, "grad_norm": 0.43180668354034424, "learning_rate": 3.342865431638278e-06, "loss": 0.749, "step": 7998 }, { "epoch": 0.3315098014836918, "grad_norm": 0.43506962060928345, "learning_rate": 3.3426582121099095e-06, "loss": 0.7119, "step": 7999 }, { "epoch": 0.3315512453893655, "grad_norm": 0.43424952030181885, "learning_rate": 3.3424509925815413e-06, "loss": 0.6917, "step": 8000 }, { "epoch": 0.33159268929503916, "grad_norm": 0.41163647174835205, "learning_rate": 3.3422437730531727e-06, "loss": 0.7488, "step": 8001 }, { "epoch": 0.33163413320071283, "grad_norm": 0.3894725441932678, "learning_rate": 3.3420365535248045e-06, "loss": 0.7036, "step": 8002 }, { "epoch": 0.3316755771063865, "grad_norm": 0.402765691280365, "learning_rate": 3.341829333996436e-06, "loss": 0.7283, "step": 8003 }, { "epoch": 0.3317170210120602, "grad_norm": 0.46130695939064026, "learning_rate": 3.3416221144680677e-06, "loss": 0.7168, "step": 8004 }, { "epoch": 0.33175846491773386, "grad_norm": 0.39759549498558044, "learning_rate": 3.3414148949396995e-06, "loss": 0.636, "step": 8005 }, { "epoch": 0.33179990882340754, "grad_norm": 0.40847623348236084, "learning_rate": 3.341207675411331e-06, "loss": 0.7113, "step": 8006 }, { "epoch": 0.33184135272908116, "grad_norm": 0.42142438888549805, "learning_rate": 3.3410004558829627e-06, "loss": 0.7239, "step": 8007 }, { "epoch": 0.33188279663475484, "grad_norm": 0.41945144534111023, "learning_rate": 3.340793236354594e-06, "loss": 0.7556, "step": 8008 }, { "epoch": 0.3319242405404285, "grad_norm": 0.41355016827583313, "learning_rate": 3.340586016826226e-06, "loss": 0.652, "step": 8009 }, { "epoch": 0.3319656844461022, "grad_norm": 0.43096327781677246, "learning_rate": 3.3403787972978573e-06, "loss": 0.7314, "step": 8010 }, { "epoch": 0.33200712835177587, "grad_norm": 0.4124286472797394, "learning_rate": 3.340171577769489e-06, "loss": 0.7734, "step": 8011 }, { "epoch": 0.33204857225744955, "grad_norm": 0.4384773373603821, "learning_rate": 3.3399643582411205e-06, "loss": 0.71, "step": 8012 }, { "epoch": 0.3320900161631232, "grad_norm": 0.38740506768226624, "learning_rate": 3.3397571387127527e-06, "loss": 0.6921, "step": 8013 }, { "epoch": 0.3321314600687969, "grad_norm": 0.40994343161582947, "learning_rate": 3.3395499191843846e-06, "loss": 0.6926, "step": 8014 }, { "epoch": 0.3321729039744706, "grad_norm": 0.4233691394329071, "learning_rate": 3.339342699656016e-06, "loss": 0.6865, "step": 8015 }, { "epoch": 0.3322143478801442, "grad_norm": 0.4100450873374939, "learning_rate": 3.3391354801276477e-06, "loss": 0.6619, "step": 8016 }, { "epoch": 0.3322557917858179, "grad_norm": 0.43379610776901245, "learning_rate": 3.338928260599279e-06, "loss": 0.7024, "step": 8017 }, { "epoch": 0.33229723569149155, "grad_norm": 0.45424965023994446, "learning_rate": 3.338721041070911e-06, "loss": 0.7092, "step": 8018 }, { "epoch": 0.33233867959716523, "grad_norm": 0.3991762101650238, "learning_rate": 3.3385138215425423e-06, "loss": 0.6206, "step": 8019 }, { "epoch": 0.3323801235028389, "grad_norm": 0.4172744154930115, "learning_rate": 3.338306602014174e-06, "loss": 0.7134, "step": 8020 }, { "epoch": 0.3324215674085126, "grad_norm": 0.40873926877975464, "learning_rate": 3.338099382485806e-06, "loss": 0.7091, "step": 8021 }, { "epoch": 0.33246301131418626, "grad_norm": 0.40145280957221985, "learning_rate": 3.3378921629574373e-06, "loss": 0.7051, "step": 8022 }, { "epoch": 0.33250445521985994, "grad_norm": 0.40661489963531494, "learning_rate": 3.337684943429069e-06, "loss": 0.6621, "step": 8023 }, { "epoch": 0.3325458991255336, "grad_norm": 0.4530370235443115, "learning_rate": 3.3374777239007005e-06, "loss": 0.6912, "step": 8024 }, { "epoch": 0.33258734303120724, "grad_norm": 0.47130855917930603, "learning_rate": 3.3372705043723323e-06, "loss": 0.7631, "step": 8025 }, { "epoch": 0.3326287869368809, "grad_norm": 0.4066319167613983, "learning_rate": 3.3370632848439637e-06, "loss": 0.6731, "step": 8026 }, { "epoch": 0.3326702308425546, "grad_norm": 0.4065719246864319, "learning_rate": 3.3368560653155955e-06, "loss": 0.6581, "step": 8027 }, { "epoch": 0.33271167474822827, "grad_norm": 0.41268390417099, "learning_rate": 3.336648845787227e-06, "loss": 0.7188, "step": 8028 }, { "epoch": 0.33275311865390195, "grad_norm": 0.39808282256126404, "learning_rate": 3.3364416262588587e-06, "loss": 0.7048, "step": 8029 }, { "epoch": 0.3327945625595756, "grad_norm": 0.38352862000465393, "learning_rate": 3.336234406730491e-06, "loss": 0.6505, "step": 8030 }, { "epoch": 0.3328360064652493, "grad_norm": 0.42006441950798035, "learning_rate": 3.3360271872021223e-06, "loss": 0.6383, "step": 8031 }, { "epoch": 0.332877450370923, "grad_norm": 0.4129132926464081, "learning_rate": 3.335819967673754e-06, "loss": 0.6748, "step": 8032 }, { "epoch": 0.3329188942765966, "grad_norm": 0.4353955388069153, "learning_rate": 3.3356127481453855e-06, "loss": 0.6965, "step": 8033 }, { "epoch": 0.3329603381822703, "grad_norm": 0.6213743686676025, "learning_rate": 3.3354055286170173e-06, "loss": 0.7064, "step": 8034 }, { "epoch": 0.33300178208794395, "grad_norm": 0.5031179785728455, "learning_rate": 3.3351983090886487e-06, "loss": 0.7524, "step": 8035 }, { "epoch": 0.33304322599361763, "grad_norm": 0.410678505897522, "learning_rate": 3.3349910895602805e-06, "loss": 0.6936, "step": 8036 }, { "epoch": 0.3330846698992913, "grad_norm": 0.4394047260284424, "learning_rate": 3.334783870031912e-06, "loss": 0.7131, "step": 8037 }, { "epoch": 0.333126113804965, "grad_norm": 0.40699484944343567, "learning_rate": 3.3345766505035437e-06, "loss": 0.7354, "step": 8038 }, { "epoch": 0.33316755771063866, "grad_norm": 0.40327244997024536, "learning_rate": 3.3343694309751755e-06, "loss": 0.6936, "step": 8039 }, { "epoch": 0.33320900161631234, "grad_norm": 0.4139373302459717, "learning_rate": 3.334162211446807e-06, "loss": 0.7258, "step": 8040 }, { "epoch": 0.333250445521986, "grad_norm": 0.4306830167770386, "learning_rate": 3.3339549919184387e-06, "loss": 0.7273, "step": 8041 }, { "epoch": 0.33329188942765964, "grad_norm": 0.42024728655815125, "learning_rate": 3.33374777239007e-06, "loss": 0.7168, "step": 8042 }, { "epoch": 0.3333333333333333, "grad_norm": 0.39892956614494324, "learning_rate": 3.333540552861702e-06, "loss": 0.7073, "step": 8043 }, { "checkpoint_runtime": 208.4844 }, { "epoch": 0.333374777239007, "grad_norm": 0.3866526782512665, "learning_rate": 3.3333333333333333e-06, "loss": 0.6536, "step": 8044 }, { "epoch": 0.33341622114468067, "grad_norm": 0.4210661053657532, "learning_rate": 3.333126113804965e-06, "loss": 0.6608, "step": 8045 }, { "epoch": 0.33345766505035435, "grad_norm": 0.437215119600296, "learning_rate": 3.3329188942765965e-06, "loss": 0.6973, "step": 8046 }, { "epoch": 0.333499108956028, "grad_norm": 0.3843875229358673, "learning_rate": 3.3327116747482288e-06, "loss": 0.6906, "step": 8047 }, { "epoch": 0.3335405528617017, "grad_norm": 0.40978240966796875, "learning_rate": 3.3325044552198606e-06, "loss": 0.7297, "step": 8048 }, { "epoch": 0.3335819967673754, "grad_norm": 0.41272062063217163, "learning_rate": 3.332297235691492e-06, "loss": 0.6727, "step": 8049 }, { "epoch": 0.33362344067304905, "grad_norm": 0.42441073060035706, "learning_rate": 3.3320900161631238e-06, "loss": 0.7423, "step": 8050 }, { "epoch": 0.3336648845787227, "grad_norm": 0.4119737148284912, "learning_rate": 3.331882796634755e-06, "loss": 0.7327, "step": 8051 }, { "epoch": 0.33370632848439635, "grad_norm": 0.41272541880607605, "learning_rate": 3.331675577106387e-06, "loss": 0.6826, "step": 8052 }, { "epoch": 0.33374777239007003, "grad_norm": 0.38396868109703064, "learning_rate": 3.3314683575780183e-06, "loss": 0.6704, "step": 8053 }, { "epoch": 0.3337892162957437, "grad_norm": 0.4227725863456726, "learning_rate": 3.33126113804965e-06, "loss": 0.7148, "step": 8054 }, { "epoch": 0.3338306602014174, "grad_norm": 0.4149216413497925, "learning_rate": 3.3310539185212815e-06, "loss": 0.7008, "step": 8055 }, { "epoch": 0.33387210410709106, "grad_norm": 0.45245110988616943, "learning_rate": 3.3308466989929133e-06, "loss": 0.7629, "step": 8056 }, { "epoch": 0.33391354801276474, "grad_norm": 0.4650709331035614, "learning_rate": 3.330639479464545e-06, "loss": 0.7327, "step": 8057 }, { "epoch": 0.3339549919184384, "grad_norm": 0.4578043520450592, "learning_rate": 3.3304322599361765e-06, "loss": 0.7603, "step": 8058 }, { "epoch": 0.33399643582411204, "grad_norm": 0.42209184169769287, "learning_rate": 3.3302250404078083e-06, "loss": 0.7404, "step": 8059 }, { "epoch": 0.3340378797297857, "grad_norm": 0.40769118070602417, "learning_rate": 3.3300178208794397e-06, "loss": 0.6794, "step": 8060 }, { "epoch": 0.3340793236354594, "grad_norm": 0.4127490222454071, "learning_rate": 3.3298106013510715e-06, "loss": 0.665, "step": 8061 }, { "epoch": 0.33412076754113307, "grad_norm": 0.4418104290962219, "learning_rate": 3.329603381822703e-06, "loss": 0.7222, "step": 8062 }, { "epoch": 0.33416221144680675, "grad_norm": 0.38394927978515625, "learning_rate": 3.3293961622943347e-06, "loss": 0.6786, "step": 8063 }, { "epoch": 0.3342036553524804, "grad_norm": 0.47822070121765137, "learning_rate": 3.329188942765966e-06, "loss": 0.6959, "step": 8064 }, { "epoch": 0.3342450992581541, "grad_norm": 0.4010542035102844, "learning_rate": 3.3289817232375984e-06, "loss": 0.7151, "step": 8065 }, { "epoch": 0.3342865431638278, "grad_norm": 0.43703991174697876, "learning_rate": 3.32877450370923e-06, "loss": 0.7244, "step": 8066 }, { "epoch": 0.33432798706950145, "grad_norm": 0.406598299741745, "learning_rate": 3.3285672841808615e-06, "loss": 0.6538, "step": 8067 }, { "epoch": 0.3343694309751751, "grad_norm": 0.4346665143966675, "learning_rate": 3.3283600646524934e-06, "loss": 0.7294, "step": 8068 }, { "epoch": 0.33441087488084875, "grad_norm": 0.3880501091480255, "learning_rate": 3.3281528451241247e-06, "loss": 0.651, "step": 8069 }, { "epoch": 0.33445231878652243, "grad_norm": 0.43470486998558044, "learning_rate": 3.3279456255957566e-06, "loss": 0.7113, "step": 8070 }, { "epoch": 0.3344937626921961, "grad_norm": 0.4513741731643677, "learning_rate": 3.327738406067388e-06, "loss": 0.766, "step": 8071 }, { "epoch": 0.3345352065978698, "grad_norm": 0.3939751982688904, "learning_rate": 3.3275311865390197e-06, "loss": 0.699, "step": 8072 }, { "epoch": 0.33457665050354346, "grad_norm": 0.46824631094932556, "learning_rate": 3.327323967010651e-06, "loss": 0.7915, "step": 8073 }, { "epoch": 0.33461809440921714, "grad_norm": 0.4007798433303833, "learning_rate": 3.327116747482283e-06, "loss": 0.7129, "step": 8074 }, { "epoch": 0.3346595383148908, "grad_norm": 0.39239993691444397, "learning_rate": 3.3269095279539147e-06, "loss": 0.6271, "step": 8075 }, { "epoch": 0.3347009822205645, "grad_norm": 0.4036101698875427, "learning_rate": 3.326702308425546e-06, "loss": 0.6714, "step": 8076 }, { "epoch": 0.3347424261262381, "grad_norm": 0.42456308007240295, "learning_rate": 3.326495088897178e-06, "loss": 0.6897, "step": 8077 }, { "epoch": 0.3347838700319118, "grad_norm": 0.38069862127304077, "learning_rate": 3.3262878693688093e-06, "loss": 0.7139, "step": 8078 }, { "epoch": 0.33482531393758547, "grad_norm": 0.43674829602241516, "learning_rate": 3.326080649840441e-06, "loss": 0.6842, "step": 8079 }, { "epoch": 0.33486675784325914, "grad_norm": 0.3938743472099304, "learning_rate": 3.3258734303120725e-06, "loss": 0.6982, "step": 8080 }, { "epoch": 0.3349082017489328, "grad_norm": 0.4254458546638489, "learning_rate": 3.3256662107837048e-06, "loss": 0.7183, "step": 8081 }, { "epoch": 0.3349496456546065, "grad_norm": 0.43785691261291504, "learning_rate": 3.3254589912553366e-06, "loss": 0.7653, "step": 8082 }, { "epoch": 0.3349910895602802, "grad_norm": 0.3793331980705261, "learning_rate": 3.325251771726968e-06, "loss": 0.702, "step": 8083 }, { "epoch": 0.33503253346595385, "grad_norm": 0.463458389043808, "learning_rate": 3.3250445521985998e-06, "loss": 0.6414, "step": 8084 }, { "epoch": 0.33507397737162753, "grad_norm": 0.4247121810913086, "learning_rate": 3.324837332670231e-06, "loss": 0.6907, "step": 8085 }, { "epoch": 0.33511542127730115, "grad_norm": 0.44032782316207886, "learning_rate": 3.324630113141863e-06, "loss": 0.731, "step": 8086 }, { "epoch": 0.33515686518297483, "grad_norm": 0.4050964117050171, "learning_rate": 3.3244228936134943e-06, "loss": 0.6572, "step": 8087 }, { "epoch": 0.3351983090886485, "grad_norm": 0.3922918140888214, "learning_rate": 3.324215674085126e-06, "loss": 0.6924, "step": 8088 }, { "epoch": 0.3352397529943222, "grad_norm": 0.4149254560470581, "learning_rate": 3.3240084545567575e-06, "loss": 0.7607, "step": 8089 }, { "epoch": 0.33528119689999586, "grad_norm": 0.5018079876899719, "learning_rate": 3.3238012350283893e-06, "loss": 0.7526, "step": 8090 }, { "epoch": 0.33532264080566954, "grad_norm": 0.37619051337242126, "learning_rate": 3.323594015500021e-06, "loss": 0.6567, "step": 8091 }, { "epoch": 0.3353640847113432, "grad_norm": 0.41074663400650024, "learning_rate": 3.3233867959716525e-06, "loss": 0.7114, "step": 8092 }, { "epoch": 0.3354055286170169, "grad_norm": 0.3720768690109253, "learning_rate": 3.3231795764432843e-06, "loss": 0.6676, "step": 8093 }, { "epoch": 0.3354469725226905, "grad_norm": 0.47178930044174194, "learning_rate": 3.3229723569149157e-06, "loss": 0.7031, "step": 8094 }, { "epoch": 0.3354884164283642, "grad_norm": 0.4005848169326782, "learning_rate": 3.3227651373865475e-06, "loss": 0.7202, "step": 8095 }, { "epoch": 0.33552986033403787, "grad_norm": 0.46273112297058105, "learning_rate": 3.322557917858179e-06, "loss": 0.688, "step": 8096 }, { "epoch": 0.33557130423971154, "grad_norm": 0.41446107625961304, "learning_rate": 3.3223506983298107e-06, "loss": 0.7024, "step": 8097 }, { "epoch": 0.3356127481453852, "grad_norm": 0.40680378675460815, "learning_rate": 3.322143478801442e-06, "loss": 0.6638, "step": 8098 }, { "epoch": 0.3356541920510589, "grad_norm": 0.43839231133461, "learning_rate": 3.3219362592730744e-06, "loss": 0.7456, "step": 8099 }, { "epoch": 0.3356956359567326, "grad_norm": 0.4191763997077942, "learning_rate": 3.321729039744706e-06, "loss": 0.6825, "step": 8100 }, { "epoch": 0.33573707986240625, "grad_norm": 0.4088877737522125, "learning_rate": 3.3215218202163376e-06, "loss": 0.6689, "step": 8101 }, { "epoch": 0.33577852376807993, "grad_norm": 0.3875562250614166, "learning_rate": 3.3213146006879694e-06, "loss": 0.6389, "step": 8102 }, { "epoch": 0.33581996767375355, "grad_norm": 0.4289272427558899, "learning_rate": 3.3211073811596007e-06, "loss": 0.7537, "step": 8103 }, { "epoch": 0.3358614115794272, "grad_norm": 0.4227634370326996, "learning_rate": 3.3209001616312326e-06, "loss": 0.6819, "step": 8104 }, { "epoch": 0.3359028554851009, "grad_norm": 0.39751020073890686, "learning_rate": 3.320692942102864e-06, "loss": 0.709, "step": 8105 }, { "epoch": 0.3359442993907746, "grad_norm": 0.4498789608478546, "learning_rate": 3.3204857225744958e-06, "loss": 0.7274, "step": 8106 }, { "epoch": 0.33598574329644826, "grad_norm": 0.4376446306705475, "learning_rate": 3.320278503046127e-06, "loss": 0.6887, "step": 8107 }, { "epoch": 0.33602718720212194, "grad_norm": 0.4088791012763977, "learning_rate": 3.320071283517759e-06, "loss": 0.6881, "step": 8108 }, { "epoch": 0.3360686311077956, "grad_norm": 0.38699325919151306, "learning_rate": 3.3198640639893908e-06, "loss": 0.6748, "step": 8109 }, { "epoch": 0.3361100750134693, "grad_norm": 0.4062426686286926, "learning_rate": 3.319656844461022e-06, "loss": 0.6898, "step": 8110 }, { "epoch": 0.33615151891914297, "grad_norm": 0.39614295959472656, "learning_rate": 3.319449624932654e-06, "loss": 0.6908, "step": 8111 }, { "epoch": 0.3361929628248166, "grad_norm": 0.43543753027915955, "learning_rate": 3.3192424054042853e-06, "loss": 0.676, "step": 8112 }, { "epoch": 0.33623440673049027, "grad_norm": 0.42491409182548523, "learning_rate": 3.319035185875917e-06, "loss": 0.7034, "step": 8113 }, { "epoch": 0.33627585063616394, "grad_norm": 0.41535019874572754, "learning_rate": 3.3188279663475485e-06, "loss": 0.682, "step": 8114 }, { "epoch": 0.3363172945418376, "grad_norm": 0.43163424730300903, "learning_rate": 3.3186207468191808e-06, "loss": 0.7275, "step": 8115 }, { "epoch": 0.3363587384475113, "grad_norm": 0.4573400318622589, "learning_rate": 3.3184135272908117e-06, "loss": 0.7288, "step": 8116 }, { "epoch": 0.336400182353185, "grad_norm": 0.42940300703048706, "learning_rate": 3.318206307762444e-06, "loss": 0.7278, "step": 8117 }, { "epoch": 0.33644162625885865, "grad_norm": 0.4591856598854065, "learning_rate": 3.3179990882340758e-06, "loss": 0.7571, "step": 8118 }, { "epoch": 0.3364830701645323, "grad_norm": 0.41584473848342896, "learning_rate": 3.317791868705707e-06, "loss": 0.6931, "step": 8119 }, { "epoch": 0.33652451407020595, "grad_norm": 0.41130807995796204, "learning_rate": 3.317584649177339e-06, "loss": 0.6749, "step": 8120 }, { "epoch": 0.3365659579758796, "grad_norm": 0.3965443968772888, "learning_rate": 3.3173774296489703e-06, "loss": 0.7008, "step": 8121 }, { "epoch": 0.3366074018815533, "grad_norm": 0.3974188268184662, "learning_rate": 3.317170210120602e-06, "loss": 0.7258, "step": 8122 }, { "epoch": 0.336648845787227, "grad_norm": 0.39916959404945374, "learning_rate": 3.3169629905922335e-06, "loss": 0.6582, "step": 8123 }, { "epoch": 0.33669028969290066, "grad_norm": 0.3768806457519531, "learning_rate": 3.3167557710638654e-06, "loss": 0.6825, "step": 8124 }, { "epoch": 0.33673173359857433, "grad_norm": 0.3995022177696228, "learning_rate": 3.3165485515354967e-06, "loss": 0.6902, "step": 8125 }, { "epoch": 0.336773177504248, "grad_norm": 0.3937276005744934, "learning_rate": 3.3163413320071285e-06, "loss": 0.6937, "step": 8126 }, { "epoch": 0.3368146214099217, "grad_norm": 0.4300495684146881, "learning_rate": 3.3161341124787604e-06, "loss": 0.6649, "step": 8127 }, { "epoch": 0.33685606531559537, "grad_norm": 0.3997376263141632, "learning_rate": 3.3159268929503917e-06, "loss": 0.7019, "step": 8128 }, { "epoch": 0.336897509221269, "grad_norm": 0.41976574063301086, "learning_rate": 3.3157196734220236e-06, "loss": 0.6833, "step": 8129 }, { "epoch": 0.33693895312694266, "grad_norm": 0.43948033452033997, "learning_rate": 3.315512453893655e-06, "loss": 0.708, "step": 8130 }, { "epoch": 0.33698039703261634, "grad_norm": 0.41532713174819946, "learning_rate": 3.3153052343652867e-06, "loss": 0.7451, "step": 8131 }, { "epoch": 0.33702184093829, "grad_norm": 0.39756307005882263, "learning_rate": 3.315098014836918e-06, "loss": 0.6951, "step": 8132 }, { "epoch": 0.3370632848439637, "grad_norm": 0.3887263238430023, "learning_rate": 3.3148907953085504e-06, "loss": 0.6749, "step": 8133 }, { "epoch": 0.3371047287496374, "grad_norm": 0.472686231136322, "learning_rate": 3.314683575780182e-06, "loss": 0.7683, "step": 8134 }, { "epoch": 0.33714617265531105, "grad_norm": 0.3917160928249359, "learning_rate": 3.3144763562518136e-06, "loss": 0.6693, "step": 8135 }, { "epoch": 0.3371876165609847, "grad_norm": 0.41691985726356506, "learning_rate": 3.3142691367234454e-06, "loss": 0.6761, "step": 8136 }, { "epoch": 0.3372290604666584, "grad_norm": 0.4337606132030487, "learning_rate": 3.3140619171950768e-06, "loss": 0.6863, "step": 8137 }, { "epoch": 0.337270504372332, "grad_norm": 0.46069976687431335, "learning_rate": 3.3138546976667086e-06, "loss": 0.6646, "step": 8138 }, { "epoch": 0.3373119482780057, "grad_norm": 0.43618836998939514, "learning_rate": 3.31364747813834e-06, "loss": 0.7468, "step": 8139 }, { "epoch": 0.3373533921836794, "grad_norm": 0.4228168725967407, "learning_rate": 3.3134402586099718e-06, "loss": 0.7051, "step": 8140 }, { "epoch": 0.33739483608935306, "grad_norm": 0.3810685873031616, "learning_rate": 3.313233039081603e-06, "loss": 0.6849, "step": 8141 }, { "epoch": 0.33743627999502673, "grad_norm": 0.4250097870826721, "learning_rate": 3.313025819553235e-06, "loss": 0.6946, "step": 8142 }, { "epoch": 0.3374777239007004, "grad_norm": 0.4589986205101013, "learning_rate": 3.3128186000248668e-06, "loss": 0.7705, "step": 8143 }, { "epoch": 0.3375191678063741, "grad_norm": 0.4005773961544037, "learning_rate": 3.312611380496498e-06, "loss": 0.7175, "step": 8144 }, { "epoch": 0.33756061171204776, "grad_norm": 0.3853548765182495, "learning_rate": 3.31240416096813e-06, "loss": 0.7271, "step": 8145 }, { "epoch": 0.33760205561772144, "grad_norm": 0.45562827587127686, "learning_rate": 3.3121969414397613e-06, "loss": 0.7161, "step": 8146 }, { "epoch": 0.33764349952339506, "grad_norm": 0.42512333393096924, "learning_rate": 3.311989721911393e-06, "loss": 0.7346, "step": 8147 }, { "epoch": 0.33768494342906874, "grad_norm": 0.4211041033267975, "learning_rate": 3.3117825023830245e-06, "loss": 0.694, "step": 8148 }, { "epoch": 0.3377263873347424, "grad_norm": 0.4500575661659241, "learning_rate": 3.3115752828546568e-06, "loss": 0.731, "step": 8149 }, { "epoch": 0.3377678312404161, "grad_norm": 0.39083340764045715, "learning_rate": 3.3113680633262877e-06, "loss": 0.7092, "step": 8150 }, { "epoch": 0.33780927514608977, "grad_norm": 0.39939233660697937, "learning_rate": 3.31116084379792e-06, "loss": 0.7086, "step": 8151 }, { "epoch": 0.33785071905176345, "grad_norm": 0.4183955490589142, "learning_rate": 3.3109536242695518e-06, "loss": 0.6832, "step": 8152 }, { "epoch": 0.3378921629574371, "grad_norm": 0.3965153694152832, "learning_rate": 3.310746404741183e-06, "loss": 0.6741, "step": 8153 }, { "epoch": 0.3379336068631108, "grad_norm": 0.41451096534729004, "learning_rate": 3.310539185212815e-06, "loss": 0.7263, "step": 8154 }, { "epoch": 0.3379750507687844, "grad_norm": 0.3950093686580658, "learning_rate": 3.3103319656844464e-06, "loss": 0.6492, "step": 8155 }, { "epoch": 0.3380164946744581, "grad_norm": 0.38639557361602783, "learning_rate": 3.310124746156078e-06, "loss": 0.6812, "step": 8156 }, { "epoch": 0.3380579385801318, "grad_norm": 0.4128146469593048, "learning_rate": 3.3099175266277095e-06, "loss": 0.6797, "step": 8157 }, { "epoch": 0.33809938248580546, "grad_norm": 0.4176913797855377, "learning_rate": 3.3097103070993414e-06, "loss": 0.7368, "step": 8158 }, { "epoch": 0.33814082639147913, "grad_norm": 0.3906860947608948, "learning_rate": 3.3095030875709727e-06, "loss": 0.6829, "step": 8159 }, { "epoch": 0.3381822702971528, "grad_norm": 0.44545209407806396, "learning_rate": 3.3092958680426046e-06, "loss": 0.7727, "step": 8160 }, { "epoch": 0.3382237142028265, "grad_norm": 0.40641355514526367, "learning_rate": 3.3090886485142364e-06, "loss": 0.687, "step": 8161 }, { "epoch": 0.33826515810850016, "grad_norm": 0.44399794936180115, "learning_rate": 3.3088814289858677e-06, "loss": 0.7136, "step": 8162 }, { "epoch": 0.33830660201417384, "grad_norm": 0.4144401550292969, "learning_rate": 3.3086742094574996e-06, "loss": 0.6897, "step": 8163 }, { "epoch": 0.33834804591984746, "grad_norm": 0.425601989030838, "learning_rate": 3.308466989929131e-06, "loss": 0.6718, "step": 8164 }, { "epoch": 0.33838948982552114, "grad_norm": 0.4186052978038788, "learning_rate": 3.3082597704007628e-06, "loss": 0.7003, "step": 8165 }, { "epoch": 0.3384309337311948, "grad_norm": 0.4085485637187958, "learning_rate": 3.308052550872394e-06, "loss": 0.6952, "step": 8166 }, { "epoch": 0.3384723776368685, "grad_norm": 0.4444997012615204, "learning_rate": 3.3078453313440264e-06, "loss": 0.7903, "step": 8167 }, { "epoch": 0.33851382154254217, "grad_norm": 0.3986816704273224, "learning_rate": 3.3076381118156573e-06, "loss": 0.7078, "step": 8168 }, { "epoch": 0.33855526544821585, "grad_norm": 0.4308570325374603, "learning_rate": 3.3074308922872896e-06, "loss": 0.6919, "step": 8169 }, { "epoch": 0.3385967093538895, "grad_norm": 0.4258674383163452, "learning_rate": 3.3072236727589214e-06, "loss": 0.6978, "step": 8170 }, { "epoch": 0.3386381532595632, "grad_norm": 0.40323829650878906, "learning_rate": 3.3070164532305528e-06, "loss": 0.6868, "step": 8171 }, { "epoch": 0.3386795971652369, "grad_norm": 0.42620015144348145, "learning_rate": 3.3068092337021846e-06, "loss": 0.6639, "step": 8172 }, { "epoch": 0.3387210410709105, "grad_norm": 0.42764487862586975, "learning_rate": 3.306602014173816e-06, "loss": 0.6992, "step": 8173 }, { "epoch": 0.3387624849765842, "grad_norm": 0.421510249376297, "learning_rate": 3.3063947946454478e-06, "loss": 0.7146, "step": 8174 }, { "epoch": 0.33880392888225785, "grad_norm": 0.3898261785507202, "learning_rate": 3.306187575117079e-06, "loss": 0.6804, "step": 8175 }, { "epoch": 0.33884537278793153, "grad_norm": 0.39174729585647583, "learning_rate": 3.305980355588711e-06, "loss": 0.7266, "step": 8176 }, { "epoch": 0.3388868166936052, "grad_norm": 0.42321160435676575, "learning_rate": 3.3057731360603423e-06, "loss": 0.7103, "step": 8177 }, { "epoch": 0.3389282605992789, "grad_norm": 0.42934203147888184, "learning_rate": 3.305565916531974e-06, "loss": 0.6912, "step": 8178 }, { "epoch": 0.33896970450495256, "grad_norm": 0.39762037992477417, "learning_rate": 3.305358697003606e-06, "loss": 0.7605, "step": 8179 }, { "epoch": 0.33901114841062624, "grad_norm": 0.4020342230796814, "learning_rate": 3.3051514774752373e-06, "loss": 0.7334, "step": 8180 }, { "epoch": 0.33905259231629986, "grad_norm": 0.4472717344760895, "learning_rate": 3.304944257946869e-06, "loss": 0.6804, "step": 8181 }, { "epoch": 0.33909403622197354, "grad_norm": 0.44548308849334717, "learning_rate": 3.3047370384185005e-06, "loss": 0.6987, "step": 8182 }, { "epoch": 0.3391354801276472, "grad_norm": 0.4155973792076111, "learning_rate": 3.3045298188901328e-06, "loss": 0.6606, "step": 8183 }, { "epoch": 0.3391769240333209, "grad_norm": 0.3973948657512665, "learning_rate": 3.3043225993617637e-06, "loss": 0.6282, "step": 8184 }, { "epoch": 0.33921836793899457, "grad_norm": 0.4547775089740753, "learning_rate": 3.304115379833396e-06, "loss": 0.7273, "step": 8185 }, { "epoch": 0.33925981184466825, "grad_norm": 0.38149547576904297, "learning_rate": 3.3039081603050274e-06, "loss": 0.7378, "step": 8186 }, { "epoch": 0.3393012557503419, "grad_norm": 0.4121350347995758, "learning_rate": 3.303700940776659e-06, "loss": 0.6863, "step": 8187 }, { "epoch": 0.3393426996560156, "grad_norm": 0.39098861813545227, "learning_rate": 3.303493721248291e-06, "loss": 0.7151, "step": 8188 }, { "epoch": 0.3393841435616893, "grad_norm": 0.4331980049610138, "learning_rate": 3.3032865017199224e-06, "loss": 0.7578, "step": 8189 }, { "epoch": 0.3394255874673629, "grad_norm": 0.41539841890335083, "learning_rate": 3.303079282191554e-06, "loss": 0.7334, "step": 8190 }, { "epoch": 0.3394670313730366, "grad_norm": 0.4201783835887909, "learning_rate": 3.3028720626631856e-06, "loss": 0.6716, "step": 8191 }, { "epoch": 0.33950847527871025, "grad_norm": 0.43180710077285767, "learning_rate": 3.3026648431348174e-06, "loss": 0.7063, "step": 8192 }, { "epoch": 0.33954991918438393, "grad_norm": 0.43275007605552673, "learning_rate": 3.3024576236064487e-06, "loss": 0.7788, "step": 8193 }, { "epoch": 0.3395913630900576, "grad_norm": 0.4269428551197052, "learning_rate": 3.3022504040780806e-06, "loss": 0.6964, "step": 8194 }, { "epoch": 0.3396328069957313, "grad_norm": 0.4455830752849579, "learning_rate": 3.3020431845497124e-06, "loss": 0.7417, "step": 8195 }, { "epoch": 0.33967425090140496, "grad_norm": 0.41200563311576843, "learning_rate": 3.3018359650213438e-06, "loss": 0.6835, "step": 8196 }, { "epoch": 0.33971569480707864, "grad_norm": 0.4117705523967743, "learning_rate": 3.3016287454929756e-06, "loss": 0.688, "step": 8197 }, { "epoch": 0.3397571387127523, "grad_norm": 0.4359980821609497, "learning_rate": 3.301421525964607e-06, "loss": 0.7114, "step": 8198 }, { "epoch": 0.33979858261842594, "grad_norm": 0.4097535014152527, "learning_rate": 3.3012143064362388e-06, "loss": 0.7246, "step": 8199 }, { "epoch": 0.3398400265240996, "grad_norm": 0.3824756443500519, "learning_rate": 3.30100708690787e-06, "loss": 0.6653, "step": 8200 }, { "epoch": 0.3398814704297733, "grad_norm": 0.40654703974723816, "learning_rate": 3.3007998673795024e-06, "loss": 0.6749, "step": 8201 }, { "epoch": 0.33992291433544697, "grad_norm": 0.380788654088974, "learning_rate": 3.3005926478511333e-06, "loss": 0.717, "step": 8202 }, { "epoch": 0.33996435824112065, "grad_norm": 0.4020877182483673, "learning_rate": 3.3003854283227656e-06, "loss": 0.7227, "step": 8203 }, { "epoch": 0.3400058021467943, "grad_norm": 0.43380260467529297, "learning_rate": 3.3001782087943974e-06, "loss": 0.7212, "step": 8204 }, { "epoch": 0.340047246052468, "grad_norm": 0.40868085622787476, "learning_rate": 3.2999709892660288e-06, "loss": 0.688, "step": 8205 }, { "epoch": 0.3400886899581417, "grad_norm": 0.422498881816864, "learning_rate": 3.2997637697376606e-06, "loss": 0.7029, "step": 8206 }, { "epoch": 0.3401301338638153, "grad_norm": 0.4094950258731842, "learning_rate": 3.299556550209292e-06, "loss": 0.6577, "step": 8207 }, { "epoch": 0.340171577769489, "grad_norm": 0.42016324400901794, "learning_rate": 3.2993493306809238e-06, "loss": 0.6655, "step": 8208 }, { "epoch": 0.34021302167516265, "grad_norm": 0.45435062050819397, "learning_rate": 3.299142111152555e-06, "loss": 0.6927, "step": 8209 }, { "epoch": 0.34025446558083633, "grad_norm": 0.40360575914382935, "learning_rate": 3.298934891624187e-06, "loss": 0.7007, "step": 8210 }, { "epoch": 0.34029590948651, "grad_norm": 0.37984806299209595, "learning_rate": 3.2987276720958184e-06, "loss": 0.7148, "step": 8211 }, { "epoch": 0.3403373533921837, "grad_norm": 0.4794977009296417, "learning_rate": 3.29852045256745e-06, "loss": 0.731, "step": 8212 }, { "epoch": 0.34037879729785736, "grad_norm": 0.4282992482185364, "learning_rate": 3.298313233039082e-06, "loss": 0.6874, "step": 8213 }, { "epoch": 0.34042024120353104, "grad_norm": 0.4196062982082367, "learning_rate": 3.2981060135107134e-06, "loss": 0.7432, "step": 8214 }, { "epoch": 0.3404616851092047, "grad_norm": 0.3992667496204376, "learning_rate": 3.297898793982345e-06, "loss": 0.6899, "step": 8215 }, { "epoch": 0.34050312901487834, "grad_norm": 0.40477341413497925, "learning_rate": 3.2976915744539765e-06, "loss": 0.6877, "step": 8216 }, { "epoch": 0.340544572920552, "grad_norm": 0.41400623321533203, "learning_rate": 3.2974843549256088e-06, "loss": 0.6278, "step": 8217 }, { "epoch": 0.3405860168262257, "grad_norm": 0.4152536988258362, "learning_rate": 3.2972771353972397e-06, "loss": 0.7126, "step": 8218 }, { "epoch": 0.34062746073189937, "grad_norm": 0.40164583921432495, "learning_rate": 3.297069915868872e-06, "loss": 0.6846, "step": 8219 }, { "epoch": 0.34066890463757304, "grad_norm": 0.39710527658462524, "learning_rate": 3.2968626963405034e-06, "loss": 0.6595, "step": 8220 }, { "epoch": 0.3407103485432467, "grad_norm": 0.43746107816696167, "learning_rate": 3.296655476812135e-06, "loss": 0.7063, "step": 8221 }, { "epoch": 0.3407517924489204, "grad_norm": 0.4980984330177307, "learning_rate": 3.296448257283767e-06, "loss": 0.7029, "step": 8222 }, { "epoch": 0.3407932363545941, "grad_norm": 0.4424000084400177, "learning_rate": 3.2962410377553984e-06, "loss": 0.7306, "step": 8223 }, { "epoch": 0.34083468026026775, "grad_norm": 0.4248712658882141, "learning_rate": 3.29603381822703e-06, "loss": 0.7373, "step": 8224 }, { "epoch": 0.3408761241659414, "grad_norm": 0.42607027292251587, "learning_rate": 3.2958265986986616e-06, "loss": 0.7131, "step": 8225 }, { "epoch": 0.34091756807161505, "grad_norm": 0.4254146218299866, "learning_rate": 3.2956193791702934e-06, "loss": 0.6804, "step": 8226 }, { "epoch": 0.34095901197728873, "grad_norm": 0.4113422930240631, "learning_rate": 3.2954121596419248e-06, "loss": 0.6729, "step": 8227 }, { "epoch": 0.3410004558829624, "grad_norm": 0.39332330226898193, "learning_rate": 3.2952049401135566e-06, "loss": 0.6873, "step": 8228 }, { "epoch": 0.3410418997886361, "grad_norm": 0.39834925532341003, "learning_rate": 3.294997720585188e-06, "loss": 0.7175, "step": 8229 }, { "epoch": 0.34108334369430976, "grad_norm": 0.40276190638542175, "learning_rate": 3.2947905010568198e-06, "loss": 0.7305, "step": 8230 }, { "epoch": 0.34112478759998344, "grad_norm": 0.4111160635948181, "learning_rate": 3.2945832815284516e-06, "loss": 0.7031, "step": 8231 }, { "epoch": 0.3411662315056571, "grad_norm": 0.4187011420726776, "learning_rate": 3.294376062000083e-06, "loss": 0.7031, "step": 8232 }, { "epoch": 0.3412076754113308, "grad_norm": 0.4183242619037628, "learning_rate": 3.294168842471715e-06, "loss": 0.7898, "step": 8233 }, { "epoch": 0.3412491193170044, "grad_norm": 0.41397011280059814, "learning_rate": 3.293961622943346e-06, "loss": 0.7209, "step": 8234 }, { "epoch": 0.3412905632226781, "grad_norm": 0.39874985814094543, "learning_rate": 3.2937544034149784e-06, "loss": 0.6709, "step": 8235 }, { "epoch": 0.34133200712835177, "grad_norm": 0.4511902630329132, "learning_rate": 3.2935471838866093e-06, "loss": 0.7522, "step": 8236 }, { "epoch": 0.34137345103402544, "grad_norm": 0.4246155321598053, "learning_rate": 3.2933399643582416e-06, "loss": 0.7366, "step": 8237 }, { "epoch": 0.3414148949396991, "grad_norm": 0.40551620721817017, "learning_rate": 3.293132744829873e-06, "loss": 0.62, "step": 8238 }, { "epoch": 0.3414563388453728, "grad_norm": 0.397664338350296, "learning_rate": 3.2929255253015048e-06, "loss": 0.6785, "step": 8239 }, { "epoch": 0.3414977827510465, "grad_norm": 0.42914676666259766, "learning_rate": 3.2927183057731366e-06, "loss": 0.6448, "step": 8240 }, { "epoch": 0.34153922665672015, "grad_norm": 0.41293951869010925, "learning_rate": 3.292511086244768e-06, "loss": 0.7228, "step": 8241 }, { "epoch": 0.3415806705623938, "grad_norm": 0.393697589635849, "learning_rate": 3.2923038667163998e-06, "loss": 0.6726, "step": 8242 }, { "epoch": 0.34162211446806745, "grad_norm": 0.4054800868034363, "learning_rate": 3.292096647188031e-06, "loss": 0.6938, "step": 8243 }, { "epoch": 0.3416635583737411, "grad_norm": 0.4179021716117859, "learning_rate": 3.291889427659663e-06, "loss": 0.707, "step": 8244 }, { "epoch": 0.3417050022794148, "grad_norm": 0.39516451954841614, "learning_rate": 3.2916822081312944e-06, "loss": 0.7258, "step": 8245 }, { "epoch": 0.3417464461850885, "grad_norm": 0.4341980814933777, "learning_rate": 3.291474988602926e-06, "loss": 0.7153, "step": 8246 }, { "epoch": 0.34178789009076216, "grad_norm": 0.42921265959739685, "learning_rate": 3.2912677690745576e-06, "loss": 0.7168, "step": 8247 }, { "epoch": 0.34182933399643584, "grad_norm": 0.4249199628829956, "learning_rate": 3.2910605495461894e-06, "loss": 0.6873, "step": 8248 }, { "epoch": 0.3418707779021095, "grad_norm": 0.407155841588974, "learning_rate": 3.290853330017821e-06, "loss": 0.7266, "step": 8249 }, { "epoch": 0.3419122218077832, "grad_norm": 0.4311218559741974, "learning_rate": 3.2906461104894526e-06, "loss": 0.7786, "step": 8250 }, { "epoch": 0.3419536657134568, "grad_norm": 0.4178151488304138, "learning_rate": 3.2904388909610848e-06, "loss": 0.7017, "step": 8251 }, { "epoch": 0.3419951096191305, "grad_norm": 0.4007262587547302, "learning_rate": 3.2902316714327157e-06, "loss": 0.6318, "step": 8252 }, { "epoch": 0.34203655352480417, "grad_norm": 0.43141937255859375, "learning_rate": 3.290024451904348e-06, "loss": 0.709, "step": 8253 }, { "epoch": 0.34207799743047784, "grad_norm": 0.3759288787841797, "learning_rate": 3.2898172323759794e-06, "loss": 0.6646, "step": 8254 }, { "epoch": 0.3421194413361515, "grad_norm": 0.4037187993526459, "learning_rate": 3.289610012847611e-06, "loss": 0.7047, "step": 8255 }, { "epoch": 0.3421608852418252, "grad_norm": 0.4008956849575043, "learning_rate": 3.289402793319243e-06, "loss": 0.6821, "step": 8256 }, { "epoch": 0.3422023291474989, "grad_norm": 0.4044203758239746, "learning_rate": 3.2891955737908744e-06, "loss": 0.699, "step": 8257 }, { "epoch": 0.34224377305317255, "grad_norm": 0.45174455642700195, "learning_rate": 3.288988354262506e-06, "loss": 0.8027, "step": 8258 }, { "epoch": 0.34228521695884623, "grad_norm": 0.41861510276794434, "learning_rate": 3.2887811347341376e-06, "loss": 0.7006, "step": 8259 }, { "epoch": 0.34232666086451985, "grad_norm": 0.4350740313529968, "learning_rate": 3.2885739152057694e-06, "loss": 0.7085, "step": 8260 }, { "epoch": 0.3423681047701935, "grad_norm": 0.47519373893737793, "learning_rate": 3.2883666956774008e-06, "loss": 0.7324, "step": 8261 }, { "epoch": 0.3424095486758672, "grad_norm": 0.38128402829170227, "learning_rate": 3.2881594761490326e-06, "loss": 0.6436, "step": 8262 }, { "epoch": 0.3424509925815409, "grad_norm": 0.42504337430000305, "learning_rate": 3.287952256620664e-06, "loss": 0.6771, "step": 8263 }, { "epoch": 0.34249243648721456, "grad_norm": 0.4140465557575226, "learning_rate": 3.2877450370922958e-06, "loss": 0.6879, "step": 8264 }, { "epoch": 0.34253388039288823, "grad_norm": 0.39626458287239075, "learning_rate": 3.2875378175639276e-06, "loss": 0.6776, "step": 8265 }, { "epoch": 0.3425753242985619, "grad_norm": 0.4329952895641327, "learning_rate": 3.287330598035559e-06, "loss": 0.6667, "step": 8266 }, { "epoch": 0.3426167682042356, "grad_norm": 0.3829418122768402, "learning_rate": 3.287123378507191e-06, "loss": 0.6383, "step": 8267 }, { "epoch": 0.3426582121099092, "grad_norm": 0.3914972245693207, "learning_rate": 3.286916158978822e-06, "loss": 0.6609, "step": 8268 }, { "epoch": 0.3426996560155829, "grad_norm": 0.4153934419155121, "learning_rate": 3.2867089394504544e-06, "loss": 0.6263, "step": 8269 }, { "epoch": 0.34274109992125656, "grad_norm": 0.4246017634868622, "learning_rate": 3.2865017199220854e-06, "loss": 0.6985, "step": 8270 }, { "epoch": 0.34278254382693024, "grad_norm": 0.453042209148407, "learning_rate": 3.2862945003937176e-06, "loss": 0.7169, "step": 8271 }, { "epoch": 0.3428239877326039, "grad_norm": 0.4430462121963501, "learning_rate": 3.286087280865349e-06, "loss": 0.7234, "step": 8272 }, { "epoch": 0.3428654316382776, "grad_norm": 0.3992575705051422, "learning_rate": 3.2858800613369808e-06, "loss": 0.6934, "step": 8273 }, { "epoch": 0.3429068755439513, "grad_norm": 0.4461498558521271, "learning_rate": 3.2856728418086126e-06, "loss": 0.6786, "step": 8274 }, { "epoch": 0.34294831944962495, "grad_norm": 0.4139283001422882, "learning_rate": 3.285465622280244e-06, "loss": 0.696, "step": 8275 }, { "epoch": 0.3429897633552986, "grad_norm": 0.41413089632987976, "learning_rate": 3.2852584027518758e-06, "loss": 0.7145, "step": 8276 }, { "epoch": 0.34303120726097225, "grad_norm": 0.4272351562976837, "learning_rate": 3.285051183223507e-06, "loss": 0.6809, "step": 8277 }, { "epoch": 0.3430726511666459, "grad_norm": 0.3883785903453827, "learning_rate": 3.284843963695139e-06, "loss": 0.6477, "step": 8278 }, { "epoch": 0.3431140950723196, "grad_norm": 0.4192541241645813, "learning_rate": 3.2846367441667704e-06, "loss": 0.7327, "step": 8279 }, { "epoch": 0.3431555389779933, "grad_norm": 0.3897496461868286, "learning_rate": 3.284429524638402e-06, "loss": 0.7375, "step": 8280 }, { "epoch": 0.34319698288366696, "grad_norm": 0.42063048481941223, "learning_rate": 3.2842223051100336e-06, "loss": 0.6785, "step": 8281 }, { "epoch": 0.34323842678934063, "grad_norm": 0.4189550280570984, "learning_rate": 3.2840150855816654e-06, "loss": 0.7202, "step": 8282 }, { "epoch": 0.3432798706950143, "grad_norm": 0.4480151832103729, "learning_rate": 3.283807866053297e-06, "loss": 0.7236, "step": 8283 }, { "epoch": 0.343321314600688, "grad_norm": 0.4498073160648346, "learning_rate": 3.2836006465249286e-06, "loss": 0.7314, "step": 8284 }, { "epoch": 0.34336275850636166, "grad_norm": 0.4089435935020447, "learning_rate": 3.283393426996561e-06, "loss": 0.7251, "step": 8285 }, { "epoch": 0.3434042024120353, "grad_norm": 0.37917083501815796, "learning_rate": 3.2831862074681918e-06, "loss": 0.7032, "step": 8286 }, { "epoch": 0.34344564631770896, "grad_norm": 0.43270036578178406, "learning_rate": 3.282978987939824e-06, "loss": 0.7083, "step": 8287 }, { "epoch": 0.34348709022338264, "grad_norm": 0.43415552377700806, "learning_rate": 3.2827717684114554e-06, "loss": 0.6958, "step": 8288 }, { "epoch": 0.3435285341290563, "grad_norm": 0.4249191880226135, "learning_rate": 3.282564548883087e-06, "loss": 0.6906, "step": 8289 }, { "epoch": 0.34356997803473, "grad_norm": 0.44363370537757874, "learning_rate": 3.2823573293547186e-06, "loss": 0.7551, "step": 8290 }, { "epoch": 0.34361142194040367, "grad_norm": 0.4261104166507721, "learning_rate": 3.2821501098263504e-06, "loss": 0.7084, "step": 8291 }, { "epoch": 0.34365286584607735, "grad_norm": 0.3948059678077698, "learning_rate": 3.281942890297982e-06, "loss": 0.6882, "step": 8292 }, { "epoch": 0.343694309751751, "grad_norm": 0.4130396246910095, "learning_rate": 3.2817356707696136e-06, "loss": 0.7051, "step": 8293 }, { "epoch": 0.3437357536574247, "grad_norm": 0.39997172355651855, "learning_rate": 3.2815284512412454e-06, "loss": 0.6543, "step": 8294 }, { "epoch": 0.3437771975630983, "grad_norm": 0.420317679643631, "learning_rate": 3.2813212317128768e-06, "loss": 0.6746, "step": 8295 }, { "epoch": 0.343818641468772, "grad_norm": 0.3757094442844391, "learning_rate": 3.2811140121845086e-06, "loss": 0.6511, "step": 8296 }, { "epoch": 0.3438600853744457, "grad_norm": 0.4113697409629822, "learning_rate": 3.28090679265614e-06, "loss": 0.6956, "step": 8297 }, { "epoch": 0.34390152928011936, "grad_norm": 0.4157564043998718, "learning_rate": 3.2806995731277718e-06, "loss": 0.752, "step": 8298 }, { "epoch": 0.34394297318579303, "grad_norm": 0.4178808331489563, "learning_rate": 3.280492353599403e-06, "loss": 0.728, "step": 8299 }, { "epoch": 0.3439844170914667, "grad_norm": 0.46545225381851196, "learning_rate": 3.280285134071035e-06, "loss": 0.741, "step": 8300 }, { "epoch": 0.3440258609971404, "grad_norm": 0.42063915729522705, "learning_rate": 3.280077914542667e-06, "loss": 0.6644, "step": 8301 }, { "epoch": 0.34406730490281406, "grad_norm": 0.41480180621147156, "learning_rate": 3.279870695014298e-06, "loss": 0.717, "step": 8302 }, { "epoch": 0.3441087488084877, "grad_norm": 0.42868340015411377, "learning_rate": 3.2796634754859304e-06, "loss": 0.6947, "step": 8303 }, { "epoch": 0.34415019271416136, "grad_norm": 0.4138825237751007, "learning_rate": 3.2794562559575614e-06, "loss": 0.7395, "step": 8304 }, { "epoch": 0.34419163661983504, "grad_norm": 0.431780070066452, "learning_rate": 3.2792490364291936e-06, "loss": 0.6931, "step": 8305 }, { "epoch": 0.3442330805255087, "grad_norm": 0.4359157085418701, "learning_rate": 3.279041816900825e-06, "loss": 0.6797, "step": 8306 }, { "epoch": 0.3442745244311824, "grad_norm": 0.4280223548412323, "learning_rate": 3.2788345973724568e-06, "loss": 0.6849, "step": 8307 }, { "epoch": 0.34431596833685607, "grad_norm": 0.4720728397369385, "learning_rate": 3.278627377844088e-06, "loss": 0.7041, "step": 8308 }, { "epoch": 0.34435741224252975, "grad_norm": 0.405681312084198, "learning_rate": 3.27842015831572e-06, "loss": 0.6765, "step": 8309 }, { "epoch": 0.3443988561482034, "grad_norm": 0.40840592980384827, "learning_rate": 3.2782129387873518e-06, "loss": 0.6729, "step": 8310 }, { "epoch": 0.3444403000538771, "grad_norm": 0.42388543486595154, "learning_rate": 3.278005719258983e-06, "loss": 0.6259, "step": 8311 }, { "epoch": 0.3444817439595507, "grad_norm": 0.39172250032424927, "learning_rate": 3.277798499730615e-06, "loss": 0.6824, "step": 8312 }, { "epoch": 0.3445231878652244, "grad_norm": 0.4206143617630005, "learning_rate": 3.2775912802022464e-06, "loss": 0.6844, "step": 8313 }, { "epoch": 0.3445646317708981, "grad_norm": 0.44540008902549744, "learning_rate": 3.277384060673878e-06, "loss": 0.6616, "step": 8314 }, { "epoch": 0.34460607567657175, "grad_norm": 0.3946239948272705, "learning_rate": 3.2771768411455096e-06, "loss": 0.6799, "step": 8315 }, { "epoch": 0.34464751958224543, "grad_norm": 0.390486478805542, "learning_rate": 3.2769696216171414e-06, "loss": 0.7371, "step": 8316 }, { "epoch": 0.3446889634879191, "grad_norm": 0.41588276624679565, "learning_rate": 3.276762402088773e-06, "loss": 0.707, "step": 8317 }, { "epoch": 0.3447304073935928, "grad_norm": 0.3903566002845764, "learning_rate": 3.2765551825604046e-06, "loss": 0.6761, "step": 8318 }, { "epoch": 0.34477185129926646, "grad_norm": 0.44057101011276245, "learning_rate": 3.276347963032037e-06, "loss": 0.7183, "step": 8319 }, { "epoch": 0.34481329520494014, "grad_norm": 0.4107791781425476, "learning_rate": 3.2761407435036678e-06, "loss": 0.6859, "step": 8320 }, { "epoch": 0.34485473911061376, "grad_norm": 0.41709867119789124, "learning_rate": 3.2759335239753e-06, "loss": 0.7407, "step": 8321 }, { "epoch": 0.34489618301628744, "grad_norm": 0.4087441861629486, "learning_rate": 3.2757263044469314e-06, "loss": 0.6833, "step": 8322 }, { "epoch": 0.3449376269219611, "grad_norm": 0.40378430485725403, "learning_rate": 3.275519084918563e-06, "loss": 0.7603, "step": 8323 }, { "epoch": 0.3449790708276348, "grad_norm": 0.4217969477176666, "learning_rate": 3.2753118653901946e-06, "loss": 0.7312, "step": 8324 }, { "epoch": 0.34502051473330847, "grad_norm": 0.42874574661254883, "learning_rate": 3.2751046458618264e-06, "loss": 0.7067, "step": 8325 }, { "epoch": 0.34506195863898215, "grad_norm": 0.44342219829559326, "learning_rate": 3.274897426333458e-06, "loss": 0.752, "step": 8326 }, { "epoch": 0.3451034025446558, "grad_norm": 0.3970843553543091, "learning_rate": 3.2746902068050896e-06, "loss": 0.6833, "step": 8327 }, { "epoch": 0.3451448464503295, "grad_norm": 0.42490053176879883, "learning_rate": 3.2744829872767214e-06, "loss": 0.7639, "step": 8328 }, { "epoch": 0.3451862903560031, "grad_norm": 0.42897534370422363, "learning_rate": 3.2742757677483528e-06, "loss": 0.6715, "step": 8329 }, { "epoch": 0.3452277342616768, "grad_norm": 0.357544869184494, "learning_rate": 3.2740685482199846e-06, "loss": 0.6862, "step": 8330 }, { "epoch": 0.3452691781673505, "grad_norm": 0.4055792987346649, "learning_rate": 3.273861328691616e-06, "loss": 0.696, "step": 8331 }, { "epoch": 0.34531062207302415, "grad_norm": 0.421024352312088, "learning_rate": 3.2736541091632478e-06, "loss": 0.708, "step": 8332 }, { "epoch": 0.34535206597869783, "grad_norm": 0.40851712226867676, "learning_rate": 3.273446889634879e-06, "loss": 0.699, "step": 8333 }, { "epoch": 0.3453935098843715, "grad_norm": 0.4174291789531708, "learning_rate": 3.273239670106511e-06, "loss": 0.6897, "step": 8334 }, { "epoch": 0.3454349537900452, "grad_norm": 0.3878898322582245, "learning_rate": 3.273032450578143e-06, "loss": 0.7041, "step": 8335 }, { "epoch": 0.34547639769571886, "grad_norm": 0.43906232714653015, "learning_rate": 3.272825231049774e-06, "loss": 0.6943, "step": 8336 }, { "epoch": 0.34551784160139254, "grad_norm": 0.3986448049545288, "learning_rate": 3.2726180115214064e-06, "loss": 0.6846, "step": 8337 }, { "epoch": 0.34555928550706616, "grad_norm": 0.41709455847740173, "learning_rate": 3.2724107919930374e-06, "loss": 0.6897, "step": 8338 }, { "epoch": 0.34560072941273984, "grad_norm": 0.4260007441043854, "learning_rate": 3.2722035724646696e-06, "loss": 0.7209, "step": 8339 }, { "epoch": 0.3456421733184135, "grad_norm": 0.41488856077194214, "learning_rate": 3.271996352936301e-06, "loss": 0.675, "step": 8340 }, { "epoch": 0.3456836172240872, "grad_norm": 0.43570229411125183, "learning_rate": 3.271789133407933e-06, "loss": 0.7344, "step": 8341 }, { "epoch": 0.34572506112976087, "grad_norm": 0.420254647731781, "learning_rate": 3.271581913879564e-06, "loss": 0.6814, "step": 8342 }, { "epoch": 0.34576650503543455, "grad_norm": 0.40473467111587524, "learning_rate": 3.271374694351196e-06, "loss": 0.6948, "step": 8343 }, { "epoch": 0.3458079489411082, "grad_norm": 0.3946726620197296, "learning_rate": 3.271167474822828e-06, "loss": 0.6949, "step": 8344 }, { "epoch": 0.3458493928467819, "grad_norm": 0.44714558124542236, "learning_rate": 3.270960255294459e-06, "loss": 0.7278, "step": 8345 }, { "epoch": 0.3458908367524556, "grad_norm": 0.41146141290664673, "learning_rate": 3.270753035766091e-06, "loss": 0.7119, "step": 8346 }, { "epoch": 0.3459322806581292, "grad_norm": 0.4135577082633972, "learning_rate": 3.2705458162377224e-06, "loss": 0.7122, "step": 8347 }, { "epoch": 0.3459737245638029, "grad_norm": 0.4159775376319885, "learning_rate": 3.270338596709354e-06, "loss": 0.719, "step": 8348 }, { "epoch": 0.34601516846947655, "grad_norm": 0.43465274572372437, "learning_rate": 3.2701313771809856e-06, "loss": 0.7042, "step": 8349 }, { "epoch": 0.34605661237515023, "grad_norm": 0.4419252872467041, "learning_rate": 3.2699241576526174e-06, "loss": 0.7295, "step": 8350 }, { "epoch": 0.3460980562808239, "grad_norm": 0.40720805525779724, "learning_rate": 3.2697169381242488e-06, "loss": 0.7052, "step": 8351 }, { "epoch": 0.3461395001864976, "grad_norm": 0.4039510488510132, "learning_rate": 3.2695097185958806e-06, "loss": 0.7197, "step": 8352 }, { "epoch": 0.34618094409217126, "grad_norm": 0.3667064607143402, "learning_rate": 3.269302499067513e-06, "loss": 0.6331, "step": 8353 }, { "epoch": 0.34622238799784494, "grad_norm": 0.4098520576953888, "learning_rate": 3.2690952795391438e-06, "loss": 0.7036, "step": 8354 }, { "epoch": 0.3462638319035186, "grad_norm": 0.39566749334335327, "learning_rate": 3.268888060010776e-06, "loss": 0.7028, "step": 8355 }, { "epoch": 0.34630527580919224, "grad_norm": 0.4372621178627014, "learning_rate": 3.2686808404824074e-06, "loss": 0.7126, "step": 8356 }, { "epoch": 0.3463467197148659, "grad_norm": 0.41523173451423645, "learning_rate": 3.268473620954039e-06, "loss": 0.6899, "step": 8357 }, { "epoch": 0.3463881636205396, "grad_norm": 0.3875006139278412, "learning_rate": 3.2682664014256706e-06, "loss": 0.6699, "step": 8358 }, { "epoch": 0.34642960752621327, "grad_norm": 0.4124395251274109, "learning_rate": 3.2680591818973024e-06, "loss": 0.6887, "step": 8359 }, { "epoch": 0.34647105143188694, "grad_norm": 0.422855019569397, "learning_rate": 3.2678519623689338e-06, "loss": 0.7189, "step": 8360 }, { "epoch": 0.3465124953375606, "grad_norm": 0.4258534014225006, "learning_rate": 3.2676447428405656e-06, "loss": 0.7263, "step": 8361 }, { "epoch": 0.3465539392432343, "grad_norm": 0.40111246705055237, "learning_rate": 3.2674375233121974e-06, "loss": 0.7114, "step": 8362 }, { "epoch": 0.346595383148908, "grad_norm": 0.38914307951927185, "learning_rate": 3.2672303037838288e-06, "loss": 0.6735, "step": 8363 }, { "epoch": 0.3466368270545816, "grad_norm": 0.40925362706184387, "learning_rate": 3.2670230842554606e-06, "loss": 0.6815, "step": 8364 }, { "epoch": 0.3466782709602553, "grad_norm": 0.4222832918167114, "learning_rate": 3.266815864727092e-06, "loss": 0.6906, "step": 8365 }, { "epoch": 0.34671971486592895, "grad_norm": 0.43050020933151245, "learning_rate": 3.2666086451987238e-06, "loss": 0.6782, "step": 8366 }, { "epoch": 0.34676115877160263, "grad_norm": 0.43644094467163086, "learning_rate": 3.266401425670355e-06, "loss": 0.7517, "step": 8367 }, { "epoch": 0.3468026026772763, "grad_norm": 0.38347160816192627, "learning_rate": 3.266194206141987e-06, "loss": 0.6982, "step": 8368 }, { "epoch": 0.34684404658295, "grad_norm": 0.4532240331172943, "learning_rate": 3.2659869866136184e-06, "loss": 0.7197, "step": 8369 }, { "epoch": 0.34688549048862366, "grad_norm": 0.41144469380378723, "learning_rate": 3.26577976708525e-06, "loss": 0.6802, "step": 8370 }, { "epoch": 0.34692693439429734, "grad_norm": 0.4099407494068146, "learning_rate": 3.2655725475568824e-06, "loss": 0.7197, "step": 8371 }, { "epoch": 0.346968378299971, "grad_norm": 0.4614093005657196, "learning_rate": 3.265365328028514e-06, "loss": 0.7495, "step": 8372 }, { "epoch": 0.34700982220564464, "grad_norm": 0.41772952675819397, "learning_rate": 3.2651581085001456e-06, "loss": 0.7375, "step": 8373 }, { "epoch": 0.3470512661113183, "grad_norm": 0.401076078414917, "learning_rate": 3.264950888971777e-06, "loss": 0.614, "step": 8374 }, { "epoch": 0.347092710016992, "grad_norm": 0.41139674186706543, "learning_rate": 3.264743669443409e-06, "loss": 0.7246, "step": 8375 }, { "epoch": 0.34713415392266567, "grad_norm": 0.40465858578681946, "learning_rate": 3.26453644991504e-06, "loss": 0.696, "step": 8376 }, { "epoch": 0.34717559782833934, "grad_norm": 0.41757339239120483, "learning_rate": 3.264329230386672e-06, "loss": 0.7158, "step": 8377 }, { "epoch": 0.347217041734013, "grad_norm": 0.4280861020088196, "learning_rate": 3.264122010858304e-06, "loss": 0.708, "step": 8378 }, { "epoch": 0.3472584856396867, "grad_norm": 0.42357343435287476, "learning_rate": 3.263914791329935e-06, "loss": 0.7068, "step": 8379 }, { "epoch": 0.3472999295453604, "grad_norm": 0.3917806148529053, "learning_rate": 3.263707571801567e-06, "loss": 0.6782, "step": 8380 }, { "epoch": 0.34734137345103405, "grad_norm": 0.4386560320854187, "learning_rate": 3.2635003522731984e-06, "loss": 0.7183, "step": 8381 }, { "epoch": 0.3473828173567077, "grad_norm": 0.4434758126735687, "learning_rate": 3.26329313274483e-06, "loss": 0.7498, "step": 8382 }, { "epoch": 0.34742426126238135, "grad_norm": 0.4160711169242859, "learning_rate": 3.2630859132164616e-06, "loss": 0.6963, "step": 8383 }, { "epoch": 0.34746570516805503, "grad_norm": 0.4238876402378082, "learning_rate": 3.2628786936880934e-06, "loss": 0.7671, "step": 8384 }, { "epoch": 0.3475071490737287, "grad_norm": 0.4074348509311676, "learning_rate": 3.2626714741597248e-06, "loss": 0.74, "step": 8385 }, { "epoch": 0.3475485929794024, "grad_norm": 0.4182678163051605, "learning_rate": 3.2624642546313566e-06, "loss": 0.7551, "step": 8386 }, { "epoch": 0.34759003688507606, "grad_norm": 0.40148499608039856, "learning_rate": 3.262257035102989e-06, "loss": 0.7255, "step": 8387 }, { "epoch": 0.34763148079074974, "grad_norm": 0.39570698142051697, "learning_rate": 3.2620498155746198e-06, "loss": 0.6349, "step": 8388 }, { "epoch": 0.3476729246964234, "grad_norm": 0.44137296080589294, "learning_rate": 3.261842596046252e-06, "loss": 0.66, "step": 8389 }, { "epoch": 0.34771436860209703, "grad_norm": 0.38619688153266907, "learning_rate": 3.2616353765178834e-06, "loss": 0.6779, "step": 8390 }, { "epoch": 0.3477558125077707, "grad_norm": 0.3878011405467987, "learning_rate": 3.261428156989515e-06, "loss": 0.6719, "step": 8391 }, { "epoch": 0.3477972564134444, "grad_norm": 0.4060506522655487, "learning_rate": 3.2612209374611466e-06, "loss": 0.6801, "step": 8392 }, { "epoch": 0.34783870031911807, "grad_norm": 0.47176098823547363, "learning_rate": 3.2610137179327784e-06, "loss": 0.7344, "step": 8393 }, { "epoch": 0.34788014422479174, "grad_norm": 0.4307069778442383, "learning_rate": 3.2608064984044098e-06, "loss": 0.6584, "step": 8394 }, { "epoch": 0.3479215881304654, "grad_norm": 0.39372867345809937, "learning_rate": 3.2605992788760416e-06, "loss": 0.7014, "step": 8395 }, { "epoch": 0.3479630320361391, "grad_norm": 0.42050257325172424, "learning_rate": 3.2603920593476734e-06, "loss": 0.666, "step": 8396 }, { "epoch": 0.3480044759418128, "grad_norm": 0.3794766068458557, "learning_rate": 3.2601848398193048e-06, "loss": 0.6598, "step": 8397 }, { "epoch": 0.34804591984748645, "grad_norm": 0.39259037375450134, "learning_rate": 3.2599776202909366e-06, "loss": 0.6631, "step": 8398 }, { "epoch": 0.3480873637531601, "grad_norm": 0.3963391184806824, "learning_rate": 3.259770400762568e-06, "loss": 0.6823, "step": 8399 }, { "epoch": 0.34812880765883375, "grad_norm": 0.4069724678993225, "learning_rate": 3.2595631812342e-06, "loss": 0.7026, "step": 8400 }, { "epoch": 0.3481702515645074, "grad_norm": 0.41394898295402527, "learning_rate": 3.259355961705831e-06, "loss": 0.7729, "step": 8401 }, { "epoch": 0.3482116954701811, "grad_norm": 0.4288797080516815, "learning_rate": 3.259148742177463e-06, "loss": 0.6653, "step": 8402 }, { "epoch": 0.3482531393758548, "grad_norm": 0.401050865650177, "learning_rate": 3.2589415226490944e-06, "loss": 0.6594, "step": 8403 }, { "epoch": 0.34829458328152846, "grad_norm": 0.40262600779533386, "learning_rate": 3.258734303120726e-06, "loss": 0.6937, "step": 8404 }, { "epoch": 0.34833602718720214, "grad_norm": 0.3903612792491913, "learning_rate": 3.2585270835923584e-06, "loss": 0.6422, "step": 8405 }, { "epoch": 0.3483774710928758, "grad_norm": 0.44177308678627014, "learning_rate": 3.25831986406399e-06, "loss": 0.71, "step": 8406 }, { "epoch": 0.3484189149985495, "grad_norm": 0.4801948368549347, "learning_rate": 3.2581126445356216e-06, "loss": 0.6855, "step": 8407 }, { "epoch": 0.3484603589042231, "grad_norm": 0.4117943048477173, "learning_rate": 3.257905425007253e-06, "loss": 0.6697, "step": 8408 }, { "epoch": 0.3485018028098968, "grad_norm": 0.40243151783943176, "learning_rate": 3.257698205478885e-06, "loss": 0.7283, "step": 8409 }, { "epoch": 0.34854324671557046, "grad_norm": 0.4275542199611664, "learning_rate": 3.257490985950516e-06, "loss": 0.7394, "step": 8410 }, { "epoch": 0.34858469062124414, "grad_norm": 0.4015789330005646, "learning_rate": 3.257283766422148e-06, "loss": 0.6936, "step": 8411 }, { "epoch": 0.3486261345269178, "grad_norm": 0.4187641143798828, "learning_rate": 3.2570765468937794e-06, "loss": 0.6703, "step": 8412 }, { "epoch": 0.3486675784325915, "grad_norm": 0.43096086382865906, "learning_rate": 3.256869327365411e-06, "loss": 0.7397, "step": 8413 }, { "epoch": 0.3487090223382652, "grad_norm": 0.39312028884887695, "learning_rate": 3.256662107837043e-06, "loss": 0.7458, "step": 8414 }, { "epoch": 0.34875046624393885, "grad_norm": 0.44121694564819336, "learning_rate": 3.2564548883086744e-06, "loss": 0.6812, "step": 8415 }, { "epoch": 0.34879191014961247, "grad_norm": 0.42621079087257385, "learning_rate": 3.256247668780306e-06, "loss": 0.6949, "step": 8416 }, { "epoch": 0.34883335405528615, "grad_norm": 0.4094623029232025, "learning_rate": 3.2560404492519376e-06, "loss": 0.7236, "step": 8417 }, { "epoch": 0.3488747979609598, "grad_norm": 0.42307811975479126, "learning_rate": 3.2558332297235694e-06, "loss": 0.7197, "step": 8418 }, { "epoch": 0.3489162418666335, "grad_norm": 0.4493318796157837, "learning_rate": 3.2556260101952008e-06, "loss": 0.7439, "step": 8419 }, { "epoch": 0.3489576857723072, "grad_norm": 0.4237443804740906, "learning_rate": 3.2554187906668326e-06, "loss": 0.7102, "step": 8420 }, { "epoch": 0.34899912967798086, "grad_norm": 0.43112456798553467, "learning_rate": 3.255211571138464e-06, "loss": 0.7421, "step": 8421 }, { "epoch": 0.34904057358365453, "grad_norm": 0.42541006207466125, "learning_rate": 3.2550043516100958e-06, "loss": 0.7278, "step": 8422 }, { "epoch": 0.3490820174893282, "grad_norm": 0.3866768777370453, "learning_rate": 3.254797132081728e-06, "loss": 0.6809, "step": 8423 }, { "epoch": 0.3491234613950019, "grad_norm": 0.4028589725494385, "learning_rate": 3.2545899125533594e-06, "loss": 0.6862, "step": 8424 }, { "epoch": 0.3491649053006755, "grad_norm": 0.4218546748161316, "learning_rate": 3.254382693024991e-06, "loss": 0.6948, "step": 8425 }, { "epoch": 0.3492063492063492, "grad_norm": 0.43646878004074097, "learning_rate": 3.2541754734966226e-06, "loss": 0.7109, "step": 8426 }, { "epoch": 0.34924779311202286, "grad_norm": 0.39836540818214417, "learning_rate": 3.2539682539682544e-06, "loss": 0.7147, "step": 8427 }, { "epoch": 0.34928923701769654, "grad_norm": 0.40244823694229126, "learning_rate": 3.2537610344398858e-06, "loss": 0.6931, "step": 8428 }, { "epoch": 0.3493306809233702, "grad_norm": 0.39114394783973694, "learning_rate": 3.2535538149115176e-06, "loss": 0.7086, "step": 8429 }, { "epoch": 0.3493721248290439, "grad_norm": 0.4192892014980316, "learning_rate": 3.253346595383149e-06, "loss": 0.7026, "step": 8430 }, { "epoch": 0.3494135687347176, "grad_norm": 0.40353110432624817, "learning_rate": 3.253139375854781e-06, "loss": 0.7236, "step": 8431 }, { "epoch": 0.34945501264039125, "grad_norm": 0.42545658349990845, "learning_rate": 3.2529321563264126e-06, "loss": 0.7097, "step": 8432 }, { "epoch": 0.3494964565460649, "grad_norm": 0.4859357178211212, "learning_rate": 3.252724936798044e-06, "loss": 0.7271, "step": 8433 }, { "epoch": 0.34953790045173855, "grad_norm": 0.398715078830719, "learning_rate": 3.252517717269676e-06, "loss": 0.6731, "step": 8434 }, { "epoch": 0.3495793443574122, "grad_norm": 0.40515193343162537, "learning_rate": 3.252310497741307e-06, "loss": 0.6179, "step": 8435 }, { "epoch": 0.3496207882630859, "grad_norm": 0.39331522583961487, "learning_rate": 3.252103278212939e-06, "loss": 0.6698, "step": 8436 }, { "epoch": 0.3496622321687596, "grad_norm": 0.4430435299873352, "learning_rate": 3.2518960586845704e-06, "loss": 0.7278, "step": 8437 }, { "epoch": 0.34970367607443326, "grad_norm": 0.4872400462627411, "learning_rate": 3.251688839156202e-06, "loss": 0.7754, "step": 8438 }, { "epoch": 0.34974511998010693, "grad_norm": 0.40427038073539734, "learning_rate": 3.2514816196278344e-06, "loss": 0.7258, "step": 8439 }, { "epoch": 0.3497865638857806, "grad_norm": 0.403890997171402, "learning_rate": 3.251274400099466e-06, "loss": 0.6477, "step": 8440 }, { "epoch": 0.3498280077914543, "grad_norm": 0.3872579038143158, "learning_rate": 3.2510671805710976e-06, "loss": 0.6703, "step": 8441 }, { "epoch": 0.34986945169712796, "grad_norm": 0.37973129749298096, "learning_rate": 3.250859961042729e-06, "loss": 0.6924, "step": 8442 }, { "epoch": 0.3499108956028016, "grad_norm": 0.42740708589553833, "learning_rate": 3.250652741514361e-06, "loss": 0.7212, "step": 8443 }, { "epoch": 0.34995233950847526, "grad_norm": 0.4297677278518677, "learning_rate": 3.250445521985992e-06, "loss": 0.72, "step": 8444 }, { "epoch": 0.34999378341414894, "grad_norm": 0.4436231553554535, "learning_rate": 3.250238302457624e-06, "loss": 0.7188, "step": 8445 }, { "epoch": 0.3500352273198226, "grad_norm": 0.4219163656234741, "learning_rate": 3.2500310829292554e-06, "loss": 0.7209, "step": 8446 }, { "epoch": 0.3500766712254963, "grad_norm": 0.4114866256713867, "learning_rate": 3.249823863400887e-06, "loss": 0.666, "step": 8447 }, { "epoch": 0.35011811513116997, "grad_norm": 0.4007662534713745, "learning_rate": 3.249616643872519e-06, "loss": 0.7061, "step": 8448 }, { "epoch": 0.35015955903684365, "grad_norm": 0.41682374477386475, "learning_rate": 3.2494094243441504e-06, "loss": 0.6858, "step": 8449 }, { "epoch": 0.3502010029425173, "grad_norm": 0.4100598394870758, "learning_rate": 3.249202204815782e-06, "loss": 0.718, "step": 8450 }, { "epoch": 0.35024244684819095, "grad_norm": 0.38997456431388855, "learning_rate": 3.2489949852874136e-06, "loss": 0.6973, "step": 8451 }, { "epoch": 0.3502838907538646, "grad_norm": 0.38735389709472656, "learning_rate": 3.2487877657590454e-06, "loss": 0.6819, "step": 8452 }, { "epoch": 0.3503253346595383, "grad_norm": 0.42380228638648987, "learning_rate": 3.2485805462306768e-06, "loss": 0.7572, "step": 8453 }, { "epoch": 0.350366778565212, "grad_norm": 0.40912526845932007, "learning_rate": 3.2483733267023086e-06, "loss": 0.6731, "step": 8454 }, { "epoch": 0.35040822247088566, "grad_norm": 0.39777782559394836, "learning_rate": 3.24816610717394e-06, "loss": 0.6802, "step": 8455 }, { "epoch": 0.35044966637655933, "grad_norm": 0.40776360034942627, "learning_rate": 3.2479588876455718e-06, "loss": 0.6731, "step": 8456 }, { "epoch": 0.350491110282233, "grad_norm": 0.43636733293533325, "learning_rate": 3.247751668117204e-06, "loss": 0.6462, "step": 8457 }, { "epoch": 0.3505325541879067, "grad_norm": 0.4054887890815735, "learning_rate": 3.2475444485888354e-06, "loss": 0.7383, "step": 8458 }, { "epoch": 0.35057399809358036, "grad_norm": 0.41566285490989685, "learning_rate": 3.2473372290604672e-06, "loss": 0.7163, "step": 8459 }, { "epoch": 0.350615441999254, "grad_norm": 0.4094327390193939, "learning_rate": 3.2471300095320986e-06, "loss": 0.7261, "step": 8460 }, { "epoch": 0.35065688590492766, "grad_norm": 0.40990570187568665, "learning_rate": 3.2469227900037304e-06, "loss": 0.657, "step": 8461 }, { "epoch": 0.35069832981060134, "grad_norm": 0.42081236839294434, "learning_rate": 3.246715570475362e-06, "loss": 0.7111, "step": 8462 }, { "epoch": 0.350739773716275, "grad_norm": 0.425610214471817, "learning_rate": 3.2465083509469936e-06, "loss": 0.6892, "step": 8463 }, { "epoch": 0.3507812176219487, "grad_norm": 0.4749346673488617, "learning_rate": 3.246301131418625e-06, "loss": 0.6796, "step": 8464 }, { "epoch": 0.35082266152762237, "grad_norm": 0.3954388201236725, "learning_rate": 3.246093911890257e-06, "loss": 0.6506, "step": 8465 }, { "epoch": 0.35086410543329605, "grad_norm": 0.4021048843860626, "learning_rate": 3.2458866923618886e-06, "loss": 0.71, "step": 8466 }, { "epoch": 0.3509055493389697, "grad_norm": 0.41527876257896423, "learning_rate": 3.24567947283352e-06, "loss": 0.7135, "step": 8467 }, { "epoch": 0.3509469932446434, "grad_norm": 0.4124913811683655, "learning_rate": 3.245472253305152e-06, "loss": 0.7153, "step": 8468 }, { "epoch": 0.350988437150317, "grad_norm": 0.40956151485443115, "learning_rate": 3.245265033776783e-06, "loss": 0.708, "step": 8469 }, { "epoch": 0.3510298810559907, "grad_norm": 0.39759552478790283, "learning_rate": 3.245057814248415e-06, "loss": 0.6559, "step": 8470 }, { "epoch": 0.3510713249616644, "grad_norm": 0.40804463624954224, "learning_rate": 3.2448505947200464e-06, "loss": 0.7258, "step": 8471 }, { "epoch": 0.35111276886733805, "grad_norm": 0.3893631100654602, "learning_rate": 3.244643375191678e-06, "loss": 0.7445, "step": 8472 }, { "epoch": 0.35115421277301173, "grad_norm": 0.4003143012523651, "learning_rate": 3.2444361556633096e-06, "loss": 0.6803, "step": 8473 }, { "epoch": 0.3511956566786854, "grad_norm": 0.373654842376709, "learning_rate": 3.244228936134942e-06, "loss": 0.6555, "step": 8474 }, { "epoch": 0.3512371005843591, "grad_norm": 0.4565506875514984, "learning_rate": 3.2440217166065736e-06, "loss": 0.6962, "step": 8475 }, { "epoch": 0.35127854449003276, "grad_norm": 0.41194650530815125, "learning_rate": 3.243814497078205e-06, "loss": 0.7195, "step": 8476 }, { "epoch": 0.3513199883957064, "grad_norm": 0.40757957100868225, "learning_rate": 3.243607277549837e-06, "loss": 0.717, "step": 8477 }, { "epoch": 0.35136143230138006, "grad_norm": 0.4242447018623352, "learning_rate": 3.243400058021468e-06, "loss": 0.7292, "step": 8478 }, { "epoch": 0.35140287620705374, "grad_norm": 0.3864867687225342, "learning_rate": 3.2431928384931e-06, "loss": 0.7109, "step": 8479 }, { "epoch": 0.3514443201127274, "grad_norm": 0.44120100140571594, "learning_rate": 3.2429856189647314e-06, "loss": 0.6929, "step": 8480 }, { "epoch": 0.3514857640184011, "grad_norm": 0.43264901638031006, "learning_rate": 3.242778399436363e-06, "loss": 0.744, "step": 8481 }, { "epoch": 0.35152720792407477, "grad_norm": 0.4202939569950104, "learning_rate": 3.2425711799079946e-06, "loss": 0.6807, "step": 8482 }, { "epoch": 0.35156865182974845, "grad_norm": 0.3774070143699646, "learning_rate": 3.2423639603796264e-06, "loss": 0.6539, "step": 8483 }, { "epoch": 0.3516100957354221, "grad_norm": 0.37859976291656494, "learning_rate": 3.242156740851258e-06, "loss": 0.6604, "step": 8484 }, { "epoch": 0.3516515396410958, "grad_norm": 0.40818437933921814, "learning_rate": 3.2419495213228896e-06, "loss": 0.7036, "step": 8485 }, { "epoch": 0.3516929835467694, "grad_norm": 0.4266854524612427, "learning_rate": 3.2417423017945214e-06, "loss": 0.6805, "step": 8486 }, { "epoch": 0.3517344274524431, "grad_norm": 0.41126519441604614, "learning_rate": 3.241535082266153e-06, "loss": 0.6772, "step": 8487 }, { "epoch": 0.3517758713581168, "grad_norm": 0.4264627695083618, "learning_rate": 3.2413278627377846e-06, "loss": 0.7246, "step": 8488 }, { "epoch": 0.35181731526379045, "grad_norm": 0.43510106205940247, "learning_rate": 3.241120643209416e-06, "loss": 0.6871, "step": 8489 }, { "epoch": 0.35185875916946413, "grad_norm": 0.4071009159088135, "learning_rate": 3.240913423681048e-06, "loss": 0.7305, "step": 8490 }, { "epoch": 0.3519002030751378, "grad_norm": 0.4376145899295807, "learning_rate": 3.24070620415268e-06, "loss": 0.7239, "step": 8491 }, { "epoch": 0.3519416469808115, "grad_norm": 0.39137589931488037, "learning_rate": 3.2404989846243114e-06, "loss": 0.6686, "step": 8492 }, { "epoch": 0.35198309088648516, "grad_norm": 0.4495477080345154, "learning_rate": 3.2402917650959432e-06, "loss": 0.738, "step": 8493 }, { "epoch": 0.35202453479215884, "grad_norm": 0.4391402304172516, "learning_rate": 3.2400845455675746e-06, "loss": 0.7415, "step": 8494 }, { "epoch": 0.35206597869783246, "grad_norm": 0.43204179406166077, "learning_rate": 3.2398773260392064e-06, "loss": 0.7378, "step": 8495 }, { "epoch": 0.35210742260350614, "grad_norm": 0.4314158260822296, "learning_rate": 3.239670106510838e-06, "loss": 0.7002, "step": 8496 }, { "epoch": 0.3521488665091798, "grad_norm": 0.40520891547203064, "learning_rate": 3.2394628869824696e-06, "loss": 0.7117, "step": 8497 }, { "epoch": 0.3521903104148535, "grad_norm": 0.41262534260749817, "learning_rate": 3.239255667454101e-06, "loss": 0.723, "step": 8498 }, { "epoch": 0.35223175432052717, "grad_norm": 0.3972526490688324, "learning_rate": 3.239048447925733e-06, "loss": 0.7563, "step": 8499 }, { "epoch": 0.35227319822620085, "grad_norm": 0.41168370842933655, "learning_rate": 3.2388412283973646e-06, "loss": 0.7366, "step": 8500 }, { "epoch": 0.3523146421318745, "grad_norm": 0.3810071349143982, "learning_rate": 3.238634008868996e-06, "loss": 0.6591, "step": 8501 }, { "epoch": 0.3523560860375482, "grad_norm": 0.38791337609291077, "learning_rate": 3.238426789340628e-06, "loss": 0.6993, "step": 8502 }, { "epoch": 0.3523975299432219, "grad_norm": 0.42793065309524536, "learning_rate": 3.238219569812259e-06, "loss": 0.6914, "step": 8503 }, { "epoch": 0.3524389738488955, "grad_norm": 0.4107297360897064, "learning_rate": 3.238012350283891e-06, "loss": 0.7444, "step": 8504 }, { "epoch": 0.3524804177545692, "grad_norm": 0.3796500265598297, "learning_rate": 3.2378051307555224e-06, "loss": 0.6554, "step": 8505 }, { "epoch": 0.35252186166024285, "grad_norm": 0.4484458863735199, "learning_rate": 3.237597911227154e-06, "loss": 0.7281, "step": 8506 }, { "epoch": 0.35256330556591653, "grad_norm": 0.42883116006851196, "learning_rate": 3.2373906916987856e-06, "loss": 0.7, "step": 8507 }, { "epoch": 0.3526047494715902, "grad_norm": 0.3968334197998047, "learning_rate": 3.237183472170418e-06, "loss": 0.7134, "step": 8508 }, { "epoch": 0.3526461933772639, "grad_norm": 0.4121285378932953, "learning_rate": 3.2369762526420496e-06, "loss": 0.6968, "step": 8509 }, { "epoch": 0.35268763728293756, "grad_norm": 0.39721494913101196, "learning_rate": 3.236769033113681e-06, "loss": 0.6797, "step": 8510 }, { "epoch": 0.35272908118861124, "grad_norm": 0.4493896961212158, "learning_rate": 3.236561813585313e-06, "loss": 0.6765, "step": 8511 }, { "epoch": 0.35277052509428486, "grad_norm": 0.4610176682472229, "learning_rate": 3.236354594056944e-06, "loss": 0.6783, "step": 8512 }, { "epoch": 0.35281196899995854, "grad_norm": 0.41171789169311523, "learning_rate": 3.236147374528576e-06, "loss": 0.7322, "step": 8513 }, { "epoch": 0.3528534129056322, "grad_norm": 0.4150131940841675, "learning_rate": 3.2359401550002074e-06, "loss": 0.6993, "step": 8514 }, { "epoch": 0.3528948568113059, "grad_norm": 0.41514599323272705, "learning_rate": 3.235732935471839e-06, "loss": 0.7449, "step": 8515 }, { "epoch": 0.35293630071697957, "grad_norm": 0.5728110074996948, "learning_rate": 3.2355257159434706e-06, "loss": 0.7488, "step": 8516 }, { "epoch": 0.35297774462265324, "grad_norm": 0.3947862982749939, "learning_rate": 3.2353184964151024e-06, "loss": 0.678, "step": 8517 }, { "epoch": 0.3530191885283269, "grad_norm": 0.4125240743160248, "learning_rate": 3.2351112768867342e-06, "loss": 0.6694, "step": 8518 }, { "epoch": 0.3530606324340006, "grad_norm": 0.437519371509552, "learning_rate": 3.2349040573583656e-06, "loss": 0.6956, "step": 8519 }, { "epoch": 0.3531020763396743, "grad_norm": 0.42778247594833374, "learning_rate": 3.2346968378299974e-06, "loss": 0.7063, "step": 8520 }, { "epoch": 0.3531435202453479, "grad_norm": 0.40842971205711365, "learning_rate": 3.234489618301629e-06, "loss": 0.7034, "step": 8521 }, { "epoch": 0.3531849641510216, "grad_norm": 0.4150451421737671, "learning_rate": 3.2342823987732606e-06, "loss": 0.6895, "step": 8522 }, { "epoch": 0.35322640805669525, "grad_norm": 0.41860222816467285, "learning_rate": 3.234075179244892e-06, "loss": 0.7441, "step": 8523 }, { "epoch": 0.35326785196236893, "grad_norm": 0.4254886209964752, "learning_rate": 3.233867959716524e-06, "loss": 0.689, "step": 8524 }, { "epoch": 0.3533092958680426, "grad_norm": 0.3855765163898468, "learning_rate": 3.233660740188155e-06, "loss": 0.6509, "step": 8525 }, { "epoch": 0.3533507397737163, "grad_norm": 0.4651561677455902, "learning_rate": 3.2334535206597874e-06, "loss": 0.7357, "step": 8526 }, { "epoch": 0.35339218367938996, "grad_norm": 0.39483535289764404, "learning_rate": 3.2332463011314192e-06, "loss": 0.6572, "step": 8527 }, { "epoch": 0.35343362758506364, "grad_norm": 0.4444069564342499, "learning_rate": 3.2330390816030506e-06, "loss": 0.7341, "step": 8528 }, { "epoch": 0.3534750714907373, "grad_norm": 0.3630324602127075, "learning_rate": 3.2328318620746824e-06, "loss": 0.6625, "step": 8529 }, { "epoch": 0.35351651539641094, "grad_norm": 0.38817235827445984, "learning_rate": 3.232624642546314e-06, "loss": 0.6934, "step": 8530 }, { "epoch": 0.3535579593020846, "grad_norm": 0.4378013610839844, "learning_rate": 3.2324174230179456e-06, "loss": 0.7349, "step": 8531 }, { "epoch": 0.3535994032077583, "grad_norm": 0.43015584349632263, "learning_rate": 3.232210203489577e-06, "loss": 0.7133, "step": 8532 }, { "epoch": 0.35364084711343197, "grad_norm": 0.40005186200141907, "learning_rate": 3.232002983961209e-06, "loss": 0.6703, "step": 8533 }, { "epoch": 0.35368229101910564, "grad_norm": 0.4355786442756653, "learning_rate": 3.23179576443284e-06, "loss": 0.6956, "step": 8534 }, { "epoch": 0.3537237349247793, "grad_norm": 0.43644124269485474, "learning_rate": 3.231588544904472e-06, "loss": 0.7417, "step": 8535 }, { "epoch": 0.353765178830453, "grad_norm": 0.39708253741264343, "learning_rate": 3.231381325376104e-06, "loss": 0.7185, "step": 8536 }, { "epoch": 0.3538066227361267, "grad_norm": 0.4361204504966736, "learning_rate": 3.231174105847735e-06, "loss": 0.6833, "step": 8537 }, { "epoch": 0.3538480666418003, "grad_norm": 0.41886967420578003, "learning_rate": 3.230966886319367e-06, "loss": 0.662, "step": 8538 }, { "epoch": 0.353889510547474, "grad_norm": 0.4427701532840729, "learning_rate": 3.2307596667909984e-06, "loss": 0.7168, "step": 8539 }, { "epoch": 0.35393095445314765, "grad_norm": 0.39517053961753845, "learning_rate": 3.23055244726263e-06, "loss": 0.7024, "step": 8540 }, { "epoch": 0.3539723983588213, "grad_norm": 0.45205774903297424, "learning_rate": 3.2303452277342616e-06, "loss": 0.7, "step": 8541 }, { "epoch": 0.354013842264495, "grad_norm": 0.4129032492637634, "learning_rate": 3.230138008205894e-06, "loss": 0.6774, "step": 8542 }, { "epoch": 0.3540552861701687, "grad_norm": 0.4169149100780487, "learning_rate": 3.2299307886775248e-06, "loss": 0.7366, "step": 8543 }, { "epoch": 0.35409673007584236, "grad_norm": 0.4418199360370636, "learning_rate": 3.229723569149157e-06, "loss": 0.7131, "step": 8544 }, { "epoch": 0.35413817398151604, "grad_norm": 0.40951967239379883, "learning_rate": 3.229516349620789e-06, "loss": 0.7048, "step": 8545 }, { "epoch": 0.3541796178871897, "grad_norm": 0.4091774821281433, "learning_rate": 3.2293091300924202e-06, "loss": 0.6365, "step": 8546 }, { "epoch": 0.35422106179286333, "grad_norm": 0.4138348400592804, "learning_rate": 3.229101910564052e-06, "loss": 0.7162, "step": 8547 }, { "epoch": 0.354262505698537, "grad_norm": 0.47510600090026855, "learning_rate": 3.2288946910356834e-06, "loss": 0.6571, "step": 8548 }, { "epoch": 0.3543039496042107, "grad_norm": 0.42789509892463684, "learning_rate": 3.2286874715073152e-06, "loss": 0.7278, "step": 8549 }, { "epoch": 0.35434539350988437, "grad_norm": 0.4559313952922821, "learning_rate": 3.2284802519789466e-06, "loss": 0.7485, "step": 8550 }, { "epoch": 0.35438683741555804, "grad_norm": 0.38186684250831604, "learning_rate": 3.2282730324505784e-06, "loss": 0.661, "step": 8551 }, { "epoch": 0.3544282813212317, "grad_norm": 0.41352972388267517, "learning_rate": 3.2280658129222102e-06, "loss": 0.7067, "step": 8552 }, { "epoch": 0.3544697252269054, "grad_norm": 0.40542006492614746, "learning_rate": 3.2278585933938416e-06, "loss": 0.6514, "step": 8553 }, { "epoch": 0.3545111691325791, "grad_norm": 0.43535369634628296, "learning_rate": 3.2276513738654734e-06, "loss": 0.7267, "step": 8554 }, { "epoch": 0.35455261303825275, "grad_norm": 0.4038558006286621, "learning_rate": 3.227444154337105e-06, "loss": 0.6693, "step": 8555 }, { "epoch": 0.3545940569439264, "grad_norm": 0.4128554165363312, "learning_rate": 3.2272369348087366e-06, "loss": 0.6991, "step": 8556 }, { "epoch": 0.35463550084960005, "grad_norm": 0.41730573773384094, "learning_rate": 3.227029715280368e-06, "loss": 0.7166, "step": 8557 }, { "epoch": 0.3546769447552737, "grad_norm": 0.3999996781349182, "learning_rate": 3.2268224957520002e-06, "loss": 0.6047, "step": 8558 }, { "epoch": 0.3547183886609474, "grad_norm": 0.4316307604312897, "learning_rate": 3.226615276223631e-06, "loss": 0.7075, "step": 8559 }, { "epoch": 0.3547598325666211, "grad_norm": 0.4348883330821991, "learning_rate": 3.2264080566952634e-06, "loss": 0.6794, "step": 8560 }, { "epoch": 0.35480127647229476, "grad_norm": 0.3705008029937744, "learning_rate": 3.2262008371668952e-06, "loss": 0.6492, "step": 8561 }, { "epoch": 0.35484272037796843, "grad_norm": 0.4168749451637268, "learning_rate": 3.2259936176385266e-06, "loss": 0.7024, "step": 8562 }, { "epoch": 0.3548841642836421, "grad_norm": 0.4245136082172394, "learning_rate": 3.2257863981101584e-06, "loss": 0.7092, "step": 8563 }, { "epoch": 0.3549256081893158, "grad_norm": 0.4210855960845947, "learning_rate": 3.22557917858179e-06, "loss": 0.6637, "step": 8564 }, { "epoch": 0.3549670520949894, "grad_norm": 0.4014700949192047, "learning_rate": 3.2253719590534216e-06, "loss": 0.7012, "step": 8565 }, { "epoch": 0.3550084960006631, "grad_norm": 0.4253336191177368, "learning_rate": 3.225164739525053e-06, "loss": 0.7317, "step": 8566 }, { "epoch": 0.35504993990633676, "grad_norm": 0.4420667290687561, "learning_rate": 3.224957519996685e-06, "loss": 0.7163, "step": 8567 }, { "epoch": 0.35509138381201044, "grad_norm": 0.503119170665741, "learning_rate": 3.224750300468316e-06, "loss": 0.7354, "step": 8568 }, { "epoch": 0.3551328277176841, "grad_norm": 0.4166845977306366, "learning_rate": 3.224543080939948e-06, "loss": 0.7067, "step": 8569 }, { "epoch": 0.3551742716233578, "grad_norm": 0.4471542239189148, "learning_rate": 3.22433586141158e-06, "loss": 0.7554, "step": 8570 }, { "epoch": 0.3552157155290315, "grad_norm": 0.40410467982292175, "learning_rate": 3.224128641883211e-06, "loss": 0.697, "step": 8571 }, { "epoch": 0.35525715943470515, "grad_norm": 0.3820880055427551, "learning_rate": 3.223921422354843e-06, "loss": 0.7217, "step": 8572 }, { "epoch": 0.35529860334037877, "grad_norm": 0.40566423535346985, "learning_rate": 3.2237142028264744e-06, "loss": 0.7223, "step": 8573 }, { "epoch": 0.35534004724605245, "grad_norm": 0.4551563858985901, "learning_rate": 3.223506983298106e-06, "loss": 0.6537, "step": 8574 }, { "epoch": 0.3553814911517261, "grad_norm": 0.380825400352478, "learning_rate": 3.2232997637697376e-06, "loss": 0.6946, "step": 8575 }, { "epoch": 0.3554229350573998, "grad_norm": 0.43815889954566956, "learning_rate": 3.22309254424137e-06, "loss": 0.7397, "step": 8576 }, { "epoch": 0.3554643789630735, "grad_norm": 0.37048688530921936, "learning_rate": 3.222885324713001e-06, "loss": 0.665, "step": 8577 }, { "epoch": 0.35550582286874716, "grad_norm": 0.4148445725440979, "learning_rate": 3.222678105184633e-06, "loss": 0.7197, "step": 8578 }, { "epoch": 0.35554726677442083, "grad_norm": 0.3876015841960907, "learning_rate": 3.222470885656265e-06, "loss": 0.663, "step": 8579 }, { "epoch": 0.3555887106800945, "grad_norm": 0.4169434905052185, "learning_rate": 3.2222636661278962e-06, "loss": 0.6793, "step": 8580 }, { "epoch": 0.3556301545857682, "grad_norm": 0.4030241072177887, "learning_rate": 3.222056446599528e-06, "loss": 0.687, "step": 8581 }, { "epoch": 0.3556715984914418, "grad_norm": 0.4049224555492401, "learning_rate": 3.2218492270711594e-06, "loss": 0.7068, "step": 8582 }, { "epoch": 0.3557130423971155, "grad_norm": 0.40973329544067383, "learning_rate": 3.2216420075427912e-06, "loss": 0.6455, "step": 8583 }, { "epoch": 0.35575448630278916, "grad_norm": 0.4468746483325958, "learning_rate": 3.2214347880144226e-06, "loss": 0.6919, "step": 8584 }, { "epoch": 0.35579593020846284, "grad_norm": 0.4454256594181061, "learning_rate": 3.2212275684860544e-06, "loss": 0.7124, "step": 8585 }, { "epoch": 0.3558373741141365, "grad_norm": 0.42078959941864014, "learning_rate": 3.221020348957686e-06, "loss": 0.6946, "step": 8586 }, { "epoch": 0.3558788180198102, "grad_norm": 0.41023966670036316, "learning_rate": 3.2208131294293176e-06, "loss": 0.6919, "step": 8587 }, { "epoch": 0.35592026192548387, "grad_norm": 0.41594240069389343, "learning_rate": 3.2206059099009494e-06, "loss": 0.6804, "step": 8588 }, { "epoch": 0.35596170583115755, "grad_norm": 0.45461198687553406, "learning_rate": 3.220398690372581e-06, "loss": 0.718, "step": 8589 }, { "epoch": 0.3560031497368312, "grad_norm": 0.4051576256752014, "learning_rate": 3.2201914708442126e-06, "loss": 0.6826, "step": 8590 }, { "epoch": 0.35604459364250485, "grad_norm": 0.42591792345046997, "learning_rate": 3.219984251315844e-06, "loss": 0.7386, "step": 8591 }, { "epoch": 0.3560860375481785, "grad_norm": 0.4453498423099518, "learning_rate": 3.2197770317874762e-06, "loss": 0.6726, "step": 8592 }, { "epoch": 0.3561274814538522, "grad_norm": 0.42274242639541626, "learning_rate": 3.219569812259107e-06, "loss": 0.7446, "step": 8593 }, { "epoch": 0.3561689253595259, "grad_norm": 0.4164092242717743, "learning_rate": 3.2193625927307394e-06, "loss": 0.748, "step": 8594 }, { "epoch": 0.35621036926519956, "grad_norm": 0.4614359736442566, "learning_rate": 3.2191553732023704e-06, "loss": 0.6766, "step": 8595 }, { "epoch": 0.35625181317087323, "grad_norm": 0.386618435382843, "learning_rate": 3.2189481536740026e-06, "loss": 0.701, "step": 8596 }, { "epoch": 0.3562932570765469, "grad_norm": 0.43639281392097473, "learning_rate": 3.2187409341456344e-06, "loss": 0.719, "step": 8597 }, { "epoch": 0.3563347009822206, "grad_norm": 0.37950024008750916, "learning_rate": 3.218533714617266e-06, "loss": 0.6694, "step": 8598 }, { "epoch": 0.3563761448878942, "grad_norm": 0.412626713514328, "learning_rate": 3.2183264950888976e-06, "loss": 0.7018, "step": 8599 }, { "epoch": 0.3564175887935679, "grad_norm": 0.4036887288093567, "learning_rate": 3.218119275560529e-06, "loss": 0.6736, "step": 8600 }, { "epoch": 0.35645903269924156, "grad_norm": 0.46518674492836, "learning_rate": 3.217912056032161e-06, "loss": 0.7415, "step": 8601 }, { "epoch": 0.35650047660491524, "grad_norm": 0.4361576437950134, "learning_rate": 3.217704836503792e-06, "loss": 0.748, "step": 8602 }, { "epoch": 0.3565419205105889, "grad_norm": 0.3992605209350586, "learning_rate": 3.217497616975424e-06, "loss": 0.7231, "step": 8603 }, { "epoch": 0.3565833644162626, "grad_norm": 0.40660595893859863, "learning_rate": 3.2172903974470554e-06, "loss": 0.7028, "step": 8604 }, { "epoch": 0.35662480832193627, "grad_norm": 0.41146934032440186, "learning_rate": 3.2170831779186872e-06, "loss": 0.6912, "step": 8605 }, { "epoch": 0.35666625222760995, "grad_norm": 0.40343713760375977, "learning_rate": 3.216875958390319e-06, "loss": 0.7017, "step": 8606 }, { "epoch": 0.3567076961332836, "grad_norm": 0.399984747171402, "learning_rate": 3.2166687388619504e-06, "loss": 0.6969, "step": 8607 }, { "epoch": 0.35674914003895725, "grad_norm": 0.3853943347930908, "learning_rate": 3.2164615193335822e-06, "loss": 0.6526, "step": 8608 }, { "epoch": 0.3567905839446309, "grad_norm": 0.4272754490375519, "learning_rate": 3.2162542998052136e-06, "loss": 0.7002, "step": 8609 }, { "epoch": 0.3568320278503046, "grad_norm": 0.4493042826652527, "learning_rate": 3.216047080276846e-06, "loss": 0.7954, "step": 8610 }, { "epoch": 0.3568734717559783, "grad_norm": 0.43379130959510803, "learning_rate": 3.215839860748477e-06, "loss": 0.7742, "step": 8611 }, { "epoch": 0.35691491566165195, "grad_norm": 0.40505853295326233, "learning_rate": 3.215632641220109e-06, "loss": 0.6909, "step": 8612 }, { "epoch": 0.35695635956732563, "grad_norm": 0.3881370425224304, "learning_rate": 3.215425421691741e-06, "loss": 0.6785, "step": 8613 }, { "epoch": 0.3569978034729993, "grad_norm": 0.38113144040107727, "learning_rate": 3.2152182021633722e-06, "loss": 0.6445, "step": 8614 }, { "epoch": 0.357039247378673, "grad_norm": 0.4128870964050293, "learning_rate": 3.215010982635004e-06, "loss": 0.7576, "step": 8615 }, { "epoch": 0.35708069128434666, "grad_norm": 0.4382694363594055, "learning_rate": 3.2148037631066354e-06, "loss": 0.7031, "step": 8616 }, { "epoch": 0.3571221351900203, "grad_norm": 0.4418732821941376, "learning_rate": 3.2145965435782672e-06, "loss": 0.7463, "step": 8617 }, { "epoch": 0.35716357909569396, "grad_norm": 0.4117302894592285, "learning_rate": 3.2143893240498986e-06, "loss": 0.7251, "step": 8618 }, { "epoch": 0.35720502300136764, "grad_norm": 0.4213348627090454, "learning_rate": 3.2141821045215304e-06, "loss": 0.7144, "step": 8619 }, { "epoch": 0.3572464669070413, "grad_norm": 0.43918853998184204, "learning_rate": 3.213974884993162e-06, "loss": 0.7439, "step": 8620 }, { "epoch": 0.357287910812715, "grad_norm": 0.43314146995544434, "learning_rate": 3.2137676654647936e-06, "loss": 0.691, "step": 8621 }, { "epoch": 0.35732935471838867, "grad_norm": 0.3981378674507141, "learning_rate": 3.2135604459364254e-06, "loss": 0.7205, "step": 8622 }, { "epoch": 0.35737079862406235, "grad_norm": 0.40885764360427856, "learning_rate": 3.213353226408057e-06, "loss": 0.6951, "step": 8623 }, { "epoch": 0.357412242529736, "grad_norm": 0.4133684039115906, "learning_rate": 3.2131460068796886e-06, "loss": 0.6925, "step": 8624 }, { "epoch": 0.35745368643540965, "grad_norm": 0.45643529295921326, "learning_rate": 3.21293878735132e-06, "loss": 0.7441, "step": 8625 }, { "epoch": 0.3574951303410833, "grad_norm": 0.42970994114875793, "learning_rate": 3.2127315678229522e-06, "loss": 0.6926, "step": 8626 }, { "epoch": 0.357536574246757, "grad_norm": 0.3958750069141388, "learning_rate": 3.212524348294583e-06, "loss": 0.6519, "step": 8627 }, { "epoch": 0.3575780181524307, "grad_norm": 0.4271100163459778, "learning_rate": 3.2123171287662154e-06, "loss": 0.7026, "step": 8628 }, { "epoch": 0.35761946205810435, "grad_norm": 0.4285823106765747, "learning_rate": 3.2121099092378464e-06, "loss": 0.6956, "step": 8629 }, { "epoch": 0.35766090596377803, "grad_norm": 0.41392239928245544, "learning_rate": 3.2119026897094786e-06, "loss": 0.7029, "step": 8630 }, { "epoch": 0.3577023498694517, "grad_norm": 0.4328571856021881, "learning_rate": 3.2116954701811104e-06, "loss": 0.705, "step": 8631 }, { "epoch": 0.3577437937751254, "grad_norm": 0.40659859776496887, "learning_rate": 3.211488250652742e-06, "loss": 0.6848, "step": 8632 }, { "epoch": 0.35778523768079906, "grad_norm": 0.4014585614204407, "learning_rate": 3.2112810311243736e-06, "loss": 0.6593, "step": 8633 }, { "epoch": 0.3578266815864727, "grad_norm": 0.374635249376297, "learning_rate": 3.211073811596005e-06, "loss": 0.6284, "step": 8634 }, { "epoch": 0.35786812549214636, "grad_norm": 0.4336332082748413, "learning_rate": 3.210866592067637e-06, "loss": 0.7231, "step": 8635 }, { "epoch": 0.35790956939782004, "grad_norm": 0.39702433347702026, "learning_rate": 3.2106593725392682e-06, "loss": 0.6583, "step": 8636 }, { "epoch": 0.3579510133034937, "grad_norm": 0.4217739999294281, "learning_rate": 3.2104521530109e-06, "loss": 0.7622, "step": 8637 }, { "epoch": 0.3579924572091674, "grad_norm": 0.411190390586853, "learning_rate": 3.2102449334825314e-06, "loss": 0.718, "step": 8638 }, { "epoch": 0.35803390111484107, "grad_norm": 0.40480706095695496, "learning_rate": 3.2100377139541632e-06, "loss": 0.7252, "step": 8639 }, { "epoch": 0.35807534502051475, "grad_norm": 0.40707555413246155, "learning_rate": 3.209830494425795e-06, "loss": 0.678, "step": 8640 }, { "epoch": 0.3581167889261884, "grad_norm": 0.4020068049430847, "learning_rate": 3.2096232748974264e-06, "loss": 0.7224, "step": 8641 }, { "epoch": 0.3581582328318621, "grad_norm": 0.41944706439971924, "learning_rate": 3.2094160553690582e-06, "loss": 0.6989, "step": 8642 }, { "epoch": 0.3581996767375357, "grad_norm": 0.41154971718788147, "learning_rate": 3.2092088358406896e-06, "loss": 0.7004, "step": 8643 }, { "epoch": 0.3582411206432094, "grad_norm": 0.4606510102748871, "learning_rate": 3.209001616312322e-06, "loss": 0.7495, "step": 8644 }, { "epoch": 0.3582825645488831, "grad_norm": 0.44434985518455505, "learning_rate": 3.208794396783953e-06, "loss": 0.668, "step": 8645 }, { "epoch": 0.35832400845455675, "grad_norm": 0.4028402864933014, "learning_rate": 3.208587177255585e-06, "loss": 0.7634, "step": 8646 }, { "epoch": 0.35836545236023043, "grad_norm": 0.4163174629211426, "learning_rate": 3.2083799577272164e-06, "loss": 0.7288, "step": 8647 }, { "epoch": 0.3584068962659041, "grad_norm": 0.39894750714302063, "learning_rate": 3.2081727381988482e-06, "loss": 0.7269, "step": 8648 }, { "epoch": 0.3584483401715778, "grad_norm": 0.4147360622882843, "learning_rate": 3.20796551867048e-06, "loss": 0.708, "step": 8649 }, { "epoch": 0.35848978407725146, "grad_norm": 0.45082521438598633, "learning_rate": 3.2077582991421114e-06, "loss": 0.7551, "step": 8650 }, { "epoch": 0.35853122798292514, "grad_norm": 0.4069189727306366, "learning_rate": 3.2075510796137432e-06, "loss": 0.6669, "step": 8651 }, { "epoch": 0.35857267188859876, "grad_norm": 0.43804097175598145, "learning_rate": 3.2073438600853746e-06, "loss": 0.7057, "step": 8652 }, { "epoch": 0.35861411579427244, "grad_norm": 0.41555774211883545, "learning_rate": 3.2071366405570064e-06, "loss": 0.709, "step": 8653 }, { "epoch": 0.3586555596999461, "grad_norm": 0.4156549870967865, "learning_rate": 3.206929421028638e-06, "loss": 0.6732, "step": 8654 }, { "epoch": 0.3586970036056198, "grad_norm": 0.38702520728111267, "learning_rate": 3.2067222015002696e-06, "loss": 0.6552, "step": 8655 }, { "epoch": 0.35873844751129347, "grad_norm": 0.418766587972641, "learning_rate": 3.206514981971901e-06, "loss": 0.7734, "step": 8656 }, { "epoch": 0.35877989141696714, "grad_norm": 0.43494096398353577, "learning_rate": 3.206307762443533e-06, "loss": 0.7148, "step": 8657 }, { "epoch": 0.3588213353226408, "grad_norm": 0.3983537256717682, "learning_rate": 3.2061005429151646e-06, "loss": 0.6417, "step": 8658 }, { "epoch": 0.3588627792283145, "grad_norm": 0.4036007821559906, "learning_rate": 3.205893323386796e-06, "loss": 0.6461, "step": 8659 }, { "epoch": 0.3589042231339881, "grad_norm": 0.3809744417667389, "learning_rate": 3.2056861038584282e-06, "loss": 0.6978, "step": 8660 }, { "epoch": 0.3589456670396618, "grad_norm": 0.4268876016139984, "learning_rate": 3.205478884330059e-06, "loss": 0.7219, "step": 8661 }, { "epoch": 0.3589871109453355, "grad_norm": 0.4100329577922821, "learning_rate": 3.2052716648016914e-06, "loss": 0.7072, "step": 8662 }, { "epoch": 0.35902855485100915, "grad_norm": 0.42140743136405945, "learning_rate": 3.2050644452733224e-06, "loss": 0.7166, "step": 8663 }, { "epoch": 0.35906999875668283, "grad_norm": 0.4297003746032715, "learning_rate": 3.2048572257449546e-06, "loss": 0.7456, "step": 8664 }, { "epoch": 0.3591114426623565, "grad_norm": 0.39545580744743347, "learning_rate": 3.204650006216586e-06, "loss": 0.6914, "step": 8665 }, { "epoch": 0.3591528865680302, "grad_norm": 0.4164530336856842, "learning_rate": 3.204442786688218e-06, "loss": 0.6655, "step": 8666 }, { "epoch": 0.35919433047370386, "grad_norm": 0.40562406182289124, "learning_rate": 3.2042355671598496e-06, "loss": 0.687, "step": 8667 }, { "epoch": 0.35923577437937754, "grad_norm": 0.43582430481910706, "learning_rate": 3.204028347631481e-06, "loss": 0.7136, "step": 8668 }, { "epoch": 0.35927721828505116, "grad_norm": 0.4351360499858856, "learning_rate": 3.203821128103113e-06, "loss": 0.7544, "step": 8669 }, { "epoch": 0.35931866219072484, "grad_norm": 0.3984261453151703, "learning_rate": 3.2036139085747442e-06, "loss": 0.6794, "step": 8670 }, { "epoch": 0.3593601060963985, "grad_norm": 0.4076157510280609, "learning_rate": 3.203406689046376e-06, "loss": 0.718, "step": 8671 }, { "epoch": 0.3594015500020722, "grad_norm": 0.40390744805336, "learning_rate": 3.2031994695180074e-06, "loss": 0.6804, "step": 8672 }, { "epoch": 0.35944299390774587, "grad_norm": 0.39340588450431824, "learning_rate": 3.2029922499896392e-06, "loss": 0.696, "step": 8673 }, { "epoch": 0.35948443781341954, "grad_norm": 0.43475353717803955, "learning_rate": 3.202785030461271e-06, "loss": 0.6855, "step": 8674 }, { "epoch": 0.3595258817190932, "grad_norm": 0.48821792006492615, "learning_rate": 3.2025778109329024e-06, "loss": 0.7532, "step": 8675 }, { "epoch": 0.3595673256247669, "grad_norm": 0.4022676944732666, "learning_rate": 3.2023705914045342e-06, "loss": 0.6978, "step": 8676 }, { "epoch": 0.3596087695304406, "grad_norm": 0.37009915709495544, "learning_rate": 3.2021633718761656e-06, "loss": 0.7012, "step": 8677 }, { "epoch": 0.3596502134361142, "grad_norm": 0.4057188034057617, "learning_rate": 3.201956152347798e-06, "loss": 0.7129, "step": 8678 }, { "epoch": 0.3596916573417879, "grad_norm": 0.4646468758583069, "learning_rate": 3.201748932819429e-06, "loss": 0.7102, "step": 8679 }, { "epoch": 0.35973310124746155, "grad_norm": 0.4313954710960388, "learning_rate": 3.201541713291061e-06, "loss": 0.733, "step": 8680 }, { "epoch": 0.35977454515313523, "grad_norm": 0.4056831896305084, "learning_rate": 3.2013344937626924e-06, "loss": 0.6432, "step": 8681 }, { "epoch": 0.3598159890588089, "grad_norm": 0.3931620121002197, "learning_rate": 3.2011272742343242e-06, "loss": 0.6626, "step": 8682 }, { "epoch": 0.3598574329644826, "grad_norm": 0.41706568002700806, "learning_rate": 3.200920054705956e-06, "loss": 0.7039, "step": 8683 }, { "epoch": 0.35989887687015626, "grad_norm": 0.42725566029548645, "learning_rate": 3.2007128351775874e-06, "loss": 0.6671, "step": 8684 }, { "epoch": 0.35994032077582994, "grad_norm": 0.41366472840309143, "learning_rate": 3.2005056156492192e-06, "loss": 0.7019, "step": 8685 }, { "epoch": 0.35998176468150356, "grad_norm": 0.4153638780117035, "learning_rate": 3.2002983961208506e-06, "loss": 0.7222, "step": 8686 }, { "epoch": 0.36002320858717723, "grad_norm": 0.41646304726600647, "learning_rate": 3.2000911765924824e-06, "loss": 0.7517, "step": 8687 }, { "epoch": 0.3600646524928509, "grad_norm": 0.3975159227848053, "learning_rate": 3.199883957064114e-06, "loss": 0.7152, "step": 8688 }, { "epoch": 0.3601060963985246, "grad_norm": 0.41991302371025085, "learning_rate": 3.1996767375357456e-06, "loss": 0.7251, "step": 8689 }, { "epoch": 0.36014754030419827, "grad_norm": 0.44278642535209656, "learning_rate": 3.199469518007377e-06, "loss": 0.6681, "step": 8690 }, { "epoch": 0.36018898420987194, "grad_norm": 0.4114539325237274, "learning_rate": 3.199262298479009e-06, "loss": 0.6626, "step": 8691 }, { "epoch": 0.3602304281155456, "grad_norm": 0.41678017377853394, "learning_rate": 3.1990550789506406e-06, "loss": 0.6844, "step": 8692 }, { "epoch": 0.3602718720212193, "grad_norm": 0.42319753766059875, "learning_rate": 3.198847859422272e-06, "loss": 0.7671, "step": 8693 }, { "epoch": 0.360313315926893, "grad_norm": 0.4015193283557892, "learning_rate": 3.1986406398939043e-06, "loss": 0.7136, "step": 8694 }, { "epoch": 0.3603547598325666, "grad_norm": 0.39908382296562195, "learning_rate": 3.1984334203655352e-06, "loss": 0.7009, "step": 8695 }, { "epoch": 0.3603962037382403, "grad_norm": 0.3875944912433624, "learning_rate": 3.1982262008371674e-06, "loss": 0.6591, "step": 8696 }, { "epoch": 0.36043764764391395, "grad_norm": 0.4470199644565582, "learning_rate": 3.1980189813087984e-06, "loss": 0.74, "step": 8697 }, { "epoch": 0.3604790915495876, "grad_norm": 0.40394559502601624, "learning_rate": 3.1978117617804306e-06, "loss": 0.6561, "step": 8698 }, { "epoch": 0.3605205354552613, "grad_norm": 0.40429189801216125, "learning_rate": 3.197604542252062e-06, "loss": 0.7131, "step": 8699 }, { "epoch": 0.360561979360935, "grad_norm": 0.39882224798202515, "learning_rate": 3.197397322723694e-06, "loss": 0.6637, "step": 8700 }, { "epoch": 0.36060342326660866, "grad_norm": 0.38423794507980347, "learning_rate": 3.1971901031953256e-06, "loss": 0.6968, "step": 8701 }, { "epoch": 0.36064486717228234, "grad_norm": 0.39836516976356506, "learning_rate": 3.196982883666957e-06, "loss": 0.7012, "step": 8702 }, { "epoch": 0.360686311077956, "grad_norm": 0.4412648677825928, "learning_rate": 3.196775664138589e-06, "loss": 0.7405, "step": 8703 }, { "epoch": 0.36072775498362963, "grad_norm": 0.4252113699913025, "learning_rate": 3.1965684446102202e-06, "loss": 0.7109, "step": 8704 }, { "epoch": 0.3607691988893033, "grad_norm": 0.459979772567749, "learning_rate": 3.196361225081852e-06, "loss": 0.699, "step": 8705 }, { "epoch": 0.360810642794977, "grad_norm": 0.39731279015541077, "learning_rate": 3.1961540055534834e-06, "loss": 0.7068, "step": 8706 }, { "epoch": 0.36085208670065066, "grad_norm": 0.41199877858161926, "learning_rate": 3.1959467860251152e-06, "loss": 0.6902, "step": 8707 }, { "epoch": 0.36089353060632434, "grad_norm": 0.3849584460258484, "learning_rate": 3.1957395664967466e-06, "loss": 0.65, "step": 8708 }, { "epoch": 0.360934974511998, "grad_norm": 0.3979337513446808, "learning_rate": 3.1955323469683784e-06, "loss": 0.6581, "step": 8709 }, { "epoch": 0.3609764184176717, "grad_norm": 0.4135061800479889, "learning_rate": 3.1953251274400102e-06, "loss": 0.6871, "step": 8710 }, { "epoch": 0.3610178623233454, "grad_norm": 0.3670703172683716, "learning_rate": 3.1951179079116416e-06, "loss": 0.6415, "step": 8711 }, { "epoch": 0.36105930622901905, "grad_norm": 0.4223484694957733, "learning_rate": 3.194910688383274e-06, "loss": 0.6906, "step": 8712 }, { "epoch": 0.36110075013469267, "grad_norm": 0.4482652246952057, "learning_rate": 3.194703468854905e-06, "loss": 0.6763, "step": 8713 }, { "epoch": 0.36114219404036635, "grad_norm": 0.4217328727245331, "learning_rate": 3.194496249326537e-06, "loss": 0.7723, "step": 8714 }, { "epoch": 0.36118363794604, "grad_norm": 0.3905523717403412, "learning_rate": 3.1942890297981684e-06, "loss": 0.6614, "step": 8715 }, { "epoch": 0.3612250818517137, "grad_norm": 0.3970358967781067, "learning_rate": 3.1940818102698002e-06, "loss": 0.7126, "step": 8716 }, { "epoch": 0.3612665257573874, "grad_norm": 0.3950396776199341, "learning_rate": 3.1938745907414316e-06, "loss": 0.6947, "step": 8717 }, { "epoch": 0.36130796966306106, "grad_norm": 0.4351550340652466, "learning_rate": 3.1936673712130634e-06, "loss": 0.6982, "step": 8718 }, { "epoch": 0.36134941356873473, "grad_norm": 0.4230193793773651, "learning_rate": 3.1934601516846952e-06, "loss": 0.6946, "step": 8719 }, { "epoch": 0.3613908574744084, "grad_norm": 0.4132295846939087, "learning_rate": 3.1932529321563266e-06, "loss": 0.6943, "step": 8720 }, { "epoch": 0.36143230138008203, "grad_norm": 0.3997012674808502, "learning_rate": 3.1930457126279584e-06, "loss": 0.6906, "step": 8721 }, { "epoch": 0.3614737452857557, "grad_norm": 0.4293724596500397, "learning_rate": 3.19283849309959e-06, "loss": 0.6619, "step": 8722 }, { "epoch": 0.3615151891914294, "grad_norm": 0.41585633158683777, "learning_rate": 3.1926312735712216e-06, "loss": 0.6763, "step": 8723 }, { "epoch": 0.36155663309710306, "grad_norm": 0.3797011375427246, "learning_rate": 3.192424054042853e-06, "loss": 0.6694, "step": 8724 }, { "epoch": 0.36159807700277674, "grad_norm": 0.4137294888496399, "learning_rate": 3.192216834514485e-06, "loss": 0.6443, "step": 8725 }, { "epoch": 0.3616395209084504, "grad_norm": 0.38571393489837646, "learning_rate": 3.1920096149861162e-06, "loss": 0.6699, "step": 8726 }, { "epoch": 0.3616809648141241, "grad_norm": 0.46915021538734436, "learning_rate": 3.191802395457748e-06, "loss": 0.7356, "step": 8727 }, { "epoch": 0.36172240871979777, "grad_norm": 0.42679134011268616, "learning_rate": 3.1915951759293803e-06, "loss": 0.6858, "step": 8728 }, { "epoch": 0.36176385262547145, "grad_norm": 0.44958364963531494, "learning_rate": 3.1913879564010112e-06, "loss": 0.6925, "step": 8729 }, { "epoch": 0.36180529653114507, "grad_norm": 0.47745299339294434, "learning_rate": 3.1911807368726435e-06, "loss": 0.76, "step": 8730 }, { "epoch": 0.36184674043681875, "grad_norm": 0.4011189341545105, "learning_rate": 3.190973517344275e-06, "loss": 0.6897, "step": 8731 }, { "epoch": 0.3618881843424924, "grad_norm": 0.44328612089157104, "learning_rate": 3.1907662978159066e-06, "loss": 0.7223, "step": 8732 }, { "epoch": 0.3619296282481661, "grad_norm": 0.4299856126308441, "learning_rate": 3.190559078287538e-06, "loss": 0.679, "step": 8733 }, { "epoch": 0.3619710721538398, "grad_norm": 0.4424896538257599, "learning_rate": 3.19035185875917e-06, "loss": 0.714, "step": 8734 }, { "epoch": 0.36201251605951346, "grad_norm": 0.4129107892513275, "learning_rate": 3.1901446392308017e-06, "loss": 0.7917, "step": 8735 }, { "epoch": 0.36205395996518713, "grad_norm": 0.3794506788253784, "learning_rate": 3.189937419702433e-06, "loss": 0.684, "step": 8736 }, { "epoch": 0.3620954038708608, "grad_norm": 0.38367196917533875, "learning_rate": 3.189730200174065e-06, "loss": 0.6549, "step": 8737 }, { "epoch": 0.3621368477765345, "grad_norm": 0.38777387142181396, "learning_rate": 3.1895229806456962e-06, "loss": 0.6519, "step": 8738 }, { "epoch": 0.3621782916822081, "grad_norm": 0.39142030477523804, "learning_rate": 3.189315761117328e-06, "loss": 0.7188, "step": 8739 }, { "epoch": 0.3622197355878818, "grad_norm": 0.38989362120628357, "learning_rate": 3.1891085415889594e-06, "loss": 0.6436, "step": 8740 }, { "epoch": 0.36226117949355546, "grad_norm": 0.3616463243961334, "learning_rate": 3.1889013220605912e-06, "loss": 0.6604, "step": 8741 }, { "epoch": 0.36230262339922914, "grad_norm": 0.3997287452220917, "learning_rate": 3.1886941025322226e-06, "loss": 0.6497, "step": 8742 }, { "epoch": 0.3623440673049028, "grad_norm": 0.43689417839050293, "learning_rate": 3.1884868830038544e-06, "loss": 0.7478, "step": 8743 }, { "epoch": 0.3623855112105765, "grad_norm": 0.3861250579357147, "learning_rate": 3.1882796634754867e-06, "loss": 0.728, "step": 8744 }, { "epoch": 0.36242695511625017, "grad_norm": 0.4277266263961792, "learning_rate": 3.1880724439471176e-06, "loss": 0.7107, "step": 8745 }, { "epoch": 0.36246839902192385, "grad_norm": 0.41886186599731445, "learning_rate": 3.18786522441875e-06, "loss": 0.6541, "step": 8746 }, { "epoch": 0.36250984292759747, "grad_norm": 0.40659788250923157, "learning_rate": 3.187658004890381e-06, "loss": 0.6239, "step": 8747 }, { "epoch": 0.36255128683327115, "grad_norm": 0.4191431701183319, "learning_rate": 3.187450785362013e-06, "loss": 0.73, "step": 8748 }, { "epoch": 0.3625927307389448, "grad_norm": 0.38583990931510925, "learning_rate": 3.1872435658336444e-06, "loss": 0.6482, "step": 8749 }, { "epoch": 0.3626341746446185, "grad_norm": 0.4314139187335968, "learning_rate": 3.1870363463052762e-06, "loss": 0.7511, "step": 8750 }, { "epoch": 0.3626756185502922, "grad_norm": 0.46310287714004517, "learning_rate": 3.1868291267769076e-06, "loss": 0.7014, "step": 8751 }, { "epoch": 0.36271706245596586, "grad_norm": 0.39220088720321655, "learning_rate": 3.1866219072485394e-06, "loss": 0.701, "step": 8752 }, { "epoch": 0.36275850636163953, "grad_norm": 0.3864094018936157, "learning_rate": 3.1864146877201713e-06, "loss": 0.7006, "step": 8753 }, { "epoch": 0.3627999502673132, "grad_norm": 0.4193985164165497, "learning_rate": 3.1862074681918026e-06, "loss": 0.6805, "step": 8754 }, { "epoch": 0.3628413941729869, "grad_norm": 0.400595098733902, "learning_rate": 3.1860002486634344e-06, "loss": 0.7012, "step": 8755 }, { "epoch": 0.3628828380786605, "grad_norm": 0.3874930143356323, "learning_rate": 3.185793029135066e-06, "loss": 0.687, "step": 8756 }, { "epoch": 0.3629242819843342, "grad_norm": 0.4066002368927002, "learning_rate": 3.1855858096066976e-06, "loss": 0.6914, "step": 8757 }, { "epoch": 0.36296572589000786, "grad_norm": 0.43422219157218933, "learning_rate": 3.185378590078329e-06, "loss": 0.7583, "step": 8758 }, { "epoch": 0.36300716979568154, "grad_norm": 0.408501535654068, "learning_rate": 3.185171370549961e-06, "loss": 0.7239, "step": 8759 }, { "epoch": 0.3630486137013552, "grad_norm": 0.39842933416366577, "learning_rate": 3.1849641510215922e-06, "loss": 0.7192, "step": 8760 }, { "epoch": 0.3630900576070289, "grad_norm": 0.38772615790367126, "learning_rate": 3.184756931493224e-06, "loss": 0.6636, "step": 8761 }, { "epoch": 0.36313150151270257, "grad_norm": 0.4501410722732544, "learning_rate": 3.1845497119648563e-06, "loss": 0.7351, "step": 8762 }, { "epoch": 0.36317294541837625, "grad_norm": 0.41095712780952454, "learning_rate": 3.1843424924364872e-06, "loss": 0.6346, "step": 8763 }, { "epoch": 0.3632143893240499, "grad_norm": 0.43201744556427, "learning_rate": 3.1841352729081195e-06, "loss": 0.7205, "step": 8764 }, { "epoch": 0.36325583322972355, "grad_norm": 0.4419722259044647, "learning_rate": 3.183928053379751e-06, "loss": 0.7068, "step": 8765 }, { "epoch": 0.3632972771353972, "grad_norm": 0.3976384699344635, "learning_rate": 3.1837208338513827e-06, "loss": 0.6809, "step": 8766 }, { "epoch": 0.3633387210410709, "grad_norm": 0.4190293848514557, "learning_rate": 3.183513614323014e-06, "loss": 0.6963, "step": 8767 }, { "epoch": 0.3633801649467446, "grad_norm": 0.4072466194629669, "learning_rate": 3.183306394794646e-06, "loss": 0.6639, "step": 8768 }, { "epoch": 0.36342160885241825, "grad_norm": 0.44544410705566406, "learning_rate": 3.1830991752662772e-06, "loss": 0.733, "step": 8769 }, { "epoch": 0.36346305275809193, "grad_norm": 0.4002475142478943, "learning_rate": 3.182891955737909e-06, "loss": 0.708, "step": 8770 }, { "epoch": 0.3635044966637656, "grad_norm": 0.4335148334503174, "learning_rate": 3.182684736209541e-06, "loss": 0.6892, "step": 8771 }, { "epoch": 0.3635459405694393, "grad_norm": 0.4222176671028137, "learning_rate": 3.1824775166811722e-06, "loss": 0.7067, "step": 8772 }, { "epoch": 0.3635873844751129, "grad_norm": 0.3817721903324127, "learning_rate": 3.182270297152804e-06, "loss": 0.6665, "step": 8773 }, { "epoch": 0.3636288283807866, "grad_norm": 0.3962269723415375, "learning_rate": 3.1820630776244354e-06, "loss": 0.6609, "step": 8774 }, { "epoch": 0.36367027228646026, "grad_norm": 0.43797460198402405, "learning_rate": 3.1818558580960672e-06, "loss": 0.6808, "step": 8775 }, { "epoch": 0.36371171619213394, "grad_norm": 0.4102444052696228, "learning_rate": 3.1816486385676986e-06, "loss": 0.6425, "step": 8776 }, { "epoch": 0.3637531600978076, "grad_norm": 0.42108339071273804, "learning_rate": 3.1814414190393304e-06, "loss": 0.7212, "step": 8777 }, { "epoch": 0.3637946040034813, "grad_norm": 0.4083385467529297, "learning_rate": 3.181234199510962e-06, "loss": 0.6426, "step": 8778 }, { "epoch": 0.36383604790915497, "grad_norm": 0.4140072166919708, "learning_rate": 3.1810269799825936e-06, "loss": 0.7356, "step": 8779 }, { "epoch": 0.36387749181482865, "grad_norm": 0.43164199590682983, "learning_rate": 3.180819760454226e-06, "loss": 0.6614, "step": 8780 }, { "epoch": 0.3639189357205023, "grad_norm": 0.4276571273803711, "learning_rate": 3.180612540925857e-06, "loss": 0.686, "step": 8781 }, { "epoch": 0.36396037962617594, "grad_norm": 0.4281634986400604, "learning_rate": 3.180405321397489e-06, "loss": 0.7313, "step": 8782 }, { "epoch": 0.3640018235318496, "grad_norm": 0.4151059687137604, "learning_rate": 3.1801981018691204e-06, "loss": 0.7006, "step": 8783 }, { "epoch": 0.3640432674375233, "grad_norm": 0.42673927545547485, "learning_rate": 3.1799908823407523e-06, "loss": 0.7039, "step": 8784 }, { "epoch": 0.364084711343197, "grad_norm": 0.41177552938461304, "learning_rate": 3.1797836628123836e-06, "loss": 0.6797, "step": 8785 }, { "epoch": 0.36412615524887065, "grad_norm": 0.38462021946907043, "learning_rate": 3.1795764432840154e-06, "loss": 0.6368, "step": 8786 }, { "epoch": 0.36416759915454433, "grad_norm": 0.39418941736221313, "learning_rate": 3.179369223755647e-06, "loss": 0.6567, "step": 8787 }, { "epoch": 0.364209043060218, "grad_norm": 0.46565166115760803, "learning_rate": 3.1791620042272786e-06, "loss": 0.7227, "step": 8788 }, { "epoch": 0.3642504869658917, "grad_norm": 0.3941397964954376, "learning_rate": 3.1789547846989105e-06, "loss": 0.6953, "step": 8789 }, { "epoch": 0.36429193087156536, "grad_norm": 0.39258432388305664, "learning_rate": 3.178747565170542e-06, "loss": 0.677, "step": 8790 }, { "epoch": 0.364333374777239, "grad_norm": 0.39421311020851135, "learning_rate": 3.1785403456421736e-06, "loss": 0.7083, "step": 8791 }, { "epoch": 0.36437481868291266, "grad_norm": 0.4141696095466614, "learning_rate": 3.178333126113805e-06, "loss": 0.7153, "step": 8792 }, { "epoch": 0.36441626258858634, "grad_norm": 0.42644450068473816, "learning_rate": 3.178125906585437e-06, "loss": 0.6473, "step": 8793 }, { "epoch": 0.36445770649426, "grad_norm": 0.4411250352859497, "learning_rate": 3.1779186870570682e-06, "loss": 0.6887, "step": 8794 }, { "epoch": 0.3644991503999337, "grad_norm": 0.3941774070262909, "learning_rate": 3.1777114675287e-06, "loss": 0.6901, "step": 8795 }, { "epoch": 0.36454059430560737, "grad_norm": 0.39575690031051636, "learning_rate": 3.1775042480003323e-06, "loss": 0.667, "step": 8796 }, { "epoch": 0.36458203821128105, "grad_norm": 0.4060383141040802, "learning_rate": 3.1772970284719632e-06, "loss": 0.7465, "step": 8797 }, { "epoch": 0.3646234821169547, "grad_norm": 0.4219093918800354, "learning_rate": 3.1770898089435955e-06, "loss": 0.7051, "step": 8798 }, { "epoch": 0.3646649260226284, "grad_norm": 0.3760469853878021, "learning_rate": 3.176882589415227e-06, "loss": 0.7008, "step": 8799 }, { "epoch": 0.364706369928302, "grad_norm": 0.40530356764793396, "learning_rate": 3.1766753698868587e-06, "loss": 0.6921, "step": 8800 }, { "epoch": 0.3647478138339757, "grad_norm": 0.42373690009117126, "learning_rate": 3.17646815035849e-06, "loss": 0.7507, "step": 8801 }, { "epoch": 0.3647892577396494, "grad_norm": 0.3819704055786133, "learning_rate": 3.176260930830122e-06, "loss": 0.6948, "step": 8802 }, { "epoch": 0.36483070164532305, "grad_norm": 0.43700453639030457, "learning_rate": 3.1760537113017532e-06, "loss": 0.6687, "step": 8803 }, { "epoch": 0.36487214555099673, "grad_norm": 0.4160009026527405, "learning_rate": 3.175846491773385e-06, "loss": 0.6682, "step": 8804 }, { "epoch": 0.3649135894566704, "grad_norm": 0.42203670740127563, "learning_rate": 3.175639272245017e-06, "loss": 0.7375, "step": 8805 }, { "epoch": 0.3649550333623441, "grad_norm": 0.39568477869033813, "learning_rate": 3.1754320527166482e-06, "loss": 0.6873, "step": 8806 }, { "epoch": 0.36499647726801776, "grad_norm": 0.4489230513572693, "learning_rate": 3.17522483318828e-06, "loss": 0.7803, "step": 8807 }, { "epoch": 0.3650379211736914, "grad_norm": 0.41700926423072815, "learning_rate": 3.1750176136599114e-06, "loss": 0.7126, "step": 8808 }, { "epoch": 0.36507936507936506, "grad_norm": 0.41486063599586487, "learning_rate": 3.1748103941315432e-06, "loss": 0.6799, "step": 8809 }, { "epoch": 0.36512080898503874, "grad_norm": 0.4383470118045807, "learning_rate": 3.1746031746031746e-06, "loss": 0.7456, "step": 8810 }, { "epoch": 0.3651622528907124, "grad_norm": 0.43047183752059937, "learning_rate": 3.1743959550748064e-06, "loss": 0.689, "step": 8811 }, { "epoch": 0.3652036967963861, "grad_norm": 0.4064778983592987, "learning_rate": 3.174188735546438e-06, "loss": 0.7117, "step": 8812 }, { "epoch": 0.36524514070205977, "grad_norm": 0.4036770462989807, "learning_rate": 3.1739815160180696e-06, "loss": 0.6799, "step": 8813 }, { "epoch": 0.36528658460773344, "grad_norm": 0.4272207021713257, "learning_rate": 3.173774296489702e-06, "loss": 0.743, "step": 8814 }, { "epoch": 0.3653280285134071, "grad_norm": 0.4056776165962219, "learning_rate": 3.173567076961333e-06, "loss": 0.7356, "step": 8815 }, { "epoch": 0.3653694724190808, "grad_norm": 0.3883136212825775, "learning_rate": 3.173359857432965e-06, "loss": 0.684, "step": 8816 }, { "epoch": 0.3654109163247544, "grad_norm": 0.38638612627983093, "learning_rate": 3.1731526379045965e-06, "loss": 0.6984, "step": 8817 }, { "epoch": 0.3654523602304281, "grad_norm": 0.4543929100036621, "learning_rate": 3.1729454183762283e-06, "loss": 0.8047, "step": 8818 }, { "epoch": 0.3654938041361018, "grad_norm": 0.41637372970581055, "learning_rate": 3.1727381988478596e-06, "loss": 0.658, "step": 8819 }, { "epoch": 0.36553524804177545, "grad_norm": 0.4713640511035919, "learning_rate": 3.1725309793194915e-06, "loss": 0.754, "step": 8820 }, { "epoch": 0.36557669194744913, "grad_norm": 0.42243054509162903, "learning_rate": 3.172323759791123e-06, "loss": 0.7238, "step": 8821 }, { "epoch": 0.3656181358531228, "grad_norm": 0.42175623774528503, "learning_rate": 3.1721165402627547e-06, "loss": 0.6578, "step": 8822 }, { "epoch": 0.3656595797587965, "grad_norm": 0.44663870334625244, "learning_rate": 3.1719093207343865e-06, "loss": 0.7307, "step": 8823 }, { "epoch": 0.36570102366447016, "grad_norm": 0.40727609395980835, "learning_rate": 3.171702101206018e-06, "loss": 0.6915, "step": 8824 }, { "epoch": 0.36574246757014384, "grad_norm": 0.4353761076927185, "learning_rate": 3.1714948816776497e-06, "loss": 0.7288, "step": 8825 }, { "epoch": 0.36578391147581746, "grad_norm": 0.42904141545295715, "learning_rate": 3.171287662149281e-06, "loss": 0.6238, "step": 8826 }, { "epoch": 0.36582535538149114, "grad_norm": 0.4006114602088928, "learning_rate": 3.171080442620913e-06, "loss": 0.722, "step": 8827 }, { "epoch": 0.3658667992871648, "grad_norm": 0.39533430337905884, "learning_rate": 3.1708732230925442e-06, "loss": 0.7078, "step": 8828 }, { "epoch": 0.3659082431928385, "grad_norm": 0.3962975740432739, "learning_rate": 3.170666003564176e-06, "loss": 0.6937, "step": 8829 }, { "epoch": 0.36594968709851217, "grad_norm": 0.4026281237602234, "learning_rate": 3.1704587840358074e-06, "loss": 0.6426, "step": 8830 }, { "epoch": 0.36599113100418584, "grad_norm": 0.39985892176628113, "learning_rate": 3.1702515645074392e-06, "loss": 0.6353, "step": 8831 }, { "epoch": 0.3660325749098595, "grad_norm": 0.40309709310531616, "learning_rate": 3.1700443449790715e-06, "loss": 0.6909, "step": 8832 }, { "epoch": 0.3660740188155332, "grad_norm": 0.38339272141456604, "learning_rate": 3.169837125450703e-06, "loss": 0.7322, "step": 8833 }, { "epoch": 0.3661154627212068, "grad_norm": 0.44847530126571655, "learning_rate": 3.1696299059223347e-06, "loss": 0.6941, "step": 8834 }, { "epoch": 0.3661569066268805, "grad_norm": 0.4052031636238098, "learning_rate": 3.169422686393966e-06, "loss": 0.6294, "step": 8835 }, { "epoch": 0.3661983505325542, "grad_norm": 0.40281644463539124, "learning_rate": 3.169215466865598e-06, "loss": 0.7249, "step": 8836 }, { "epoch": 0.36623979443822785, "grad_norm": 0.39112725853919983, "learning_rate": 3.1690082473372292e-06, "loss": 0.7258, "step": 8837 }, { "epoch": 0.3662812383439015, "grad_norm": 0.4365958869457245, "learning_rate": 3.168801027808861e-06, "loss": 0.7205, "step": 8838 }, { "epoch": 0.3663226822495752, "grad_norm": 0.40034088492393494, "learning_rate": 3.1685938082804924e-06, "loss": 0.7083, "step": 8839 }, { "epoch": 0.3663641261552489, "grad_norm": 0.440376877784729, "learning_rate": 3.1683865887521243e-06, "loss": 0.6729, "step": 8840 }, { "epoch": 0.36640557006092256, "grad_norm": 0.4406425952911377, "learning_rate": 3.168179369223756e-06, "loss": 0.6744, "step": 8841 }, { "epoch": 0.36644701396659624, "grad_norm": 0.3999883234500885, "learning_rate": 3.1679721496953874e-06, "loss": 0.6677, "step": 8842 }, { "epoch": 0.36648845787226986, "grad_norm": 0.43860435485839844, "learning_rate": 3.1677649301670193e-06, "loss": 0.7212, "step": 8843 }, { "epoch": 0.36652990177794353, "grad_norm": 0.43907806277275085, "learning_rate": 3.1675577106386506e-06, "loss": 0.6941, "step": 8844 }, { "epoch": 0.3665713456836172, "grad_norm": 0.4345446825027466, "learning_rate": 3.1673504911102824e-06, "loss": 0.7002, "step": 8845 }, { "epoch": 0.3666127895892909, "grad_norm": 0.4227834641933441, "learning_rate": 3.167143271581914e-06, "loss": 0.7002, "step": 8846 }, { "epoch": 0.36665423349496457, "grad_norm": 0.3927697539329529, "learning_rate": 3.1669360520535456e-06, "loss": 0.668, "step": 8847 }, { "epoch": 0.36669567740063824, "grad_norm": 0.4078225791454315, "learning_rate": 3.166728832525177e-06, "loss": 0.7678, "step": 8848 }, { "epoch": 0.3667371213063119, "grad_norm": 0.4158647060394287, "learning_rate": 3.166521612996809e-06, "loss": 0.741, "step": 8849 }, { "epoch": 0.3667785652119856, "grad_norm": 0.37287968397140503, "learning_rate": 3.166314393468441e-06, "loss": 0.6517, "step": 8850 }, { "epoch": 0.3668200091176593, "grad_norm": 0.39171770215034485, "learning_rate": 3.1661071739400725e-06, "loss": 0.6335, "step": 8851 }, { "epoch": 0.3668614530233329, "grad_norm": 0.45891904830932617, "learning_rate": 3.1658999544117043e-06, "loss": 0.7556, "step": 8852 }, { "epoch": 0.3669028969290066, "grad_norm": 0.42991599440574646, "learning_rate": 3.1656927348833357e-06, "loss": 0.6675, "step": 8853 }, { "epoch": 0.36694434083468025, "grad_norm": 0.41664254665374756, "learning_rate": 3.1654855153549675e-06, "loss": 0.7004, "step": 8854 }, { "epoch": 0.3669857847403539, "grad_norm": 0.43574804067611694, "learning_rate": 3.165278295826599e-06, "loss": 0.7046, "step": 8855 }, { "epoch": 0.3670272286460276, "grad_norm": 0.41894927620887756, "learning_rate": 3.1650710762982307e-06, "loss": 0.7124, "step": 8856 }, { "epoch": 0.3670686725517013, "grad_norm": 0.3938758075237274, "learning_rate": 3.1648638567698625e-06, "loss": 0.688, "step": 8857 }, { "epoch": 0.36711011645737496, "grad_norm": 0.4141032099723816, "learning_rate": 3.164656637241494e-06, "loss": 0.7043, "step": 8858 }, { "epoch": 0.36715156036304863, "grad_norm": 0.4135150611400604, "learning_rate": 3.1644494177131257e-06, "loss": 0.7212, "step": 8859 }, { "epoch": 0.3671930042687223, "grad_norm": 0.4092787504196167, "learning_rate": 3.164242198184757e-06, "loss": 0.6729, "step": 8860 }, { "epoch": 0.36723444817439593, "grad_norm": 0.4133279323577881, "learning_rate": 3.164034978656389e-06, "loss": 0.7372, "step": 8861 }, { "epoch": 0.3672758920800696, "grad_norm": 0.38868510723114014, "learning_rate": 3.1638277591280202e-06, "loss": 0.6643, "step": 8862 }, { "epoch": 0.3673173359857433, "grad_norm": 0.424174040555954, "learning_rate": 3.163620539599652e-06, "loss": 0.7004, "step": 8863 }, { "epoch": 0.36735877989141696, "grad_norm": 0.42038533091545105, "learning_rate": 3.1634133200712834e-06, "loss": 0.7422, "step": 8864 }, { "epoch": 0.36740022379709064, "grad_norm": 0.3929160535335541, "learning_rate": 3.1632061005429152e-06, "loss": 0.6885, "step": 8865 }, { "epoch": 0.3674416677027643, "grad_norm": 0.41162413358688354, "learning_rate": 3.1629988810145475e-06, "loss": 0.7068, "step": 8866 }, { "epoch": 0.367483111608438, "grad_norm": 0.3875158429145813, "learning_rate": 3.162791661486179e-06, "loss": 0.7146, "step": 8867 }, { "epoch": 0.3675245555141117, "grad_norm": 0.4303911030292511, "learning_rate": 3.1625844419578107e-06, "loss": 0.6968, "step": 8868 }, { "epoch": 0.3675659994197853, "grad_norm": 0.37923362851142883, "learning_rate": 3.162377222429442e-06, "loss": 0.6641, "step": 8869 }, { "epoch": 0.36760744332545897, "grad_norm": 0.3993144631385803, "learning_rate": 3.162170002901074e-06, "loss": 0.731, "step": 8870 }, { "epoch": 0.36764888723113265, "grad_norm": 0.40559154748916626, "learning_rate": 3.1619627833727053e-06, "loss": 0.7188, "step": 8871 }, { "epoch": 0.3676903311368063, "grad_norm": 0.40088963508605957, "learning_rate": 3.161755563844337e-06, "loss": 0.6777, "step": 8872 }, { "epoch": 0.36773177504248, "grad_norm": 0.4128590226173401, "learning_rate": 3.1615483443159684e-06, "loss": 0.6943, "step": 8873 }, { "epoch": 0.3677732189481537, "grad_norm": 0.4353982210159302, "learning_rate": 3.1613411247876003e-06, "loss": 0.7417, "step": 8874 }, { "epoch": 0.36781466285382736, "grad_norm": 0.4161624610424042, "learning_rate": 3.161133905259232e-06, "loss": 0.6355, "step": 8875 }, { "epoch": 0.36785610675950103, "grad_norm": 0.41683724522590637, "learning_rate": 3.1609266857308635e-06, "loss": 0.7651, "step": 8876 }, { "epoch": 0.3678975506651747, "grad_norm": 0.44149574637413025, "learning_rate": 3.1607194662024953e-06, "loss": 0.7206, "step": 8877 }, { "epoch": 0.36793899457084833, "grad_norm": 0.36669236421585083, "learning_rate": 3.1605122466741266e-06, "loss": 0.6589, "step": 8878 }, { "epoch": 0.367980438476522, "grad_norm": 0.42263463139533997, "learning_rate": 3.1603050271457585e-06, "loss": 0.697, "step": 8879 }, { "epoch": 0.3680218823821957, "grad_norm": 0.39665165543556213, "learning_rate": 3.16009780761739e-06, "loss": 0.6768, "step": 8880 }, { "epoch": 0.36806332628786936, "grad_norm": 0.4220811426639557, "learning_rate": 3.1598905880890217e-06, "loss": 0.7172, "step": 8881 }, { "epoch": 0.36810477019354304, "grad_norm": 0.4566970765590668, "learning_rate": 3.159683368560653e-06, "loss": 0.7004, "step": 8882 }, { "epoch": 0.3681462140992167, "grad_norm": 0.42729124426841736, "learning_rate": 3.159476149032285e-06, "loss": 0.702, "step": 8883 }, { "epoch": 0.3681876580048904, "grad_norm": 0.39075806736946106, "learning_rate": 3.159268929503917e-06, "loss": 0.7207, "step": 8884 }, { "epoch": 0.36822910191056407, "grad_norm": 0.40214312076568604, "learning_rate": 3.1590617099755485e-06, "loss": 0.7302, "step": 8885 }, { "epoch": 0.36827054581623775, "grad_norm": 0.3944583833217621, "learning_rate": 3.1588544904471803e-06, "loss": 0.7052, "step": 8886 }, { "epoch": 0.36831198972191137, "grad_norm": 0.4106513559818268, "learning_rate": 3.1586472709188117e-06, "loss": 0.7422, "step": 8887 }, { "epoch": 0.36835343362758505, "grad_norm": 0.41970962285995483, "learning_rate": 3.1584400513904435e-06, "loss": 0.7058, "step": 8888 }, { "epoch": 0.3683948775332587, "grad_norm": 0.3840161859989166, "learning_rate": 3.158232831862075e-06, "loss": 0.6943, "step": 8889 }, { "epoch": 0.3684363214389324, "grad_norm": 0.4291200339794159, "learning_rate": 3.1580256123337067e-06, "loss": 0.7437, "step": 8890 }, { "epoch": 0.3684777653446061, "grad_norm": 0.42865267395973206, "learning_rate": 3.157818392805338e-06, "loss": 0.7101, "step": 8891 }, { "epoch": 0.36851920925027976, "grad_norm": 0.4136693775653839, "learning_rate": 3.15761117327697e-06, "loss": 0.7096, "step": 8892 }, { "epoch": 0.36856065315595343, "grad_norm": 0.4085708558559418, "learning_rate": 3.1574039537486017e-06, "loss": 0.6813, "step": 8893 }, { "epoch": 0.3686020970616271, "grad_norm": 0.3818548619747162, "learning_rate": 3.157196734220233e-06, "loss": 0.6488, "step": 8894 }, { "epoch": 0.36864354096730073, "grad_norm": 0.415693998336792, "learning_rate": 3.156989514691865e-06, "loss": 0.6547, "step": 8895 }, { "epoch": 0.3686849848729744, "grad_norm": 0.38231855630874634, "learning_rate": 3.1567822951634962e-06, "loss": 0.6954, "step": 8896 }, { "epoch": 0.3687264287786481, "grad_norm": 0.4177792966365814, "learning_rate": 3.156575075635128e-06, "loss": 0.7069, "step": 8897 }, { "epoch": 0.36876787268432176, "grad_norm": 0.41496288776397705, "learning_rate": 3.1563678561067594e-06, "loss": 0.6851, "step": 8898 }, { "epoch": 0.36880931658999544, "grad_norm": 0.38058000802993774, "learning_rate": 3.1561606365783913e-06, "loss": 0.6969, "step": 8899 }, { "epoch": 0.3688507604956691, "grad_norm": 0.4469859004020691, "learning_rate": 3.1559534170500226e-06, "loss": 0.7708, "step": 8900 }, { "epoch": 0.3688922044013428, "grad_norm": 0.4495726525783539, "learning_rate": 3.155746197521655e-06, "loss": 0.6992, "step": 8901 }, { "epoch": 0.36893364830701647, "grad_norm": 0.44434696435928345, "learning_rate": 3.1555389779932867e-06, "loss": 0.6707, "step": 8902 }, { "epoch": 0.36897509221269015, "grad_norm": 0.3963526487350464, "learning_rate": 3.155331758464918e-06, "loss": 0.6733, "step": 8903 }, { "epoch": 0.36901653611836377, "grad_norm": 0.5381492376327515, "learning_rate": 3.15512453893655e-06, "loss": 0.7307, "step": 8904 }, { "epoch": 0.36905798002403745, "grad_norm": 0.4107595682144165, "learning_rate": 3.1549173194081813e-06, "loss": 0.6766, "step": 8905 }, { "epoch": 0.3690994239297111, "grad_norm": 0.3853423595428467, "learning_rate": 3.154710099879813e-06, "loss": 0.71, "step": 8906 }, { "epoch": 0.3691408678353848, "grad_norm": 0.44488653540611267, "learning_rate": 3.1545028803514445e-06, "loss": 0.7334, "step": 8907 }, { "epoch": 0.3691823117410585, "grad_norm": 0.42180588841438293, "learning_rate": 3.1542956608230763e-06, "loss": 0.646, "step": 8908 }, { "epoch": 0.36922375564673215, "grad_norm": 0.4292296767234802, "learning_rate": 3.154088441294708e-06, "loss": 0.6484, "step": 8909 }, { "epoch": 0.36926519955240583, "grad_norm": 0.43941107392311096, "learning_rate": 3.1538812217663395e-06, "loss": 0.6658, "step": 8910 }, { "epoch": 0.3693066434580795, "grad_norm": 0.4024118483066559, "learning_rate": 3.1536740022379713e-06, "loss": 0.7249, "step": 8911 }, { "epoch": 0.3693480873637532, "grad_norm": 0.39081859588623047, "learning_rate": 3.1534667827096027e-06, "loss": 0.6953, "step": 8912 }, { "epoch": 0.3693895312694268, "grad_norm": 0.4163510203361511, "learning_rate": 3.1532595631812345e-06, "loss": 0.7219, "step": 8913 }, { "epoch": 0.3694309751751005, "grad_norm": 0.38302725553512573, "learning_rate": 3.153052343652866e-06, "loss": 0.687, "step": 8914 }, { "epoch": 0.36947241908077416, "grad_norm": 0.422585129737854, "learning_rate": 3.1528451241244977e-06, "loss": 0.6946, "step": 8915 }, { "epoch": 0.36951386298644784, "grad_norm": 0.4248790442943573, "learning_rate": 3.152637904596129e-06, "loss": 0.7201, "step": 8916 }, { "epoch": 0.3695553068921215, "grad_norm": 0.3838922381401062, "learning_rate": 3.1524306850677613e-06, "loss": 0.6603, "step": 8917 }, { "epoch": 0.3695967507977952, "grad_norm": 0.4402380585670471, "learning_rate": 3.152223465539393e-06, "loss": 0.7576, "step": 8918 }, { "epoch": 0.36963819470346887, "grad_norm": 0.4050540626049042, "learning_rate": 3.1520162460110245e-06, "loss": 0.6968, "step": 8919 }, { "epoch": 0.36967963860914255, "grad_norm": 0.4492555856704712, "learning_rate": 3.1518090264826563e-06, "loss": 0.6975, "step": 8920 }, { "epoch": 0.3697210825148162, "grad_norm": 0.40193501114845276, "learning_rate": 3.1516018069542877e-06, "loss": 0.7595, "step": 8921 }, { "epoch": 0.36976252642048985, "grad_norm": 0.43714073300361633, "learning_rate": 3.1513945874259195e-06, "loss": 0.7666, "step": 8922 }, { "epoch": 0.3698039703261635, "grad_norm": 0.43069228529930115, "learning_rate": 3.151187367897551e-06, "loss": 0.6665, "step": 8923 }, { "epoch": 0.3698454142318372, "grad_norm": 0.44574522972106934, "learning_rate": 3.1509801483691827e-06, "loss": 0.7229, "step": 8924 }, { "epoch": 0.3698868581375109, "grad_norm": 0.4311257302761078, "learning_rate": 3.150772928840814e-06, "loss": 0.6721, "step": 8925 }, { "epoch": 0.36992830204318455, "grad_norm": 0.41908079385757446, "learning_rate": 3.150565709312446e-06, "loss": 0.697, "step": 8926 }, { "epoch": 0.36996974594885823, "grad_norm": 0.4305960536003113, "learning_rate": 3.1503584897840777e-06, "loss": 0.754, "step": 8927 }, { "epoch": 0.3700111898545319, "grad_norm": 0.40490633249282837, "learning_rate": 3.150151270255709e-06, "loss": 0.6755, "step": 8928 }, { "epoch": 0.3700526337602056, "grad_norm": 0.3791857361793518, "learning_rate": 3.149944050727341e-06, "loss": 0.6689, "step": 8929 }, { "epoch": 0.3700940776658792, "grad_norm": 0.4229313135147095, "learning_rate": 3.1497368311989723e-06, "loss": 0.712, "step": 8930 }, { "epoch": 0.3701355215715529, "grad_norm": 0.43068942427635193, "learning_rate": 3.149529611670604e-06, "loss": 0.7083, "step": 8931 }, { "epoch": 0.37017696547722656, "grad_norm": 0.41415274143218994, "learning_rate": 3.1493223921422354e-06, "loss": 0.6925, "step": 8932 }, { "epoch": 0.37021840938290024, "grad_norm": 0.45212414860725403, "learning_rate": 3.1491151726138673e-06, "loss": 0.6646, "step": 8933 }, { "epoch": 0.3702598532885739, "grad_norm": 0.4104657769203186, "learning_rate": 3.1489079530854986e-06, "loss": 0.6316, "step": 8934 }, { "epoch": 0.3703012971942476, "grad_norm": 0.413814902305603, "learning_rate": 3.148700733557131e-06, "loss": 0.7446, "step": 8935 }, { "epoch": 0.37034274109992127, "grad_norm": 0.3918263614177704, "learning_rate": 3.1484935140287627e-06, "loss": 0.6893, "step": 8936 }, { "epoch": 0.37038418500559495, "grad_norm": 0.40154901146888733, "learning_rate": 3.148286294500394e-06, "loss": 0.6958, "step": 8937 }, { "epoch": 0.3704256289112686, "grad_norm": 0.44967883825302124, "learning_rate": 3.148079074972026e-06, "loss": 0.7539, "step": 8938 }, { "epoch": 0.37046707281694224, "grad_norm": 0.37649351358413696, "learning_rate": 3.1478718554436573e-06, "loss": 0.6957, "step": 8939 }, { "epoch": 0.3705085167226159, "grad_norm": 0.41725289821624756, "learning_rate": 3.147664635915289e-06, "loss": 0.689, "step": 8940 }, { "epoch": 0.3705499606282896, "grad_norm": 0.4126502275466919, "learning_rate": 3.1474574163869205e-06, "loss": 0.719, "step": 8941 }, { "epoch": 0.3705914045339633, "grad_norm": 0.4377695918083191, "learning_rate": 3.1472501968585523e-06, "loss": 0.739, "step": 8942 }, { "epoch": 0.37063284843963695, "grad_norm": 0.38311824202537537, "learning_rate": 3.1470429773301837e-06, "loss": 0.6895, "step": 8943 }, { "epoch": 0.37067429234531063, "grad_norm": 0.40987834334373474, "learning_rate": 3.1468357578018155e-06, "loss": 0.7278, "step": 8944 }, { "epoch": 0.3707157362509843, "grad_norm": 0.39136096835136414, "learning_rate": 3.1466285382734473e-06, "loss": 0.729, "step": 8945 }, { "epoch": 0.370757180156658, "grad_norm": 0.42354851961135864, "learning_rate": 3.1464213187450787e-06, "loss": 0.6946, "step": 8946 }, { "epoch": 0.37079862406233166, "grad_norm": 0.43437498807907104, "learning_rate": 3.1462140992167105e-06, "loss": 0.6929, "step": 8947 }, { "epoch": 0.3708400679680053, "grad_norm": 0.39896121621131897, "learning_rate": 3.146006879688342e-06, "loss": 0.7188, "step": 8948 }, { "epoch": 0.37088151187367896, "grad_norm": 0.3920890688896179, "learning_rate": 3.1457996601599737e-06, "loss": 0.6741, "step": 8949 }, { "epoch": 0.37092295577935264, "grad_norm": 0.4017693102359772, "learning_rate": 3.145592440631605e-06, "loss": 0.7036, "step": 8950 }, { "epoch": 0.3709643996850263, "grad_norm": 0.4487254321575165, "learning_rate": 3.1453852211032373e-06, "loss": 0.6853, "step": 8951 }, { "epoch": 0.3710058435907, "grad_norm": 0.41148504614830017, "learning_rate": 3.1451780015748682e-06, "loss": 0.6652, "step": 8952 }, { "epoch": 0.37104728749637367, "grad_norm": 0.40088164806365967, "learning_rate": 3.1449707820465005e-06, "loss": 0.7283, "step": 8953 }, { "epoch": 0.37108873140204734, "grad_norm": 0.39436858892440796, "learning_rate": 3.1447635625181323e-06, "loss": 0.6843, "step": 8954 }, { "epoch": 0.371130175307721, "grad_norm": 0.4027118384838104, "learning_rate": 3.1445563429897637e-06, "loss": 0.7324, "step": 8955 }, { "epoch": 0.37117161921339464, "grad_norm": 0.42992323637008667, "learning_rate": 3.1443491234613955e-06, "loss": 0.7239, "step": 8956 }, { "epoch": 0.3712130631190683, "grad_norm": 0.45619744062423706, "learning_rate": 3.144141903933027e-06, "loss": 0.7329, "step": 8957 }, { "epoch": 0.371254507024742, "grad_norm": 0.41691407561302185, "learning_rate": 3.1439346844046587e-06, "loss": 0.6731, "step": 8958 }, { "epoch": 0.3712959509304157, "grad_norm": 0.41546350717544556, "learning_rate": 3.14372746487629e-06, "loss": 0.6968, "step": 8959 }, { "epoch": 0.37133739483608935, "grad_norm": 0.44289126992225647, "learning_rate": 3.143520245347922e-06, "loss": 0.7498, "step": 8960 }, { "epoch": 0.37137883874176303, "grad_norm": 0.40682074427604675, "learning_rate": 3.1433130258195533e-06, "loss": 0.7139, "step": 8961 }, { "epoch": 0.3714202826474367, "grad_norm": 0.3976593613624573, "learning_rate": 3.143105806291185e-06, "loss": 0.678, "step": 8962 }, { "epoch": 0.3714617265531104, "grad_norm": 0.44312337040901184, "learning_rate": 3.142898586762817e-06, "loss": 0.6653, "step": 8963 }, { "epoch": 0.37150317045878406, "grad_norm": 0.44188857078552246, "learning_rate": 3.1426913672344483e-06, "loss": 0.7041, "step": 8964 }, { "epoch": 0.3715446143644577, "grad_norm": 0.4210462272167206, "learning_rate": 3.14248414770608e-06, "loss": 0.6686, "step": 8965 }, { "epoch": 0.37158605827013136, "grad_norm": 0.43581393361091614, "learning_rate": 3.1422769281777115e-06, "loss": 0.6913, "step": 8966 }, { "epoch": 0.37162750217580504, "grad_norm": 0.4365854263305664, "learning_rate": 3.1420697086493433e-06, "loss": 0.7068, "step": 8967 }, { "epoch": 0.3716689460814787, "grad_norm": 0.40731990337371826, "learning_rate": 3.1418624891209746e-06, "loss": 0.6987, "step": 8968 }, { "epoch": 0.3717103899871524, "grad_norm": 0.437568724155426, "learning_rate": 3.141655269592607e-06, "loss": 0.7211, "step": 8969 }, { "epoch": 0.37175183389282607, "grad_norm": 0.4120711088180542, "learning_rate": 3.1414480500642387e-06, "loss": 0.7236, "step": 8970 }, { "epoch": 0.37179327779849974, "grad_norm": 0.42031511664390564, "learning_rate": 3.14124083053587e-06, "loss": 0.7183, "step": 8971 }, { "epoch": 0.3718347217041734, "grad_norm": 0.4255237877368927, "learning_rate": 3.141033611007502e-06, "loss": 0.744, "step": 8972 }, { "epoch": 0.3718761656098471, "grad_norm": 0.429678738117218, "learning_rate": 3.1408263914791333e-06, "loss": 0.7146, "step": 8973 }, { "epoch": 0.3719176095155207, "grad_norm": 0.4080464839935303, "learning_rate": 3.140619171950765e-06, "loss": 0.6656, "step": 8974 }, { "epoch": 0.3719590534211944, "grad_norm": 0.40447452664375305, "learning_rate": 3.1404119524223965e-06, "loss": 0.6836, "step": 8975 }, { "epoch": 0.3720004973268681, "grad_norm": 0.4139817953109741, "learning_rate": 3.1402047328940283e-06, "loss": 0.6909, "step": 8976 }, { "epoch": 0.37204194123254175, "grad_norm": 0.4272252321243286, "learning_rate": 3.1399975133656597e-06, "loss": 0.7529, "step": 8977 }, { "epoch": 0.37208338513821543, "grad_norm": 0.40480709075927734, "learning_rate": 3.1397902938372915e-06, "loss": 0.6465, "step": 8978 }, { "epoch": 0.3721248290438891, "grad_norm": 0.4349636435508728, "learning_rate": 3.1395830743089233e-06, "loss": 0.7134, "step": 8979 }, { "epoch": 0.3721662729495628, "grad_norm": 0.3812727630138397, "learning_rate": 3.1393758547805547e-06, "loss": 0.6343, "step": 8980 }, { "epoch": 0.37220771685523646, "grad_norm": 0.4279041886329651, "learning_rate": 3.1391686352521865e-06, "loss": 0.7083, "step": 8981 }, { "epoch": 0.3722491607609101, "grad_norm": 0.43525397777557373, "learning_rate": 3.138961415723818e-06, "loss": 0.7056, "step": 8982 }, { "epoch": 0.37229060466658376, "grad_norm": 0.41355806589126587, "learning_rate": 3.1387541961954497e-06, "loss": 0.7041, "step": 8983 }, { "epoch": 0.37233204857225743, "grad_norm": 0.4455762505531311, "learning_rate": 3.138546976667081e-06, "loss": 0.7184, "step": 8984 }, { "epoch": 0.3723734924779311, "grad_norm": 0.40540534257888794, "learning_rate": 3.1383397571387133e-06, "loss": 0.6278, "step": 8985 }, { "epoch": 0.3724149363836048, "grad_norm": 0.4474935233592987, "learning_rate": 3.1381325376103442e-06, "loss": 0.7366, "step": 8986 }, { "epoch": 0.37245638028927847, "grad_norm": 0.50785893201828, "learning_rate": 3.1379253180819765e-06, "loss": 0.8015, "step": 8987 }, { "epoch": 0.37249782419495214, "grad_norm": 0.44993510842323303, "learning_rate": 3.1377180985536083e-06, "loss": 0.7634, "step": 8988 }, { "epoch": 0.3725392681006258, "grad_norm": 0.44682446122169495, "learning_rate": 3.1375108790252397e-06, "loss": 0.7126, "step": 8989 }, { "epoch": 0.3725807120062995, "grad_norm": 0.4277103841304779, "learning_rate": 3.1373036594968715e-06, "loss": 0.7437, "step": 8990 }, { "epoch": 0.3726221559119731, "grad_norm": 0.387192964553833, "learning_rate": 3.137096439968503e-06, "loss": 0.681, "step": 8991 }, { "epoch": 0.3726635998176468, "grad_norm": 0.4618108868598938, "learning_rate": 3.1368892204401347e-06, "loss": 0.7266, "step": 8992 }, { "epoch": 0.3727050437233205, "grad_norm": 0.3832768499851227, "learning_rate": 3.136682000911766e-06, "loss": 0.731, "step": 8993 }, { "epoch": 0.37274648762899415, "grad_norm": 0.4146955907344818, "learning_rate": 3.136474781383398e-06, "loss": 0.7415, "step": 8994 }, { "epoch": 0.3727879315346678, "grad_norm": 0.4216214716434479, "learning_rate": 3.1362675618550293e-06, "loss": 0.7126, "step": 8995 }, { "epoch": 0.3728293754403415, "grad_norm": 0.4020933508872986, "learning_rate": 3.136060342326661e-06, "loss": 0.6865, "step": 8996 }, { "epoch": 0.3728708193460152, "grad_norm": 0.44254156947135925, "learning_rate": 3.135853122798293e-06, "loss": 0.7759, "step": 8997 }, { "epoch": 0.37291226325168886, "grad_norm": 0.3965075612068176, "learning_rate": 3.1356459032699243e-06, "loss": 0.7112, "step": 8998 }, { "epoch": 0.37295370715736254, "grad_norm": 0.4085024893283844, "learning_rate": 3.135438683741556e-06, "loss": 0.7026, "step": 8999 }, { "epoch": 0.37299515106303616, "grad_norm": 0.3847168982028961, "learning_rate": 3.1352314642131875e-06, "loss": 0.6571, "step": 9000 }, { "epoch": 0.37303659496870983, "grad_norm": 0.4005201458930969, "learning_rate": 3.1350242446848193e-06, "loss": 0.6799, "step": 9001 }, { "epoch": 0.3730780388743835, "grad_norm": 0.38419410586357117, "learning_rate": 3.1348170251564507e-06, "loss": 0.6427, "step": 9002 }, { "epoch": 0.3731194827800572, "grad_norm": 0.4034765362739563, "learning_rate": 3.134609805628083e-06, "loss": 0.7407, "step": 9003 }, { "epoch": 0.37316092668573086, "grad_norm": 0.42471420764923096, "learning_rate": 3.134402586099714e-06, "loss": 0.6909, "step": 9004 }, { "epoch": 0.37320237059140454, "grad_norm": 0.3986184000968933, "learning_rate": 3.134195366571346e-06, "loss": 0.6698, "step": 9005 }, { "epoch": 0.3732438144970782, "grad_norm": 0.3673023581504822, "learning_rate": 3.133988147042978e-06, "loss": 0.6846, "step": 9006 }, { "epoch": 0.3732852584027519, "grad_norm": 0.4386202096939087, "learning_rate": 3.1337809275146093e-06, "loss": 0.7034, "step": 9007 }, { "epoch": 0.3733267023084256, "grad_norm": 0.42089977860450745, "learning_rate": 3.133573707986241e-06, "loss": 0.7126, "step": 9008 }, { "epoch": 0.3733681462140992, "grad_norm": 0.42322877049446106, "learning_rate": 3.1333664884578725e-06, "loss": 0.7217, "step": 9009 }, { "epoch": 0.37340959011977287, "grad_norm": 0.4157218933105469, "learning_rate": 3.1331592689295043e-06, "loss": 0.6995, "step": 9010 }, { "epoch": 0.37345103402544655, "grad_norm": 0.4435606598854065, "learning_rate": 3.1329520494011357e-06, "loss": 0.7151, "step": 9011 }, { "epoch": 0.3734924779311202, "grad_norm": 0.42205163836479187, "learning_rate": 3.1327448298727675e-06, "loss": 0.7203, "step": 9012 }, { "epoch": 0.3735339218367939, "grad_norm": 0.4289886951446533, "learning_rate": 3.132537610344399e-06, "loss": 0.7288, "step": 9013 }, { "epoch": 0.3735753657424676, "grad_norm": 0.44541287422180176, "learning_rate": 3.1323303908160307e-06, "loss": 0.6755, "step": 9014 }, { "epoch": 0.37361680964814126, "grad_norm": 0.42502719163894653, "learning_rate": 3.1321231712876625e-06, "loss": 0.6941, "step": 9015 }, { "epoch": 0.37365825355381493, "grad_norm": 0.418663889169693, "learning_rate": 3.131915951759294e-06, "loss": 0.7354, "step": 9016 }, { "epoch": 0.37369969745948856, "grad_norm": 0.37722355127334595, "learning_rate": 3.1317087322309257e-06, "loss": 0.7615, "step": 9017 }, { "epoch": 0.37374114136516223, "grad_norm": 0.4065714180469513, "learning_rate": 3.131501512702557e-06, "loss": 0.6987, "step": 9018 }, { "epoch": 0.3737825852708359, "grad_norm": 0.37011241912841797, "learning_rate": 3.1312942931741893e-06, "loss": 0.6885, "step": 9019 }, { "epoch": 0.3738240291765096, "grad_norm": 0.39469170570373535, "learning_rate": 3.1310870736458203e-06, "loss": 0.6821, "step": 9020 }, { "epoch": 0.37386547308218326, "grad_norm": 0.4192628264427185, "learning_rate": 3.1308798541174525e-06, "loss": 0.6526, "step": 9021 }, { "epoch": 0.37390691698785694, "grad_norm": 0.44984716176986694, "learning_rate": 3.1306726345890835e-06, "loss": 0.7432, "step": 9022 }, { "epoch": 0.3739483608935306, "grad_norm": 0.4296228289604187, "learning_rate": 3.1304654150607157e-06, "loss": 0.6796, "step": 9023 }, { "epoch": 0.3739898047992043, "grad_norm": 0.40176934003829956, "learning_rate": 3.1302581955323475e-06, "loss": 0.6724, "step": 9024 }, { "epoch": 0.37403124870487797, "grad_norm": 0.580371618270874, "learning_rate": 3.130050976003979e-06, "loss": 0.7333, "step": 9025 }, { "epoch": 0.3740726926105516, "grad_norm": 0.40399980545043945, "learning_rate": 3.1298437564756107e-06, "loss": 0.6982, "step": 9026 }, { "epoch": 0.37411413651622527, "grad_norm": 0.47145065665245056, "learning_rate": 3.129636536947242e-06, "loss": 0.8025, "step": 9027 }, { "epoch": 0.37415558042189895, "grad_norm": 0.4262302815914154, "learning_rate": 3.129429317418874e-06, "loss": 0.7062, "step": 9028 }, { "epoch": 0.3741970243275726, "grad_norm": 0.40217873454093933, "learning_rate": 3.1292220978905053e-06, "loss": 0.7089, "step": 9029 }, { "epoch": 0.3742384682332463, "grad_norm": 0.41054466366767883, "learning_rate": 3.129014878362137e-06, "loss": 0.6703, "step": 9030 }, { "epoch": 0.37427991213892, "grad_norm": 0.42367956042289734, "learning_rate": 3.128807658833769e-06, "loss": 0.771, "step": 9031 }, { "epoch": 0.37432135604459366, "grad_norm": 0.41689419746398926, "learning_rate": 3.1286004393054003e-06, "loss": 0.6696, "step": 9032 }, { "epoch": 0.37436279995026733, "grad_norm": 0.4569893777370453, "learning_rate": 3.128393219777032e-06, "loss": 0.7959, "step": 9033 }, { "epoch": 0.374404243855941, "grad_norm": 0.4198036789894104, "learning_rate": 3.1281860002486635e-06, "loss": 0.6592, "step": 9034 }, { "epoch": 0.37444568776161463, "grad_norm": 0.4172281324863434, "learning_rate": 3.1279787807202953e-06, "loss": 0.6841, "step": 9035 }, { "epoch": 0.3744871316672883, "grad_norm": 0.41084471344947815, "learning_rate": 3.1277715611919267e-06, "loss": 0.7244, "step": 9036 }, { "epoch": 0.374528575572962, "grad_norm": 0.41479793190956116, "learning_rate": 3.127564341663559e-06, "loss": 0.6646, "step": 9037 }, { "epoch": 0.37457001947863566, "grad_norm": 0.36531442403793335, "learning_rate": 3.12735712213519e-06, "loss": 0.7004, "step": 9038 }, { "epoch": 0.37461146338430934, "grad_norm": 0.39806339144706726, "learning_rate": 3.127149902606822e-06, "loss": 0.6904, "step": 9039 }, { "epoch": 0.374652907289983, "grad_norm": 0.3861761689186096, "learning_rate": 3.126942683078454e-06, "loss": 0.6917, "step": 9040 }, { "epoch": 0.3746943511956567, "grad_norm": 0.4125358462333679, "learning_rate": 3.1267354635500853e-06, "loss": 0.7041, "step": 9041 }, { "epoch": 0.37473579510133037, "grad_norm": 0.3887813687324524, "learning_rate": 3.126528244021717e-06, "loss": 0.6926, "step": 9042 }, { "epoch": 0.374777239007004, "grad_norm": 0.41943198442459106, "learning_rate": 3.1263210244933485e-06, "loss": 0.6904, "step": 9043 }, { "epoch": 0.37481868291267767, "grad_norm": 0.40313640236854553, "learning_rate": 3.1261138049649803e-06, "loss": 0.6785, "step": 9044 }, { "epoch": 0.37486012681835135, "grad_norm": 0.42146387696266174, "learning_rate": 3.1259065854366117e-06, "loss": 0.6936, "step": 9045 }, { "epoch": 0.374901570724025, "grad_norm": 0.39755356311798096, "learning_rate": 3.1256993659082435e-06, "loss": 0.731, "step": 9046 }, { "epoch": 0.3749430146296987, "grad_norm": 0.3908047378063202, "learning_rate": 3.125492146379875e-06, "loss": 0.684, "step": 9047 }, { "epoch": 0.3749844585353724, "grad_norm": 0.4660780429840088, "learning_rate": 3.1252849268515067e-06, "loss": 0.7664, "step": 9048 }, { "epoch": 0.37502590244104606, "grad_norm": 0.3982694745063782, "learning_rate": 3.1250777073231385e-06, "loss": 0.6882, "step": 9049 }, { "epoch": 0.37506734634671973, "grad_norm": 0.3845243752002716, "learning_rate": 3.12487048779477e-06, "loss": 0.6697, "step": 9050 }, { "epoch": 0.3751087902523934, "grad_norm": 0.40967074036598206, "learning_rate": 3.1246632682664017e-06, "loss": 0.7224, "step": 9051 }, { "epoch": 0.37515023415806703, "grad_norm": 0.45407307147979736, "learning_rate": 3.124456048738033e-06, "loss": 0.6774, "step": 9052 }, { "epoch": 0.3751916780637407, "grad_norm": 0.36823195219039917, "learning_rate": 3.1242488292096653e-06, "loss": 0.6522, "step": 9053 }, { "epoch": 0.3752331219694144, "grad_norm": 0.3910224735736847, "learning_rate": 3.1240416096812963e-06, "loss": 0.7097, "step": 9054 }, { "epoch": 0.37527456587508806, "grad_norm": 0.4064963459968567, "learning_rate": 3.1238343901529285e-06, "loss": 0.6512, "step": 9055 }, { "epoch": 0.37531600978076174, "grad_norm": 0.3889906704425812, "learning_rate": 3.12362717062456e-06, "loss": 0.6978, "step": 9056 }, { "epoch": 0.3753574536864354, "grad_norm": 0.3777136206626892, "learning_rate": 3.1234199510961917e-06, "loss": 0.6639, "step": 9057 }, { "epoch": 0.3753988975921091, "grad_norm": 0.39697420597076416, "learning_rate": 3.1232127315678235e-06, "loss": 0.666, "step": 9058 }, { "epoch": 0.37544034149778277, "grad_norm": 0.41608989238739014, "learning_rate": 3.123005512039455e-06, "loss": 0.762, "step": 9059 }, { "epoch": 0.37548178540345645, "grad_norm": 0.4434530436992645, "learning_rate": 3.1227982925110867e-06, "loss": 0.7117, "step": 9060 }, { "epoch": 0.37552322930913007, "grad_norm": 0.4080751836299896, "learning_rate": 3.122591072982718e-06, "loss": 0.7278, "step": 9061 }, { "epoch": 0.37556467321480375, "grad_norm": 0.4407501816749573, "learning_rate": 3.12238385345435e-06, "loss": 0.7041, "step": 9062 }, { "epoch": 0.3756061171204774, "grad_norm": 0.4107012450695038, "learning_rate": 3.1221766339259813e-06, "loss": 0.7275, "step": 9063 }, { "epoch": 0.3756475610261511, "grad_norm": 0.402629554271698, "learning_rate": 3.121969414397613e-06, "loss": 0.7366, "step": 9064 }, { "epoch": 0.3756890049318248, "grad_norm": 0.4513614773750305, "learning_rate": 3.1217621948692445e-06, "loss": 0.7582, "step": 9065 }, { "epoch": 0.37573044883749845, "grad_norm": 0.4121415615081787, "learning_rate": 3.1215549753408763e-06, "loss": 0.7017, "step": 9066 }, { "epoch": 0.37577189274317213, "grad_norm": 0.4087435305118561, "learning_rate": 3.121347755812508e-06, "loss": 0.6853, "step": 9067 }, { "epoch": 0.3758133366488458, "grad_norm": 0.42026081681251526, "learning_rate": 3.1211405362841395e-06, "loss": 0.6765, "step": 9068 }, { "epoch": 0.3758547805545195, "grad_norm": 0.4215841293334961, "learning_rate": 3.1209333167557713e-06, "loss": 0.7017, "step": 9069 }, { "epoch": 0.3758962244601931, "grad_norm": 0.42803895473480225, "learning_rate": 3.1207260972274027e-06, "loss": 0.7014, "step": 9070 }, { "epoch": 0.3759376683658668, "grad_norm": 0.3890891671180725, "learning_rate": 3.120518877699035e-06, "loss": 0.6689, "step": 9071 }, { "epoch": 0.37597911227154046, "grad_norm": 0.3961104154586792, "learning_rate": 3.120311658170666e-06, "loss": 0.6892, "step": 9072 }, { "epoch": 0.37602055617721414, "grad_norm": 0.41163522005081177, "learning_rate": 3.120104438642298e-06, "loss": 0.7423, "step": 9073 }, { "epoch": 0.3760620000828878, "grad_norm": 0.3853738605976105, "learning_rate": 3.1198972191139295e-06, "loss": 0.6439, "step": 9074 }, { "epoch": 0.3761034439885615, "grad_norm": 0.43779686093330383, "learning_rate": 3.1196899995855613e-06, "loss": 0.7368, "step": 9075 }, { "epoch": 0.37614488789423517, "grad_norm": 0.38855960965156555, "learning_rate": 3.119482780057193e-06, "loss": 0.6627, "step": 9076 }, { "epoch": 0.37618633179990885, "grad_norm": 0.4142540395259857, "learning_rate": 3.1192755605288245e-06, "loss": 0.6985, "step": 9077 }, { "epoch": 0.37622777570558247, "grad_norm": 0.4327195882797241, "learning_rate": 3.1190683410004563e-06, "loss": 0.6885, "step": 9078 }, { "epoch": 0.37626921961125614, "grad_norm": 0.4253315329551697, "learning_rate": 3.1188611214720877e-06, "loss": 0.6682, "step": 9079 }, { "epoch": 0.3763106635169298, "grad_norm": 0.42176294326782227, "learning_rate": 3.1186539019437195e-06, "loss": 0.7368, "step": 9080 }, { "epoch": 0.3763521074226035, "grad_norm": 0.43868452310562134, "learning_rate": 3.118446682415351e-06, "loss": 0.6815, "step": 9081 }, { "epoch": 0.3763935513282772, "grad_norm": 0.42157405614852905, "learning_rate": 3.1182394628869827e-06, "loss": 0.7676, "step": 9082 }, { "epoch": 0.37643499523395085, "grad_norm": 0.4277568757534027, "learning_rate": 3.118032243358614e-06, "loss": 0.6919, "step": 9083 }, { "epoch": 0.37647643913962453, "grad_norm": 0.42543360590934753, "learning_rate": 3.117825023830246e-06, "loss": 0.6975, "step": 9084 }, { "epoch": 0.3765178830452982, "grad_norm": 0.38533830642700195, "learning_rate": 3.1176178043018777e-06, "loss": 0.6488, "step": 9085 }, { "epoch": 0.3765593269509719, "grad_norm": 0.4000158905982971, "learning_rate": 3.117410584773509e-06, "loss": 0.6829, "step": 9086 }, { "epoch": 0.3766007708566455, "grad_norm": 0.4085211753845215, "learning_rate": 3.1172033652451413e-06, "loss": 0.6871, "step": 9087 }, { "epoch": 0.3766422147623192, "grad_norm": 0.41819900274276733, "learning_rate": 3.1169961457167723e-06, "loss": 0.679, "step": 9088 }, { "epoch": 0.37668365866799286, "grad_norm": 0.37898722290992737, "learning_rate": 3.1167889261884045e-06, "loss": 0.6273, "step": 9089 }, { "epoch": 0.37672510257366654, "grad_norm": 0.3954674303531647, "learning_rate": 3.116581706660036e-06, "loss": 0.6678, "step": 9090 }, { "epoch": 0.3767665464793402, "grad_norm": 0.4099103510379791, "learning_rate": 3.1163744871316677e-06, "loss": 0.674, "step": 9091 }, { "epoch": 0.3768079903850139, "grad_norm": 0.4313313961029053, "learning_rate": 3.1161672676032995e-06, "loss": 0.693, "step": 9092 }, { "epoch": 0.37684943429068757, "grad_norm": 0.4157784879207611, "learning_rate": 3.115960048074931e-06, "loss": 0.7014, "step": 9093 }, { "epoch": 0.37689087819636125, "grad_norm": 0.38549521565437317, "learning_rate": 3.1157528285465627e-06, "loss": 0.7111, "step": 9094 }, { "epoch": 0.3769323221020349, "grad_norm": 0.4178113341331482, "learning_rate": 3.115545609018194e-06, "loss": 0.7021, "step": 9095 }, { "epoch": 0.37697376600770854, "grad_norm": 0.4093519151210785, "learning_rate": 3.115338389489826e-06, "loss": 0.6753, "step": 9096 }, { "epoch": 0.3770152099133822, "grad_norm": 0.46477174758911133, "learning_rate": 3.1151311699614573e-06, "loss": 0.7068, "step": 9097 }, { "epoch": 0.3770566538190559, "grad_norm": 0.431284636259079, "learning_rate": 3.114923950433089e-06, "loss": 0.7107, "step": 9098 }, { "epoch": 0.3770980977247296, "grad_norm": 0.4034000635147095, "learning_rate": 3.1147167309047205e-06, "loss": 0.6477, "step": 9099 }, { "epoch": 0.37713954163040325, "grad_norm": 0.40878334641456604, "learning_rate": 3.1145095113763523e-06, "loss": 0.7117, "step": 9100 }, { "epoch": 0.37718098553607693, "grad_norm": 0.41085487604141235, "learning_rate": 3.114302291847984e-06, "loss": 0.7378, "step": 9101 }, { "epoch": 0.3772224294417506, "grad_norm": 0.40366291999816895, "learning_rate": 3.1140950723196155e-06, "loss": 0.7061, "step": 9102 }, { "epoch": 0.3772638733474243, "grad_norm": 0.39071395993232727, "learning_rate": 3.1138878527912477e-06, "loss": 0.6768, "step": 9103 }, { "epoch": 0.3773053172530979, "grad_norm": 0.4179964065551758, "learning_rate": 3.1136806332628787e-06, "loss": 0.7324, "step": 9104 }, { "epoch": 0.3773467611587716, "grad_norm": 0.42246708273887634, "learning_rate": 3.113473413734511e-06, "loss": 0.7065, "step": 9105 }, { "epoch": 0.37738820506444526, "grad_norm": 0.4216391444206238, "learning_rate": 3.113266194206142e-06, "loss": 0.74, "step": 9106 }, { "epoch": 0.37742964897011894, "grad_norm": 0.40119192004203796, "learning_rate": 3.113058974677774e-06, "loss": 0.6599, "step": 9107 }, { "epoch": 0.3774710928757926, "grad_norm": 0.39321181178092957, "learning_rate": 3.1128517551494055e-06, "loss": 0.703, "step": 9108 }, { "epoch": 0.3775125367814663, "grad_norm": 0.39705103635787964, "learning_rate": 3.1126445356210373e-06, "loss": 0.71, "step": 9109 }, { "epoch": 0.37755398068713997, "grad_norm": 0.3890209197998047, "learning_rate": 3.112437316092669e-06, "loss": 0.7162, "step": 9110 }, { "epoch": 0.37759542459281364, "grad_norm": 0.410936564207077, "learning_rate": 3.1122300965643005e-06, "loss": 0.6965, "step": 9111 }, { "epoch": 0.3776368684984873, "grad_norm": 0.4227541387081146, "learning_rate": 3.1120228770359323e-06, "loss": 0.7002, "step": 9112 }, { "epoch": 0.37767831240416094, "grad_norm": 0.41310614347457886, "learning_rate": 3.1118156575075637e-06, "loss": 0.7417, "step": 9113 }, { "epoch": 0.3777197563098346, "grad_norm": 0.39821621775627136, "learning_rate": 3.1116084379791955e-06, "loss": 0.6428, "step": 9114 }, { "epoch": 0.3777612002155083, "grad_norm": 0.3745613992214203, "learning_rate": 3.111401218450827e-06, "loss": 0.6638, "step": 9115 }, { "epoch": 0.377802644121182, "grad_norm": 0.3949117362499237, "learning_rate": 3.1111939989224587e-06, "loss": 0.671, "step": 9116 }, { "epoch": 0.37784408802685565, "grad_norm": 0.4343055486679077, "learning_rate": 3.11098677939409e-06, "loss": 0.6991, "step": 9117 }, { "epoch": 0.37788553193252933, "grad_norm": 0.37441444396972656, "learning_rate": 3.110779559865722e-06, "loss": 0.6653, "step": 9118 }, { "epoch": 0.377926975838203, "grad_norm": 0.41116878390312195, "learning_rate": 3.1105723403373537e-06, "loss": 0.6334, "step": 9119 }, { "epoch": 0.3779684197438767, "grad_norm": 0.4206935167312622, "learning_rate": 3.110365120808985e-06, "loss": 0.7507, "step": 9120 }, { "epoch": 0.37800986364955036, "grad_norm": 0.5626989006996155, "learning_rate": 3.1101579012806173e-06, "loss": 0.7302, "step": 9121 }, { "epoch": 0.378051307555224, "grad_norm": 0.44472408294677734, "learning_rate": 3.1099506817522483e-06, "loss": 0.7343, "step": 9122 }, { "epoch": 0.37809275146089766, "grad_norm": 0.43195515871047974, "learning_rate": 3.1097434622238805e-06, "loss": 0.7478, "step": 9123 }, { "epoch": 0.37813419536657134, "grad_norm": 0.3992812931537628, "learning_rate": 3.109536242695512e-06, "loss": 0.699, "step": 9124 }, { "epoch": 0.378175639272245, "grad_norm": 0.3875252604484558, "learning_rate": 3.1093290231671437e-06, "loss": 0.6951, "step": 9125 }, { "epoch": 0.3782170831779187, "grad_norm": 0.43861815333366394, "learning_rate": 3.109121803638775e-06, "loss": 0.6458, "step": 9126 }, { "epoch": 0.37825852708359237, "grad_norm": 0.38638928532600403, "learning_rate": 3.108914584110407e-06, "loss": 0.6531, "step": 9127 }, { "epoch": 0.37829997098926604, "grad_norm": 0.42017653584480286, "learning_rate": 3.1087073645820387e-06, "loss": 0.7073, "step": 9128 }, { "epoch": 0.3783414148949397, "grad_norm": 0.40330740809440613, "learning_rate": 3.10850014505367e-06, "loss": 0.7257, "step": 9129 }, { "epoch": 0.3783828588006134, "grad_norm": 0.442602276802063, "learning_rate": 3.108292925525302e-06, "loss": 0.7546, "step": 9130 }, { "epoch": 0.378424302706287, "grad_norm": 0.43920981884002686, "learning_rate": 3.1080857059969333e-06, "loss": 0.6884, "step": 9131 }, { "epoch": 0.3784657466119607, "grad_norm": 0.4300270676612854, "learning_rate": 3.107878486468565e-06, "loss": 0.678, "step": 9132 }, { "epoch": 0.3785071905176344, "grad_norm": 0.40002313256263733, "learning_rate": 3.1076712669401965e-06, "loss": 0.6848, "step": 9133 }, { "epoch": 0.37854863442330805, "grad_norm": 0.4457443654537201, "learning_rate": 3.1074640474118283e-06, "loss": 0.7006, "step": 9134 }, { "epoch": 0.3785900783289817, "grad_norm": 0.4265090525150299, "learning_rate": 3.1072568278834597e-06, "loss": 0.7227, "step": 9135 }, { "epoch": 0.3786315222346554, "grad_norm": 0.39798253774642944, "learning_rate": 3.1070496083550915e-06, "loss": 0.6573, "step": 9136 }, { "epoch": 0.3786729661403291, "grad_norm": 0.42372873425483704, "learning_rate": 3.1068423888267237e-06, "loss": 0.7654, "step": 9137 }, { "epoch": 0.37871441004600276, "grad_norm": 0.42440658807754517, "learning_rate": 3.1066351692983547e-06, "loss": 0.7156, "step": 9138 }, { "epoch": 0.3787558539516764, "grad_norm": 0.4401095509529114, "learning_rate": 3.106427949769987e-06, "loss": 0.6403, "step": 9139 }, { "epoch": 0.37879729785735006, "grad_norm": 0.4060695469379425, "learning_rate": 3.106220730241618e-06, "loss": 0.6844, "step": 9140 }, { "epoch": 0.37883874176302373, "grad_norm": 0.4003825783729553, "learning_rate": 3.10601351071325e-06, "loss": 0.6892, "step": 9141 }, { "epoch": 0.3788801856686974, "grad_norm": 0.403488427400589, "learning_rate": 3.1058062911848815e-06, "loss": 0.6914, "step": 9142 }, { "epoch": 0.3789216295743711, "grad_norm": 0.40524524450302124, "learning_rate": 3.1055990716565133e-06, "loss": 0.6655, "step": 9143 }, { "epoch": 0.37896307348004477, "grad_norm": 0.42342856526374817, "learning_rate": 3.1053918521281447e-06, "loss": 0.738, "step": 9144 }, { "epoch": 0.37900451738571844, "grad_norm": 0.4484630823135376, "learning_rate": 3.1051846325997765e-06, "loss": 0.7518, "step": 9145 }, { "epoch": 0.3790459612913921, "grad_norm": 0.4479706287384033, "learning_rate": 3.1049774130714083e-06, "loss": 0.7754, "step": 9146 }, { "epoch": 0.3790874051970658, "grad_norm": 0.3792915344238281, "learning_rate": 3.1047701935430397e-06, "loss": 0.6783, "step": 9147 }, { "epoch": 0.3791288491027394, "grad_norm": 0.4135064482688904, "learning_rate": 3.1045629740146715e-06, "loss": 0.6904, "step": 9148 }, { "epoch": 0.3791702930084131, "grad_norm": 0.4051499366760254, "learning_rate": 3.104355754486303e-06, "loss": 0.7429, "step": 9149 }, { "epoch": 0.37921173691408677, "grad_norm": 0.4056798815727234, "learning_rate": 3.1041485349579347e-06, "loss": 0.6238, "step": 9150 }, { "epoch": 0.37925318081976045, "grad_norm": 0.40963971614837646, "learning_rate": 3.103941315429566e-06, "loss": 0.6687, "step": 9151 }, { "epoch": 0.3792946247254341, "grad_norm": 0.4232853651046753, "learning_rate": 3.103734095901198e-06, "loss": 0.7424, "step": 9152 }, { "epoch": 0.3793360686311078, "grad_norm": 0.44976383447647095, "learning_rate": 3.1035268763728297e-06, "loss": 0.8, "step": 9153 }, { "epoch": 0.3793775125367815, "grad_norm": 0.3789380192756653, "learning_rate": 3.103319656844461e-06, "loss": 0.688, "step": 9154 }, { "epoch": 0.37941895644245516, "grad_norm": 0.4339161813259125, "learning_rate": 3.1031124373160933e-06, "loss": 0.7151, "step": 9155 }, { "epoch": 0.37946040034812883, "grad_norm": 0.3916315734386444, "learning_rate": 3.1029052177877243e-06, "loss": 0.6511, "step": 9156 }, { "epoch": 0.37950184425380246, "grad_norm": 0.42720597982406616, "learning_rate": 3.1026979982593565e-06, "loss": 0.6603, "step": 9157 }, { "epoch": 0.37954328815947613, "grad_norm": 0.4029950797557831, "learning_rate": 3.102490778730988e-06, "loss": 0.666, "step": 9158 }, { "epoch": 0.3795847320651498, "grad_norm": 0.41230687499046326, "learning_rate": 3.1022835592026197e-06, "loss": 0.6763, "step": 9159 }, { "epoch": 0.3796261759708235, "grad_norm": 0.4137158989906311, "learning_rate": 3.102076339674251e-06, "loss": 0.6852, "step": 9160 }, { "epoch": 0.37966761987649716, "grad_norm": 0.3936541676521301, "learning_rate": 3.101869120145883e-06, "loss": 0.684, "step": 9161 }, { "epoch": 0.37970906378217084, "grad_norm": 0.4540264904499054, "learning_rate": 3.1016619006175147e-06, "loss": 0.6782, "step": 9162 }, { "epoch": 0.3797505076878445, "grad_norm": 0.4015125036239624, "learning_rate": 3.101454681089146e-06, "loss": 0.7256, "step": 9163 }, { "epoch": 0.3797919515935182, "grad_norm": 0.43519648909568787, "learning_rate": 3.101247461560778e-06, "loss": 0.7952, "step": 9164 }, { "epoch": 0.3798333954991918, "grad_norm": 0.43334752321243286, "learning_rate": 3.1010402420324093e-06, "loss": 0.7122, "step": 9165 }, { "epoch": 0.3798748394048655, "grad_norm": 0.38858523964881897, "learning_rate": 3.100833022504041e-06, "loss": 0.6384, "step": 9166 }, { "epoch": 0.37991628331053917, "grad_norm": 0.4041828215122223, "learning_rate": 3.1006258029756725e-06, "loss": 0.6899, "step": 9167 }, { "epoch": 0.37995772721621285, "grad_norm": 0.4047773778438568, "learning_rate": 3.1004185834473043e-06, "loss": 0.73, "step": 9168 }, { "epoch": 0.3799991711218865, "grad_norm": 0.41954758763313293, "learning_rate": 3.1002113639189357e-06, "loss": 0.7112, "step": 9169 }, { "epoch": 0.3800406150275602, "grad_norm": 0.40262526273727417, "learning_rate": 3.1000041443905675e-06, "loss": 0.7061, "step": 9170 }, { "epoch": 0.3800820589332339, "grad_norm": 0.4172843098640442, "learning_rate": 3.0997969248621997e-06, "loss": 0.6473, "step": 9171 }, { "epoch": 0.38012350283890756, "grad_norm": 0.39049574732780457, "learning_rate": 3.0995897053338307e-06, "loss": 0.6387, "step": 9172 }, { "epoch": 0.38016494674458123, "grad_norm": 0.4358871877193451, "learning_rate": 3.099382485805463e-06, "loss": 0.6462, "step": 9173 }, { "epoch": 0.38020639065025486, "grad_norm": 0.41856205463409424, "learning_rate": 3.099175266277094e-06, "loss": 0.7256, "step": 9174 }, { "epoch": 0.38024783455592853, "grad_norm": 0.4508495330810547, "learning_rate": 3.098968046748726e-06, "loss": 0.7565, "step": 9175 }, { "epoch": 0.3802892784616022, "grad_norm": 0.4072088599205017, "learning_rate": 3.0987608272203575e-06, "loss": 0.6831, "step": 9176 }, { "epoch": 0.3803307223672759, "grad_norm": 0.437509149312973, "learning_rate": 3.0985536076919893e-06, "loss": 0.7272, "step": 9177 }, { "epoch": 0.38037216627294956, "grad_norm": 0.3919816017150879, "learning_rate": 3.0983463881636207e-06, "loss": 0.658, "step": 9178 }, { "epoch": 0.38041361017862324, "grad_norm": 0.41366225481033325, "learning_rate": 3.0981391686352525e-06, "loss": 0.6792, "step": 9179 }, { "epoch": 0.3804550540842969, "grad_norm": 0.3940708637237549, "learning_rate": 3.0979319491068843e-06, "loss": 0.6787, "step": 9180 }, { "epoch": 0.3804964979899706, "grad_norm": 0.37057119607925415, "learning_rate": 3.0977247295785157e-06, "loss": 0.6355, "step": 9181 }, { "epoch": 0.38053794189564427, "grad_norm": 0.4065597653388977, "learning_rate": 3.0975175100501475e-06, "loss": 0.6897, "step": 9182 }, { "epoch": 0.3805793858013179, "grad_norm": 0.41408172249794006, "learning_rate": 3.097310290521779e-06, "loss": 0.7371, "step": 9183 }, { "epoch": 0.38062082970699157, "grad_norm": 0.4121535122394562, "learning_rate": 3.0971030709934107e-06, "loss": 0.6934, "step": 9184 }, { "epoch": 0.38066227361266525, "grad_norm": 0.39426106214523315, "learning_rate": 3.096895851465042e-06, "loss": 0.6777, "step": 9185 }, { "epoch": 0.3807037175183389, "grad_norm": 0.398344486951828, "learning_rate": 3.096688631936674e-06, "loss": 0.7153, "step": 9186 }, { "epoch": 0.3807451614240126, "grad_norm": 0.4373199939727783, "learning_rate": 3.0964814124083053e-06, "loss": 0.7561, "step": 9187 }, { "epoch": 0.3807866053296863, "grad_norm": 0.38807064294815063, "learning_rate": 3.096274192879937e-06, "loss": 0.7073, "step": 9188 }, { "epoch": 0.38082804923535996, "grad_norm": 0.41266536712646484, "learning_rate": 3.0960669733515693e-06, "loss": 0.6698, "step": 9189 }, { "epoch": 0.38086949314103363, "grad_norm": 0.451168417930603, "learning_rate": 3.0958597538232003e-06, "loss": 0.7844, "step": 9190 }, { "epoch": 0.38091093704670725, "grad_norm": 0.41523683071136475, "learning_rate": 3.0956525342948325e-06, "loss": 0.6826, "step": 9191 }, { "epoch": 0.38095238095238093, "grad_norm": 0.4772147536277771, "learning_rate": 3.095445314766464e-06, "loss": 0.723, "step": 9192 }, { "epoch": 0.3809938248580546, "grad_norm": 0.3959874212741852, "learning_rate": 3.0952380952380957e-06, "loss": 0.6663, "step": 9193 }, { "epoch": 0.3810352687637283, "grad_norm": 0.4092823266983032, "learning_rate": 3.095030875709727e-06, "loss": 0.674, "step": 9194 }, { "epoch": 0.38107671266940196, "grad_norm": 0.38881716132164, "learning_rate": 3.094823656181359e-06, "loss": 0.6752, "step": 9195 }, { "epoch": 0.38111815657507564, "grad_norm": 0.41186651587486267, "learning_rate": 3.0946164366529903e-06, "loss": 0.6913, "step": 9196 }, { "epoch": 0.3811596004807493, "grad_norm": 0.38933441042900085, "learning_rate": 3.094409217124622e-06, "loss": 0.6606, "step": 9197 }, { "epoch": 0.381201044386423, "grad_norm": 0.46014562249183655, "learning_rate": 3.094201997596254e-06, "loss": 0.7891, "step": 9198 }, { "epoch": 0.38124248829209667, "grad_norm": 0.43342795968055725, "learning_rate": 3.0939947780678853e-06, "loss": 0.7053, "step": 9199 }, { "epoch": 0.3812839321977703, "grad_norm": 0.3876701593399048, "learning_rate": 3.093787558539517e-06, "loss": 0.717, "step": 9200 }, { "epoch": 0.38132537610344397, "grad_norm": 0.42016223073005676, "learning_rate": 3.0935803390111485e-06, "loss": 0.6826, "step": 9201 }, { "epoch": 0.38136682000911765, "grad_norm": 0.3845817446708679, "learning_rate": 3.0933731194827803e-06, "loss": 0.6818, "step": 9202 }, { "epoch": 0.3814082639147913, "grad_norm": 0.42730796337127686, "learning_rate": 3.0931658999544117e-06, "loss": 0.6796, "step": 9203 }, { "epoch": 0.381449707820465, "grad_norm": 0.39858508110046387, "learning_rate": 3.0929586804260435e-06, "loss": 0.6812, "step": 9204 }, { "epoch": 0.3814911517261387, "grad_norm": 0.41656821966171265, "learning_rate": 3.092751460897675e-06, "loss": 0.6733, "step": 9205 }, { "epoch": 0.38153259563181235, "grad_norm": 0.4087032377719879, "learning_rate": 3.0925442413693067e-06, "loss": 0.6782, "step": 9206 }, { "epoch": 0.38157403953748603, "grad_norm": 0.4199017286300659, "learning_rate": 3.092337021840939e-06, "loss": 0.6802, "step": 9207 }, { "epoch": 0.3816154834431597, "grad_norm": 0.4345358908176422, "learning_rate": 3.09212980231257e-06, "loss": 0.7092, "step": 9208 }, { "epoch": 0.38165692734883333, "grad_norm": 0.4565702974796295, "learning_rate": 3.091922582784202e-06, "loss": 0.6724, "step": 9209 }, { "epoch": 0.381698371254507, "grad_norm": 0.40829774737358093, "learning_rate": 3.0917153632558335e-06, "loss": 0.7113, "step": 9210 }, { "epoch": 0.3817398151601807, "grad_norm": 0.3997948467731476, "learning_rate": 3.0915081437274653e-06, "loss": 0.6882, "step": 9211 }, { "epoch": 0.38178125906585436, "grad_norm": 0.39756909012794495, "learning_rate": 3.0913009241990967e-06, "loss": 0.6965, "step": 9212 }, { "epoch": 0.38182270297152804, "grad_norm": 0.42741507291793823, "learning_rate": 3.0910937046707285e-06, "loss": 0.7021, "step": 9213 }, { "epoch": 0.3818641468772017, "grad_norm": 0.41837838292121887, "learning_rate": 3.0908864851423603e-06, "loss": 0.6982, "step": 9214 }, { "epoch": 0.3819055907828754, "grad_norm": 0.3680271506309509, "learning_rate": 3.0906792656139917e-06, "loss": 0.6803, "step": 9215 }, { "epoch": 0.38194703468854907, "grad_norm": 0.47932693362236023, "learning_rate": 3.0904720460856235e-06, "loss": 0.7446, "step": 9216 }, { "epoch": 0.38198847859422275, "grad_norm": 0.5347210764884949, "learning_rate": 3.090264826557255e-06, "loss": 0.7231, "step": 9217 }, { "epoch": 0.38202992249989637, "grad_norm": 0.39970850944519043, "learning_rate": 3.0900576070288867e-06, "loss": 0.7344, "step": 9218 }, { "epoch": 0.38207136640557005, "grad_norm": 0.4219069182872772, "learning_rate": 3.089850387500518e-06, "loss": 0.6705, "step": 9219 }, { "epoch": 0.3821128103112437, "grad_norm": 0.40626099705696106, "learning_rate": 3.08964316797215e-06, "loss": 0.7156, "step": 9220 }, { "epoch": 0.3821542542169174, "grad_norm": 0.37770986557006836, "learning_rate": 3.0894359484437813e-06, "loss": 0.6879, "step": 9221 }, { "epoch": 0.3821956981225911, "grad_norm": 0.36803457140922546, "learning_rate": 3.089228728915413e-06, "loss": 0.6553, "step": 9222 }, { "epoch": 0.38223714202826475, "grad_norm": 0.423929363489151, "learning_rate": 3.0890215093870453e-06, "loss": 0.6608, "step": 9223 }, { "epoch": 0.38227858593393843, "grad_norm": 0.3884040415287018, "learning_rate": 3.0888142898586763e-06, "loss": 0.7126, "step": 9224 }, { "epoch": 0.3823200298396121, "grad_norm": 0.4234277307987213, "learning_rate": 3.0886070703303085e-06, "loss": 0.6619, "step": 9225 }, { "epoch": 0.38236147374528573, "grad_norm": 0.4217928946018219, "learning_rate": 3.08839985080194e-06, "loss": 0.7173, "step": 9226 }, { "epoch": 0.3824029176509594, "grad_norm": 0.40673696994781494, "learning_rate": 3.0881926312735717e-06, "loss": 0.6736, "step": 9227 }, { "epoch": 0.3824443615566331, "grad_norm": 0.41197019815444946, "learning_rate": 3.087985411745203e-06, "loss": 0.6749, "step": 9228 }, { "epoch": 0.38248580546230676, "grad_norm": 0.4270392954349518, "learning_rate": 3.087778192216835e-06, "loss": 0.7128, "step": 9229 }, { "epoch": 0.38252724936798044, "grad_norm": 0.409603476524353, "learning_rate": 3.0875709726884663e-06, "loss": 0.693, "step": 9230 }, { "epoch": 0.3825686932736541, "grad_norm": 0.44367167353630066, "learning_rate": 3.087363753160098e-06, "loss": 0.7283, "step": 9231 }, { "epoch": 0.3826101371793278, "grad_norm": 0.39005792140960693, "learning_rate": 3.08715653363173e-06, "loss": 0.6313, "step": 9232 }, { "epoch": 0.38265158108500147, "grad_norm": 0.43114331364631653, "learning_rate": 3.0869493141033613e-06, "loss": 0.6875, "step": 9233 }, { "epoch": 0.38269302499067515, "grad_norm": 0.42688754200935364, "learning_rate": 3.086742094574993e-06, "loss": 0.7119, "step": 9234 }, { "epoch": 0.38273446889634877, "grad_norm": 0.4607378840446472, "learning_rate": 3.0865348750466245e-06, "loss": 0.7593, "step": 9235 }, { "epoch": 0.38277591280202244, "grad_norm": 0.4205532670021057, "learning_rate": 3.0863276555182563e-06, "loss": 0.7437, "step": 9236 }, { "epoch": 0.3828173567076961, "grad_norm": 0.3958719074726105, "learning_rate": 3.0861204359898877e-06, "loss": 0.7041, "step": 9237 }, { "epoch": 0.3828588006133698, "grad_norm": 0.41612759232521057, "learning_rate": 3.0859132164615195e-06, "loss": 0.6743, "step": 9238 }, { "epoch": 0.3829002445190435, "grad_norm": 0.4589601755142212, "learning_rate": 3.085705996933151e-06, "loss": 0.7297, "step": 9239 }, { "epoch": 0.38294168842471715, "grad_norm": 0.45214006304740906, "learning_rate": 3.0854987774047827e-06, "loss": 0.7485, "step": 9240 }, { "epoch": 0.38298313233039083, "grad_norm": 0.40532243251800537, "learning_rate": 3.085291557876415e-06, "loss": 0.6909, "step": 9241 }, { "epoch": 0.3830245762360645, "grad_norm": 0.3932921588420868, "learning_rate": 3.085084338348046e-06, "loss": 0.687, "step": 9242 }, { "epoch": 0.3830660201417382, "grad_norm": 0.40643736720085144, "learning_rate": 3.084877118819678e-06, "loss": 0.6965, "step": 9243 }, { "epoch": 0.3831074640474118, "grad_norm": 0.449027419090271, "learning_rate": 3.0846698992913095e-06, "loss": 0.6666, "step": 9244 }, { "epoch": 0.3831489079530855, "grad_norm": 0.3998550474643707, "learning_rate": 3.0844626797629413e-06, "loss": 0.701, "step": 9245 }, { "epoch": 0.38319035185875916, "grad_norm": 0.41012856364250183, "learning_rate": 3.0842554602345727e-06, "loss": 0.6414, "step": 9246 }, { "epoch": 0.38323179576443284, "grad_norm": 0.41098618507385254, "learning_rate": 3.0840482407062045e-06, "loss": 0.6624, "step": 9247 }, { "epoch": 0.3832732396701065, "grad_norm": 0.46740084886550903, "learning_rate": 3.083841021177836e-06, "loss": 0.7201, "step": 9248 }, { "epoch": 0.3833146835757802, "grad_norm": 0.4086371064186096, "learning_rate": 3.0836338016494677e-06, "loss": 0.666, "step": 9249 }, { "epoch": 0.38335612748145387, "grad_norm": 0.3859868049621582, "learning_rate": 3.0834265821210995e-06, "loss": 0.6307, "step": 9250 }, { "epoch": 0.38339757138712754, "grad_norm": 0.4064895808696747, "learning_rate": 3.083219362592731e-06, "loss": 0.7031, "step": 9251 }, { "epoch": 0.38343901529280117, "grad_norm": 0.39568623900413513, "learning_rate": 3.0830121430643627e-06, "loss": 0.6993, "step": 9252 }, { "epoch": 0.38348045919847484, "grad_norm": 0.3965052664279938, "learning_rate": 3.082804923535994e-06, "loss": 0.6677, "step": 9253 }, { "epoch": 0.3835219031041485, "grad_norm": 0.3991289734840393, "learning_rate": 3.082597704007626e-06, "loss": 0.6921, "step": 9254 }, { "epoch": 0.3835633470098222, "grad_norm": 0.4161168038845062, "learning_rate": 3.0823904844792573e-06, "loss": 0.6987, "step": 9255 }, { "epoch": 0.3836047909154959, "grad_norm": 0.3837801218032837, "learning_rate": 3.082183264950889e-06, "loss": 0.7041, "step": 9256 }, { "epoch": 0.38364623482116955, "grad_norm": 0.3896898627281189, "learning_rate": 3.0819760454225205e-06, "loss": 0.7351, "step": 9257 }, { "epoch": 0.38368767872684323, "grad_norm": 0.3969760239124298, "learning_rate": 3.0817688258941523e-06, "loss": 0.6835, "step": 9258 }, { "epoch": 0.3837291226325169, "grad_norm": 0.4045853614807129, "learning_rate": 3.0815616063657845e-06, "loss": 0.666, "step": 9259 }, { "epoch": 0.3837705665381906, "grad_norm": 0.4116005599498749, "learning_rate": 3.081354386837416e-06, "loss": 0.6964, "step": 9260 }, { "epoch": 0.3838120104438642, "grad_norm": 0.3917616009712219, "learning_rate": 3.0811471673090477e-06, "loss": 0.6885, "step": 9261 }, { "epoch": 0.3838534543495379, "grad_norm": 0.41883930563926697, "learning_rate": 3.080939947780679e-06, "loss": 0.6703, "step": 9262 }, { "epoch": 0.38389489825521156, "grad_norm": 0.3914545178413391, "learning_rate": 3.080732728252311e-06, "loss": 0.7054, "step": 9263 }, { "epoch": 0.38393634216088524, "grad_norm": 0.4153364896774292, "learning_rate": 3.0805255087239423e-06, "loss": 0.7294, "step": 9264 }, { "epoch": 0.3839777860665589, "grad_norm": 0.4503365159034729, "learning_rate": 3.080318289195574e-06, "loss": 0.7317, "step": 9265 }, { "epoch": 0.3840192299722326, "grad_norm": 0.38273900747299194, "learning_rate": 3.080111069667206e-06, "loss": 0.7111, "step": 9266 }, { "epoch": 0.38406067387790627, "grad_norm": 0.39001408219337463, "learning_rate": 3.0799038501388373e-06, "loss": 0.7439, "step": 9267 }, { "epoch": 0.38410211778357994, "grad_norm": 0.4026496112346649, "learning_rate": 3.079696630610469e-06, "loss": 0.708, "step": 9268 }, { "epoch": 0.3841435616892536, "grad_norm": 0.42652666568756104, "learning_rate": 3.0794894110821005e-06, "loss": 0.7251, "step": 9269 }, { "epoch": 0.38418500559492724, "grad_norm": 0.40743565559387207, "learning_rate": 3.0792821915537323e-06, "loss": 0.7258, "step": 9270 }, { "epoch": 0.3842264495006009, "grad_norm": 0.42058518528938293, "learning_rate": 3.0790749720253637e-06, "loss": 0.6884, "step": 9271 }, { "epoch": 0.3842678934062746, "grad_norm": 0.4118545949459076, "learning_rate": 3.0788677524969955e-06, "loss": 0.6855, "step": 9272 }, { "epoch": 0.3843093373119483, "grad_norm": 0.4530578553676605, "learning_rate": 3.078660532968627e-06, "loss": 0.6875, "step": 9273 }, { "epoch": 0.38435078121762195, "grad_norm": 0.42843106389045715, "learning_rate": 3.0784533134402587e-06, "loss": 0.7013, "step": 9274 }, { "epoch": 0.3843922251232956, "grad_norm": 0.45375871658325195, "learning_rate": 3.078246093911891e-06, "loss": 0.7146, "step": 9275 }, { "epoch": 0.3844336690289693, "grad_norm": 0.39169248938560486, "learning_rate": 3.0780388743835223e-06, "loss": 0.674, "step": 9276 }, { "epoch": 0.384475112934643, "grad_norm": 0.47202402353286743, "learning_rate": 3.077831654855154e-06, "loss": 0.729, "step": 9277 }, { "epoch": 0.38451655684031666, "grad_norm": 0.47285759449005127, "learning_rate": 3.0776244353267855e-06, "loss": 0.7554, "step": 9278 }, { "epoch": 0.3845580007459903, "grad_norm": 0.39169469475746155, "learning_rate": 3.0774172157984173e-06, "loss": 0.6794, "step": 9279 }, { "epoch": 0.38459944465166396, "grad_norm": 0.41335371136665344, "learning_rate": 3.0772099962700487e-06, "loss": 0.7026, "step": 9280 }, { "epoch": 0.38464088855733763, "grad_norm": 0.5122267603874207, "learning_rate": 3.0770027767416805e-06, "loss": 0.7996, "step": 9281 }, { "epoch": 0.3846823324630113, "grad_norm": 0.42029279470443726, "learning_rate": 3.076795557213312e-06, "loss": 0.7041, "step": 9282 }, { "epoch": 0.384723776368685, "grad_norm": 0.38627052307128906, "learning_rate": 3.0765883376849437e-06, "loss": 0.718, "step": 9283 }, { "epoch": 0.38476522027435867, "grad_norm": 0.4266931712627411, "learning_rate": 3.0763811181565755e-06, "loss": 0.6802, "step": 9284 }, { "epoch": 0.38480666418003234, "grad_norm": 0.4115358293056488, "learning_rate": 3.076173898628207e-06, "loss": 0.6675, "step": 9285 }, { "epoch": 0.384848108085706, "grad_norm": 0.42079469561576843, "learning_rate": 3.0759666790998387e-06, "loss": 0.6609, "step": 9286 }, { "epoch": 0.38488955199137964, "grad_norm": 0.4105003774166107, "learning_rate": 3.07575945957147e-06, "loss": 0.6641, "step": 9287 }, { "epoch": 0.3849309958970533, "grad_norm": 0.44400209188461304, "learning_rate": 3.075552240043102e-06, "loss": 0.6768, "step": 9288 }, { "epoch": 0.384972439802727, "grad_norm": 0.40054166316986084, "learning_rate": 3.0753450205147333e-06, "loss": 0.6992, "step": 9289 }, { "epoch": 0.3850138837084007, "grad_norm": 0.3749813139438629, "learning_rate": 3.075137800986365e-06, "loss": 0.686, "step": 9290 }, { "epoch": 0.38505532761407435, "grad_norm": 0.4300565719604492, "learning_rate": 3.0749305814579965e-06, "loss": 0.7028, "step": 9291 }, { "epoch": 0.385096771519748, "grad_norm": 0.4431581497192383, "learning_rate": 3.0747233619296283e-06, "loss": 0.7161, "step": 9292 }, { "epoch": 0.3851382154254217, "grad_norm": 0.47178760170936584, "learning_rate": 3.0745161424012605e-06, "loss": 0.6978, "step": 9293 }, { "epoch": 0.3851796593310954, "grad_norm": 0.39275801181793213, "learning_rate": 3.074308922872892e-06, "loss": 0.7205, "step": 9294 }, { "epoch": 0.38522110323676906, "grad_norm": 0.41197440028190613, "learning_rate": 3.0741017033445237e-06, "loss": 0.6871, "step": 9295 }, { "epoch": 0.3852625471424427, "grad_norm": 0.3886524438858032, "learning_rate": 3.073894483816155e-06, "loss": 0.7612, "step": 9296 }, { "epoch": 0.38530399104811636, "grad_norm": 0.44530779123306274, "learning_rate": 3.073687264287787e-06, "loss": 0.7716, "step": 9297 }, { "epoch": 0.38534543495379003, "grad_norm": 0.3905171751976013, "learning_rate": 3.0734800447594183e-06, "loss": 0.6514, "step": 9298 }, { "epoch": 0.3853868788594637, "grad_norm": 0.4377938508987427, "learning_rate": 3.07327282523105e-06, "loss": 0.7163, "step": 9299 }, { "epoch": 0.3854283227651374, "grad_norm": 0.4164804220199585, "learning_rate": 3.0730656057026815e-06, "loss": 0.6943, "step": 9300 }, { "epoch": 0.38546976667081106, "grad_norm": 0.3963632583618164, "learning_rate": 3.0728583861743133e-06, "loss": 0.7355, "step": 9301 }, { "epoch": 0.38551121057648474, "grad_norm": 0.42046797275543213, "learning_rate": 3.072651166645945e-06, "loss": 0.6958, "step": 9302 }, { "epoch": 0.3855526544821584, "grad_norm": 0.4171767830848694, "learning_rate": 3.0724439471175765e-06, "loss": 0.6887, "step": 9303 }, { "epoch": 0.3855940983878321, "grad_norm": 0.39495649933815, "learning_rate": 3.0722367275892083e-06, "loss": 0.7014, "step": 9304 }, { "epoch": 0.3856355422935057, "grad_norm": 0.41342851519584656, "learning_rate": 3.0720295080608397e-06, "loss": 0.688, "step": 9305 }, { "epoch": 0.3856769861991794, "grad_norm": 0.3821893036365509, "learning_rate": 3.0718222885324715e-06, "loss": 0.681, "step": 9306 }, { "epoch": 0.38571843010485307, "grad_norm": 0.4187532663345337, "learning_rate": 3.071615069004103e-06, "loss": 0.6915, "step": 9307 }, { "epoch": 0.38575987401052675, "grad_norm": 0.40716350078582764, "learning_rate": 3.0714078494757347e-06, "loss": 0.6821, "step": 9308 }, { "epoch": 0.3858013179162004, "grad_norm": 0.3995026648044586, "learning_rate": 3.071200629947366e-06, "loss": 0.6769, "step": 9309 }, { "epoch": 0.3858427618218741, "grad_norm": 0.39887571334838867, "learning_rate": 3.0709934104189983e-06, "loss": 0.6995, "step": 9310 }, { "epoch": 0.3858842057275478, "grad_norm": 0.4234156608581543, "learning_rate": 3.07078619089063e-06, "loss": 0.7073, "step": 9311 }, { "epoch": 0.38592564963322146, "grad_norm": 0.42823249101638794, "learning_rate": 3.0705789713622615e-06, "loss": 0.7744, "step": 9312 }, { "epoch": 0.3859670935388951, "grad_norm": 0.4211607575416565, "learning_rate": 3.0703717518338933e-06, "loss": 0.7649, "step": 9313 }, { "epoch": 0.38600853744456876, "grad_norm": 0.4031028747558594, "learning_rate": 3.0701645323055247e-06, "loss": 0.6672, "step": 9314 }, { "epoch": 0.38604998135024243, "grad_norm": 0.43502265214920044, "learning_rate": 3.0699573127771565e-06, "loss": 0.7006, "step": 9315 }, { "epoch": 0.3860914252559161, "grad_norm": 0.38088634610176086, "learning_rate": 3.069750093248788e-06, "loss": 0.7067, "step": 9316 }, { "epoch": 0.3861328691615898, "grad_norm": 0.3934900760650635, "learning_rate": 3.0695428737204197e-06, "loss": 0.7119, "step": 9317 }, { "epoch": 0.38617431306726346, "grad_norm": 0.42896568775177, "learning_rate": 3.069335654192051e-06, "loss": 0.7524, "step": 9318 }, { "epoch": 0.38621575697293714, "grad_norm": 0.4162254333496094, "learning_rate": 3.069128434663683e-06, "loss": 0.7139, "step": 9319 }, { "epoch": 0.3862572008786108, "grad_norm": 0.41852423548698425, "learning_rate": 3.0689212151353147e-06, "loss": 0.7051, "step": 9320 }, { "epoch": 0.3862986447842845, "grad_norm": 0.4078312814235687, "learning_rate": 3.068713995606946e-06, "loss": 0.757, "step": 9321 }, { "epoch": 0.3863400886899581, "grad_norm": 0.4128841161727905, "learning_rate": 3.068506776078578e-06, "loss": 0.7336, "step": 9322 }, { "epoch": 0.3863815325956318, "grad_norm": 0.4214540719985962, "learning_rate": 3.0682995565502093e-06, "loss": 0.697, "step": 9323 }, { "epoch": 0.38642297650130547, "grad_norm": 0.5146307349205017, "learning_rate": 3.068092337021841e-06, "loss": 0.6804, "step": 9324 }, { "epoch": 0.38646442040697915, "grad_norm": 0.40413782000541687, "learning_rate": 3.0678851174934725e-06, "loss": 0.686, "step": 9325 }, { "epoch": 0.3865058643126528, "grad_norm": 0.4414723217487335, "learning_rate": 3.0676778979651043e-06, "loss": 0.7346, "step": 9326 }, { "epoch": 0.3865473082183265, "grad_norm": 0.41432738304138184, "learning_rate": 3.0674706784367365e-06, "loss": 0.7241, "step": 9327 }, { "epoch": 0.3865887521240002, "grad_norm": 0.42227959632873535, "learning_rate": 3.067263458908368e-06, "loss": 0.7246, "step": 9328 }, { "epoch": 0.38663019602967386, "grad_norm": 0.4370814859867096, "learning_rate": 3.0670562393799997e-06, "loss": 0.7522, "step": 9329 }, { "epoch": 0.38667163993534753, "grad_norm": 0.4009360074996948, "learning_rate": 3.066849019851631e-06, "loss": 0.672, "step": 9330 }, { "epoch": 0.38671308384102115, "grad_norm": 0.3789265751838684, "learning_rate": 3.066641800323263e-06, "loss": 0.6594, "step": 9331 }, { "epoch": 0.38675452774669483, "grad_norm": 0.43974459171295166, "learning_rate": 3.0664345807948943e-06, "loss": 0.7202, "step": 9332 }, { "epoch": 0.3867959716523685, "grad_norm": 0.41657185554504395, "learning_rate": 3.066227361266526e-06, "loss": 0.7183, "step": 9333 }, { "epoch": 0.3868374155580422, "grad_norm": 0.4277246296405792, "learning_rate": 3.0660201417381575e-06, "loss": 0.6992, "step": 9334 }, { "epoch": 0.38687885946371586, "grad_norm": 0.39929816126823425, "learning_rate": 3.0658129222097893e-06, "loss": 0.6129, "step": 9335 }, { "epoch": 0.38692030336938954, "grad_norm": 0.3911859691143036, "learning_rate": 3.065605702681421e-06, "loss": 0.6646, "step": 9336 }, { "epoch": 0.3869617472750632, "grad_norm": 0.42812100052833557, "learning_rate": 3.0653984831530525e-06, "loss": 0.7207, "step": 9337 }, { "epoch": 0.3870031911807369, "grad_norm": 0.4009913504123688, "learning_rate": 3.0651912636246843e-06, "loss": 0.6772, "step": 9338 }, { "epoch": 0.3870446350864105, "grad_norm": 0.4087466895580292, "learning_rate": 3.0649840440963157e-06, "loss": 0.6508, "step": 9339 }, { "epoch": 0.3870860789920842, "grad_norm": 0.40619394183158875, "learning_rate": 3.0647768245679475e-06, "loss": 0.6945, "step": 9340 }, { "epoch": 0.38712752289775787, "grad_norm": 0.43778157234191895, "learning_rate": 3.064569605039579e-06, "loss": 0.651, "step": 9341 }, { "epoch": 0.38716896680343155, "grad_norm": 0.45212897658348083, "learning_rate": 3.0643623855112107e-06, "loss": 0.7206, "step": 9342 }, { "epoch": 0.3872104107091052, "grad_norm": 0.3748480975627899, "learning_rate": 3.064155165982842e-06, "loss": 0.6938, "step": 9343 }, { "epoch": 0.3872518546147789, "grad_norm": 0.4283382296562195, "learning_rate": 3.0639479464544743e-06, "loss": 0.6681, "step": 9344 }, { "epoch": 0.3872932985204526, "grad_norm": 0.431734561920166, "learning_rate": 3.063740726926106e-06, "loss": 0.6798, "step": 9345 }, { "epoch": 0.38733474242612626, "grad_norm": 0.3874494731426239, "learning_rate": 3.0635335073977375e-06, "loss": 0.6772, "step": 9346 }, { "epoch": 0.38737618633179993, "grad_norm": 0.4058365523815155, "learning_rate": 3.0633262878693693e-06, "loss": 0.6943, "step": 9347 }, { "epoch": 0.38741763023747355, "grad_norm": 0.43409669399261475, "learning_rate": 3.0631190683410007e-06, "loss": 0.6814, "step": 9348 }, { "epoch": 0.38745907414314723, "grad_norm": 0.44051212072372437, "learning_rate": 3.0629118488126325e-06, "loss": 0.7214, "step": 9349 }, { "epoch": 0.3875005180488209, "grad_norm": 0.4288006126880646, "learning_rate": 3.062704629284264e-06, "loss": 0.6978, "step": 9350 }, { "epoch": 0.3875419619544946, "grad_norm": 0.4143591523170471, "learning_rate": 3.0624974097558957e-06, "loss": 0.6953, "step": 9351 }, { "epoch": 0.38758340586016826, "grad_norm": 0.3979547619819641, "learning_rate": 3.062290190227527e-06, "loss": 0.6562, "step": 9352 }, { "epoch": 0.38762484976584194, "grad_norm": 0.40608513355255127, "learning_rate": 3.062082970699159e-06, "loss": 0.6945, "step": 9353 }, { "epoch": 0.3876662936715156, "grad_norm": 0.4167138338088989, "learning_rate": 3.0618757511707907e-06, "loss": 0.65, "step": 9354 }, { "epoch": 0.3877077375771893, "grad_norm": 0.4133530855178833, "learning_rate": 3.061668531642422e-06, "loss": 0.6798, "step": 9355 }, { "epoch": 0.38774918148286297, "grad_norm": 0.43597152829170227, "learning_rate": 3.061461312114054e-06, "loss": 0.6968, "step": 9356 }, { "epoch": 0.3877906253885366, "grad_norm": 0.4552972912788391, "learning_rate": 3.0612540925856853e-06, "loss": 0.7023, "step": 9357 }, { "epoch": 0.38783206929421027, "grad_norm": 0.45744043588638306, "learning_rate": 3.061046873057317e-06, "loss": 0.7249, "step": 9358 }, { "epoch": 0.38787351319988395, "grad_norm": 0.38768458366394043, "learning_rate": 3.0608396535289485e-06, "loss": 0.6654, "step": 9359 }, { "epoch": 0.3879149571055576, "grad_norm": 0.44565173983573914, "learning_rate": 3.0606324340005803e-06, "loss": 0.7227, "step": 9360 }, { "epoch": 0.3879564010112313, "grad_norm": 0.4418325424194336, "learning_rate": 3.0604252144722117e-06, "loss": 0.6963, "step": 9361 }, { "epoch": 0.387997844916905, "grad_norm": 0.4202401340007782, "learning_rate": 3.060217994943844e-06, "loss": 0.7322, "step": 9362 }, { "epoch": 0.38803928882257865, "grad_norm": 0.4099680185317993, "learning_rate": 3.0600107754154757e-06, "loss": 0.7207, "step": 9363 }, { "epoch": 0.38808073272825233, "grad_norm": 0.42437639832496643, "learning_rate": 3.059803555887107e-06, "loss": 0.7019, "step": 9364 }, { "epoch": 0.388122176633926, "grad_norm": 0.4297334551811218, "learning_rate": 3.059596336358739e-06, "loss": 0.6953, "step": 9365 }, { "epoch": 0.38816362053959963, "grad_norm": 0.3909590542316437, "learning_rate": 3.0593891168303703e-06, "loss": 0.6721, "step": 9366 }, { "epoch": 0.3882050644452733, "grad_norm": 0.3923332989215851, "learning_rate": 3.059181897302002e-06, "loss": 0.6709, "step": 9367 }, { "epoch": 0.388246508350947, "grad_norm": 0.4225355386734009, "learning_rate": 3.0589746777736335e-06, "loss": 0.6786, "step": 9368 }, { "epoch": 0.38828795225662066, "grad_norm": 0.40075549483299255, "learning_rate": 3.0587674582452653e-06, "loss": 0.7092, "step": 9369 }, { "epoch": 0.38832939616229434, "grad_norm": 0.395334392786026, "learning_rate": 3.0585602387168967e-06, "loss": 0.7083, "step": 9370 }, { "epoch": 0.388370840067968, "grad_norm": 0.4742256999015808, "learning_rate": 3.0583530191885285e-06, "loss": 0.7449, "step": 9371 }, { "epoch": 0.3884122839736417, "grad_norm": 0.440755695104599, "learning_rate": 3.0581457996601603e-06, "loss": 0.7703, "step": 9372 }, { "epoch": 0.38845372787931537, "grad_norm": 0.39307084679603577, "learning_rate": 3.0579385801317917e-06, "loss": 0.7002, "step": 9373 }, { "epoch": 0.388495171784989, "grad_norm": 0.3954385817050934, "learning_rate": 3.0577313606034235e-06, "loss": 0.7085, "step": 9374 }, { "epoch": 0.38853661569066267, "grad_norm": 0.3915391266345978, "learning_rate": 3.057524141075055e-06, "loss": 0.6803, "step": 9375 }, { "epoch": 0.38857805959633634, "grad_norm": 0.42253270745277405, "learning_rate": 3.0573169215466867e-06, "loss": 0.7141, "step": 9376 }, { "epoch": 0.38861950350201, "grad_norm": 0.43275704979896545, "learning_rate": 3.057109702018318e-06, "loss": 0.7012, "step": 9377 }, { "epoch": 0.3886609474076837, "grad_norm": 0.48151740431785583, "learning_rate": 3.0569024824899503e-06, "loss": 0.7449, "step": 9378 }, { "epoch": 0.3887023913133574, "grad_norm": 0.42160600423812866, "learning_rate": 3.0566952629615813e-06, "loss": 0.6963, "step": 9379 }, { "epoch": 0.38874383521903105, "grad_norm": 0.4075644314289093, "learning_rate": 3.0564880434332135e-06, "loss": 0.6858, "step": 9380 }, { "epoch": 0.38878527912470473, "grad_norm": 0.43163609504699707, "learning_rate": 3.0562808239048453e-06, "loss": 0.6967, "step": 9381 }, { "epoch": 0.3888267230303784, "grad_norm": 0.4000757336616516, "learning_rate": 3.0560736043764767e-06, "loss": 0.6614, "step": 9382 }, { "epoch": 0.38886816693605203, "grad_norm": 0.41001972556114197, "learning_rate": 3.0558663848481085e-06, "loss": 0.675, "step": 9383 }, { "epoch": 0.3889096108417257, "grad_norm": 0.4375309646129608, "learning_rate": 3.05565916531974e-06, "loss": 0.718, "step": 9384 }, { "epoch": 0.3889510547473994, "grad_norm": 0.47871777415275574, "learning_rate": 3.0554519457913717e-06, "loss": 0.7468, "step": 9385 }, { "epoch": 0.38899249865307306, "grad_norm": 0.4396761953830719, "learning_rate": 3.055244726263003e-06, "loss": 0.6819, "step": 9386 }, { "epoch": 0.38903394255874674, "grad_norm": 0.43953385949134827, "learning_rate": 3.055037506734635e-06, "loss": 0.6902, "step": 9387 }, { "epoch": 0.3890753864644204, "grad_norm": 0.42892351746559143, "learning_rate": 3.0548302872062667e-06, "loss": 0.7288, "step": 9388 }, { "epoch": 0.3891168303700941, "grad_norm": 0.4042990803718567, "learning_rate": 3.054623067677898e-06, "loss": 0.6667, "step": 9389 }, { "epoch": 0.38915827427576777, "grad_norm": 0.39697572588920593, "learning_rate": 3.05441584814953e-06, "loss": 0.6851, "step": 9390 }, { "epoch": 0.38919971818144145, "grad_norm": 0.3929368853569031, "learning_rate": 3.0542086286211613e-06, "loss": 0.6702, "step": 9391 }, { "epoch": 0.38924116208711507, "grad_norm": 0.4014310836791992, "learning_rate": 3.054001409092793e-06, "loss": 0.6858, "step": 9392 }, { "epoch": 0.38928260599278874, "grad_norm": 0.408161997795105, "learning_rate": 3.0537941895644245e-06, "loss": 0.6667, "step": 9393 }, { "epoch": 0.3893240498984624, "grad_norm": 0.40276798605918884, "learning_rate": 3.0535869700360563e-06, "loss": 0.6556, "step": 9394 }, { "epoch": 0.3893654938041361, "grad_norm": 0.43274080753326416, "learning_rate": 3.0533797505076877e-06, "loss": 0.7205, "step": 9395 }, { "epoch": 0.3894069377098098, "grad_norm": 0.4182130992412567, "learning_rate": 3.05317253097932e-06, "loss": 0.7208, "step": 9396 }, { "epoch": 0.38944838161548345, "grad_norm": 0.4255117177963257, "learning_rate": 3.0529653114509517e-06, "loss": 0.7148, "step": 9397 }, { "epoch": 0.38948982552115713, "grad_norm": 0.40732160210609436, "learning_rate": 3.052758091922583e-06, "loss": 0.6854, "step": 9398 }, { "epoch": 0.3895312694268308, "grad_norm": 0.4692497253417969, "learning_rate": 3.052550872394215e-06, "loss": 0.7415, "step": 9399 }, { "epoch": 0.38957271333250443, "grad_norm": 0.4151582717895508, "learning_rate": 3.0523436528658463e-06, "loss": 0.722, "step": 9400 }, { "epoch": 0.3896141572381781, "grad_norm": 0.41852709650993347, "learning_rate": 3.052136433337478e-06, "loss": 0.7573, "step": 9401 }, { "epoch": 0.3896556011438518, "grad_norm": 0.42075392603874207, "learning_rate": 3.0519292138091095e-06, "loss": 0.7275, "step": 9402 }, { "epoch": 0.38969704504952546, "grad_norm": 0.4144735336303711, "learning_rate": 3.0517219942807413e-06, "loss": 0.7144, "step": 9403 }, { "epoch": 0.38973848895519914, "grad_norm": 0.3844081461429596, "learning_rate": 3.0515147747523727e-06, "loss": 0.6539, "step": 9404 }, { "epoch": 0.3897799328608728, "grad_norm": 0.4397536516189575, "learning_rate": 3.0513075552240045e-06, "loss": 0.6885, "step": 9405 }, { "epoch": 0.3898213767665465, "grad_norm": 0.3842150568962097, "learning_rate": 3.0511003356956363e-06, "loss": 0.6689, "step": 9406 }, { "epoch": 0.38986282067222017, "grad_norm": 0.4209131896495819, "learning_rate": 3.0508931161672677e-06, "loss": 0.7139, "step": 9407 }, { "epoch": 0.38990426457789384, "grad_norm": 0.3816525638103485, "learning_rate": 3.0506858966388995e-06, "loss": 0.7064, "step": 9408 }, { "epoch": 0.38994570848356747, "grad_norm": 0.43802395462989807, "learning_rate": 3.050478677110531e-06, "loss": 0.6863, "step": 9409 }, { "epoch": 0.38998715238924114, "grad_norm": 0.40442538261413574, "learning_rate": 3.0502714575821627e-06, "loss": 0.6554, "step": 9410 }, { "epoch": 0.3900285962949148, "grad_norm": 0.41163915395736694, "learning_rate": 3.050064238053794e-06, "loss": 0.6738, "step": 9411 }, { "epoch": 0.3900700402005885, "grad_norm": 0.42640355229377747, "learning_rate": 3.0498570185254263e-06, "loss": 0.7239, "step": 9412 }, { "epoch": 0.3901114841062622, "grad_norm": 0.41415268182754517, "learning_rate": 3.0496497989970573e-06, "loss": 0.704, "step": 9413 }, { "epoch": 0.39015292801193585, "grad_norm": 0.4178018271923065, "learning_rate": 3.0494425794686895e-06, "loss": 0.6835, "step": 9414 }, { "epoch": 0.39019437191760953, "grad_norm": 0.42922040820121765, "learning_rate": 3.0492353599403214e-06, "loss": 0.7175, "step": 9415 }, { "epoch": 0.3902358158232832, "grad_norm": 0.40202462673187256, "learning_rate": 3.0490281404119527e-06, "loss": 0.7068, "step": 9416 }, { "epoch": 0.3902772597289569, "grad_norm": 0.4150748550891876, "learning_rate": 3.0488209208835845e-06, "loss": 0.7247, "step": 9417 }, { "epoch": 0.3903187036346305, "grad_norm": 0.3927296996116638, "learning_rate": 3.048613701355216e-06, "loss": 0.6863, "step": 9418 }, { "epoch": 0.3903601475403042, "grad_norm": 0.42835643887519836, "learning_rate": 3.0484064818268477e-06, "loss": 0.6763, "step": 9419 }, { "epoch": 0.39040159144597786, "grad_norm": 0.39233919978141785, "learning_rate": 3.048199262298479e-06, "loss": 0.6855, "step": 9420 }, { "epoch": 0.39044303535165154, "grad_norm": 0.4676865041255951, "learning_rate": 3.047992042770111e-06, "loss": 0.698, "step": 9421 }, { "epoch": 0.3904844792573252, "grad_norm": 0.43749746680259705, "learning_rate": 3.0477848232417423e-06, "loss": 0.7002, "step": 9422 }, { "epoch": 0.3905259231629989, "grad_norm": 0.4187760353088379, "learning_rate": 3.047577603713374e-06, "loss": 0.7021, "step": 9423 }, { "epoch": 0.39056736706867257, "grad_norm": 0.40054646134376526, "learning_rate": 3.047370384185006e-06, "loss": 0.7048, "step": 9424 }, { "epoch": 0.39060881097434624, "grad_norm": 0.3844476044178009, "learning_rate": 3.0471631646566373e-06, "loss": 0.6562, "step": 9425 }, { "epoch": 0.3906502548800199, "grad_norm": 0.4560074806213379, "learning_rate": 3.046955945128269e-06, "loss": 0.7402, "step": 9426 }, { "epoch": 0.39069169878569354, "grad_norm": 0.422397255897522, "learning_rate": 3.0467487255999005e-06, "loss": 0.697, "step": 9427 }, { "epoch": 0.3907331426913672, "grad_norm": 0.401538610458374, "learning_rate": 3.0465415060715323e-06, "loss": 0.7356, "step": 9428 }, { "epoch": 0.3907745865970409, "grad_norm": 0.38236844539642334, "learning_rate": 3.0463342865431637e-06, "loss": 0.6874, "step": 9429 }, { "epoch": 0.3908160305027146, "grad_norm": 0.40216246247291565, "learning_rate": 3.046127067014796e-06, "loss": 0.6558, "step": 9430 }, { "epoch": 0.39085747440838825, "grad_norm": 0.40618130564689636, "learning_rate": 3.045919847486427e-06, "loss": 0.6813, "step": 9431 }, { "epoch": 0.3908989183140619, "grad_norm": 0.3617019057273865, "learning_rate": 3.045712627958059e-06, "loss": 0.6788, "step": 9432 }, { "epoch": 0.3909403622197356, "grad_norm": 0.40182480216026306, "learning_rate": 3.045505408429691e-06, "loss": 0.6317, "step": 9433 }, { "epoch": 0.3909818061254093, "grad_norm": 0.40691280364990234, "learning_rate": 3.0452981889013223e-06, "loss": 0.7026, "step": 9434 }, { "epoch": 0.3910232500310829, "grad_norm": 0.3901875913143158, "learning_rate": 3.045090969372954e-06, "loss": 0.6703, "step": 9435 }, { "epoch": 0.3910646939367566, "grad_norm": 0.37969711422920227, "learning_rate": 3.0448837498445855e-06, "loss": 0.6105, "step": 9436 }, { "epoch": 0.39110613784243026, "grad_norm": 0.4128420054912567, "learning_rate": 3.0446765303162173e-06, "loss": 0.7029, "step": 9437 }, { "epoch": 0.39114758174810393, "grad_norm": 0.40657541155815125, "learning_rate": 3.0444693107878487e-06, "loss": 0.6719, "step": 9438 }, { "epoch": 0.3911890256537776, "grad_norm": 0.3873860538005829, "learning_rate": 3.0442620912594805e-06, "loss": 0.6594, "step": 9439 }, { "epoch": 0.3912304695594513, "grad_norm": 0.43893229961395264, "learning_rate": 3.044054871731112e-06, "loss": 0.6809, "step": 9440 }, { "epoch": 0.39127191346512497, "grad_norm": 0.43747207522392273, "learning_rate": 3.0438476522027437e-06, "loss": 0.6719, "step": 9441 }, { "epoch": 0.39131335737079864, "grad_norm": 0.4196889102458954, "learning_rate": 3.0436404326743755e-06, "loss": 0.7444, "step": 9442 }, { "epoch": 0.3913548012764723, "grad_norm": 0.375454306602478, "learning_rate": 3.043433213146007e-06, "loss": 0.6641, "step": 9443 }, { "epoch": 0.39139624518214594, "grad_norm": 0.4401792883872986, "learning_rate": 3.0432259936176387e-06, "loss": 0.7008, "step": 9444 }, { "epoch": 0.3914376890878196, "grad_norm": 0.3918755054473877, "learning_rate": 3.04301877408927e-06, "loss": 0.6759, "step": 9445 }, { "epoch": 0.3914791329934933, "grad_norm": 0.4113456606864929, "learning_rate": 3.0428115545609024e-06, "loss": 0.7659, "step": 9446 }, { "epoch": 0.39152057689916697, "grad_norm": 0.4200038015842438, "learning_rate": 3.0426043350325333e-06, "loss": 0.7122, "step": 9447 }, { "epoch": 0.39156202080484065, "grad_norm": 0.42115336656570435, "learning_rate": 3.0423971155041655e-06, "loss": 0.6528, "step": 9448 }, { "epoch": 0.3916034647105143, "grad_norm": 0.3892390727996826, "learning_rate": 3.0421898959757974e-06, "loss": 0.6572, "step": 9449 }, { "epoch": 0.391644908616188, "grad_norm": 0.4148411154747009, "learning_rate": 3.0419826764474287e-06, "loss": 0.686, "step": 9450 }, { "epoch": 0.3916863525218617, "grad_norm": 0.3985777199268341, "learning_rate": 3.0417754569190606e-06, "loss": 0.6809, "step": 9451 }, { "epoch": 0.39172779642753536, "grad_norm": 0.43128225207328796, "learning_rate": 3.041568237390692e-06, "loss": 0.6885, "step": 9452 }, { "epoch": 0.391769240333209, "grad_norm": 0.40669140219688416, "learning_rate": 3.0413610178623237e-06, "loss": 0.7219, "step": 9453 }, { "epoch": 0.39181068423888266, "grad_norm": 0.3818778395652771, "learning_rate": 3.041153798333955e-06, "loss": 0.7009, "step": 9454 }, { "epoch": 0.39185212814455633, "grad_norm": 0.4299337863922119, "learning_rate": 3.040946578805587e-06, "loss": 0.7102, "step": 9455 }, { "epoch": 0.39189357205023, "grad_norm": 0.4290864169597626, "learning_rate": 3.0407393592772183e-06, "loss": 0.6941, "step": 9456 }, { "epoch": 0.3919350159559037, "grad_norm": 0.40424028038978577, "learning_rate": 3.04053213974885e-06, "loss": 0.6664, "step": 9457 }, { "epoch": 0.39197645986157736, "grad_norm": 0.4697413146495819, "learning_rate": 3.040324920220482e-06, "loss": 0.7214, "step": 9458 }, { "epoch": 0.39201790376725104, "grad_norm": 0.370175838470459, "learning_rate": 3.0401177006921133e-06, "loss": 0.7195, "step": 9459 }, { "epoch": 0.3920593476729247, "grad_norm": 0.44440561532974243, "learning_rate": 3.039910481163745e-06, "loss": 0.7089, "step": 9460 }, { "epoch": 0.39210079157859834, "grad_norm": 0.40980175137519836, "learning_rate": 3.0397032616353765e-06, "loss": 0.6456, "step": 9461 }, { "epoch": 0.392142235484272, "grad_norm": 0.3858970105648041, "learning_rate": 3.0394960421070088e-06, "loss": 0.6976, "step": 9462 }, { "epoch": 0.3921836793899457, "grad_norm": 0.4140380024909973, "learning_rate": 3.0392888225786397e-06, "loss": 0.7024, "step": 9463 }, { "epoch": 0.39222512329561937, "grad_norm": 0.43206754326820374, "learning_rate": 3.039081603050272e-06, "loss": 0.6665, "step": 9464 }, { "epoch": 0.39226656720129305, "grad_norm": 0.4300233721733093, "learning_rate": 3.038874383521903e-06, "loss": 0.6993, "step": 9465 }, { "epoch": 0.3923080111069667, "grad_norm": 0.38279590010643005, "learning_rate": 3.038667163993535e-06, "loss": 0.7249, "step": 9466 }, { "epoch": 0.3923494550126404, "grad_norm": 0.39058512449264526, "learning_rate": 3.038459944465167e-06, "loss": 0.6982, "step": 9467 }, { "epoch": 0.3923908989183141, "grad_norm": 0.42312824726104736, "learning_rate": 3.0382527249367983e-06, "loss": 0.7031, "step": 9468 }, { "epoch": 0.39243234282398776, "grad_norm": 0.3944706320762634, "learning_rate": 3.03804550540843e-06, "loss": 0.7324, "step": 9469 }, { "epoch": 0.3924737867296614, "grad_norm": 0.43116700649261475, "learning_rate": 3.0378382858800615e-06, "loss": 0.7305, "step": 9470 }, { "epoch": 0.39251523063533506, "grad_norm": 0.4367968440055847, "learning_rate": 3.0376310663516933e-06, "loss": 0.7554, "step": 9471 }, { "epoch": 0.39255667454100873, "grad_norm": 0.4336731433868408, "learning_rate": 3.0374238468233247e-06, "loss": 0.7664, "step": 9472 }, { "epoch": 0.3925981184466824, "grad_norm": 0.4240306615829468, "learning_rate": 3.0372166272949565e-06, "loss": 0.6698, "step": 9473 }, { "epoch": 0.3926395623523561, "grad_norm": 0.41871407628059387, "learning_rate": 3.037009407766588e-06, "loss": 0.6815, "step": 9474 }, { "epoch": 0.39268100625802976, "grad_norm": 0.405497282743454, "learning_rate": 3.0368021882382197e-06, "loss": 0.6941, "step": 9475 }, { "epoch": 0.39272245016370344, "grad_norm": 0.4034014344215393, "learning_rate": 3.0365949687098515e-06, "loss": 0.6526, "step": 9476 }, { "epoch": 0.3927638940693771, "grad_norm": 0.3778819143772125, "learning_rate": 3.036387749181483e-06, "loss": 0.6829, "step": 9477 }, { "epoch": 0.3928053379750508, "grad_norm": 0.39154472947120667, "learning_rate": 3.0361805296531147e-06, "loss": 0.6809, "step": 9478 }, { "epoch": 0.3928467818807244, "grad_norm": 0.458553671836853, "learning_rate": 3.035973310124746e-06, "loss": 0.7729, "step": 9479 }, { "epoch": 0.3928882257863981, "grad_norm": 0.40662333369255066, "learning_rate": 3.0357660905963784e-06, "loss": 0.7083, "step": 9480 }, { "epoch": 0.39292966969207177, "grad_norm": 0.3842395842075348, "learning_rate": 3.0355588710680093e-06, "loss": 0.6417, "step": 9481 }, { "epoch": 0.39297111359774545, "grad_norm": 0.40851306915283203, "learning_rate": 3.0353516515396416e-06, "loss": 0.6973, "step": 9482 }, { "epoch": 0.3930125575034191, "grad_norm": 0.42207080125808716, "learning_rate": 3.035144432011273e-06, "loss": 0.7305, "step": 9483 }, { "epoch": 0.3930540014090928, "grad_norm": 0.3744085431098938, "learning_rate": 3.0349372124829047e-06, "loss": 0.6938, "step": 9484 }, { "epoch": 0.3930954453147665, "grad_norm": 0.3872545659542084, "learning_rate": 3.0347299929545366e-06, "loss": 0.6309, "step": 9485 }, { "epoch": 0.39313688922044016, "grad_norm": 0.4180368185043335, "learning_rate": 3.034522773426168e-06, "loss": 0.6697, "step": 9486 }, { "epoch": 0.39317833312611383, "grad_norm": 0.4022526741027832, "learning_rate": 3.0343155538977998e-06, "loss": 0.6858, "step": 9487 }, { "epoch": 0.39321977703178745, "grad_norm": 0.41308555006980896, "learning_rate": 3.034108334369431e-06, "loss": 0.679, "step": 9488 }, { "epoch": 0.39326122093746113, "grad_norm": 0.39015960693359375, "learning_rate": 3.033901114841063e-06, "loss": 0.687, "step": 9489 }, { "epoch": 0.3933026648431348, "grad_norm": 0.4057956337928772, "learning_rate": 3.0336938953126943e-06, "loss": 0.7466, "step": 9490 }, { "epoch": 0.3933441087488085, "grad_norm": 0.41703540086746216, "learning_rate": 3.033486675784326e-06, "loss": 0.6656, "step": 9491 }, { "epoch": 0.39338555265448216, "grad_norm": 0.4397125244140625, "learning_rate": 3.0332794562559575e-06, "loss": 0.7239, "step": 9492 }, { "epoch": 0.39342699656015584, "grad_norm": 0.40248948335647583, "learning_rate": 3.0330722367275893e-06, "loss": 0.6904, "step": 9493 }, { "epoch": 0.3934684404658295, "grad_norm": 0.4140368103981018, "learning_rate": 3.032865017199221e-06, "loss": 0.7349, "step": 9494 }, { "epoch": 0.3935098843715032, "grad_norm": 0.4492703676223755, "learning_rate": 3.0326577976708525e-06, "loss": 0.7371, "step": 9495 }, { "epoch": 0.3935513282771768, "grad_norm": 0.3933362066745758, "learning_rate": 3.0324505781424848e-06, "loss": 0.6658, "step": 9496 }, { "epoch": 0.3935927721828505, "grad_norm": 0.4190264642238617, "learning_rate": 3.0322433586141157e-06, "loss": 0.6958, "step": 9497 }, { "epoch": 0.39363421608852417, "grad_norm": 0.4406547248363495, "learning_rate": 3.032036139085748e-06, "loss": 0.7687, "step": 9498 }, { "epoch": 0.39367565999419785, "grad_norm": 0.4731440246105194, "learning_rate": 3.031828919557379e-06, "loss": 0.7628, "step": 9499 }, { "epoch": 0.3937171038998715, "grad_norm": 0.4237787127494812, "learning_rate": 3.031621700029011e-06, "loss": 0.6875, "step": 9500 }, { "epoch": 0.3937585478055452, "grad_norm": 0.4177541732788086, "learning_rate": 3.0314144805006425e-06, "loss": 0.6465, "step": 9501 }, { "epoch": 0.3937999917112189, "grad_norm": 0.4523256719112396, "learning_rate": 3.0312072609722743e-06, "loss": 0.7993, "step": 9502 }, { "epoch": 0.39384143561689255, "grad_norm": 0.38543811440467834, "learning_rate": 3.031000041443906e-06, "loss": 0.6826, "step": 9503 }, { "epoch": 0.39388287952256623, "grad_norm": 0.41366326808929443, "learning_rate": 3.0307928219155375e-06, "loss": 0.6575, "step": 9504 }, { "epoch": 0.39392432342823985, "grad_norm": 0.4158187806606293, "learning_rate": 3.0305856023871694e-06, "loss": 0.7203, "step": 9505 }, { "epoch": 0.39396576733391353, "grad_norm": 0.4178304076194763, "learning_rate": 3.0303783828588007e-06, "loss": 0.7109, "step": 9506 }, { "epoch": 0.3940072112395872, "grad_norm": 0.4592568874359131, "learning_rate": 3.0301711633304325e-06, "loss": 0.7341, "step": 9507 }, { "epoch": 0.3940486551452609, "grad_norm": 0.4132538437843323, "learning_rate": 3.029963943802064e-06, "loss": 0.6838, "step": 9508 }, { "epoch": 0.39409009905093456, "grad_norm": 0.4246460497379303, "learning_rate": 3.0297567242736957e-06, "loss": 0.7031, "step": 9509 }, { "epoch": 0.39413154295660824, "grad_norm": 0.40939202904701233, "learning_rate": 3.0295495047453276e-06, "loss": 0.6609, "step": 9510 }, { "epoch": 0.3941729868622819, "grad_norm": 0.40757328271865845, "learning_rate": 3.029342285216959e-06, "loss": 0.7036, "step": 9511 }, { "epoch": 0.3942144307679556, "grad_norm": 0.4184506833553314, "learning_rate": 3.0291350656885907e-06, "loss": 0.6727, "step": 9512 }, { "epoch": 0.39425587467362927, "grad_norm": 0.47774237394332886, "learning_rate": 3.028927846160222e-06, "loss": 0.7834, "step": 9513 }, { "epoch": 0.3942973185793029, "grad_norm": 0.45306167006492615, "learning_rate": 3.0287206266318544e-06, "loss": 0.745, "step": 9514 }, { "epoch": 0.39433876248497657, "grad_norm": 0.4820023477077484, "learning_rate": 3.0285134071034853e-06, "loss": 0.787, "step": 9515 }, { "epoch": 0.39438020639065025, "grad_norm": 0.4054049551486969, "learning_rate": 3.0283061875751176e-06, "loss": 0.6511, "step": 9516 }, { "epoch": 0.3944216502963239, "grad_norm": 0.39879316091537476, "learning_rate": 3.028098968046749e-06, "loss": 0.6914, "step": 9517 }, { "epoch": 0.3944630942019976, "grad_norm": 0.40437743067741394, "learning_rate": 3.0278917485183808e-06, "loss": 0.6873, "step": 9518 }, { "epoch": 0.3945045381076713, "grad_norm": 0.41723304986953735, "learning_rate": 3.0276845289900126e-06, "loss": 0.7087, "step": 9519 }, { "epoch": 0.39454598201334495, "grad_norm": 0.4016387164592743, "learning_rate": 3.027477309461644e-06, "loss": 0.6846, "step": 9520 }, { "epoch": 0.39458742591901863, "grad_norm": 0.38109537959098816, "learning_rate": 3.0272700899332758e-06, "loss": 0.7073, "step": 9521 }, { "epoch": 0.39462886982469225, "grad_norm": 0.431786447763443, "learning_rate": 3.027062870404907e-06, "loss": 0.7078, "step": 9522 }, { "epoch": 0.39467031373036593, "grad_norm": 0.4154272675514221, "learning_rate": 3.026855650876539e-06, "loss": 0.6707, "step": 9523 }, { "epoch": 0.3947117576360396, "grad_norm": 0.45969444513320923, "learning_rate": 3.0266484313481703e-06, "loss": 0.7339, "step": 9524 }, { "epoch": 0.3947532015417133, "grad_norm": 0.43442776799201965, "learning_rate": 3.026441211819802e-06, "loss": 0.668, "step": 9525 }, { "epoch": 0.39479464544738696, "grad_norm": 0.4378511607646942, "learning_rate": 3.0262339922914335e-06, "loss": 0.7317, "step": 9526 }, { "epoch": 0.39483608935306064, "grad_norm": 0.41116365790367126, "learning_rate": 3.0260267727630653e-06, "loss": 0.7185, "step": 9527 }, { "epoch": 0.3948775332587343, "grad_norm": 0.42327770590782166, "learning_rate": 3.025819553234697e-06, "loss": 0.708, "step": 9528 }, { "epoch": 0.394918977164408, "grad_norm": 0.4089825451374054, "learning_rate": 3.0256123337063285e-06, "loss": 0.7256, "step": 9529 }, { "epoch": 0.39496042107008167, "grad_norm": 0.38536033034324646, "learning_rate": 3.0254051141779608e-06, "loss": 0.656, "step": 9530 }, { "epoch": 0.3950018649757553, "grad_norm": 0.4033794701099396, "learning_rate": 3.0251978946495917e-06, "loss": 0.6902, "step": 9531 }, { "epoch": 0.39504330888142897, "grad_norm": 0.40971848368644714, "learning_rate": 3.024990675121224e-06, "loss": 0.6523, "step": 9532 }, { "epoch": 0.39508475278710264, "grad_norm": 0.437128484249115, "learning_rate": 3.024783455592855e-06, "loss": 0.7283, "step": 9533 }, { "epoch": 0.3951261966927763, "grad_norm": 0.4298624098300934, "learning_rate": 3.024576236064487e-06, "loss": 0.7429, "step": 9534 }, { "epoch": 0.39516764059845, "grad_norm": 0.4121999740600586, "learning_rate": 3.0243690165361185e-06, "loss": 0.6997, "step": 9535 }, { "epoch": 0.3952090845041237, "grad_norm": 0.39793020486831665, "learning_rate": 3.0241617970077504e-06, "loss": 0.6801, "step": 9536 }, { "epoch": 0.39525052840979735, "grad_norm": 0.4143294095993042, "learning_rate": 3.023954577479382e-06, "loss": 0.7744, "step": 9537 }, { "epoch": 0.39529197231547103, "grad_norm": 0.41114315390586853, "learning_rate": 3.0237473579510135e-06, "loss": 0.7107, "step": 9538 }, { "epoch": 0.3953334162211447, "grad_norm": 0.4264541566371918, "learning_rate": 3.0235401384226454e-06, "loss": 0.708, "step": 9539 }, { "epoch": 0.39537486012681833, "grad_norm": 0.4418397545814514, "learning_rate": 3.0233329188942767e-06, "loss": 0.702, "step": 9540 }, { "epoch": 0.395416304032492, "grad_norm": 0.42302101850509644, "learning_rate": 3.0231256993659086e-06, "loss": 0.6978, "step": 9541 }, { "epoch": 0.3954577479381657, "grad_norm": 0.437695175409317, "learning_rate": 3.02291847983754e-06, "loss": 0.7102, "step": 9542 }, { "epoch": 0.39549919184383936, "grad_norm": 0.4156973361968994, "learning_rate": 3.0227112603091717e-06, "loss": 0.6909, "step": 9543 }, { "epoch": 0.39554063574951304, "grad_norm": 0.4250630736351013, "learning_rate": 3.022504040780803e-06, "loss": 0.6746, "step": 9544 }, { "epoch": 0.3955820796551867, "grad_norm": 0.43384137749671936, "learning_rate": 3.022296821252435e-06, "loss": 0.7444, "step": 9545 }, { "epoch": 0.3956235235608604, "grad_norm": 0.38809671998023987, "learning_rate": 3.0220896017240668e-06, "loss": 0.6821, "step": 9546 }, { "epoch": 0.39566496746653407, "grad_norm": 0.42618754506111145, "learning_rate": 3.021882382195698e-06, "loss": 0.6899, "step": 9547 }, { "epoch": 0.3957064113722077, "grad_norm": 0.41692298650741577, "learning_rate": 3.0216751626673304e-06, "loss": 0.6802, "step": 9548 }, { "epoch": 0.39574785527788137, "grad_norm": 0.377252995967865, "learning_rate": 3.0214679431389613e-06, "loss": 0.6914, "step": 9549 }, { "epoch": 0.39578929918355504, "grad_norm": 0.426179975271225, "learning_rate": 3.0212607236105936e-06, "loss": 0.698, "step": 9550 }, { "epoch": 0.3958307430892287, "grad_norm": 0.40155890583992004, "learning_rate": 3.021053504082225e-06, "loss": 0.6512, "step": 9551 }, { "epoch": 0.3958721869949024, "grad_norm": 0.4274677336215973, "learning_rate": 3.0208462845538568e-06, "loss": 0.6467, "step": 9552 }, { "epoch": 0.3959136309005761, "grad_norm": 0.4084275960922241, "learning_rate": 3.020639065025488e-06, "loss": 0.7053, "step": 9553 }, { "epoch": 0.39595507480624975, "grad_norm": 0.41739505529403687, "learning_rate": 3.02043184549712e-06, "loss": 0.6515, "step": 9554 }, { "epoch": 0.39599651871192343, "grad_norm": 0.40556082129478455, "learning_rate": 3.0202246259687518e-06, "loss": 0.7195, "step": 9555 }, { "epoch": 0.3960379626175971, "grad_norm": 0.39795422554016113, "learning_rate": 3.020017406440383e-06, "loss": 0.6766, "step": 9556 }, { "epoch": 0.3960794065232707, "grad_norm": 0.4124256372451782, "learning_rate": 3.019810186912015e-06, "loss": 0.7236, "step": 9557 }, { "epoch": 0.3961208504289444, "grad_norm": 0.3810274600982666, "learning_rate": 3.0196029673836463e-06, "loss": 0.6608, "step": 9558 }, { "epoch": 0.3961622943346181, "grad_norm": 0.425054669380188, "learning_rate": 3.019395747855278e-06, "loss": 0.7024, "step": 9559 }, { "epoch": 0.39620373824029176, "grad_norm": 0.37433454394340515, "learning_rate": 3.0191885283269095e-06, "loss": 0.6387, "step": 9560 }, { "epoch": 0.39624518214596544, "grad_norm": 0.404678076505661, "learning_rate": 3.0189813087985413e-06, "loss": 0.6754, "step": 9561 }, { "epoch": 0.3962866260516391, "grad_norm": 0.4291474223136902, "learning_rate": 3.0187740892701727e-06, "loss": 0.7246, "step": 9562 }, { "epoch": 0.3963280699573128, "grad_norm": 0.4419684112071991, "learning_rate": 3.0185668697418045e-06, "loss": 0.728, "step": 9563 }, { "epoch": 0.39636951386298647, "grad_norm": 0.40887850522994995, "learning_rate": 3.0183596502134368e-06, "loss": 0.6334, "step": 9564 }, { "epoch": 0.39641095776866014, "grad_norm": 0.38945356011390686, "learning_rate": 3.0181524306850677e-06, "loss": 0.697, "step": 9565 }, { "epoch": 0.39645240167433377, "grad_norm": 0.40308645367622375, "learning_rate": 3.0179452111567e-06, "loss": 0.7693, "step": 9566 }, { "epoch": 0.39649384558000744, "grad_norm": 0.4002532362937927, "learning_rate": 3.017737991628331e-06, "loss": 0.6704, "step": 9567 }, { "epoch": 0.3965352894856811, "grad_norm": 0.4021584987640381, "learning_rate": 3.017530772099963e-06, "loss": 0.6865, "step": 9568 }, { "epoch": 0.3965767333913548, "grad_norm": 0.47005054354667664, "learning_rate": 3.0173235525715946e-06, "loss": 0.6838, "step": 9569 }, { "epoch": 0.3966181772970285, "grad_norm": 0.4345030188560486, "learning_rate": 3.0171163330432264e-06, "loss": 0.7421, "step": 9570 }, { "epoch": 0.39665962120270215, "grad_norm": 0.4128609001636505, "learning_rate": 3.016909113514858e-06, "loss": 0.6935, "step": 9571 }, { "epoch": 0.3967010651083758, "grad_norm": 0.4201023578643799, "learning_rate": 3.0167018939864896e-06, "loss": 0.7466, "step": 9572 }, { "epoch": 0.3967425090140495, "grad_norm": 0.39556774497032166, "learning_rate": 3.0164946744581214e-06, "loss": 0.6703, "step": 9573 }, { "epoch": 0.3967839529197232, "grad_norm": 0.42128506302833557, "learning_rate": 3.0162874549297528e-06, "loss": 0.6877, "step": 9574 }, { "epoch": 0.3968253968253968, "grad_norm": 0.43170836567878723, "learning_rate": 3.0160802354013846e-06, "loss": 0.7119, "step": 9575 }, { "epoch": 0.3968668407310705, "grad_norm": 0.4017358422279358, "learning_rate": 3.015873015873016e-06, "loss": 0.6826, "step": 9576 }, { "epoch": 0.39690828463674416, "grad_norm": 0.5477772951126099, "learning_rate": 3.0156657963446478e-06, "loss": 0.6798, "step": 9577 }, { "epoch": 0.39694972854241783, "grad_norm": 0.4157739281654358, "learning_rate": 3.015458576816279e-06, "loss": 0.7335, "step": 9578 }, { "epoch": 0.3969911724480915, "grad_norm": 0.4020553529262543, "learning_rate": 3.015251357287911e-06, "loss": 0.6973, "step": 9579 }, { "epoch": 0.3970326163537652, "grad_norm": 0.42026329040527344, "learning_rate": 3.0150441377595428e-06, "loss": 0.6835, "step": 9580 }, { "epoch": 0.39707406025943887, "grad_norm": 0.38915637135505676, "learning_rate": 3.014836918231174e-06, "loss": 0.734, "step": 9581 }, { "epoch": 0.39711550416511254, "grad_norm": 0.39482253789901733, "learning_rate": 3.0146296987028064e-06, "loss": 0.7061, "step": 9582 }, { "epoch": 0.39715694807078616, "grad_norm": 0.4192039668560028, "learning_rate": 3.0144224791744373e-06, "loss": 0.6772, "step": 9583 }, { "epoch": 0.39719839197645984, "grad_norm": 0.3737242519855499, "learning_rate": 3.0142152596460696e-06, "loss": 0.6333, "step": 9584 }, { "epoch": 0.3972398358821335, "grad_norm": 0.4051286280155182, "learning_rate": 3.014008040117701e-06, "loss": 0.7433, "step": 9585 }, { "epoch": 0.3972812797878072, "grad_norm": 0.428166002035141, "learning_rate": 3.0138008205893328e-06, "loss": 0.7012, "step": 9586 }, { "epoch": 0.3973227236934809, "grad_norm": 0.41494107246398926, "learning_rate": 3.013593601060964e-06, "loss": 0.7216, "step": 9587 }, { "epoch": 0.39736416759915455, "grad_norm": 0.3880235552787781, "learning_rate": 3.013386381532596e-06, "loss": 0.7021, "step": 9588 }, { "epoch": 0.3974056115048282, "grad_norm": 0.41936543583869934, "learning_rate": 3.0131791620042278e-06, "loss": 0.6404, "step": 9589 }, { "epoch": 0.3974470554105019, "grad_norm": 0.4009304642677307, "learning_rate": 3.012971942475859e-06, "loss": 0.7158, "step": 9590 }, { "epoch": 0.3974884993161756, "grad_norm": 0.44974878430366516, "learning_rate": 3.012764722947491e-06, "loss": 0.7216, "step": 9591 }, { "epoch": 0.3975299432218492, "grad_norm": 0.37596216797828674, "learning_rate": 3.0125575034191224e-06, "loss": 0.7107, "step": 9592 }, { "epoch": 0.3975713871275229, "grad_norm": 0.38925701379776, "learning_rate": 3.012350283890754e-06, "loss": 0.6355, "step": 9593 }, { "epoch": 0.39761283103319656, "grad_norm": 0.4065350890159607, "learning_rate": 3.0121430643623855e-06, "loss": 0.6947, "step": 9594 }, { "epoch": 0.39765427493887023, "grad_norm": 0.3931446373462677, "learning_rate": 3.0119358448340174e-06, "loss": 0.7021, "step": 9595 }, { "epoch": 0.3976957188445439, "grad_norm": 0.4282339811325073, "learning_rate": 3.0117286253056487e-06, "loss": 0.6649, "step": 9596 }, { "epoch": 0.3977371627502176, "grad_norm": 0.41863736510276794, "learning_rate": 3.0115214057772805e-06, "loss": 0.729, "step": 9597 }, { "epoch": 0.39777860665589126, "grad_norm": 0.4263836443424225, "learning_rate": 3.0113141862489128e-06, "loss": 0.7188, "step": 9598 }, { "epoch": 0.39782005056156494, "grad_norm": 0.44368159770965576, "learning_rate": 3.0111069667205437e-06, "loss": 0.7651, "step": 9599 }, { "epoch": 0.3978614944672386, "grad_norm": 0.3832560181617737, "learning_rate": 3.010899747192176e-06, "loss": 0.6891, "step": 9600 }, { "epoch": 0.39790293837291224, "grad_norm": 0.4258100986480713, "learning_rate": 3.0106925276638074e-06, "loss": 0.6936, "step": 9601 }, { "epoch": 0.3979443822785859, "grad_norm": 0.39879173040390015, "learning_rate": 3.010485308135439e-06, "loss": 0.6781, "step": 9602 }, { "epoch": 0.3979858261842596, "grad_norm": 0.3927847445011139, "learning_rate": 3.0102780886070706e-06, "loss": 0.688, "step": 9603 }, { "epoch": 0.39802727008993327, "grad_norm": 0.44034093618392944, "learning_rate": 3.0100708690787024e-06, "loss": 0.7589, "step": 9604 }, { "epoch": 0.39806871399560695, "grad_norm": 0.423814594745636, "learning_rate": 3.0098636495503338e-06, "loss": 0.7043, "step": 9605 }, { "epoch": 0.3981101579012806, "grad_norm": 0.4956913888454437, "learning_rate": 3.0096564300219656e-06, "loss": 0.7086, "step": 9606 }, { "epoch": 0.3981516018069543, "grad_norm": 0.39377278089523315, "learning_rate": 3.0094492104935974e-06, "loss": 0.6738, "step": 9607 }, { "epoch": 0.398193045712628, "grad_norm": 0.44385913014411926, "learning_rate": 3.0092419909652288e-06, "loss": 0.6451, "step": 9608 }, { "epoch": 0.3982344896183016, "grad_norm": 0.4152202010154724, "learning_rate": 3.0090347714368606e-06, "loss": 0.7893, "step": 9609 }, { "epoch": 0.3982759335239753, "grad_norm": 0.4080865681171417, "learning_rate": 3.008827551908492e-06, "loss": 0.6689, "step": 9610 }, { "epoch": 0.39831737742964896, "grad_norm": 0.41871216893196106, "learning_rate": 3.0086203323801238e-06, "loss": 0.6625, "step": 9611 }, { "epoch": 0.39835882133532263, "grad_norm": 0.43666625022888184, "learning_rate": 3.008413112851755e-06, "loss": 0.7366, "step": 9612 }, { "epoch": 0.3984002652409963, "grad_norm": 0.4114949405193329, "learning_rate": 3.008205893323387e-06, "loss": 0.6797, "step": 9613 }, { "epoch": 0.39844170914667, "grad_norm": 0.41424646973609924, "learning_rate": 3.0079986737950183e-06, "loss": 0.7253, "step": 9614 }, { "epoch": 0.39848315305234366, "grad_norm": 0.40461185574531555, "learning_rate": 3.00779145426665e-06, "loss": 0.6953, "step": 9615 }, { "epoch": 0.39852459695801734, "grad_norm": 0.4125845432281494, "learning_rate": 3.0075842347382824e-06, "loss": 0.6825, "step": 9616 }, { "epoch": 0.398566040863691, "grad_norm": 0.4023011326789856, "learning_rate": 3.0073770152099133e-06, "loss": 0.7158, "step": 9617 }, { "epoch": 0.39860748476936464, "grad_norm": 0.3855823278427124, "learning_rate": 3.0071697956815456e-06, "loss": 0.6633, "step": 9618 }, { "epoch": 0.3986489286750383, "grad_norm": 0.397495836019516, "learning_rate": 3.006962576153177e-06, "loss": 0.645, "step": 9619 }, { "epoch": 0.398690372580712, "grad_norm": 0.42676928639411926, "learning_rate": 3.0067553566248088e-06, "loss": 0.6462, "step": 9620 }, { "epoch": 0.39873181648638567, "grad_norm": 0.4323368966579437, "learning_rate": 3.00654813709644e-06, "loss": 0.748, "step": 9621 }, { "epoch": 0.39877326039205935, "grad_norm": 0.40364721417427063, "learning_rate": 3.006340917568072e-06, "loss": 0.7197, "step": 9622 }, { "epoch": 0.398814704297733, "grad_norm": 0.40984681248664856, "learning_rate": 3.0061336980397038e-06, "loss": 0.7123, "step": 9623 }, { "epoch": 0.3988561482034067, "grad_norm": 0.42342615127563477, "learning_rate": 3.005926478511335e-06, "loss": 0.6814, "step": 9624 }, { "epoch": 0.3988975921090804, "grad_norm": 0.4154376685619354, "learning_rate": 3.005719258982967e-06, "loss": 0.682, "step": 9625 }, { "epoch": 0.39893903601475406, "grad_norm": 0.40213608741760254, "learning_rate": 3.0055120394545984e-06, "loss": 0.7097, "step": 9626 }, { "epoch": 0.3989804799204277, "grad_norm": 0.43730056285858154, "learning_rate": 3.00530481992623e-06, "loss": 0.7336, "step": 9627 }, { "epoch": 0.39902192382610135, "grad_norm": 0.3922622799873352, "learning_rate": 3.0050976003978616e-06, "loss": 0.6943, "step": 9628 }, { "epoch": 0.39906336773177503, "grad_norm": 0.4234660267829895, "learning_rate": 3.0048903808694934e-06, "loss": 0.6409, "step": 9629 }, { "epoch": 0.3991048116374487, "grad_norm": 0.4117945432662964, "learning_rate": 3.0046831613411247e-06, "loss": 0.6968, "step": 9630 }, { "epoch": 0.3991462555431224, "grad_norm": 0.4291854798793793, "learning_rate": 3.0044759418127566e-06, "loss": 0.7083, "step": 9631 }, { "epoch": 0.39918769944879606, "grad_norm": 0.4228399097919464, "learning_rate": 3.0042687222843888e-06, "loss": 0.6851, "step": 9632 }, { "epoch": 0.39922914335446974, "grad_norm": 0.43061158061027527, "learning_rate": 3.0040615027560198e-06, "loss": 0.7168, "step": 9633 }, { "epoch": 0.3992705872601434, "grad_norm": 0.44774770736694336, "learning_rate": 3.003854283227652e-06, "loss": 0.7216, "step": 9634 }, { "epoch": 0.3993120311658171, "grad_norm": 0.4778788387775421, "learning_rate": 3.0036470636992834e-06, "loss": 0.7458, "step": 9635 }, { "epoch": 0.3993534750714907, "grad_norm": 0.405484676361084, "learning_rate": 3.003439844170915e-06, "loss": 0.6963, "step": 9636 }, { "epoch": 0.3993949189771644, "grad_norm": 0.4083288609981537, "learning_rate": 3.0032326246425466e-06, "loss": 0.667, "step": 9637 }, { "epoch": 0.39943636288283807, "grad_norm": 0.4395633339881897, "learning_rate": 3.0030254051141784e-06, "loss": 0.731, "step": 9638 }, { "epoch": 0.39947780678851175, "grad_norm": 0.46842095255851746, "learning_rate": 3.0028181855858098e-06, "loss": 0.6823, "step": 9639 }, { "epoch": 0.3995192506941854, "grad_norm": 0.4304969012737274, "learning_rate": 3.0026109660574416e-06, "loss": 0.6257, "step": 9640 }, { "epoch": 0.3995606945998591, "grad_norm": 0.42316973209381104, "learning_rate": 3.0024037465290734e-06, "loss": 0.698, "step": 9641 }, { "epoch": 0.3996021385055328, "grad_norm": 0.42654502391815186, "learning_rate": 3.0021965270007048e-06, "loss": 0.7192, "step": 9642 }, { "epoch": 0.39964358241120645, "grad_norm": 0.45171642303466797, "learning_rate": 3.0019893074723366e-06, "loss": 0.7605, "step": 9643 }, { "epoch": 0.3996850263168801, "grad_norm": 0.43299993872642517, "learning_rate": 3.001782087943968e-06, "loss": 0.7301, "step": 9644 }, { "epoch": 0.39972647022255375, "grad_norm": 0.41101953387260437, "learning_rate": 3.0015748684155998e-06, "loss": 0.6953, "step": 9645 }, { "epoch": 0.39976791412822743, "grad_norm": 0.4370548129081726, "learning_rate": 3.001367648887231e-06, "loss": 0.7325, "step": 9646 }, { "epoch": 0.3998093580339011, "grad_norm": 0.4835684299468994, "learning_rate": 3.001160429358863e-06, "loss": 0.7172, "step": 9647 }, { "epoch": 0.3998508019395748, "grad_norm": 0.4168919622898102, "learning_rate": 3.0009532098304943e-06, "loss": 0.6801, "step": 9648 }, { "epoch": 0.39989224584524846, "grad_norm": 0.42068737745285034, "learning_rate": 3.000745990302126e-06, "loss": 0.7227, "step": 9649 }, { "epoch": 0.39993368975092214, "grad_norm": 0.426327109336853, "learning_rate": 3.0005387707737584e-06, "loss": 0.703, "step": 9650 }, { "epoch": 0.3999751336565958, "grad_norm": 0.4372767210006714, "learning_rate": 3.0003315512453894e-06, "loss": 0.7252, "step": 9651 }, { "epoch": 0.4000165775622695, "grad_norm": 0.39417994022369385, "learning_rate": 3.0001243317170216e-06, "loss": 0.7095, "step": 9652 }, { "epoch": 0.4000580214679431, "grad_norm": 0.45287519693374634, "learning_rate": 2.999917112188653e-06, "loss": 0.7617, "step": 9653 }, { "epoch": 0.4000994653736168, "grad_norm": 0.39047256112098694, "learning_rate": 2.9997098926602848e-06, "loss": 0.6921, "step": 9654 }, { "epoch": 0.40014090927929047, "grad_norm": 0.43514031171798706, "learning_rate": 2.999502673131916e-06, "loss": 0.7384, "step": 9655 }, { "epoch": 0.40018235318496415, "grad_norm": 0.43791842460632324, "learning_rate": 2.999295453603548e-06, "loss": 0.6835, "step": 9656 }, { "epoch": 0.4002237970906378, "grad_norm": 0.365914523601532, "learning_rate": 2.9990882340751794e-06, "loss": 0.6929, "step": 9657 }, { "epoch": 0.4002652409963115, "grad_norm": 0.385794073343277, "learning_rate": 2.998881014546811e-06, "loss": 0.6487, "step": 9658 }, { "epoch": 0.4003066849019852, "grad_norm": 0.4497039020061493, "learning_rate": 2.998673795018443e-06, "loss": 0.6887, "step": 9659 }, { "epoch": 0.40034812880765885, "grad_norm": 0.3803664743900299, "learning_rate": 2.9984665754900744e-06, "loss": 0.667, "step": 9660 }, { "epoch": 0.40038957271333253, "grad_norm": 0.40676215291023254, "learning_rate": 2.998259355961706e-06, "loss": 0.7122, "step": 9661 }, { "epoch": 0.40043101661900615, "grad_norm": 0.5099861025810242, "learning_rate": 2.9980521364333376e-06, "loss": 0.7947, "step": 9662 }, { "epoch": 0.40047246052467983, "grad_norm": 0.4264516830444336, "learning_rate": 2.9978449169049694e-06, "loss": 0.6931, "step": 9663 }, { "epoch": 0.4005139044303535, "grad_norm": 0.40377357602119446, "learning_rate": 2.9976376973766008e-06, "loss": 0.7192, "step": 9664 }, { "epoch": 0.4005553483360272, "grad_norm": 0.40250930190086365, "learning_rate": 2.9974304778482326e-06, "loss": 0.6815, "step": 9665 }, { "epoch": 0.40059679224170086, "grad_norm": 0.4150809347629547, "learning_rate": 2.997223258319864e-06, "loss": 0.6731, "step": 9666 }, { "epoch": 0.40063823614737454, "grad_norm": 0.3994804322719574, "learning_rate": 2.9970160387914958e-06, "loss": 0.6965, "step": 9667 }, { "epoch": 0.4006796800530482, "grad_norm": 0.4262216091156006, "learning_rate": 2.996808819263128e-06, "loss": 0.7173, "step": 9668 }, { "epoch": 0.4007211239587219, "grad_norm": 0.4071671962738037, "learning_rate": 2.9966015997347594e-06, "loss": 0.6593, "step": 9669 }, { "epoch": 0.4007625678643955, "grad_norm": 0.4282101094722748, "learning_rate": 2.996394380206391e-06, "loss": 0.6907, "step": 9670 }, { "epoch": 0.4008040117700692, "grad_norm": 0.3996165096759796, "learning_rate": 2.9961871606780226e-06, "loss": 0.7192, "step": 9671 }, { "epoch": 0.40084545567574287, "grad_norm": 0.4529103934764862, "learning_rate": 2.9959799411496544e-06, "loss": 0.7654, "step": 9672 }, { "epoch": 0.40088689958141654, "grad_norm": 0.3931501805782318, "learning_rate": 2.9957727216212858e-06, "loss": 0.6749, "step": 9673 }, { "epoch": 0.4009283434870902, "grad_norm": 0.42044511437416077, "learning_rate": 2.9955655020929176e-06, "loss": 0.7065, "step": 9674 }, { "epoch": 0.4009697873927639, "grad_norm": 0.4241681694984436, "learning_rate": 2.995358282564549e-06, "loss": 0.735, "step": 9675 }, { "epoch": 0.4010112312984376, "grad_norm": 0.37090548872947693, "learning_rate": 2.9951510630361808e-06, "loss": 0.6632, "step": 9676 }, { "epoch": 0.40105267520411125, "grad_norm": 0.41652169823646545, "learning_rate": 2.9949438435078126e-06, "loss": 0.7278, "step": 9677 }, { "epoch": 0.40109411910978493, "grad_norm": 0.3859569728374481, "learning_rate": 2.994736623979444e-06, "loss": 0.663, "step": 9678 }, { "epoch": 0.40113556301545855, "grad_norm": 0.40043866634368896, "learning_rate": 2.9945294044510758e-06, "loss": 0.668, "step": 9679 }, { "epoch": 0.40117700692113223, "grad_norm": 0.4198809266090393, "learning_rate": 2.994322184922707e-06, "loss": 0.6982, "step": 9680 }, { "epoch": 0.4012184508268059, "grad_norm": 0.3963892161846161, "learning_rate": 2.994114965394339e-06, "loss": 0.7035, "step": 9681 }, { "epoch": 0.4012598947324796, "grad_norm": 0.40620893239974976, "learning_rate": 2.9939077458659704e-06, "loss": 0.7153, "step": 9682 }, { "epoch": 0.40130133863815326, "grad_norm": 0.409356951713562, "learning_rate": 2.993700526337602e-06, "loss": 0.691, "step": 9683 }, { "epoch": 0.40134278254382694, "grad_norm": 0.40759673714637756, "learning_rate": 2.9934933068092344e-06, "loss": 0.6593, "step": 9684 }, { "epoch": 0.4013842264495006, "grad_norm": 0.4362272620201111, "learning_rate": 2.9932860872808654e-06, "loss": 0.6971, "step": 9685 }, { "epoch": 0.4014256703551743, "grad_norm": 0.4030139744281769, "learning_rate": 2.9930788677524976e-06, "loss": 0.6505, "step": 9686 }, { "epoch": 0.40146711426084797, "grad_norm": 0.44515135884284973, "learning_rate": 2.992871648224129e-06, "loss": 0.7523, "step": 9687 }, { "epoch": 0.4015085581665216, "grad_norm": 0.38346153497695923, "learning_rate": 2.9926644286957608e-06, "loss": 0.6327, "step": 9688 }, { "epoch": 0.40155000207219527, "grad_norm": 0.41850608587265015, "learning_rate": 2.992457209167392e-06, "loss": 0.6801, "step": 9689 }, { "epoch": 0.40159144597786894, "grad_norm": 0.3929440975189209, "learning_rate": 2.992249989639024e-06, "loss": 0.7134, "step": 9690 }, { "epoch": 0.4016328898835426, "grad_norm": 0.4518125653266907, "learning_rate": 2.9920427701106554e-06, "loss": 0.7428, "step": 9691 }, { "epoch": 0.4016743337892163, "grad_norm": 0.4130164384841919, "learning_rate": 2.991835550582287e-06, "loss": 0.6401, "step": 9692 }, { "epoch": 0.40171577769489, "grad_norm": 0.43333011865615845, "learning_rate": 2.991628331053919e-06, "loss": 0.7095, "step": 9693 }, { "epoch": 0.40175722160056365, "grad_norm": 0.4257073402404785, "learning_rate": 2.9914211115255504e-06, "loss": 0.6899, "step": 9694 }, { "epoch": 0.40179866550623733, "grad_norm": 0.4309791326522827, "learning_rate": 2.991213891997182e-06, "loss": 0.644, "step": 9695 }, { "epoch": 0.401840109411911, "grad_norm": 0.4238462746143341, "learning_rate": 2.9910066724688136e-06, "loss": 0.7166, "step": 9696 }, { "epoch": 0.4018815533175846, "grad_norm": 0.37391698360443115, "learning_rate": 2.9907994529404454e-06, "loss": 0.7034, "step": 9697 }, { "epoch": 0.4019229972232583, "grad_norm": 0.39448216557502747, "learning_rate": 2.9905922334120768e-06, "loss": 0.6835, "step": 9698 }, { "epoch": 0.401964441128932, "grad_norm": 0.4246854782104492, "learning_rate": 2.9903850138837086e-06, "loss": 0.6943, "step": 9699 }, { "epoch": 0.40200588503460566, "grad_norm": 0.47914642095565796, "learning_rate": 2.99017779435534e-06, "loss": 0.7328, "step": 9700 }, { "epoch": 0.40204732894027934, "grad_norm": 0.4108937382698059, "learning_rate": 2.9899705748269718e-06, "loss": 0.7351, "step": 9701 }, { "epoch": 0.402088772845953, "grad_norm": 0.47547468543052673, "learning_rate": 2.989763355298604e-06, "loss": 0.7439, "step": 9702 }, { "epoch": 0.4021302167516267, "grad_norm": 0.3836901783943176, "learning_rate": 2.9895561357702354e-06, "loss": 0.698, "step": 9703 }, { "epoch": 0.40217166065730037, "grad_norm": 0.4137960374355316, "learning_rate": 2.989348916241867e-06, "loss": 0.6604, "step": 9704 }, { "epoch": 0.402213104562974, "grad_norm": 0.4403729736804962, "learning_rate": 2.9891416967134986e-06, "loss": 0.7084, "step": 9705 }, { "epoch": 0.40225454846864767, "grad_norm": 0.38235464692115784, "learning_rate": 2.9889344771851304e-06, "loss": 0.6726, "step": 9706 }, { "epoch": 0.40229599237432134, "grad_norm": 0.4330444037914276, "learning_rate": 2.9887272576567618e-06, "loss": 0.689, "step": 9707 }, { "epoch": 0.402337436279995, "grad_norm": 0.41801586747169495, "learning_rate": 2.9885200381283936e-06, "loss": 0.6974, "step": 9708 }, { "epoch": 0.4023788801856687, "grad_norm": 0.411640465259552, "learning_rate": 2.988312818600025e-06, "loss": 0.6982, "step": 9709 }, { "epoch": 0.4024203240913424, "grad_norm": 0.45801541209220886, "learning_rate": 2.9881055990716568e-06, "loss": 0.688, "step": 9710 }, { "epoch": 0.40246176799701605, "grad_norm": 0.4810921847820282, "learning_rate": 2.9878983795432886e-06, "loss": 0.7605, "step": 9711 }, { "epoch": 0.40250321190268973, "grad_norm": 0.4124468266963959, "learning_rate": 2.98769116001492e-06, "loss": 0.6692, "step": 9712 }, { "epoch": 0.4025446558083634, "grad_norm": 0.407736599445343, "learning_rate": 2.9874839404865518e-06, "loss": 0.7416, "step": 9713 }, { "epoch": 0.402586099714037, "grad_norm": 0.43104997277259827, "learning_rate": 2.987276720958183e-06, "loss": 0.6719, "step": 9714 }, { "epoch": 0.4026275436197107, "grad_norm": 0.3871555030345917, "learning_rate": 2.987069501429815e-06, "loss": 0.6791, "step": 9715 }, { "epoch": 0.4026689875253844, "grad_norm": 0.43013012409210205, "learning_rate": 2.9868622819014464e-06, "loss": 0.7112, "step": 9716 }, { "epoch": 0.40271043143105806, "grad_norm": 0.390768826007843, "learning_rate": 2.986655062373078e-06, "loss": 0.7297, "step": 9717 }, { "epoch": 0.40275187533673174, "grad_norm": 0.41533708572387695, "learning_rate": 2.9864478428447096e-06, "loss": 0.6846, "step": 9718 }, { "epoch": 0.4027933192424054, "grad_norm": 0.4183333218097687, "learning_rate": 2.9862406233163414e-06, "loss": 0.66, "step": 9719 }, { "epoch": 0.4028347631480791, "grad_norm": 0.42538776993751526, "learning_rate": 2.9860334037879736e-06, "loss": 0.7168, "step": 9720 }, { "epoch": 0.40287620705375277, "grad_norm": 0.3932821750640869, "learning_rate": 2.985826184259605e-06, "loss": 0.7111, "step": 9721 }, { "epoch": 0.40291765095942644, "grad_norm": 0.40207764506340027, "learning_rate": 2.985618964731237e-06, "loss": 0.725, "step": 9722 }, { "epoch": 0.40295909486510006, "grad_norm": 0.3952674865722656, "learning_rate": 2.985411745202868e-06, "loss": 0.637, "step": 9723 }, { "epoch": 0.40300053877077374, "grad_norm": 0.3992679715156555, "learning_rate": 2.9852045256745e-06, "loss": 0.6704, "step": 9724 }, { "epoch": 0.4030419826764474, "grad_norm": 0.4539913535118103, "learning_rate": 2.9849973061461314e-06, "loss": 0.7115, "step": 9725 }, { "epoch": 0.4030834265821211, "grad_norm": 0.4348430037498474, "learning_rate": 2.984790086617763e-06, "loss": 0.7009, "step": 9726 }, { "epoch": 0.4031248704877948, "grad_norm": 0.4055812358856201, "learning_rate": 2.9845828670893946e-06, "loss": 0.6998, "step": 9727 }, { "epoch": 0.40316631439346845, "grad_norm": 0.4385034441947937, "learning_rate": 2.9843756475610264e-06, "loss": 0.7321, "step": 9728 }, { "epoch": 0.4032077582991421, "grad_norm": 0.44498491287231445, "learning_rate": 2.984168428032658e-06, "loss": 0.7411, "step": 9729 }, { "epoch": 0.4032492022048158, "grad_norm": 0.39831238985061646, "learning_rate": 2.9839612085042896e-06, "loss": 0.681, "step": 9730 }, { "epoch": 0.4032906461104894, "grad_norm": 0.4489978551864624, "learning_rate": 2.9837539889759214e-06, "loss": 0.7593, "step": 9731 }, { "epoch": 0.4033320900161631, "grad_norm": 0.43996408581733704, "learning_rate": 2.9835467694475528e-06, "loss": 0.7134, "step": 9732 }, { "epoch": 0.4033735339218368, "grad_norm": 0.4582103192806244, "learning_rate": 2.9833395499191846e-06, "loss": 0.7085, "step": 9733 }, { "epoch": 0.40341497782751046, "grad_norm": 0.4034340977668762, "learning_rate": 2.983132330390816e-06, "loss": 0.7085, "step": 9734 }, { "epoch": 0.40345642173318413, "grad_norm": 0.390758216381073, "learning_rate": 2.9829251108624478e-06, "loss": 0.6543, "step": 9735 }, { "epoch": 0.4034978656388578, "grad_norm": 0.4019414782524109, "learning_rate": 2.982717891334079e-06, "loss": 0.6099, "step": 9736 }, { "epoch": 0.4035393095445315, "grad_norm": 0.3674839735031128, "learning_rate": 2.9825106718057114e-06, "loss": 0.6615, "step": 9737 }, { "epoch": 0.40358075345020517, "grad_norm": 0.39651063084602356, "learning_rate": 2.982303452277343e-06, "loss": 0.7063, "step": 9738 }, { "epoch": 0.40362219735587884, "grad_norm": 0.45791327953338623, "learning_rate": 2.9820962327489746e-06, "loss": 0.7656, "step": 9739 }, { "epoch": 0.40366364126155246, "grad_norm": 0.4377143979072571, "learning_rate": 2.9818890132206064e-06, "loss": 0.6709, "step": 9740 }, { "epoch": 0.40370508516722614, "grad_norm": 0.4366690516471863, "learning_rate": 2.9816817936922378e-06, "loss": 0.7251, "step": 9741 }, { "epoch": 0.4037465290728998, "grad_norm": 0.45985424518585205, "learning_rate": 2.9814745741638696e-06, "loss": 0.7292, "step": 9742 }, { "epoch": 0.4037879729785735, "grad_norm": 0.41890010237693787, "learning_rate": 2.981267354635501e-06, "loss": 0.7183, "step": 9743 }, { "epoch": 0.40382941688424717, "grad_norm": 0.4206309914588928, "learning_rate": 2.9810601351071328e-06, "loss": 0.717, "step": 9744 }, { "epoch": 0.40387086078992085, "grad_norm": 0.40354251861572266, "learning_rate": 2.9808529155787646e-06, "loss": 0.6577, "step": 9745 }, { "epoch": 0.4039123046955945, "grad_norm": 0.41606658697128296, "learning_rate": 2.980645696050396e-06, "loss": 0.6763, "step": 9746 }, { "epoch": 0.4039537486012682, "grad_norm": 0.40557873249053955, "learning_rate": 2.9804384765220278e-06, "loss": 0.6495, "step": 9747 }, { "epoch": 0.4039951925069419, "grad_norm": 0.4139299988746643, "learning_rate": 2.980231256993659e-06, "loss": 0.6874, "step": 9748 }, { "epoch": 0.4040366364126155, "grad_norm": 0.4618210792541504, "learning_rate": 2.980024037465291e-06, "loss": 0.728, "step": 9749 }, { "epoch": 0.4040780803182892, "grad_norm": 0.4006025493144989, "learning_rate": 2.9798168179369224e-06, "loss": 0.6624, "step": 9750 }, { "epoch": 0.40411952422396286, "grad_norm": 0.41140055656433105, "learning_rate": 2.979609598408554e-06, "loss": 0.6853, "step": 9751 }, { "epoch": 0.40416096812963653, "grad_norm": 0.3993538022041321, "learning_rate": 2.9794023788801856e-06, "loss": 0.7245, "step": 9752 }, { "epoch": 0.4042024120353102, "grad_norm": 0.4154256284236908, "learning_rate": 2.9791951593518174e-06, "loss": 0.7375, "step": 9753 }, { "epoch": 0.4042438559409839, "grad_norm": 0.38683274388313293, "learning_rate": 2.9789879398234496e-06, "loss": 0.6671, "step": 9754 }, { "epoch": 0.40428529984665756, "grad_norm": 0.398532509803772, "learning_rate": 2.978780720295081e-06, "loss": 0.7321, "step": 9755 }, { "epoch": 0.40432674375233124, "grad_norm": 0.38536715507507324, "learning_rate": 2.978573500766713e-06, "loss": 0.6814, "step": 9756 }, { "epoch": 0.40436818765800486, "grad_norm": 0.41558557748794556, "learning_rate": 2.978366281238344e-06, "loss": 0.6821, "step": 9757 }, { "epoch": 0.40440963156367854, "grad_norm": 0.39867663383483887, "learning_rate": 2.978159061709976e-06, "loss": 0.7092, "step": 9758 }, { "epoch": 0.4044510754693522, "grad_norm": 0.41024357080459595, "learning_rate": 2.9779518421816074e-06, "loss": 0.686, "step": 9759 }, { "epoch": 0.4044925193750259, "grad_norm": 0.3874228894710541, "learning_rate": 2.977744622653239e-06, "loss": 0.6919, "step": 9760 }, { "epoch": 0.40453396328069957, "grad_norm": 0.3987756371498108, "learning_rate": 2.9775374031248706e-06, "loss": 0.647, "step": 9761 }, { "epoch": 0.40457540718637325, "grad_norm": 0.384509414434433, "learning_rate": 2.9773301835965024e-06, "loss": 0.6512, "step": 9762 }, { "epoch": 0.4046168510920469, "grad_norm": 0.4130660891532898, "learning_rate": 2.977122964068134e-06, "loss": 0.6956, "step": 9763 }, { "epoch": 0.4046582949977206, "grad_norm": 0.41469961404800415, "learning_rate": 2.9769157445397656e-06, "loss": 0.6439, "step": 9764 }, { "epoch": 0.4046997389033943, "grad_norm": 0.43319326639175415, "learning_rate": 2.9767085250113974e-06, "loss": 0.7203, "step": 9765 }, { "epoch": 0.4047411828090679, "grad_norm": 0.42562583088874817, "learning_rate": 2.9765013054830288e-06, "loss": 0.7291, "step": 9766 }, { "epoch": 0.4047826267147416, "grad_norm": 0.3917831778526306, "learning_rate": 2.9762940859546606e-06, "loss": 0.6954, "step": 9767 }, { "epoch": 0.40482407062041526, "grad_norm": 0.4059503376483917, "learning_rate": 2.976086866426292e-06, "loss": 0.6973, "step": 9768 }, { "epoch": 0.40486551452608893, "grad_norm": 0.415391206741333, "learning_rate": 2.9758796468979238e-06, "loss": 0.7131, "step": 9769 }, { "epoch": 0.4049069584317626, "grad_norm": 0.4100469648838043, "learning_rate": 2.975672427369555e-06, "loss": 0.6978, "step": 9770 }, { "epoch": 0.4049484023374363, "grad_norm": 0.4291653335094452, "learning_rate": 2.9754652078411874e-06, "loss": 0.689, "step": 9771 }, { "epoch": 0.40498984624310996, "grad_norm": 0.4807572066783905, "learning_rate": 2.975257988312819e-06, "loss": 0.6653, "step": 9772 }, { "epoch": 0.40503129014878364, "grad_norm": 0.3876575529575348, "learning_rate": 2.9750507687844506e-06, "loss": 0.6962, "step": 9773 }, { "epoch": 0.4050727340544573, "grad_norm": 0.42250585556030273, "learning_rate": 2.9748435492560824e-06, "loss": 0.6829, "step": 9774 }, { "epoch": 0.40511417796013094, "grad_norm": 0.3840040862560272, "learning_rate": 2.9746363297277138e-06, "loss": 0.6383, "step": 9775 }, { "epoch": 0.4051556218658046, "grad_norm": 0.4058098793029785, "learning_rate": 2.9744291101993456e-06, "loss": 0.667, "step": 9776 }, { "epoch": 0.4051970657714783, "grad_norm": 0.4196680188179016, "learning_rate": 2.974221890670977e-06, "loss": 0.6843, "step": 9777 }, { "epoch": 0.40523850967715197, "grad_norm": 0.41006314754486084, "learning_rate": 2.9740146711426088e-06, "loss": 0.6853, "step": 9778 }, { "epoch": 0.40527995358282565, "grad_norm": 0.42810681462287903, "learning_rate": 2.97380745161424e-06, "loss": 0.7095, "step": 9779 }, { "epoch": 0.4053213974884993, "grad_norm": 0.3791421055793762, "learning_rate": 2.973600232085872e-06, "loss": 0.6659, "step": 9780 }, { "epoch": 0.405362841394173, "grad_norm": 0.4133743941783905, "learning_rate": 2.973393012557504e-06, "loss": 0.688, "step": 9781 }, { "epoch": 0.4054042852998467, "grad_norm": 0.39762014150619507, "learning_rate": 2.973185793029135e-06, "loss": 0.6975, "step": 9782 }, { "epoch": 0.40544572920552036, "grad_norm": 0.432325154542923, "learning_rate": 2.972978573500767e-06, "loss": 0.6964, "step": 9783 }, { "epoch": 0.405487173111194, "grad_norm": 0.3798847794532776, "learning_rate": 2.9727713539723984e-06, "loss": 0.6218, "step": 9784 }, { "epoch": 0.40552861701686765, "grad_norm": 0.40270090103149414, "learning_rate": 2.97256413444403e-06, "loss": 0.6697, "step": 9785 }, { "epoch": 0.40557006092254133, "grad_norm": 0.39199671149253845, "learning_rate": 2.9723569149156616e-06, "loss": 0.7109, "step": 9786 }, { "epoch": 0.405611504828215, "grad_norm": 0.3734724819660187, "learning_rate": 2.972149695387294e-06, "loss": 0.6576, "step": 9787 }, { "epoch": 0.4056529487338887, "grad_norm": 0.422929584980011, "learning_rate": 2.9719424758589248e-06, "loss": 0.7195, "step": 9788 }, { "epoch": 0.40569439263956236, "grad_norm": 0.3927742838859558, "learning_rate": 2.971735256330557e-06, "loss": 0.6973, "step": 9789 }, { "epoch": 0.40573583654523604, "grad_norm": 0.41834065318107605, "learning_rate": 2.971528036802189e-06, "loss": 0.7046, "step": 9790 }, { "epoch": 0.4057772804509097, "grad_norm": 0.40593087673187256, "learning_rate": 2.97132081727382e-06, "loss": 0.7168, "step": 9791 }, { "epoch": 0.40581872435658334, "grad_norm": 0.4382356107234955, "learning_rate": 2.971113597745452e-06, "loss": 0.7053, "step": 9792 }, { "epoch": 0.405860168262257, "grad_norm": 0.43486127257347107, "learning_rate": 2.9709063782170834e-06, "loss": 0.6655, "step": 9793 }, { "epoch": 0.4059016121679307, "grad_norm": 0.4384447932243347, "learning_rate": 2.970699158688715e-06, "loss": 0.6996, "step": 9794 }, { "epoch": 0.40594305607360437, "grad_norm": 0.37911680340766907, "learning_rate": 2.9704919391603466e-06, "loss": 0.6279, "step": 9795 }, { "epoch": 0.40598449997927805, "grad_norm": 0.4113192558288574, "learning_rate": 2.9702847196319784e-06, "loss": 0.7108, "step": 9796 }, { "epoch": 0.4060259438849517, "grad_norm": 0.3932170271873474, "learning_rate": 2.9700775001036098e-06, "loss": 0.6826, "step": 9797 }, { "epoch": 0.4060673877906254, "grad_norm": 0.4573267996311188, "learning_rate": 2.9698702805752416e-06, "loss": 0.7081, "step": 9798 }, { "epoch": 0.4061088316962991, "grad_norm": 0.38210076093673706, "learning_rate": 2.9696630610468734e-06, "loss": 0.6199, "step": 9799 }, { "epoch": 0.40615027560197275, "grad_norm": 0.43597087264060974, "learning_rate": 2.9694558415185048e-06, "loss": 0.7279, "step": 9800 }, { "epoch": 0.4061917195076464, "grad_norm": 0.42689868807792664, "learning_rate": 2.9692486219901366e-06, "loss": 0.7275, "step": 9801 }, { "epoch": 0.40623316341332005, "grad_norm": 0.4760863482952118, "learning_rate": 2.969041402461768e-06, "loss": 0.6757, "step": 9802 }, { "epoch": 0.40627460731899373, "grad_norm": 0.3733714818954468, "learning_rate": 2.9688341829333998e-06, "loss": 0.6475, "step": 9803 }, { "epoch": 0.4063160512246674, "grad_norm": 0.39665886759757996, "learning_rate": 2.968626963405031e-06, "loss": 0.6772, "step": 9804 }, { "epoch": 0.4063574951303411, "grad_norm": 0.44022735953330994, "learning_rate": 2.9684197438766634e-06, "loss": 0.6558, "step": 9805 }, { "epoch": 0.40639893903601476, "grad_norm": 0.41237711906433105, "learning_rate": 2.968212524348295e-06, "loss": 0.6951, "step": 9806 }, { "epoch": 0.40644038294168844, "grad_norm": 0.3845568299293518, "learning_rate": 2.9680053048199266e-06, "loss": 0.644, "step": 9807 }, { "epoch": 0.4064818268473621, "grad_norm": 0.3948551118373871, "learning_rate": 2.9677980852915584e-06, "loss": 0.6838, "step": 9808 }, { "epoch": 0.4065232707530358, "grad_norm": 0.4554884433746338, "learning_rate": 2.96759086576319e-06, "loss": 0.7437, "step": 9809 }, { "epoch": 0.4065647146587094, "grad_norm": 0.4134681522846222, "learning_rate": 2.9673836462348216e-06, "loss": 0.6843, "step": 9810 }, { "epoch": 0.4066061585643831, "grad_norm": 0.37341564893722534, "learning_rate": 2.967176426706453e-06, "loss": 0.6799, "step": 9811 }, { "epoch": 0.40664760247005677, "grad_norm": 0.40386343002319336, "learning_rate": 2.966969207178085e-06, "loss": 0.6948, "step": 9812 }, { "epoch": 0.40668904637573045, "grad_norm": 0.3902512192726135, "learning_rate": 2.966761987649716e-06, "loss": 0.6678, "step": 9813 }, { "epoch": 0.4067304902814041, "grad_norm": 0.4293665885925293, "learning_rate": 2.966554768121348e-06, "loss": 0.7361, "step": 9814 }, { "epoch": 0.4067719341870778, "grad_norm": 0.44609761238098145, "learning_rate": 2.96634754859298e-06, "loss": 0.7173, "step": 9815 }, { "epoch": 0.4068133780927515, "grad_norm": 0.43193814158439636, "learning_rate": 2.966140329064611e-06, "loss": 0.7697, "step": 9816 }, { "epoch": 0.40685482199842515, "grad_norm": 0.40452417731285095, "learning_rate": 2.965933109536243e-06, "loss": 0.6707, "step": 9817 }, { "epoch": 0.4068962659040988, "grad_norm": 0.41765305399894714, "learning_rate": 2.9657258900078744e-06, "loss": 0.7388, "step": 9818 }, { "epoch": 0.40693770980977245, "grad_norm": 0.4309346675872803, "learning_rate": 2.965518670479506e-06, "loss": 0.6675, "step": 9819 }, { "epoch": 0.40697915371544613, "grad_norm": 0.3872416019439697, "learning_rate": 2.9653114509511376e-06, "loss": 0.7013, "step": 9820 }, { "epoch": 0.4070205976211198, "grad_norm": 0.3716718256473541, "learning_rate": 2.96510423142277e-06, "loss": 0.6863, "step": 9821 }, { "epoch": 0.4070620415267935, "grad_norm": 0.41562730073928833, "learning_rate": 2.9648970118944008e-06, "loss": 0.7166, "step": 9822 }, { "epoch": 0.40710348543246716, "grad_norm": 0.4011440575122833, "learning_rate": 2.964689792366033e-06, "loss": 0.7153, "step": 9823 }, { "epoch": 0.40714492933814084, "grad_norm": 0.4375945031642914, "learning_rate": 2.964482572837665e-06, "loss": 0.7494, "step": 9824 }, { "epoch": 0.4071863732438145, "grad_norm": 0.4255179166793823, "learning_rate": 2.964275353309296e-06, "loss": 0.708, "step": 9825 }, { "epoch": 0.4072278171494882, "grad_norm": 0.40039414167404175, "learning_rate": 2.964068133780928e-06, "loss": 0.6779, "step": 9826 }, { "epoch": 0.4072692610551618, "grad_norm": 0.4201604723930359, "learning_rate": 2.9638609142525594e-06, "loss": 0.6969, "step": 9827 }, { "epoch": 0.4073107049608355, "grad_norm": 0.4251379072666168, "learning_rate": 2.963653694724191e-06, "loss": 0.7157, "step": 9828 }, { "epoch": 0.40735214886650917, "grad_norm": 0.4018380641937256, "learning_rate": 2.9634464751958226e-06, "loss": 0.6987, "step": 9829 }, { "epoch": 0.40739359277218284, "grad_norm": 0.41972389817237854, "learning_rate": 2.9632392556674544e-06, "loss": 0.7166, "step": 9830 }, { "epoch": 0.4074350366778565, "grad_norm": 0.46676549315452576, "learning_rate": 2.9630320361390858e-06, "loss": 0.6982, "step": 9831 }, { "epoch": 0.4074764805835302, "grad_norm": 0.4330548942089081, "learning_rate": 2.9628248166107176e-06, "loss": 0.7063, "step": 9832 }, { "epoch": 0.4075179244892039, "grad_norm": 0.38913047313690186, "learning_rate": 2.9626175970823494e-06, "loss": 0.6644, "step": 9833 }, { "epoch": 0.40755936839487755, "grad_norm": 0.4217050075531006, "learning_rate": 2.9624103775539808e-06, "loss": 0.6458, "step": 9834 }, { "epoch": 0.40760081230055123, "grad_norm": 0.421800434589386, "learning_rate": 2.9622031580256126e-06, "loss": 0.6853, "step": 9835 }, { "epoch": 0.40764225620622485, "grad_norm": 0.38288357853889465, "learning_rate": 2.961995938497244e-06, "loss": 0.6719, "step": 9836 }, { "epoch": 0.40768370011189853, "grad_norm": 0.4018184244632721, "learning_rate": 2.9617887189688758e-06, "loss": 0.7068, "step": 9837 }, { "epoch": 0.4077251440175722, "grad_norm": 0.4016752243041992, "learning_rate": 2.961581499440507e-06, "loss": 0.7034, "step": 9838 }, { "epoch": 0.4077665879232459, "grad_norm": 0.4374559819698334, "learning_rate": 2.9613742799121394e-06, "loss": 0.7039, "step": 9839 }, { "epoch": 0.40780803182891956, "grad_norm": 0.3954083025455475, "learning_rate": 2.9611670603837704e-06, "loss": 0.6494, "step": 9840 }, { "epoch": 0.40784947573459324, "grad_norm": 0.3769851326942444, "learning_rate": 2.9609598408554026e-06, "loss": 0.6768, "step": 9841 }, { "epoch": 0.4078909196402669, "grad_norm": 0.4377748370170593, "learning_rate": 2.9607526213270344e-06, "loss": 0.6736, "step": 9842 }, { "epoch": 0.4079323635459406, "grad_norm": 0.42467209696769714, "learning_rate": 2.960545401798666e-06, "loss": 0.6893, "step": 9843 }, { "epoch": 0.40797380745161427, "grad_norm": 0.4180726408958435, "learning_rate": 2.9603381822702976e-06, "loss": 0.6813, "step": 9844 }, { "epoch": 0.4080152513572879, "grad_norm": 0.3715086281299591, "learning_rate": 2.960130962741929e-06, "loss": 0.6371, "step": 9845 }, { "epoch": 0.40805669526296157, "grad_norm": 0.465617835521698, "learning_rate": 2.959923743213561e-06, "loss": 0.7207, "step": 9846 }, { "epoch": 0.40809813916863524, "grad_norm": 0.3955141007900238, "learning_rate": 2.959716523685192e-06, "loss": 0.6697, "step": 9847 }, { "epoch": 0.4081395830743089, "grad_norm": 0.40128272771835327, "learning_rate": 2.959509304156824e-06, "loss": 0.7043, "step": 9848 }, { "epoch": 0.4081810269799826, "grad_norm": 0.40640193223953247, "learning_rate": 2.9593020846284554e-06, "loss": 0.6284, "step": 9849 }, { "epoch": 0.4082224708856563, "grad_norm": 0.4107244312763214, "learning_rate": 2.959094865100087e-06, "loss": 0.6943, "step": 9850 }, { "epoch": 0.40826391479132995, "grad_norm": 0.4154118299484253, "learning_rate": 2.958887645571719e-06, "loss": 0.661, "step": 9851 }, { "epoch": 0.40830535869700363, "grad_norm": 0.39897677302360535, "learning_rate": 2.9586804260433504e-06, "loss": 0.6726, "step": 9852 }, { "epoch": 0.40834680260267725, "grad_norm": 0.40769824385643005, "learning_rate": 2.958473206514982e-06, "loss": 0.6705, "step": 9853 }, { "epoch": 0.4083882465083509, "grad_norm": 0.4450128972530365, "learning_rate": 2.9582659869866136e-06, "loss": 0.7395, "step": 9854 }, { "epoch": 0.4084296904140246, "grad_norm": 0.41259756684303284, "learning_rate": 2.958058767458246e-06, "loss": 0.7351, "step": 9855 }, { "epoch": 0.4084711343196983, "grad_norm": 0.4322452247142792, "learning_rate": 2.9578515479298768e-06, "loss": 0.6863, "step": 9856 }, { "epoch": 0.40851257822537196, "grad_norm": 0.4103268086910248, "learning_rate": 2.957644328401509e-06, "loss": 0.6294, "step": 9857 }, { "epoch": 0.40855402213104564, "grad_norm": 0.4311973452568054, "learning_rate": 2.95743710887314e-06, "loss": 0.6741, "step": 9858 }, { "epoch": 0.4085954660367193, "grad_norm": 0.3895007073879242, "learning_rate": 2.957229889344772e-06, "loss": 0.708, "step": 9859 }, { "epoch": 0.408636909942393, "grad_norm": 0.41853252053260803, "learning_rate": 2.957022669816404e-06, "loss": 0.7349, "step": 9860 }, { "epoch": 0.40867835384806667, "grad_norm": 0.41222086548805237, "learning_rate": 2.9568154502880354e-06, "loss": 0.6895, "step": 9861 }, { "epoch": 0.4087197977537403, "grad_norm": 0.4088389277458191, "learning_rate": 2.956608230759667e-06, "loss": 0.6392, "step": 9862 }, { "epoch": 0.40876124165941397, "grad_norm": 0.44018176198005676, "learning_rate": 2.9564010112312986e-06, "loss": 0.7249, "step": 9863 }, { "epoch": 0.40880268556508764, "grad_norm": 0.46858349442481995, "learning_rate": 2.9561937917029304e-06, "loss": 0.7843, "step": 9864 }, { "epoch": 0.4088441294707613, "grad_norm": 0.4034697115421295, "learning_rate": 2.9559865721745618e-06, "loss": 0.6829, "step": 9865 }, { "epoch": 0.408885573376435, "grad_norm": 0.3867449462413788, "learning_rate": 2.9557793526461936e-06, "loss": 0.6399, "step": 9866 }, { "epoch": 0.4089270172821087, "grad_norm": 0.4132276177406311, "learning_rate": 2.9555721331178254e-06, "loss": 0.6216, "step": 9867 }, { "epoch": 0.40896846118778235, "grad_norm": 0.43972450494766235, "learning_rate": 2.955364913589457e-06, "loss": 0.6926, "step": 9868 }, { "epoch": 0.409009905093456, "grad_norm": 0.3830566108226776, "learning_rate": 2.9551576940610886e-06, "loss": 0.6716, "step": 9869 }, { "epoch": 0.4090513489991297, "grad_norm": 0.4093276560306549, "learning_rate": 2.95495047453272e-06, "loss": 0.6647, "step": 9870 }, { "epoch": 0.4090927929048033, "grad_norm": 0.40445640683174133, "learning_rate": 2.954743255004352e-06, "loss": 0.6628, "step": 9871 }, { "epoch": 0.409134236810477, "grad_norm": 0.4183807969093323, "learning_rate": 2.954536035475983e-06, "loss": 0.6802, "step": 9872 }, { "epoch": 0.4091756807161507, "grad_norm": 0.40704983472824097, "learning_rate": 2.9543288159476154e-06, "loss": 0.7356, "step": 9873 }, { "epoch": 0.40921712462182436, "grad_norm": 0.3970617949962616, "learning_rate": 2.9541215964192464e-06, "loss": 0.7427, "step": 9874 }, { "epoch": 0.40925856852749803, "grad_norm": 0.42581823468208313, "learning_rate": 2.9539143768908786e-06, "loss": 0.7668, "step": 9875 }, { "epoch": 0.4093000124331717, "grad_norm": 0.3967044949531555, "learning_rate": 2.9537071573625104e-06, "loss": 0.678, "step": 9876 }, { "epoch": 0.4093414563388454, "grad_norm": 0.3935525417327881, "learning_rate": 2.953499937834142e-06, "loss": 0.679, "step": 9877 }, { "epoch": 0.40938290024451907, "grad_norm": 0.383810818195343, "learning_rate": 2.9532927183057736e-06, "loss": 0.6588, "step": 9878 }, { "epoch": 0.4094243441501927, "grad_norm": 0.40502607822418213, "learning_rate": 2.953085498777405e-06, "loss": 0.7495, "step": 9879 }, { "epoch": 0.40946578805586636, "grad_norm": 0.43088823556900024, "learning_rate": 2.952878279249037e-06, "loss": 0.6658, "step": 9880 }, { "epoch": 0.40950723196154004, "grad_norm": 0.4080725610256195, "learning_rate": 2.952671059720668e-06, "loss": 0.7075, "step": 9881 }, { "epoch": 0.4095486758672137, "grad_norm": 0.36060792207717896, "learning_rate": 2.9524638401923e-06, "loss": 0.6119, "step": 9882 }, { "epoch": 0.4095901197728874, "grad_norm": 0.40297508239746094, "learning_rate": 2.9522566206639314e-06, "loss": 0.6689, "step": 9883 }, { "epoch": 0.4096315636785611, "grad_norm": 0.4059356153011322, "learning_rate": 2.952049401135563e-06, "loss": 0.6775, "step": 9884 }, { "epoch": 0.40967300758423475, "grad_norm": 0.43801403045654297, "learning_rate": 2.951842181607195e-06, "loss": 0.6689, "step": 9885 }, { "epoch": 0.4097144514899084, "grad_norm": 0.40025022625923157, "learning_rate": 2.9516349620788264e-06, "loss": 0.6647, "step": 9886 }, { "epoch": 0.4097558953955821, "grad_norm": 0.40739428997039795, "learning_rate": 2.951427742550458e-06, "loss": 0.7041, "step": 9887 }, { "epoch": 0.4097973393012557, "grad_norm": 0.4218859076499939, "learning_rate": 2.9512205230220896e-06, "loss": 0.7006, "step": 9888 }, { "epoch": 0.4098387832069294, "grad_norm": 0.4065995514392853, "learning_rate": 2.951013303493722e-06, "loss": 0.6674, "step": 9889 }, { "epoch": 0.4098802271126031, "grad_norm": 0.4310034215450287, "learning_rate": 2.9508060839653528e-06, "loss": 0.6708, "step": 9890 }, { "epoch": 0.40992167101827676, "grad_norm": 0.4128533601760864, "learning_rate": 2.950598864436985e-06, "loss": 0.6901, "step": 9891 }, { "epoch": 0.40996311492395043, "grad_norm": 0.4101572036743164, "learning_rate": 2.950391644908616e-06, "loss": 0.6492, "step": 9892 }, { "epoch": 0.4100045588296241, "grad_norm": 0.3957703709602356, "learning_rate": 2.950184425380248e-06, "loss": 0.6899, "step": 9893 }, { "epoch": 0.4100460027352978, "grad_norm": 0.3723556697368622, "learning_rate": 2.94997720585188e-06, "loss": 0.6715, "step": 9894 }, { "epoch": 0.41008744664097146, "grad_norm": 0.3845115303993225, "learning_rate": 2.9497699863235114e-06, "loss": 0.6562, "step": 9895 }, { "epoch": 0.41012889054664514, "grad_norm": 0.4314626455307007, "learning_rate": 2.949562766795143e-06, "loss": 0.7449, "step": 9896 }, { "epoch": 0.41017033445231876, "grad_norm": 0.43843644857406616, "learning_rate": 2.9493555472667746e-06, "loss": 0.6987, "step": 9897 }, { "epoch": 0.41021177835799244, "grad_norm": 0.4094424247741699, "learning_rate": 2.9491483277384064e-06, "loss": 0.6987, "step": 9898 }, { "epoch": 0.4102532222636661, "grad_norm": 0.4344218373298645, "learning_rate": 2.948941108210038e-06, "loss": 0.6379, "step": 9899 }, { "epoch": 0.4102946661693398, "grad_norm": 0.41544845700263977, "learning_rate": 2.9487338886816696e-06, "loss": 0.6914, "step": 9900 }, { "epoch": 0.41033611007501347, "grad_norm": 0.3984636068344116, "learning_rate": 2.948526669153301e-06, "loss": 0.6799, "step": 9901 }, { "epoch": 0.41037755398068715, "grad_norm": 0.38979098200798035, "learning_rate": 2.948319449624933e-06, "loss": 0.7173, "step": 9902 }, { "epoch": 0.4104189978863608, "grad_norm": 0.41933101415634155, "learning_rate": 2.9481122300965646e-06, "loss": 0.6862, "step": 9903 }, { "epoch": 0.4104604417920345, "grad_norm": 0.41371187567710876, "learning_rate": 2.947905010568196e-06, "loss": 0.7002, "step": 9904 }, { "epoch": 0.4105018856977081, "grad_norm": 0.43397021293640137, "learning_rate": 2.947697791039828e-06, "loss": 0.637, "step": 9905 }, { "epoch": 0.4105433296033818, "grad_norm": 0.39864104986190796, "learning_rate": 2.947490571511459e-06, "loss": 0.6938, "step": 9906 }, { "epoch": 0.4105847735090555, "grad_norm": 0.40557411313056946, "learning_rate": 2.9472833519830914e-06, "loss": 0.6316, "step": 9907 }, { "epoch": 0.41062621741472916, "grad_norm": 0.4153580069541931, "learning_rate": 2.9470761324547224e-06, "loss": 0.7166, "step": 9908 }, { "epoch": 0.41066766132040283, "grad_norm": 0.40662312507629395, "learning_rate": 2.9468689129263546e-06, "loss": 0.6587, "step": 9909 }, { "epoch": 0.4107091052260765, "grad_norm": 0.38802796602249146, "learning_rate": 2.946661693397986e-06, "loss": 0.6758, "step": 9910 }, { "epoch": 0.4107505491317502, "grad_norm": 0.4412783980369568, "learning_rate": 2.946454473869618e-06, "loss": 0.7275, "step": 9911 }, { "epoch": 0.41079199303742386, "grad_norm": 0.371125191450119, "learning_rate": 2.9462472543412496e-06, "loss": 0.6433, "step": 9912 }, { "epoch": 0.41083343694309754, "grad_norm": 0.3777885437011719, "learning_rate": 2.946040034812881e-06, "loss": 0.645, "step": 9913 }, { "epoch": 0.41087488084877116, "grad_norm": 0.393840491771698, "learning_rate": 2.945832815284513e-06, "loss": 0.6832, "step": 9914 }, { "epoch": 0.41091632475444484, "grad_norm": 0.39863622188568115, "learning_rate": 2.945625595756144e-06, "loss": 0.667, "step": 9915 }, { "epoch": 0.4109577686601185, "grad_norm": 0.39298927783966064, "learning_rate": 2.945418376227776e-06, "loss": 0.6853, "step": 9916 }, { "epoch": 0.4109992125657922, "grad_norm": 0.4325486421585083, "learning_rate": 2.9452111566994074e-06, "loss": 0.6512, "step": 9917 }, { "epoch": 0.41104065647146587, "grad_norm": 0.4007813632488251, "learning_rate": 2.945003937171039e-06, "loss": 0.6919, "step": 9918 }, { "epoch": 0.41108210037713955, "grad_norm": 0.43355464935302734, "learning_rate": 2.9447967176426706e-06, "loss": 0.7268, "step": 9919 }, { "epoch": 0.4111235442828132, "grad_norm": 0.3767062723636627, "learning_rate": 2.9445894981143024e-06, "loss": 0.6606, "step": 9920 }, { "epoch": 0.4111649881884869, "grad_norm": 0.4500974714756012, "learning_rate": 2.944382278585934e-06, "loss": 0.6826, "step": 9921 }, { "epoch": 0.4112064320941606, "grad_norm": 0.4071853458881378, "learning_rate": 2.9441750590575656e-06, "loss": 0.6824, "step": 9922 }, { "epoch": 0.4112478759998342, "grad_norm": 0.4739421010017395, "learning_rate": 2.943967839529198e-06, "loss": 0.7356, "step": 9923 }, { "epoch": 0.4112893199055079, "grad_norm": 0.4158670902252197, "learning_rate": 2.9437606200008288e-06, "loss": 0.6599, "step": 9924 }, { "epoch": 0.41133076381118155, "grad_norm": 0.40996891260147095, "learning_rate": 2.943553400472461e-06, "loss": 0.7335, "step": 9925 }, { "epoch": 0.41137220771685523, "grad_norm": 0.4265899956226349, "learning_rate": 2.943346180944092e-06, "loss": 0.751, "step": 9926 }, { "epoch": 0.4114136516225289, "grad_norm": 0.3901168704032898, "learning_rate": 2.9431389614157242e-06, "loss": 0.6665, "step": 9927 }, { "epoch": 0.4114550955282026, "grad_norm": 0.4476945400238037, "learning_rate": 2.942931741887356e-06, "loss": 0.7175, "step": 9928 }, { "epoch": 0.41149653943387626, "grad_norm": 0.4273155927658081, "learning_rate": 2.9427245223589874e-06, "loss": 0.7363, "step": 9929 }, { "epoch": 0.41153798333954994, "grad_norm": 0.4015730619430542, "learning_rate": 2.9425173028306192e-06, "loss": 0.6642, "step": 9930 }, { "epoch": 0.4115794272452236, "grad_norm": 0.4184058606624603, "learning_rate": 2.9423100833022506e-06, "loss": 0.751, "step": 9931 }, { "epoch": 0.41162087115089724, "grad_norm": 0.4010770320892334, "learning_rate": 2.9421028637738824e-06, "loss": 0.7415, "step": 9932 }, { "epoch": 0.4116623150565709, "grad_norm": 0.4162043333053589, "learning_rate": 2.941895644245514e-06, "loss": 0.6912, "step": 9933 }, { "epoch": 0.4117037589622446, "grad_norm": 0.4069697856903076, "learning_rate": 2.9416884247171456e-06, "loss": 0.7273, "step": 9934 }, { "epoch": 0.41174520286791827, "grad_norm": 0.4305668771266937, "learning_rate": 2.941481205188777e-06, "loss": 0.6912, "step": 9935 }, { "epoch": 0.41178664677359195, "grad_norm": 0.4122347831726074, "learning_rate": 2.941273985660409e-06, "loss": 0.6635, "step": 9936 }, { "epoch": 0.4118280906792656, "grad_norm": 0.41147279739379883, "learning_rate": 2.9410667661320406e-06, "loss": 0.7229, "step": 9937 }, { "epoch": 0.4118695345849393, "grad_norm": 0.4109627902507782, "learning_rate": 2.940859546603672e-06, "loss": 0.7708, "step": 9938 }, { "epoch": 0.411910978490613, "grad_norm": 0.42420098185539246, "learning_rate": 2.940652327075304e-06, "loss": 0.7279, "step": 9939 }, { "epoch": 0.4119524223962866, "grad_norm": 0.4074649214744568, "learning_rate": 2.940445107546935e-06, "loss": 0.7277, "step": 9940 }, { "epoch": 0.4119938663019603, "grad_norm": 0.4052943289279938, "learning_rate": 2.9402378880185674e-06, "loss": 0.6741, "step": 9941 }, { "epoch": 0.41203531020763395, "grad_norm": 0.4130780100822449, "learning_rate": 2.9400306684901984e-06, "loss": 0.6853, "step": 9942 }, { "epoch": 0.41207675411330763, "grad_norm": 0.40979427099227905, "learning_rate": 2.9398234489618306e-06, "loss": 0.6733, "step": 9943 }, { "epoch": 0.4121181980189813, "grad_norm": 0.41698014736175537, "learning_rate": 2.939616229433462e-06, "loss": 0.698, "step": 9944 }, { "epoch": 0.412159641924655, "grad_norm": 0.3715636134147644, "learning_rate": 2.939409009905094e-06, "loss": 0.6418, "step": 9945 }, { "epoch": 0.41220108583032866, "grad_norm": 0.4079609811306, "learning_rate": 2.9392017903767256e-06, "loss": 0.6791, "step": 9946 }, { "epoch": 0.41224252973600234, "grad_norm": 0.40526172518730164, "learning_rate": 2.938994570848357e-06, "loss": 0.6763, "step": 9947 }, { "epoch": 0.412283973641676, "grad_norm": 0.4079827070236206, "learning_rate": 2.938787351319989e-06, "loss": 0.7605, "step": 9948 }, { "epoch": 0.41232541754734964, "grad_norm": 0.43996623158454895, "learning_rate": 2.93858013179162e-06, "loss": 0.744, "step": 9949 }, { "epoch": 0.4123668614530233, "grad_norm": 0.4004502594470978, "learning_rate": 2.938372912263252e-06, "loss": 0.728, "step": 9950 }, { "epoch": 0.412408305358697, "grad_norm": 0.4523896276950836, "learning_rate": 2.9381656927348834e-06, "loss": 0.7148, "step": 9951 }, { "epoch": 0.41244974926437067, "grad_norm": 0.44025474786758423, "learning_rate": 2.937958473206515e-06, "loss": 0.7358, "step": 9952 }, { "epoch": 0.41249119317004435, "grad_norm": 0.39966803789138794, "learning_rate": 2.9377512536781466e-06, "loss": 0.6677, "step": 9953 }, { "epoch": 0.412532637075718, "grad_norm": 0.39091306924819946, "learning_rate": 2.9375440341497784e-06, "loss": 0.6733, "step": 9954 }, { "epoch": 0.4125740809813917, "grad_norm": 0.41826990246772766, "learning_rate": 2.93733681462141e-06, "loss": 0.6892, "step": 9955 }, { "epoch": 0.4126155248870654, "grad_norm": 0.4154479205608368, "learning_rate": 2.9371295950930416e-06, "loss": 0.7212, "step": 9956 }, { "epoch": 0.41265696879273905, "grad_norm": 0.46661099791526794, "learning_rate": 2.936922375564674e-06, "loss": 0.793, "step": 9957 }, { "epoch": 0.4126984126984127, "grad_norm": 0.42165571451187134, "learning_rate": 2.936715156036305e-06, "loss": 0.703, "step": 9958 }, { "epoch": 0.41273985660408635, "grad_norm": 0.3981415331363678, "learning_rate": 2.936507936507937e-06, "loss": 0.6792, "step": 9959 }, { "epoch": 0.41278130050976003, "grad_norm": 0.43455350399017334, "learning_rate": 2.9363007169795684e-06, "loss": 0.733, "step": 9960 }, { "epoch": 0.4128227444154337, "grad_norm": 0.40184104442596436, "learning_rate": 2.9360934974512002e-06, "loss": 0.7087, "step": 9961 }, { "epoch": 0.4128641883211074, "grad_norm": 0.39984843134880066, "learning_rate": 2.9358862779228316e-06, "loss": 0.7166, "step": 9962 }, { "epoch": 0.41290563222678106, "grad_norm": 0.41432371735572815, "learning_rate": 2.9356790583944634e-06, "loss": 0.6831, "step": 9963 }, { "epoch": 0.41294707613245474, "grad_norm": 0.43040016293525696, "learning_rate": 2.9354718388660952e-06, "loss": 0.6819, "step": 9964 }, { "epoch": 0.4129885200381284, "grad_norm": 0.4298746585845947, "learning_rate": 2.9352646193377266e-06, "loss": 0.7622, "step": 9965 }, { "epoch": 0.41302996394380204, "grad_norm": 0.3972429633140564, "learning_rate": 2.9350573998093584e-06, "loss": 0.6382, "step": 9966 }, { "epoch": 0.4130714078494757, "grad_norm": 0.4548075497150421, "learning_rate": 2.93485018028099e-06, "loss": 0.6855, "step": 9967 }, { "epoch": 0.4131128517551494, "grad_norm": 0.3880721926689148, "learning_rate": 2.9346429607526216e-06, "loss": 0.6516, "step": 9968 }, { "epoch": 0.41315429566082307, "grad_norm": 0.41648736596107483, "learning_rate": 2.934435741224253e-06, "loss": 0.7219, "step": 9969 }, { "epoch": 0.41319573956649674, "grad_norm": 0.4189252257347107, "learning_rate": 2.934228521695885e-06, "loss": 0.6552, "step": 9970 }, { "epoch": 0.4132371834721704, "grad_norm": 0.3922632932662964, "learning_rate": 2.934021302167516e-06, "loss": 0.6396, "step": 9971 }, { "epoch": 0.4132786273778441, "grad_norm": 0.4077017903327942, "learning_rate": 2.933814082639148e-06, "loss": 0.6536, "step": 9972 }, { "epoch": 0.4133200712835178, "grad_norm": 0.413200318813324, "learning_rate": 2.9336068631107802e-06, "loss": 0.7556, "step": 9973 }, { "epoch": 0.41336151518919145, "grad_norm": 0.4114530682563782, "learning_rate": 2.933399643582411e-06, "loss": 0.7104, "step": 9974 }, { "epoch": 0.4134029590948651, "grad_norm": 0.4124280512332916, "learning_rate": 2.9331924240540434e-06, "loss": 0.6719, "step": 9975 }, { "epoch": 0.41344440300053875, "grad_norm": 0.3713845908641815, "learning_rate": 2.9329852045256744e-06, "loss": 0.7056, "step": 9976 }, { "epoch": 0.41348584690621243, "grad_norm": 0.39752280712127686, "learning_rate": 2.9327779849973066e-06, "loss": 0.7122, "step": 9977 }, { "epoch": 0.4135272908118861, "grad_norm": 0.39454489946365356, "learning_rate": 2.932570765468938e-06, "loss": 0.7393, "step": 9978 }, { "epoch": 0.4135687347175598, "grad_norm": 0.4232577383518219, "learning_rate": 2.93236354594057e-06, "loss": 0.6969, "step": 9979 }, { "epoch": 0.41361017862323346, "grad_norm": 0.41344892978668213, "learning_rate": 2.932156326412201e-06, "loss": 0.6617, "step": 9980 }, { "epoch": 0.41365162252890714, "grad_norm": 0.3872958719730377, "learning_rate": 2.931949106883833e-06, "loss": 0.6481, "step": 9981 }, { "epoch": 0.4136930664345808, "grad_norm": 0.44460973143577576, "learning_rate": 2.931741887355465e-06, "loss": 0.7537, "step": 9982 }, { "epoch": 0.4137345103402545, "grad_norm": 0.42787396907806396, "learning_rate": 2.931534667827096e-06, "loss": 0.6687, "step": 9983 }, { "epoch": 0.4137759542459281, "grad_norm": 0.42375168204307556, "learning_rate": 2.931327448298728e-06, "loss": 0.679, "step": 9984 }, { "epoch": 0.4138173981516018, "grad_norm": 0.4151184558868408, "learning_rate": 2.9311202287703594e-06, "loss": 0.6755, "step": 9985 }, { "epoch": 0.41385884205727547, "grad_norm": 0.44001683592796326, "learning_rate": 2.9309130092419912e-06, "loss": 0.6985, "step": 9986 }, { "epoch": 0.41390028596294914, "grad_norm": 0.42662665247917175, "learning_rate": 2.9307057897136226e-06, "loss": 0.6543, "step": 9987 }, { "epoch": 0.4139417298686228, "grad_norm": 0.42190679907798767, "learning_rate": 2.9304985701852544e-06, "loss": 0.7054, "step": 9988 }, { "epoch": 0.4139831737742965, "grad_norm": 0.3966737985610962, "learning_rate": 2.9302913506568862e-06, "loss": 0.7322, "step": 9989 }, { "epoch": 0.4140246176799702, "grad_norm": 0.4179009199142456, "learning_rate": 2.9300841311285176e-06, "loss": 0.6818, "step": 9990 }, { "epoch": 0.41406606158564385, "grad_norm": 0.43420839309692383, "learning_rate": 2.92987691160015e-06, "loss": 0.6742, "step": 9991 }, { "epoch": 0.41410750549131753, "grad_norm": 0.4090658724308014, "learning_rate": 2.929669692071781e-06, "loss": 0.7119, "step": 9992 }, { "epoch": 0.41414894939699115, "grad_norm": 0.4331103563308716, "learning_rate": 2.929462472543413e-06, "loss": 0.7126, "step": 9993 }, { "epoch": 0.4141903933026648, "grad_norm": 0.3838220536708832, "learning_rate": 2.9292552530150444e-06, "loss": 0.6747, "step": 9994 }, { "epoch": 0.4142318372083385, "grad_norm": 0.39047226309776306, "learning_rate": 2.9290480334866762e-06, "loss": 0.6993, "step": 9995 }, { "epoch": 0.4142732811140122, "grad_norm": 0.42338618636131287, "learning_rate": 2.9288408139583076e-06, "loss": 0.6611, "step": 9996 }, { "epoch": 0.41431472501968586, "grad_norm": 0.4173765778541565, "learning_rate": 2.9286335944299394e-06, "loss": 0.7207, "step": 9997 }, { "epoch": 0.41435616892535954, "grad_norm": 0.4694630801677704, "learning_rate": 2.9284263749015712e-06, "loss": 0.7632, "step": 9998 }, { "epoch": 0.4143976128310332, "grad_norm": 0.3794528841972351, "learning_rate": 2.9282191553732026e-06, "loss": 0.6506, "step": 9999 }, { "epoch": 0.4144390567367069, "grad_norm": 0.4125378727912903, "learning_rate": 2.9280119358448344e-06, "loss": 0.7036, "step": 10000 }, { "epoch": 0.4144805006423805, "grad_norm": 0.479382187128067, "learning_rate": 2.927804716316466e-06, "loss": 0.7251, "step": 10001 }, { "epoch": 0.4145219445480542, "grad_norm": 0.43535634875297546, "learning_rate": 2.9275974967880976e-06, "loss": 0.7097, "step": 10002 }, { "epoch": 0.41456338845372787, "grad_norm": 0.41060003638267517, "learning_rate": 2.927390277259729e-06, "loss": 0.6906, "step": 10003 }, { "epoch": 0.41460483235940154, "grad_norm": 0.4216634929180145, "learning_rate": 2.927183057731361e-06, "loss": 0.6709, "step": 10004 }, { "epoch": 0.4146462762650752, "grad_norm": 0.44468292593955994, "learning_rate": 2.926975838202992e-06, "loss": 0.6991, "step": 10005 }, { "epoch": 0.4146877201707489, "grad_norm": 0.4194738268852234, "learning_rate": 2.926768618674624e-06, "loss": 0.7322, "step": 10006 }, { "epoch": 0.4147291640764226, "grad_norm": 0.4366951882839203, "learning_rate": 2.9265613991462562e-06, "loss": 0.7334, "step": 10007 }, { "epoch": 0.41477060798209625, "grad_norm": 0.42715564370155334, "learning_rate": 2.926354179617887e-06, "loss": 0.6815, "step": 10008 }, { "epoch": 0.41481205188776993, "grad_norm": 0.3871564269065857, "learning_rate": 2.9261469600895194e-06, "loss": 0.6904, "step": 10009 }, { "epoch": 0.41485349579344355, "grad_norm": 0.4171812832355499, "learning_rate": 2.9259397405611504e-06, "loss": 0.7393, "step": 10010 }, { "epoch": 0.4148949396991172, "grad_norm": 0.4639853537082672, "learning_rate": 2.9257325210327826e-06, "loss": 0.752, "step": 10011 }, { "epoch": 0.4149363836047909, "grad_norm": 0.438038170337677, "learning_rate": 2.925525301504414e-06, "loss": 0.7021, "step": 10012 }, { "epoch": 0.4149778275104646, "grad_norm": 0.3876878321170807, "learning_rate": 2.925318081976046e-06, "loss": 0.673, "step": 10013 }, { "epoch": 0.41501927141613826, "grad_norm": 0.39537274837493896, "learning_rate": 2.925110862447677e-06, "loss": 0.6532, "step": 10014 }, { "epoch": 0.41506071532181193, "grad_norm": 0.3916131556034088, "learning_rate": 2.924903642919309e-06, "loss": 0.6873, "step": 10015 }, { "epoch": 0.4151021592274856, "grad_norm": 0.5355849862098694, "learning_rate": 2.924696423390941e-06, "loss": 0.679, "step": 10016 }, { "epoch": 0.4151436031331593, "grad_norm": 0.44493067264556885, "learning_rate": 2.9244892038625722e-06, "loss": 0.7637, "step": 10017 }, { "epoch": 0.41518504703883297, "grad_norm": 0.43440911173820496, "learning_rate": 2.924281984334204e-06, "loss": 0.7151, "step": 10018 }, { "epoch": 0.4152264909445066, "grad_norm": 0.39024344086647034, "learning_rate": 2.9240747648058354e-06, "loss": 0.6465, "step": 10019 }, { "epoch": 0.41526793485018026, "grad_norm": 0.37725114822387695, "learning_rate": 2.9238675452774672e-06, "loss": 0.7029, "step": 10020 }, { "epoch": 0.41530937875585394, "grad_norm": 0.3950027525424957, "learning_rate": 2.9236603257490986e-06, "loss": 0.7231, "step": 10021 }, { "epoch": 0.4153508226615276, "grad_norm": 0.41263076663017273, "learning_rate": 2.9234531062207304e-06, "loss": 0.7034, "step": 10022 }, { "epoch": 0.4153922665672013, "grad_norm": 0.4066719114780426, "learning_rate": 2.923245886692362e-06, "loss": 0.6309, "step": 10023 }, { "epoch": 0.415433710472875, "grad_norm": 0.4050705134868622, "learning_rate": 2.9230386671639936e-06, "loss": 0.6836, "step": 10024 }, { "epoch": 0.41547515437854865, "grad_norm": 0.38551566004753113, "learning_rate": 2.922831447635626e-06, "loss": 0.6692, "step": 10025 }, { "epoch": 0.4155165982842223, "grad_norm": 0.39364781975746155, "learning_rate": 2.922624228107257e-06, "loss": 0.6399, "step": 10026 }, { "epoch": 0.41555804218989595, "grad_norm": 0.39220213890075684, "learning_rate": 2.922417008578889e-06, "loss": 0.7007, "step": 10027 }, { "epoch": 0.4155994860955696, "grad_norm": 0.4132976233959198, "learning_rate": 2.9222097890505204e-06, "loss": 0.7498, "step": 10028 }, { "epoch": 0.4156409300012433, "grad_norm": 0.42848655581474304, "learning_rate": 2.9220025695221522e-06, "loss": 0.6899, "step": 10029 }, { "epoch": 0.415682373906917, "grad_norm": 0.4121173620223999, "learning_rate": 2.9217953499937836e-06, "loss": 0.6914, "step": 10030 }, { "epoch": 0.41572381781259066, "grad_norm": 0.38896945118904114, "learning_rate": 2.9215881304654154e-06, "loss": 0.7085, "step": 10031 }, { "epoch": 0.41576526171826433, "grad_norm": 0.4360326826572418, "learning_rate": 2.921380910937047e-06, "loss": 0.719, "step": 10032 }, { "epoch": 0.415806705623938, "grad_norm": 0.43854182958602905, "learning_rate": 2.9211736914086786e-06, "loss": 0.7231, "step": 10033 }, { "epoch": 0.4158481495296117, "grad_norm": 0.42000943422317505, "learning_rate": 2.9209664718803104e-06, "loss": 0.6848, "step": 10034 }, { "epoch": 0.41588959343528537, "grad_norm": 0.44673657417297363, "learning_rate": 2.920759252351942e-06, "loss": 0.7191, "step": 10035 }, { "epoch": 0.415931037340959, "grad_norm": 0.4248892664909363, "learning_rate": 2.9205520328235736e-06, "loss": 0.7278, "step": 10036 }, { "epoch": 0.41597248124663266, "grad_norm": 0.4134279191493988, "learning_rate": 2.920344813295205e-06, "loss": 0.6899, "step": 10037 }, { "epoch": 0.41601392515230634, "grad_norm": 0.4307844340801239, "learning_rate": 2.920137593766837e-06, "loss": 0.7169, "step": 10038 }, { "epoch": 0.41605536905798, "grad_norm": 0.4218250811100006, "learning_rate": 2.919930374238468e-06, "loss": 0.7063, "step": 10039 }, { "epoch": 0.4160968129636537, "grad_norm": 0.4126237630844116, "learning_rate": 2.9197231547101e-06, "loss": 0.6775, "step": 10040 }, { "epoch": 0.41613825686932737, "grad_norm": 0.4284915626049042, "learning_rate": 2.9195159351817322e-06, "loss": 0.6892, "step": 10041 }, { "epoch": 0.41617970077500105, "grad_norm": 0.4493291676044464, "learning_rate": 2.919308715653363e-06, "loss": 0.707, "step": 10042 }, { "epoch": 0.4162211446806747, "grad_norm": 0.38464421033859253, "learning_rate": 2.9191014961249954e-06, "loss": 0.645, "step": 10043 }, { "epoch": 0.4162625885863484, "grad_norm": 0.40138405561447144, "learning_rate": 2.9188942765966264e-06, "loss": 0.689, "step": 10044 }, { "epoch": 0.416304032492022, "grad_norm": 0.41934534907341003, "learning_rate": 2.9186870570682586e-06, "loss": 0.6815, "step": 10045 }, { "epoch": 0.4163454763976957, "grad_norm": 0.40376022458076477, "learning_rate": 2.91847983753989e-06, "loss": 0.7148, "step": 10046 }, { "epoch": 0.4163869203033694, "grad_norm": 0.4188496768474579, "learning_rate": 2.918272618011522e-06, "loss": 0.6438, "step": 10047 }, { "epoch": 0.41642836420904306, "grad_norm": 0.4080359637737274, "learning_rate": 2.9180653984831532e-06, "loss": 0.6993, "step": 10048 }, { "epoch": 0.41646980811471673, "grad_norm": 0.38120919466018677, "learning_rate": 2.917858178954785e-06, "loss": 0.6711, "step": 10049 }, { "epoch": 0.4165112520203904, "grad_norm": 0.41138529777526855, "learning_rate": 2.917650959426417e-06, "loss": 0.7163, "step": 10050 }, { "epoch": 0.4165526959260641, "grad_norm": 0.41981860995292664, "learning_rate": 2.9174437398980482e-06, "loss": 0.6647, "step": 10051 }, { "epoch": 0.41659413983173776, "grad_norm": 0.4471815228462219, "learning_rate": 2.91723652036968e-06, "loss": 0.6936, "step": 10052 }, { "epoch": 0.41663558373741144, "grad_norm": 0.3866643011569977, "learning_rate": 2.9170293008413114e-06, "loss": 0.6129, "step": 10053 }, { "epoch": 0.41667702764308506, "grad_norm": 0.3875521123409271, "learning_rate": 2.9168220813129432e-06, "loss": 0.6122, "step": 10054 }, { "epoch": 0.41671847154875874, "grad_norm": 0.41306057572364807, "learning_rate": 2.9166148617845746e-06, "loss": 0.6545, "step": 10055 }, { "epoch": 0.4167599154544324, "grad_norm": 0.41437965631484985, "learning_rate": 2.9164076422562064e-06, "loss": 0.6672, "step": 10056 }, { "epoch": 0.4168013593601061, "grad_norm": 0.395694762468338, "learning_rate": 2.916200422727838e-06, "loss": 0.6559, "step": 10057 }, { "epoch": 0.41684280326577977, "grad_norm": 0.4070577621459961, "learning_rate": 2.9159932031994696e-06, "loss": 0.6918, "step": 10058 }, { "epoch": 0.41688424717145345, "grad_norm": 0.43152493238449097, "learning_rate": 2.915785983671102e-06, "loss": 0.6803, "step": 10059 }, { "epoch": 0.4169256910771271, "grad_norm": 0.4536620080471039, "learning_rate": 2.915578764142733e-06, "loss": 0.7019, "step": 10060 }, { "epoch": 0.4169671349828008, "grad_norm": 0.4446908235549927, "learning_rate": 2.915371544614365e-06, "loss": 0.6952, "step": 10061 }, { "epoch": 0.4170085788884744, "grad_norm": 0.41655266284942627, "learning_rate": 2.9151643250859964e-06, "loss": 0.6783, "step": 10062 }, { "epoch": 0.4170500227941481, "grad_norm": 0.4246901869773865, "learning_rate": 2.9149571055576282e-06, "loss": 0.7532, "step": 10063 }, { "epoch": 0.4170914666998218, "grad_norm": 0.4338078796863556, "learning_rate": 2.9147498860292596e-06, "loss": 0.6992, "step": 10064 }, { "epoch": 0.41713291060549545, "grad_norm": 0.3923001289367676, "learning_rate": 2.9145426665008914e-06, "loss": 0.7367, "step": 10065 }, { "epoch": 0.41717435451116913, "grad_norm": 0.5137569308280945, "learning_rate": 2.914335446972523e-06, "loss": 0.7358, "step": 10066 }, { "epoch": 0.4172157984168428, "grad_norm": 0.3998924493789673, "learning_rate": 2.9141282274441546e-06, "loss": 0.6965, "step": 10067 }, { "epoch": 0.4172572423225165, "grad_norm": 0.40006181597709656, "learning_rate": 2.9139210079157864e-06, "loss": 0.7104, "step": 10068 }, { "epoch": 0.41729868622819016, "grad_norm": 0.4219914972782135, "learning_rate": 2.913713788387418e-06, "loss": 0.7056, "step": 10069 }, { "epoch": 0.41734013013386384, "grad_norm": 0.3953186571598053, "learning_rate": 2.9135065688590496e-06, "loss": 0.7297, "step": 10070 }, { "epoch": 0.41738157403953746, "grad_norm": 0.39155176281929016, "learning_rate": 2.913299349330681e-06, "loss": 0.6799, "step": 10071 }, { "epoch": 0.41742301794521114, "grad_norm": 0.3963681161403656, "learning_rate": 2.913092129802313e-06, "loss": 0.6285, "step": 10072 }, { "epoch": 0.4174644618508848, "grad_norm": 0.4096900522708893, "learning_rate": 2.912884910273944e-06, "loss": 0.6799, "step": 10073 }, { "epoch": 0.4175059057565585, "grad_norm": 0.40416139364242554, "learning_rate": 2.912677690745576e-06, "loss": 0.696, "step": 10074 }, { "epoch": 0.41754734966223217, "grad_norm": 0.444085955619812, "learning_rate": 2.9124704712172074e-06, "loss": 0.7034, "step": 10075 }, { "epoch": 0.41758879356790585, "grad_norm": 0.4189859628677368, "learning_rate": 2.9122632516888392e-06, "loss": 0.6899, "step": 10076 }, { "epoch": 0.4176302374735795, "grad_norm": 0.40397050976753235, "learning_rate": 2.9120560321604714e-06, "loss": 0.7283, "step": 10077 }, { "epoch": 0.4176716813792532, "grad_norm": 0.4646121859550476, "learning_rate": 2.9118488126321024e-06, "loss": 0.7408, "step": 10078 }, { "epoch": 0.4177131252849269, "grad_norm": 0.3946421146392822, "learning_rate": 2.9116415931037346e-06, "loss": 0.717, "step": 10079 }, { "epoch": 0.4177545691906005, "grad_norm": 0.42847490310668945, "learning_rate": 2.911434373575366e-06, "loss": 0.668, "step": 10080 }, { "epoch": 0.4177960130962742, "grad_norm": 0.4313769042491913, "learning_rate": 2.911227154046998e-06, "loss": 0.7117, "step": 10081 }, { "epoch": 0.41783745700194785, "grad_norm": 0.4347526431083679, "learning_rate": 2.9110199345186292e-06, "loss": 0.7015, "step": 10082 }, { "epoch": 0.41787890090762153, "grad_norm": 0.43561851978302, "learning_rate": 2.910812714990261e-06, "loss": 0.6584, "step": 10083 }, { "epoch": 0.4179203448132952, "grad_norm": 0.40543240308761597, "learning_rate": 2.9106054954618924e-06, "loss": 0.6255, "step": 10084 }, { "epoch": 0.4179617887189689, "grad_norm": 0.4019544720649719, "learning_rate": 2.9103982759335242e-06, "loss": 0.6289, "step": 10085 }, { "epoch": 0.41800323262464256, "grad_norm": 0.4045496881008148, "learning_rate": 2.910191056405156e-06, "loss": 0.6978, "step": 10086 }, { "epoch": 0.41804467653031624, "grad_norm": 0.4120318293571472, "learning_rate": 2.9099838368767874e-06, "loss": 0.6687, "step": 10087 }, { "epoch": 0.41808612043598986, "grad_norm": 0.4046080410480499, "learning_rate": 2.9097766173484192e-06, "loss": 0.6638, "step": 10088 }, { "epoch": 0.41812756434166354, "grad_norm": 0.4070160388946533, "learning_rate": 2.9095693978200506e-06, "loss": 0.6783, "step": 10089 }, { "epoch": 0.4181690082473372, "grad_norm": 0.4111829698085785, "learning_rate": 2.9093621782916824e-06, "loss": 0.7019, "step": 10090 }, { "epoch": 0.4182104521530109, "grad_norm": 0.4016530215740204, "learning_rate": 2.909154958763314e-06, "loss": 0.6498, "step": 10091 }, { "epoch": 0.41825189605868457, "grad_norm": 0.3862515091896057, "learning_rate": 2.9089477392349456e-06, "loss": 0.6609, "step": 10092 }, { "epoch": 0.41829333996435825, "grad_norm": 0.4537737965583801, "learning_rate": 2.908740519706577e-06, "loss": 0.7473, "step": 10093 }, { "epoch": 0.4183347838700319, "grad_norm": 0.4077194631099701, "learning_rate": 2.908533300178209e-06, "loss": 0.7039, "step": 10094 }, { "epoch": 0.4183762277757056, "grad_norm": 0.42547231912612915, "learning_rate": 2.908326080649841e-06, "loss": 0.7688, "step": 10095 }, { "epoch": 0.4184176716813793, "grad_norm": 0.38500475883483887, "learning_rate": 2.9081188611214724e-06, "loss": 0.64, "step": 10096 }, { "epoch": 0.4184591155870529, "grad_norm": 0.4095021188259125, "learning_rate": 2.9079116415931042e-06, "loss": 0.6846, "step": 10097 }, { "epoch": 0.4185005594927266, "grad_norm": 0.4133777618408203, "learning_rate": 2.9077044220647356e-06, "loss": 0.6697, "step": 10098 }, { "epoch": 0.41854200339840025, "grad_norm": 0.4071449935436249, "learning_rate": 2.9074972025363674e-06, "loss": 0.6575, "step": 10099 }, { "epoch": 0.41858344730407393, "grad_norm": 0.40374892950057983, "learning_rate": 2.907289983007999e-06, "loss": 0.678, "step": 10100 }, { "epoch": 0.4186248912097476, "grad_norm": 0.4224300980567932, "learning_rate": 2.9070827634796306e-06, "loss": 0.7646, "step": 10101 }, { "epoch": 0.4186663351154213, "grad_norm": 0.424853652715683, "learning_rate": 2.9068755439512624e-06, "loss": 0.724, "step": 10102 }, { "epoch": 0.41870777902109496, "grad_norm": 0.3898879587650299, "learning_rate": 2.906668324422894e-06, "loss": 0.687, "step": 10103 }, { "epoch": 0.41874922292676864, "grad_norm": 0.42142367362976074, "learning_rate": 2.9064611048945256e-06, "loss": 0.7504, "step": 10104 }, { "epoch": 0.4187906668324423, "grad_norm": 0.39617568254470825, "learning_rate": 2.906253885366157e-06, "loss": 0.6882, "step": 10105 }, { "epoch": 0.41883211073811594, "grad_norm": 0.4162764549255371, "learning_rate": 2.906046665837789e-06, "loss": 0.6709, "step": 10106 }, { "epoch": 0.4188735546437896, "grad_norm": 0.45892590284347534, "learning_rate": 2.9058394463094202e-06, "loss": 0.6831, "step": 10107 }, { "epoch": 0.4189149985494633, "grad_norm": 0.4267515242099762, "learning_rate": 2.905632226781052e-06, "loss": 0.6858, "step": 10108 }, { "epoch": 0.41895644245513697, "grad_norm": 0.4441525340080261, "learning_rate": 2.9054250072526834e-06, "loss": 0.7308, "step": 10109 }, { "epoch": 0.41899788636081065, "grad_norm": 0.4077686071395874, "learning_rate": 2.9052177877243152e-06, "loss": 0.7095, "step": 10110 }, { "epoch": 0.4190393302664843, "grad_norm": 0.4197358191013336, "learning_rate": 2.9050105681959475e-06, "loss": 0.6647, "step": 10111 }, { "epoch": 0.419080774172158, "grad_norm": 0.4275217652320862, "learning_rate": 2.9048033486675784e-06, "loss": 0.72, "step": 10112 }, { "epoch": 0.4191222180778317, "grad_norm": 0.4126758873462677, "learning_rate": 2.9045961291392106e-06, "loss": 0.7151, "step": 10113 }, { "epoch": 0.4191636619835053, "grad_norm": 0.4319170117378235, "learning_rate": 2.904388909610842e-06, "loss": 0.7312, "step": 10114 }, { "epoch": 0.419205105889179, "grad_norm": 0.39468684792518616, "learning_rate": 2.904181690082474e-06, "loss": 0.6582, "step": 10115 }, { "epoch": 0.41924654979485265, "grad_norm": 0.4175708591938019, "learning_rate": 2.9039744705541052e-06, "loss": 0.6917, "step": 10116 }, { "epoch": 0.41928799370052633, "grad_norm": 0.4118190407752991, "learning_rate": 2.903767251025737e-06, "loss": 0.6755, "step": 10117 }, { "epoch": 0.4193294376062, "grad_norm": 0.4232187569141388, "learning_rate": 2.9035600314973684e-06, "loss": 0.6902, "step": 10118 }, { "epoch": 0.4193708815118737, "grad_norm": 0.41559267044067383, "learning_rate": 2.9033528119690002e-06, "loss": 0.7065, "step": 10119 }, { "epoch": 0.41941232541754736, "grad_norm": 0.40015387535095215, "learning_rate": 2.903145592440632e-06, "loss": 0.7622, "step": 10120 }, { "epoch": 0.41945376932322104, "grad_norm": 0.43168577551841736, "learning_rate": 2.9029383729122634e-06, "loss": 0.7197, "step": 10121 }, { "epoch": 0.4194952132288947, "grad_norm": 0.4424954950809479, "learning_rate": 2.9027311533838952e-06, "loss": 0.7102, "step": 10122 }, { "epoch": 0.41953665713456834, "grad_norm": 0.39829349517822266, "learning_rate": 2.9025239338555266e-06, "loss": 0.6477, "step": 10123 }, { "epoch": 0.419578101040242, "grad_norm": 0.4464055895805359, "learning_rate": 2.9023167143271584e-06, "loss": 0.7225, "step": 10124 }, { "epoch": 0.4196195449459157, "grad_norm": 0.44539156556129456, "learning_rate": 2.90210949479879e-06, "loss": 0.767, "step": 10125 }, { "epoch": 0.41966098885158937, "grad_norm": 0.41021695733070374, "learning_rate": 2.9019022752704216e-06, "loss": 0.6956, "step": 10126 }, { "epoch": 0.41970243275726304, "grad_norm": 0.42815688252449036, "learning_rate": 2.901695055742053e-06, "loss": 0.6829, "step": 10127 }, { "epoch": 0.4197438766629367, "grad_norm": 0.4615345895290375, "learning_rate": 2.901487836213685e-06, "loss": 0.7056, "step": 10128 }, { "epoch": 0.4197853205686104, "grad_norm": 0.4075317084789276, "learning_rate": 2.901280616685317e-06, "loss": 0.6912, "step": 10129 }, { "epoch": 0.4198267644742841, "grad_norm": 0.4276561141014099, "learning_rate": 2.9010733971569484e-06, "loss": 0.674, "step": 10130 }, { "epoch": 0.41986820837995775, "grad_norm": 0.41672348976135254, "learning_rate": 2.9008661776285802e-06, "loss": 0.6958, "step": 10131 }, { "epoch": 0.4199096522856314, "grad_norm": 0.3936053216457367, "learning_rate": 2.9006589581002116e-06, "loss": 0.688, "step": 10132 }, { "epoch": 0.41995109619130505, "grad_norm": 0.4158685505390167, "learning_rate": 2.9004517385718434e-06, "loss": 0.6726, "step": 10133 }, { "epoch": 0.41999254009697873, "grad_norm": 0.41040393710136414, "learning_rate": 2.900244519043475e-06, "loss": 0.6804, "step": 10134 }, { "epoch": 0.4200339840026524, "grad_norm": 0.4078502058982849, "learning_rate": 2.9000372995151066e-06, "loss": 0.7456, "step": 10135 }, { "epoch": 0.4200754279083261, "grad_norm": 0.3787294030189514, "learning_rate": 2.899830079986738e-06, "loss": 0.6664, "step": 10136 }, { "epoch": 0.42011687181399976, "grad_norm": 0.4465276300907135, "learning_rate": 2.89962286045837e-06, "loss": 0.7246, "step": 10137 }, { "epoch": 0.42015831571967344, "grad_norm": 0.41634276509284973, "learning_rate": 2.8994156409300016e-06, "loss": 0.691, "step": 10138 }, { "epoch": 0.4201997596253471, "grad_norm": 0.41570356488227844, "learning_rate": 2.899208421401633e-06, "loss": 0.6851, "step": 10139 }, { "epoch": 0.4202412035310208, "grad_norm": 0.41125667095184326, "learning_rate": 2.899001201873265e-06, "loss": 0.6753, "step": 10140 }, { "epoch": 0.4202826474366944, "grad_norm": 0.43010514974594116, "learning_rate": 2.8987939823448962e-06, "loss": 0.7109, "step": 10141 }, { "epoch": 0.4203240913423681, "grad_norm": 0.4447622299194336, "learning_rate": 2.898586762816528e-06, "loss": 0.6852, "step": 10142 }, { "epoch": 0.42036553524804177, "grad_norm": 0.37587252259254456, "learning_rate": 2.8983795432881594e-06, "loss": 0.6855, "step": 10143 }, { "epoch": 0.42040697915371544, "grad_norm": 0.47261160612106323, "learning_rate": 2.8981723237597912e-06, "loss": 0.7773, "step": 10144 }, { "epoch": 0.4204484230593891, "grad_norm": 0.39476269483566284, "learning_rate": 2.8979651042314226e-06, "loss": 0.6777, "step": 10145 }, { "epoch": 0.4204898669650628, "grad_norm": 0.42408743500709534, "learning_rate": 2.897757884703055e-06, "loss": 0.6736, "step": 10146 }, { "epoch": 0.4205313108707365, "grad_norm": 0.41875147819519043, "learning_rate": 2.8975506651746867e-06, "loss": 0.7395, "step": 10147 }, { "epoch": 0.42057275477641015, "grad_norm": 0.3818807303905487, "learning_rate": 2.897343445646318e-06, "loss": 0.6786, "step": 10148 }, { "epoch": 0.4206141986820838, "grad_norm": 0.39929676055908203, "learning_rate": 2.89713622611795e-06, "loss": 0.7102, "step": 10149 }, { "epoch": 0.42065564258775745, "grad_norm": 0.39484116435050964, "learning_rate": 2.8969290065895812e-06, "loss": 0.6819, "step": 10150 }, { "epoch": 0.4206970864934311, "grad_norm": 0.44920578598976135, "learning_rate": 2.896721787061213e-06, "loss": 0.7023, "step": 10151 }, { "epoch": 0.4207385303991048, "grad_norm": 0.48473748564720154, "learning_rate": 2.8965145675328444e-06, "loss": 0.8225, "step": 10152 }, { "epoch": 0.4207799743047785, "grad_norm": 0.3916252553462982, "learning_rate": 2.8963073480044762e-06, "loss": 0.6824, "step": 10153 }, { "epoch": 0.42082141821045216, "grad_norm": 0.4219130873680115, "learning_rate": 2.8961001284761076e-06, "loss": 0.7069, "step": 10154 }, { "epoch": 0.42086286211612584, "grad_norm": 0.37896451354026794, "learning_rate": 2.8958929089477394e-06, "loss": 0.6726, "step": 10155 }, { "epoch": 0.4209043060217995, "grad_norm": 0.432675302028656, "learning_rate": 2.8956856894193712e-06, "loss": 0.7346, "step": 10156 }, { "epoch": 0.4209457499274732, "grad_norm": 0.42689788341522217, "learning_rate": 2.8954784698910026e-06, "loss": 0.6852, "step": 10157 }, { "epoch": 0.4209871938331468, "grad_norm": 0.40822523832321167, "learning_rate": 2.8952712503626344e-06, "loss": 0.7198, "step": 10158 }, { "epoch": 0.4210286377388205, "grad_norm": 0.4555254578590393, "learning_rate": 2.895064030834266e-06, "loss": 0.6924, "step": 10159 }, { "epoch": 0.42107008164449417, "grad_norm": 0.43470826745033264, "learning_rate": 2.8948568113058976e-06, "loss": 0.7238, "step": 10160 }, { "epoch": 0.42111152555016784, "grad_norm": 0.37628692388534546, "learning_rate": 2.894649591777529e-06, "loss": 0.6229, "step": 10161 }, { "epoch": 0.4211529694558415, "grad_norm": 0.4038245677947998, "learning_rate": 2.894442372249161e-06, "loss": 0.7654, "step": 10162 }, { "epoch": 0.4211944133615152, "grad_norm": 0.4021161198616028, "learning_rate": 2.894235152720793e-06, "loss": 0.6816, "step": 10163 }, { "epoch": 0.4212358572671889, "grad_norm": 0.47103646397590637, "learning_rate": 2.8940279331924244e-06, "loss": 0.6826, "step": 10164 }, { "epoch": 0.42127730117286255, "grad_norm": 0.43137219548225403, "learning_rate": 2.8938207136640563e-06, "loss": 0.6509, "step": 10165 }, { "epoch": 0.4213187450785362, "grad_norm": 0.4132159352302551, "learning_rate": 2.8936134941356876e-06, "loss": 0.6533, "step": 10166 }, { "epoch": 0.42136018898420985, "grad_norm": 0.4073375463485718, "learning_rate": 2.8934062746073194e-06, "loss": 0.6733, "step": 10167 }, { "epoch": 0.4214016328898835, "grad_norm": 0.39615103602409363, "learning_rate": 2.893199055078951e-06, "loss": 0.6301, "step": 10168 }, { "epoch": 0.4214430767955572, "grad_norm": 0.4119548499584198, "learning_rate": 2.8929918355505826e-06, "loss": 0.7258, "step": 10169 }, { "epoch": 0.4214845207012309, "grad_norm": 0.3968375325202942, "learning_rate": 2.892784616022214e-06, "loss": 0.6851, "step": 10170 }, { "epoch": 0.42152596460690456, "grad_norm": 0.42875370383262634, "learning_rate": 2.892577396493846e-06, "loss": 0.7073, "step": 10171 }, { "epoch": 0.42156740851257823, "grad_norm": 0.40341463685035706, "learning_rate": 2.8923701769654776e-06, "loss": 0.7263, "step": 10172 }, { "epoch": 0.4216088524182519, "grad_norm": 0.40994176268577576, "learning_rate": 2.892162957437109e-06, "loss": 0.6838, "step": 10173 }, { "epoch": 0.4216502963239256, "grad_norm": 0.3969689905643463, "learning_rate": 2.891955737908741e-06, "loss": 0.6956, "step": 10174 }, { "epoch": 0.4216917402295992, "grad_norm": 0.4012397229671478, "learning_rate": 2.8917485183803722e-06, "loss": 0.7178, "step": 10175 }, { "epoch": 0.4217331841352729, "grad_norm": 0.41515862941741943, "learning_rate": 2.891541298852004e-06, "loss": 0.6968, "step": 10176 }, { "epoch": 0.42177462804094656, "grad_norm": 0.4045315384864807, "learning_rate": 2.8913340793236354e-06, "loss": 0.637, "step": 10177 }, { "epoch": 0.42181607194662024, "grad_norm": 0.4065553843975067, "learning_rate": 2.8911268597952672e-06, "loss": 0.731, "step": 10178 }, { "epoch": 0.4218575158522939, "grad_norm": 0.38114413619041443, "learning_rate": 2.8909196402668986e-06, "loss": 0.6787, "step": 10179 }, { "epoch": 0.4218989597579676, "grad_norm": 0.4144408106803894, "learning_rate": 2.890712420738531e-06, "loss": 0.7239, "step": 10180 }, { "epoch": 0.4219404036636413, "grad_norm": 0.3942946791648865, "learning_rate": 2.8905052012101627e-06, "loss": 0.6406, "step": 10181 }, { "epoch": 0.42198184756931495, "grad_norm": 0.41020676493644714, "learning_rate": 2.890297981681794e-06, "loss": 0.6953, "step": 10182 }, { "epoch": 0.4220232914749886, "grad_norm": 0.4050442576408386, "learning_rate": 2.890090762153426e-06, "loss": 0.6769, "step": 10183 }, { "epoch": 0.42206473538066225, "grad_norm": 0.42163223028182983, "learning_rate": 2.8898835426250572e-06, "loss": 0.6957, "step": 10184 }, { "epoch": 0.4221061792863359, "grad_norm": 0.39362090826034546, "learning_rate": 2.889676323096689e-06, "loss": 0.6694, "step": 10185 }, { "epoch": 0.4221476231920096, "grad_norm": 0.38748443126678467, "learning_rate": 2.8894691035683204e-06, "loss": 0.6913, "step": 10186 }, { "epoch": 0.4221890670976833, "grad_norm": 0.46111661195755005, "learning_rate": 2.8892618840399522e-06, "loss": 0.7261, "step": 10187 }, { "epoch": 0.42223051100335696, "grad_norm": 0.4202723205089569, "learning_rate": 2.8890546645115836e-06, "loss": 0.7014, "step": 10188 }, { "epoch": 0.42227195490903063, "grad_norm": 0.40815672278404236, "learning_rate": 2.8888474449832154e-06, "loss": 0.7235, "step": 10189 }, { "epoch": 0.4223133988147043, "grad_norm": 0.44557347893714905, "learning_rate": 2.8886402254548472e-06, "loss": 0.7305, "step": 10190 }, { "epoch": 0.422354842720378, "grad_norm": 0.41423219442367554, "learning_rate": 2.8884330059264786e-06, "loss": 0.6337, "step": 10191 }, { "epoch": 0.42239628662605166, "grad_norm": 0.399832546710968, "learning_rate": 2.8882257863981104e-06, "loss": 0.6868, "step": 10192 }, { "epoch": 0.4224377305317253, "grad_norm": 0.3639524579048157, "learning_rate": 2.888018566869742e-06, "loss": 0.6476, "step": 10193 }, { "epoch": 0.42247917443739896, "grad_norm": 0.40864095091819763, "learning_rate": 2.8878113473413736e-06, "loss": 0.7261, "step": 10194 }, { "epoch": 0.42252061834307264, "grad_norm": 0.4145399034023285, "learning_rate": 2.887604127813005e-06, "loss": 0.6655, "step": 10195 }, { "epoch": 0.4225620622487463, "grad_norm": 0.4135926365852356, "learning_rate": 2.887396908284637e-06, "loss": 0.6855, "step": 10196 }, { "epoch": 0.42260350615442, "grad_norm": 0.42498481273651123, "learning_rate": 2.8871896887562682e-06, "loss": 0.7341, "step": 10197 }, { "epoch": 0.42264495006009367, "grad_norm": 0.4186903238296509, "learning_rate": 2.8869824692279005e-06, "loss": 0.7028, "step": 10198 }, { "epoch": 0.42268639396576735, "grad_norm": 0.4143699109554291, "learning_rate": 2.8867752496995323e-06, "loss": 0.78, "step": 10199 }, { "epoch": 0.422727837871441, "grad_norm": 0.41902509331703186, "learning_rate": 2.8865680301711636e-06, "loss": 0.6488, "step": 10200 }, { "epoch": 0.4227692817771147, "grad_norm": 0.396794855594635, "learning_rate": 2.8863608106427955e-06, "loss": 0.6522, "step": 10201 }, { "epoch": 0.4228107256827883, "grad_norm": 0.41808241605758667, "learning_rate": 2.886153591114427e-06, "loss": 0.6593, "step": 10202 }, { "epoch": 0.422852169588462, "grad_norm": 0.3930683434009552, "learning_rate": 2.8859463715860587e-06, "loss": 0.7261, "step": 10203 }, { "epoch": 0.4228936134941357, "grad_norm": 0.41105759143829346, "learning_rate": 2.88573915205769e-06, "loss": 0.6797, "step": 10204 }, { "epoch": 0.42293505739980936, "grad_norm": 0.3948060870170593, "learning_rate": 2.885531932529322e-06, "loss": 0.6885, "step": 10205 }, { "epoch": 0.42297650130548303, "grad_norm": 0.4425502419471741, "learning_rate": 2.8853247130009532e-06, "loss": 0.6951, "step": 10206 }, { "epoch": 0.4230179452111567, "grad_norm": 0.441812127828598, "learning_rate": 2.885117493472585e-06, "loss": 0.6698, "step": 10207 }, { "epoch": 0.4230593891168304, "grad_norm": 0.41703009605407715, "learning_rate": 2.884910273944217e-06, "loss": 0.6709, "step": 10208 }, { "epoch": 0.42310083302250406, "grad_norm": 0.37838214635849, "learning_rate": 2.8847030544158482e-06, "loss": 0.6591, "step": 10209 }, { "epoch": 0.4231422769281777, "grad_norm": 0.4456915259361267, "learning_rate": 2.88449583488748e-06, "loss": 0.7206, "step": 10210 }, { "epoch": 0.42318372083385136, "grad_norm": 0.39847251772880554, "learning_rate": 2.8842886153591114e-06, "loss": 0.7074, "step": 10211 }, { "epoch": 0.42322516473952504, "grad_norm": 0.3975691497325897, "learning_rate": 2.8840813958307432e-06, "loss": 0.7183, "step": 10212 }, { "epoch": 0.4232666086451987, "grad_norm": 0.4412553906440735, "learning_rate": 2.8838741763023746e-06, "loss": 0.6831, "step": 10213 }, { "epoch": 0.4233080525508724, "grad_norm": 0.3839682638645172, "learning_rate": 2.883666956774007e-06, "loss": 0.6415, "step": 10214 }, { "epoch": 0.42334949645654607, "grad_norm": 0.4261326193809509, "learning_rate": 2.883459737245638e-06, "loss": 0.6592, "step": 10215 }, { "epoch": 0.42339094036221975, "grad_norm": 0.4193367063999176, "learning_rate": 2.88325251771727e-06, "loss": 0.7007, "step": 10216 }, { "epoch": 0.4234323842678934, "grad_norm": 0.4272805154323578, "learning_rate": 2.883045298188902e-06, "loss": 0.72, "step": 10217 }, { "epoch": 0.4234738281735671, "grad_norm": 0.4257102608680725, "learning_rate": 2.8828380786605332e-06, "loss": 0.7686, "step": 10218 }, { "epoch": 0.4235152720792407, "grad_norm": 0.40702876448631287, "learning_rate": 2.882630859132165e-06, "loss": 0.7122, "step": 10219 }, { "epoch": 0.4235567159849144, "grad_norm": 0.4129182994365692, "learning_rate": 2.8824236396037964e-06, "loss": 0.6683, "step": 10220 }, { "epoch": 0.4235981598905881, "grad_norm": 0.4500209391117096, "learning_rate": 2.8822164200754283e-06, "loss": 0.7084, "step": 10221 }, { "epoch": 0.42363960379626175, "grad_norm": 0.41604897379875183, "learning_rate": 2.8820092005470596e-06, "loss": 0.6832, "step": 10222 }, { "epoch": 0.42368104770193543, "grad_norm": 0.39230993390083313, "learning_rate": 2.8818019810186914e-06, "loss": 0.7218, "step": 10223 }, { "epoch": 0.4237224916076091, "grad_norm": 0.3749372959136963, "learning_rate": 2.8815947614903233e-06, "loss": 0.6801, "step": 10224 }, { "epoch": 0.4237639355132828, "grad_norm": 0.4048909842967987, "learning_rate": 2.8813875419619546e-06, "loss": 0.6174, "step": 10225 }, { "epoch": 0.42380537941895646, "grad_norm": 0.4248400330543518, "learning_rate": 2.8811803224335865e-06, "loss": 0.7142, "step": 10226 }, { "epoch": 0.42384682332463014, "grad_norm": 0.39567089080810547, "learning_rate": 2.880973102905218e-06, "loss": 0.7222, "step": 10227 }, { "epoch": 0.42388826723030376, "grad_norm": 0.41997814178466797, "learning_rate": 2.8807658833768496e-06, "loss": 0.7324, "step": 10228 }, { "epoch": 0.42392971113597744, "grad_norm": 0.3850817382335663, "learning_rate": 2.880558663848481e-06, "loss": 0.6646, "step": 10229 }, { "epoch": 0.4239711550416511, "grad_norm": 0.3971218466758728, "learning_rate": 2.880351444320113e-06, "loss": 0.6935, "step": 10230 }, { "epoch": 0.4240125989473248, "grad_norm": 0.39058709144592285, "learning_rate": 2.8801442247917442e-06, "loss": 0.6748, "step": 10231 }, { "epoch": 0.42405404285299847, "grad_norm": 0.4227049648761749, "learning_rate": 2.8799370052633765e-06, "loss": 0.6399, "step": 10232 }, { "epoch": 0.42409548675867215, "grad_norm": 0.42481720447540283, "learning_rate": 2.8797297857350083e-06, "loss": 0.6937, "step": 10233 }, { "epoch": 0.4241369306643458, "grad_norm": 0.39221954345703125, "learning_rate": 2.8795225662066397e-06, "loss": 0.6625, "step": 10234 }, { "epoch": 0.4241783745700195, "grad_norm": 0.4086552560329437, "learning_rate": 2.8793153466782715e-06, "loss": 0.7483, "step": 10235 }, { "epoch": 0.4242198184756931, "grad_norm": 0.41614705324172974, "learning_rate": 2.879108127149903e-06, "loss": 0.6652, "step": 10236 }, { "epoch": 0.4242612623813668, "grad_norm": 0.4312654733657837, "learning_rate": 2.8789009076215347e-06, "loss": 0.6996, "step": 10237 }, { "epoch": 0.4243027062870405, "grad_norm": 0.3950953781604767, "learning_rate": 2.878693688093166e-06, "loss": 0.6656, "step": 10238 }, { "epoch": 0.42434415019271415, "grad_norm": 0.3886812925338745, "learning_rate": 2.878486468564798e-06, "loss": 0.6171, "step": 10239 }, { "epoch": 0.42438559409838783, "grad_norm": 0.42413538694381714, "learning_rate": 2.8782792490364292e-06, "loss": 0.6663, "step": 10240 }, { "epoch": 0.4244270380040615, "grad_norm": 0.3936503827571869, "learning_rate": 2.878072029508061e-06, "loss": 0.7002, "step": 10241 }, { "epoch": 0.4244684819097352, "grad_norm": 0.384168803691864, "learning_rate": 2.877864809979693e-06, "loss": 0.6533, "step": 10242 }, { "epoch": 0.42450992581540886, "grad_norm": 0.4433716833591461, "learning_rate": 2.8776575904513242e-06, "loss": 0.7679, "step": 10243 }, { "epoch": 0.42455136972108254, "grad_norm": 0.42682769894599915, "learning_rate": 2.877450370922956e-06, "loss": 0.6594, "step": 10244 }, { "epoch": 0.42459281362675616, "grad_norm": 0.3907032012939453, "learning_rate": 2.8772431513945874e-06, "loss": 0.6868, "step": 10245 }, { "epoch": 0.42463425753242984, "grad_norm": 0.39520198106765747, "learning_rate": 2.8770359318662192e-06, "loss": 0.6619, "step": 10246 }, { "epoch": 0.4246757014381035, "grad_norm": 0.3837250769138336, "learning_rate": 2.8768287123378506e-06, "loss": 0.6553, "step": 10247 }, { "epoch": 0.4247171453437772, "grad_norm": 0.42277634143829346, "learning_rate": 2.876621492809483e-06, "loss": 0.6958, "step": 10248 }, { "epoch": 0.42475858924945087, "grad_norm": 0.38644087314605713, "learning_rate": 2.876414273281114e-06, "loss": 0.6849, "step": 10249 }, { "epoch": 0.42480003315512455, "grad_norm": 0.3962564468383789, "learning_rate": 2.876207053752746e-06, "loss": 0.7198, "step": 10250 }, { "epoch": 0.4248414770607982, "grad_norm": 0.4509621560573578, "learning_rate": 2.875999834224378e-06, "loss": 0.7019, "step": 10251 }, { "epoch": 0.4248829209664719, "grad_norm": 0.4079335331916809, "learning_rate": 2.8757926146960093e-06, "loss": 0.6799, "step": 10252 }, { "epoch": 0.4249243648721456, "grad_norm": 0.41734832525253296, "learning_rate": 2.875585395167641e-06, "loss": 0.6185, "step": 10253 }, { "epoch": 0.4249658087778192, "grad_norm": 0.4063252806663513, "learning_rate": 2.8753781756392724e-06, "loss": 0.6694, "step": 10254 }, { "epoch": 0.4250072526834929, "grad_norm": 0.40629255771636963, "learning_rate": 2.8751709561109043e-06, "loss": 0.6873, "step": 10255 }, { "epoch": 0.42504869658916655, "grad_norm": 0.42590293288230896, "learning_rate": 2.8749637365825356e-06, "loss": 0.7593, "step": 10256 }, { "epoch": 0.42509014049484023, "grad_norm": 0.42158129811286926, "learning_rate": 2.8747565170541675e-06, "loss": 0.702, "step": 10257 }, { "epoch": 0.4251315844005139, "grad_norm": 0.3932560980319977, "learning_rate": 2.874549297525799e-06, "loss": 0.6526, "step": 10258 }, { "epoch": 0.4251730283061876, "grad_norm": 0.40466374158859253, "learning_rate": 2.8743420779974306e-06, "loss": 0.692, "step": 10259 }, { "epoch": 0.42521447221186126, "grad_norm": 0.4045518934726715, "learning_rate": 2.8741348584690625e-06, "loss": 0.6906, "step": 10260 }, { "epoch": 0.42525591611753494, "grad_norm": 0.40885889530181885, "learning_rate": 2.873927638940694e-06, "loss": 0.7017, "step": 10261 }, { "epoch": 0.4252973600232086, "grad_norm": 0.43215063214302063, "learning_rate": 2.8737204194123257e-06, "loss": 0.6454, "step": 10262 }, { "epoch": 0.42533880392888224, "grad_norm": 0.46865829825401306, "learning_rate": 2.873513199883957e-06, "loss": 0.743, "step": 10263 }, { "epoch": 0.4253802478345559, "grad_norm": 0.3963261544704437, "learning_rate": 2.873305980355589e-06, "loss": 0.6936, "step": 10264 }, { "epoch": 0.4254216917402296, "grad_norm": 0.39299359917640686, "learning_rate": 2.8730987608272202e-06, "loss": 0.6573, "step": 10265 }, { "epoch": 0.42546313564590327, "grad_norm": 0.40829357504844666, "learning_rate": 2.8728915412988525e-06, "loss": 0.6823, "step": 10266 }, { "epoch": 0.42550457955157694, "grad_norm": 0.4267268776893616, "learning_rate": 2.8726843217704834e-06, "loss": 0.7211, "step": 10267 }, { "epoch": 0.4255460234572506, "grad_norm": 0.43717238306999207, "learning_rate": 2.8724771022421157e-06, "loss": 0.7153, "step": 10268 }, { "epoch": 0.4255874673629243, "grad_norm": 0.38898205757141113, "learning_rate": 2.8722698827137475e-06, "loss": 0.6183, "step": 10269 }, { "epoch": 0.425628911268598, "grad_norm": 0.40572547912597656, "learning_rate": 2.872062663185379e-06, "loss": 0.7458, "step": 10270 }, { "epoch": 0.4256703551742716, "grad_norm": 0.4216849207878113, "learning_rate": 2.8718554436570107e-06, "loss": 0.7795, "step": 10271 }, { "epoch": 0.4257117990799453, "grad_norm": 0.407711386680603, "learning_rate": 2.871648224128642e-06, "loss": 0.7449, "step": 10272 }, { "epoch": 0.42575324298561895, "grad_norm": 0.37762150168418884, "learning_rate": 2.871441004600274e-06, "loss": 0.6973, "step": 10273 }, { "epoch": 0.42579468689129263, "grad_norm": 0.3813624083995819, "learning_rate": 2.8712337850719052e-06, "loss": 0.6504, "step": 10274 }, { "epoch": 0.4258361307969663, "grad_norm": 0.4144291877746582, "learning_rate": 2.871026565543537e-06, "loss": 0.675, "step": 10275 }, { "epoch": 0.42587757470264, "grad_norm": 0.4193122088909149, "learning_rate": 2.8708193460151684e-06, "loss": 0.6975, "step": 10276 }, { "epoch": 0.42591901860831366, "grad_norm": 0.3827277719974518, "learning_rate": 2.8706121264868002e-06, "loss": 0.6722, "step": 10277 }, { "epoch": 0.42596046251398734, "grad_norm": 0.4192068576812744, "learning_rate": 2.870404906958432e-06, "loss": 0.7405, "step": 10278 }, { "epoch": 0.426001906419661, "grad_norm": 0.36999237537384033, "learning_rate": 2.8701976874300634e-06, "loss": 0.6798, "step": 10279 }, { "epoch": 0.42604335032533464, "grad_norm": 0.4163936376571655, "learning_rate": 2.8699904679016953e-06, "loss": 0.7681, "step": 10280 }, { "epoch": 0.4260847942310083, "grad_norm": 0.39415282011032104, "learning_rate": 2.8697832483733266e-06, "loss": 0.7061, "step": 10281 }, { "epoch": 0.426126238136682, "grad_norm": 0.4454629719257355, "learning_rate": 2.869576028844959e-06, "loss": 0.6639, "step": 10282 }, { "epoch": 0.42616768204235567, "grad_norm": 0.40792116522789, "learning_rate": 2.86936880931659e-06, "loss": 0.6849, "step": 10283 }, { "epoch": 0.42620912594802934, "grad_norm": 0.4383392333984375, "learning_rate": 2.869161589788222e-06, "loss": 0.7009, "step": 10284 }, { "epoch": 0.426250569853703, "grad_norm": 0.3914540708065033, "learning_rate": 2.868954370259854e-06, "loss": 0.653, "step": 10285 }, { "epoch": 0.4262920137593767, "grad_norm": 1.3565726280212402, "learning_rate": 2.8687471507314853e-06, "loss": 0.6313, "step": 10286 }, { "epoch": 0.4263334576650504, "grad_norm": 0.39975056052207947, "learning_rate": 2.868539931203117e-06, "loss": 0.7212, "step": 10287 }, { "epoch": 0.42637490157072405, "grad_norm": 0.3931676745414734, "learning_rate": 2.8683327116747485e-06, "loss": 0.7012, "step": 10288 }, { "epoch": 0.4264163454763977, "grad_norm": 0.4522618055343628, "learning_rate": 2.8681254921463803e-06, "loss": 0.7581, "step": 10289 }, { "epoch": 0.42645778938207135, "grad_norm": 0.4114205539226532, "learning_rate": 2.8679182726180116e-06, "loss": 0.6775, "step": 10290 }, { "epoch": 0.426499233287745, "grad_norm": 0.4485328793525696, "learning_rate": 2.8677110530896435e-06, "loss": 0.7458, "step": 10291 }, { "epoch": 0.4265406771934187, "grad_norm": 0.45354917645454407, "learning_rate": 2.867503833561275e-06, "loss": 0.6826, "step": 10292 }, { "epoch": 0.4265821210990924, "grad_norm": 0.3831586241722107, "learning_rate": 2.8672966140329067e-06, "loss": 0.684, "step": 10293 }, { "epoch": 0.42662356500476606, "grad_norm": 0.4138661026954651, "learning_rate": 2.8670893945045385e-06, "loss": 0.7246, "step": 10294 }, { "epoch": 0.42666500891043974, "grad_norm": 0.4374277591705322, "learning_rate": 2.86688217497617e-06, "loss": 0.6868, "step": 10295 }, { "epoch": 0.4267064528161134, "grad_norm": 0.4301886558532715, "learning_rate": 2.8666749554478017e-06, "loss": 0.7124, "step": 10296 }, { "epoch": 0.42674789672178703, "grad_norm": 0.41223520040512085, "learning_rate": 2.866467735919433e-06, "loss": 0.6858, "step": 10297 }, { "epoch": 0.4267893406274607, "grad_norm": 0.39161986112594604, "learning_rate": 2.866260516391065e-06, "loss": 0.6907, "step": 10298 }, { "epoch": 0.4268307845331344, "grad_norm": 0.39950069785118103, "learning_rate": 2.8660532968626962e-06, "loss": 0.6638, "step": 10299 }, { "epoch": 0.42687222843880807, "grad_norm": 0.42116594314575195, "learning_rate": 2.8658460773343285e-06, "loss": 0.7405, "step": 10300 }, { "epoch": 0.42691367234448174, "grad_norm": 0.39591485261917114, "learning_rate": 2.8656388578059594e-06, "loss": 0.719, "step": 10301 }, { "epoch": 0.4269551162501554, "grad_norm": 0.42279744148254395, "learning_rate": 2.8654316382775917e-06, "loss": 0.7117, "step": 10302 }, { "epoch": 0.4269965601558291, "grad_norm": 0.4172136187553406, "learning_rate": 2.8652244187492235e-06, "loss": 0.7041, "step": 10303 }, { "epoch": 0.4270380040615028, "grad_norm": 0.44062983989715576, "learning_rate": 2.865017199220855e-06, "loss": 0.7278, "step": 10304 }, { "epoch": 0.42707944796717645, "grad_norm": 0.4178955852985382, "learning_rate": 2.8648099796924867e-06, "loss": 0.7178, "step": 10305 }, { "epoch": 0.4271208918728501, "grad_norm": 0.39628198742866516, "learning_rate": 2.864602760164118e-06, "loss": 0.71, "step": 10306 }, { "epoch": 0.42716233577852375, "grad_norm": 0.358176052570343, "learning_rate": 2.86439554063575e-06, "loss": 0.6376, "step": 10307 }, { "epoch": 0.4272037796841974, "grad_norm": 0.40806353092193604, "learning_rate": 2.8641883211073812e-06, "loss": 0.6686, "step": 10308 }, { "epoch": 0.4272452235898711, "grad_norm": 0.4033411145210266, "learning_rate": 2.863981101579013e-06, "loss": 0.7014, "step": 10309 }, { "epoch": 0.4272866674955448, "grad_norm": 0.4035930633544922, "learning_rate": 2.8637738820506444e-06, "loss": 0.6735, "step": 10310 }, { "epoch": 0.42732811140121846, "grad_norm": 0.38307318091392517, "learning_rate": 2.8635666625222763e-06, "loss": 0.6473, "step": 10311 }, { "epoch": 0.42736955530689213, "grad_norm": 0.4092990756034851, "learning_rate": 2.863359442993908e-06, "loss": 0.6892, "step": 10312 }, { "epoch": 0.4274109992125658, "grad_norm": 0.41131725907325745, "learning_rate": 2.8631522234655394e-06, "loss": 0.6963, "step": 10313 }, { "epoch": 0.4274524431182395, "grad_norm": 0.4274854362010956, "learning_rate": 2.8629450039371713e-06, "loss": 0.7202, "step": 10314 }, { "epoch": 0.4274938870239131, "grad_norm": 0.4018362760543823, "learning_rate": 2.8627377844088026e-06, "loss": 0.692, "step": 10315 }, { "epoch": 0.4275353309295868, "grad_norm": 0.38827648758888245, "learning_rate": 2.862530564880435e-06, "loss": 0.6609, "step": 10316 }, { "epoch": 0.42757677483526046, "grad_norm": 0.41456443071365356, "learning_rate": 2.862323345352066e-06, "loss": 0.7686, "step": 10317 }, { "epoch": 0.42761821874093414, "grad_norm": 0.45351704955101013, "learning_rate": 2.862116125823698e-06, "loss": 0.7185, "step": 10318 }, { "epoch": 0.4276596626466078, "grad_norm": 0.4366154670715332, "learning_rate": 2.8619089062953295e-06, "loss": 0.7505, "step": 10319 }, { "epoch": 0.4277011065522815, "grad_norm": 0.42559099197387695, "learning_rate": 2.8617016867669613e-06, "loss": 0.7468, "step": 10320 }, { "epoch": 0.4277425504579552, "grad_norm": 0.44532063603401184, "learning_rate": 2.861494467238593e-06, "loss": 0.7395, "step": 10321 }, { "epoch": 0.42778399436362885, "grad_norm": 0.3997159004211426, "learning_rate": 2.8612872477102245e-06, "loss": 0.6895, "step": 10322 }, { "epoch": 0.42782543826930247, "grad_norm": 0.39214181900024414, "learning_rate": 2.8610800281818563e-06, "loss": 0.6453, "step": 10323 }, { "epoch": 0.42786688217497615, "grad_norm": 0.4151853024959564, "learning_rate": 2.8608728086534877e-06, "loss": 0.7383, "step": 10324 }, { "epoch": 0.4279083260806498, "grad_norm": 0.4224843382835388, "learning_rate": 2.8606655891251195e-06, "loss": 0.744, "step": 10325 }, { "epoch": 0.4279497699863235, "grad_norm": 0.3751247227191925, "learning_rate": 2.860458369596751e-06, "loss": 0.6599, "step": 10326 }, { "epoch": 0.4279912138919972, "grad_norm": 0.4261116087436676, "learning_rate": 2.8602511500683827e-06, "loss": 0.7114, "step": 10327 }, { "epoch": 0.42803265779767086, "grad_norm": 0.36136800050735474, "learning_rate": 2.860043930540014e-06, "loss": 0.6694, "step": 10328 }, { "epoch": 0.42807410170334453, "grad_norm": 0.40281742811203003, "learning_rate": 2.859836711011646e-06, "loss": 0.7125, "step": 10329 }, { "epoch": 0.4281155456090182, "grad_norm": 0.4250551462173462, "learning_rate": 2.8596294914832777e-06, "loss": 0.7083, "step": 10330 }, { "epoch": 0.4281569895146919, "grad_norm": 0.4115763008594513, "learning_rate": 2.859422271954909e-06, "loss": 0.6924, "step": 10331 }, { "epoch": 0.4281984334203655, "grad_norm": 0.43523451685905457, "learning_rate": 2.8592150524265413e-06, "loss": 0.7241, "step": 10332 }, { "epoch": 0.4282398773260392, "grad_norm": 0.3943975865840912, "learning_rate": 2.8590078328981722e-06, "loss": 0.6353, "step": 10333 }, { "epoch": 0.42828132123171286, "grad_norm": 0.43322503566741943, "learning_rate": 2.8588006133698045e-06, "loss": 0.6724, "step": 10334 }, { "epoch": 0.42832276513738654, "grad_norm": 0.5009081363677979, "learning_rate": 2.8585933938414354e-06, "loss": 0.7222, "step": 10335 }, { "epoch": 0.4283642090430602, "grad_norm": 0.43241265416145325, "learning_rate": 2.8583861743130677e-06, "loss": 0.6643, "step": 10336 }, { "epoch": 0.4284056529487339, "grad_norm": 0.41388100385665894, "learning_rate": 2.858178954784699e-06, "loss": 0.7316, "step": 10337 }, { "epoch": 0.42844709685440757, "grad_norm": 0.403164267539978, "learning_rate": 2.857971735256331e-06, "loss": 0.6941, "step": 10338 }, { "epoch": 0.42848854076008125, "grad_norm": 0.4207041561603546, "learning_rate": 2.8577645157279627e-06, "loss": 0.6661, "step": 10339 }, { "epoch": 0.4285299846657549, "grad_norm": 0.41981980204582214, "learning_rate": 2.857557296199594e-06, "loss": 0.6494, "step": 10340 }, { "epoch": 0.42857142857142855, "grad_norm": 0.4165239632129669, "learning_rate": 2.857350076671226e-06, "loss": 0.6901, "step": 10341 }, { "epoch": 0.4286128724771022, "grad_norm": 0.4078351557254791, "learning_rate": 2.8571428571428573e-06, "loss": 0.688, "step": 10342 }, { "epoch": 0.4286543163827759, "grad_norm": 0.4032996594905853, "learning_rate": 2.856935637614489e-06, "loss": 0.7085, "step": 10343 }, { "epoch": 0.4286957602884496, "grad_norm": 0.40226784348487854, "learning_rate": 2.8567284180861205e-06, "loss": 0.694, "step": 10344 }, { "epoch": 0.42873720419412326, "grad_norm": 0.4037446081638336, "learning_rate": 2.8565211985577523e-06, "loss": 0.6671, "step": 10345 }, { "epoch": 0.42877864809979693, "grad_norm": 0.42036697268486023, "learning_rate": 2.856313979029384e-06, "loss": 0.6985, "step": 10346 }, { "epoch": 0.4288200920054706, "grad_norm": 0.4235241711139679, "learning_rate": 2.8561067595010155e-06, "loss": 0.7065, "step": 10347 }, { "epoch": 0.4288615359111443, "grad_norm": 0.3868952691555023, "learning_rate": 2.8558995399726473e-06, "loss": 0.6929, "step": 10348 }, { "epoch": 0.42890297981681796, "grad_norm": 0.4019016921520233, "learning_rate": 2.8556923204442786e-06, "loss": 0.6971, "step": 10349 }, { "epoch": 0.4289444237224916, "grad_norm": 0.41189122200012207, "learning_rate": 2.855485100915911e-06, "loss": 0.7515, "step": 10350 }, { "epoch": 0.42898586762816526, "grad_norm": 0.43131476640701294, "learning_rate": 2.855277881387542e-06, "loss": 0.6801, "step": 10351 }, { "epoch": 0.42902731153383894, "grad_norm": 0.3869524300098419, "learning_rate": 2.855070661859174e-06, "loss": 0.7052, "step": 10352 }, { "epoch": 0.4290687554395126, "grad_norm": 0.4453834593296051, "learning_rate": 2.8548634423308055e-06, "loss": 0.6528, "step": 10353 }, { "epoch": 0.4291101993451863, "grad_norm": 0.41838037967681885, "learning_rate": 2.8546562228024373e-06, "loss": 0.6708, "step": 10354 }, { "epoch": 0.42915164325085997, "grad_norm": 0.409089595079422, "learning_rate": 2.854449003274069e-06, "loss": 0.7175, "step": 10355 }, { "epoch": 0.42919308715653365, "grad_norm": 0.44469740986824036, "learning_rate": 2.8542417837457005e-06, "loss": 0.7241, "step": 10356 }, { "epoch": 0.4292345310622073, "grad_norm": 0.3896887004375458, "learning_rate": 2.8540345642173323e-06, "loss": 0.6871, "step": 10357 }, { "epoch": 0.42927597496788095, "grad_norm": 0.37660789489746094, "learning_rate": 2.8538273446889637e-06, "loss": 0.6422, "step": 10358 }, { "epoch": 0.4293174188735546, "grad_norm": 0.3995661735534668, "learning_rate": 2.8536201251605955e-06, "loss": 0.6704, "step": 10359 }, { "epoch": 0.4293588627792283, "grad_norm": 0.45961079001426697, "learning_rate": 2.853412905632227e-06, "loss": 0.782, "step": 10360 }, { "epoch": 0.429400306684902, "grad_norm": 0.4425617754459381, "learning_rate": 2.8532056861038587e-06, "loss": 0.7329, "step": 10361 }, { "epoch": 0.42944175059057565, "grad_norm": 0.388287752866745, "learning_rate": 2.85299846657549e-06, "loss": 0.6654, "step": 10362 }, { "epoch": 0.42948319449624933, "grad_norm": 0.4254777133464813, "learning_rate": 2.852791247047122e-06, "loss": 0.6649, "step": 10363 }, { "epoch": 0.429524638401923, "grad_norm": 0.43070361018180847, "learning_rate": 2.8525840275187537e-06, "loss": 0.6343, "step": 10364 }, { "epoch": 0.4295660823075967, "grad_norm": 0.3828522264957428, "learning_rate": 2.852376807990385e-06, "loss": 0.6719, "step": 10365 }, { "epoch": 0.42960752621327036, "grad_norm": 0.4857997000217438, "learning_rate": 2.8521695884620173e-06, "loss": 0.781, "step": 10366 }, { "epoch": 0.429648970118944, "grad_norm": 0.4035874605178833, "learning_rate": 2.8519623689336482e-06, "loss": 0.6904, "step": 10367 }, { "epoch": 0.42969041402461766, "grad_norm": 0.41508448123931885, "learning_rate": 2.8517551494052805e-06, "loss": 0.6844, "step": 10368 }, { "epoch": 0.42973185793029134, "grad_norm": 0.3784126043319702, "learning_rate": 2.8515479298769114e-06, "loss": 0.6697, "step": 10369 }, { "epoch": 0.429773301835965, "grad_norm": 0.4123397469520569, "learning_rate": 2.8513407103485437e-06, "loss": 0.6909, "step": 10370 }, { "epoch": 0.4298147457416387, "grad_norm": 0.4014924168586731, "learning_rate": 2.851133490820175e-06, "loss": 0.6503, "step": 10371 }, { "epoch": 0.42985618964731237, "grad_norm": 0.3760715425014496, "learning_rate": 2.850926271291807e-06, "loss": 0.6814, "step": 10372 }, { "epoch": 0.42989763355298605, "grad_norm": 0.39710837602615356, "learning_rate": 2.8507190517634387e-06, "loss": 0.7124, "step": 10373 }, { "epoch": 0.4299390774586597, "grad_norm": 0.38073188066482544, "learning_rate": 2.85051183223507e-06, "loss": 0.7207, "step": 10374 }, { "epoch": 0.4299805213643334, "grad_norm": 0.39431479573249817, "learning_rate": 2.850304612706702e-06, "loss": 0.718, "step": 10375 }, { "epoch": 0.430021965270007, "grad_norm": 0.3984322249889374, "learning_rate": 2.8500973931783333e-06, "loss": 0.6854, "step": 10376 }, { "epoch": 0.4300634091756807, "grad_norm": 0.4100247919559479, "learning_rate": 2.849890173649965e-06, "loss": 0.7086, "step": 10377 }, { "epoch": 0.4301048530813544, "grad_norm": 0.40601712465286255, "learning_rate": 2.8496829541215965e-06, "loss": 0.6569, "step": 10378 }, { "epoch": 0.43014629698702805, "grad_norm": 0.39086848497390747, "learning_rate": 2.8494757345932283e-06, "loss": 0.7195, "step": 10379 }, { "epoch": 0.43018774089270173, "grad_norm": 0.41443049907684326, "learning_rate": 2.8492685150648597e-06, "loss": 0.6763, "step": 10380 }, { "epoch": 0.4302291847983754, "grad_norm": 0.41948074102401733, "learning_rate": 2.8490612955364915e-06, "loss": 0.7395, "step": 10381 }, { "epoch": 0.4302706287040491, "grad_norm": 0.4203872084617615, "learning_rate": 2.8488540760081233e-06, "loss": 0.6766, "step": 10382 }, { "epoch": 0.43031207260972276, "grad_norm": 0.41850173473358154, "learning_rate": 2.8486468564797547e-06, "loss": 0.6774, "step": 10383 }, { "epoch": 0.4303535165153964, "grad_norm": 0.4498981535434723, "learning_rate": 2.848439636951387e-06, "loss": 0.7539, "step": 10384 }, { "epoch": 0.43039496042107006, "grad_norm": 0.4484434127807617, "learning_rate": 2.848232417423018e-06, "loss": 0.7385, "step": 10385 }, { "epoch": 0.43043640432674374, "grad_norm": 0.41074103116989136, "learning_rate": 2.84802519789465e-06, "loss": 0.6959, "step": 10386 }, { "epoch": 0.4304778482324174, "grad_norm": 0.4210585355758667, "learning_rate": 2.8478179783662815e-06, "loss": 0.7473, "step": 10387 }, { "epoch": 0.4305192921380911, "grad_norm": 0.4294824004173279, "learning_rate": 2.8476107588379133e-06, "loss": 0.729, "step": 10388 }, { "epoch": 0.43056073604376477, "grad_norm": 0.401900976896286, "learning_rate": 2.8474035393095447e-06, "loss": 0.7144, "step": 10389 }, { "epoch": 0.43060217994943845, "grad_norm": 0.41022762656211853, "learning_rate": 2.8471963197811765e-06, "loss": 0.6653, "step": 10390 }, { "epoch": 0.4306436238551121, "grad_norm": 0.45915380120277405, "learning_rate": 2.8469891002528083e-06, "loss": 0.7119, "step": 10391 }, { "epoch": 0.4306850677607858, "grad_norm": 0.4486527144908905, "learning_rate": 2.8467818807244397e-06, "loss": 0.7689, "step": 10392 }, { "epoch": 0.4307265116664594, "grad_norm": 0.4198085367679596, "learning_rate": 2.8465746611960715e-06, "loss": 0.6575, "step": 10393 }, { "epoch": 0.4307679555721331, "grad_norm": 0.4376690685749054, "learning_rate": 2.846367441667703e-06, "loss": 0.7307, "step": 10394 }, { "epoch": 0.4308093994778068, "grad_norm": 0.3829568028450012, "learning_rate": 2.8461602221393347e-06, "loss": 0.6804, "step": 10395 }, { "epoch": 0.43085084338348045, "grad_norm": 0.445376455783844, "learning_rate": 2.845953002610966e-06, "loss": 0.7283, "step": 10396 }, { "epoch": 0.43089228728915413, "grad_norm": 0.42636096477508545, "learning_rate": 2.845745783082598e-06, "loss": 0.7225, "step": 10397 }, { "epoch": 0.4309337311948278, "grad_norm": 0.3926697373390198, "learning_rate": 2.8455385635542297e-06, "loss": 0.6819, "step": 10398 }, { "epoch": 0.4309751751005015, "grad_norm": 0.383539080619812, "learning_rate": 2.845331344025861e-06, "loss": 0.6531, "step": 10399 }, { "epoch": 0.43101661900617516, "grad_norm": 0.3896278440952301, "learning_rate": 2.8451241244974933e-06, "loss": 0.686, "step": 10400 }, { "epoch": 0.43105806291184884, "grad_norm": 0.45750442147254944, "learning_rate": 2.8449169049691243e-06, "loss": 0.7634, "step": 10401 }, { "epoch": 0.43109950681752246, "grad_norm": 0.5514282584190369, "learning_rate": 2.8447096854407565e-06, "loss": 0.6888, "step": 10402 }, { "epoch": 0.43114095072319614, "grad_norm": 0.4909672141075134, "learning_rate": 2.8445024659123875e-06, "loss": 0.7512, "step": 10403 }, { "epoch": 0.4311823946288698, "grad_norm": 0.40350115299224854, "learning_rate": 2.8442952463840197e-06, "loss": 0.6833, "step": 10404 }, { "epoch": 0.4312238385345435, "grad_norm": 0.4067443311214447, "learning_rate": 2.844088026855651e-06, "loss": 0.6758, "step": 10405 }, { "epoch": 0.43126528244021717, "grad_norm": 0.4207201600074768, "learning_rate": 2.843880807327283e-06, "loss": 0.6987, "step": 10406 }, { "epoch": 0.43130672634589085, "grad_norm": 0.5194283723831177, "learning_rate": 2.8436735877989147e-06, "loss": 0.7173, "step": 10407 }, { "epoch": 0.4313481702515645, "grad_norm": 0.41775834560394287, "learning_rate": 2.843466368270546e-06, "loss": 0.6826, "step": 10408 }, { "epoch": 0.4313896141572382, "grad_norm": 0.4213453531265259, "learning_rate": 2.843259148742178e-06, "loss": 0.7417, "step": 10409 }, { "epoch": 0.4314310580629119, "grad_norm": 0.4384973645210266, "learning_rate": 2.8430519292138093e-06, "loss": 0.7024, "step": 10410 }, { "epoch": 0.4314725019685855, "grad_norm": 0.4062466323375702, "learning_rate": 2.842844709685441e-06, "loss": 0.7256, "step": 10411 }, { "epoch": 0.4315139458742592, "grad_norm": 0.45456886291503906, "learning_rate": 2.8426374901570725e-06, "loss": 0.7163, "step": 10412 }, { "epoch": 0.43155538977993285, "grad_norm": 0.3806185722351074, "learning_rate": 2.8424302706287043e-06, "loss": 0.6923, "step": 10413 }, { "epoch": 0.43159683368560653, "grad_norm": 0.3872608244419098, "learning_rate": 2.8422230511003357e-06, "loss": 0.6941, "step": 10414 }, { "epoch": 0.4316382775912802, "grad_norm": 0.3723561465740204, "learning_rate": 2.8420158315719675e-06, "loss": 0.6619, "step": 10415 }, { "epoch": 0.4316797214969539, "grad_norm": 0.41451072692871094, "learning_rate": 2.8418086120435993e-06, "loss": 0.6829, "step": 10416 }, { "epoch": 0.43172116540262756, "grad_norm": 0.3857555091381073, "learning_rate": 2.8416013925152307e-06, "loss": 0.6677, "step": 10417 }, { "epoch": 0.43176260930830124, "grad_norm": 0.42607617378234863, "learning_rate": 2.841394172986863e-06, "loss": 0.6768, "step": 10418 }, { "epoch": 0.43180405321397486, "grad_norm": 0.4787174463272095, "learning_rate": 2.841186953458494e-06, "loss": 0.7783, "step": 10419 }, { "epoch": 0.43184549711964854, "grad_norm": 0.453541100025177, "learning_rate": 2.840979733930126e-06, "loss": 0.7217, "step": 10420 }, { "epoch": 0.4318869410253222, "grad_norm": 0.41865450143814087, "learning_rate": 2.8407725144017575e-06, "loss": 0.7148, "step": 10421 }, { "epoch": 0.4319283849309959, "grad_norm": 0.4176312983036041, "learning_rate": 2.8405652948733893e-06, "loss": 0.6924, "step": 10422 }, { "epoch": 0.43196982883666957, "grad_norm": 0.40825653076171875, "learning_rate": 2.8403580753450207e-06, "loss": 0.6279, "step": 10423 }, { "epoch": 0.43201127274234324, "grad_norm": 0.4302830696105957, "learning_rate": 2.8401508558166525e-06, "loss": 0.6958, "step": 10424 }, { "epoch": 0.4320527166480169, "grad_norm": 0.42328062653541565, "learning_rate": 2.8399436362882843e-06, "loss": 0.7087, "step": 10425 }, { "epoch": 0.4320941605536906, "grad_norm": 0.4232436716556549, "learning_rate": 2.8397364167599157e-06, "loss": 0.7058, "step": 10426 }, { "epoch": 0.4321356044593643, "grad_norm": 0.4146150052547455, "learning_rate": 2.8395291972315475e-06, "loss": 0.7168, "step": 10427 }, { "epoch": 0.4321770483650379, "grad_norm": 0.39475908875465393, "learning_rate": 2.839321977703179e-06, "loss": 0.7042, "step": 10428 }, { "epoch": 0.4322184922707116, "grad_norm": 0.4133051931858063, "learning_rate": 2.8391147581748107e-06, "loss": 0.7473, "step": 10429 }, { "epoch": 0.43225993617638525, "grad_norm": 0.4265364408493042, "learning_rate": 2.838907538646442e-06, "loss": 0.7161, "step": 10430 }, { "epoch": 0.43230138008205893, "grad_norm": 0.47007066011428833, "learning_rate": 2.838700319118074e-06, "loss": 0.6924, "step": 10431 }, { "epoch": 0.4323428239877326, "grad_norm": 0.38882359862327576, "learning_rate": 2.8384930995897053e-06, "loss": 0.6256, "step": 10432 }, { "epoch": 0.4323842678934063, "grad_norm": 0.4489215910434723, "learning_rate": 2.838285880061337e-06, "loss": 0.7015, "step": 10433 }, { "epoch": 0.43242571179907996, "grad_norm": 0.42065873742103577, "learning_rate": 2.8380786605329693e-06, "loss": 0.74, "step": 10434 }, { "epoch": 0.43246715570475364, "grad_norm": 0.42804017663002014, "learning_rate": 2.8378714410046003e-06, "loss": 0.6963, "step": 10435 }, { "epoch": 0.4325085996104273, "grad_norm": 0.45132169127464294, "learning_rate": 2.8376642214762325e-06, "loss": 0.6914, "step": 10436 }, { "epoch": 0.43255004351610093, "grad_norm": 0.38495537638664246, "learning_rate": 2.8374570019478635e-06, "loss": 0.7031, "step": 10437 }, { "epoch": 0.4325914874217746, "grad_norm": 0.4353320002555847, "learning_rate": 2.8372497824194957e-06, "loss": 0.6973, "step": 10438 }, { "epoch": 0.4326329313274483, "grad_norm": 0.3904997408390045, "learning_rate": 2.837042562891127e-06, "loss": 0.6505, "step": 10439 }, { "epoch": 0.43267437523312197, "grad_norm": 0.41821181774139404, "learning_rate": 2.836835343362759e-06, "loss": 0.6642, "step": 10440 }, { "epoch": 0.43271581913879564, "grad_norm": 0.42200586199760437, "learning_rate": 2.8366281238343903e-06, "loss": 0.6707, "step": 10441 }, { "epoch": 0.4327572630444693, "grad_norm": 0.45376840233802795, "learning_rate": 2.836420904306022e-06, "loss": 0.7063, "step": 10442 }, { "epoch": 0.432798706950143, "grad_norm": 0.4244289696216583, "learning_rate": 2.836213684777654e-06, "loss": 0.6737, "step": 10443 }, { "epoch": 0.4328401508558167, "grad_norm": 0.4336882531642914, "learning_rate": 2.8360064652492853e-06, "loss": 0.7075, "step": 10444 }, { "epoch": 0.4328815947614903, "grad_norm": 0.4346788823604584, "learning_rate": 2.835799245720917e-06, "loss": 0.6704, "step": 10445 }, { "epoch": 0.432923038667164, "grad_norm": 0.4044596552848816, "learning_rate": 2.8355920261925485e-06, "loss": 0.6975, "step": 10446 }, { "epoch": 0.43296448257283765, "grad_norm": 0.39014607667922974, "learning_rate": 2.8353848066641803e-06, "loss": 0.6404, "step": 10447 }, { "epoch": 0.4330059264785113, "grad_norm": 0.37580567598342896, "learning_rate": 2.8351775871358117e-06, "loss": 0.6658, "step": 10448 }, { "epoch": 0.433047370384185, "grad_norm": 0.42023444175720215, "learning_rate": 2.8349703676074435e-06, "loss": 0.7043, "step": 10449 }, { "epoch": 0.4330888142898587, "grad_norm": 0.41422587633132935, "learning_rate": 2.834763148079075e-06, "loss": 0.6907, "step": 10450 }, { "epoch": 0.43313025819553236, "grad_norm": 0.39521753787994385, "learning_rate": 2.8345559285507067e-06, "loss": 0.6328, "step": 10451 }, { "epoch": 0.43317170210120604, "grad_norm": 0.3909316062927246, "learning_rate": 2.834348709022339e-06, "loss": 0.6357, "step": 10452 }, { "epoch": 0.4332131460068797, "grad_norm": 0.4118797481060028, "learning_rate": 2.83414148949397e-06, "loss": 0.6495, "step": 10453 }, { "epoch": 0.43325458991255333, "grad_norm": 0.44610312581062317, "learning_rate": 2.833934269965602e-06, "loss": 0.7161, "step": 10454 }, { "epoch": 0.433296033818227, "grad_norm": 0.40244585275650024, "learning_rate": 2.8337270504372335e-06, "loss": 0.6704, "step": 10455 }, { "epoch": 0.4333374777239007, "grad_norm": 0.4081888496875763, "learning_rate": 2.8335198309088653e-06, "loss": 0.6914, "step": 10456 }, { "epoch": 0.43337892162957437, "grad_norm": 0.42663708329200745, "learning_rate": 2.8333126113804967e-06, "loss": 0.6818, "step": 10457 }, { "epoch": 0.43342036553524804, "grad_norm": 0.3722307085990906, "learning_rate": 2.8331053918521285e-06, "loss": 0.6589, "step": 10458 }, { "epoch": 0.4334618094409217, "grad_norm": 0.4462888240814209, "learning_rate": 2.8328981723237603e-06, "loss": 0.6964, "step": 10459 }, { "epoch": 0.4335032533465954, "grad_norm": 0.4314737319946289, "learning_rate": 2.8326909527953917e-06, "loss": 0.6882, "step": 10460 }, { "epoch": 0.4335446972522691, "grad_norm": 0.39039912819862366, "learning_rate": 2.8324837332670235e-06, "loss": 0.6716, "step": 10461 }, { "epoch": 0.43358614115794275, "grad_norm": 0.42251133918762207, "learning_rate": 2.832276513738655e-06, "loss": 0.7087, "step": 10462 }, { "epoch": 0.43362758506361637, "grad_norm": 0.36282774806022644, "learning_rate": 2.8320692942102867e-06, "loss": 0.6202, "step": 10463 }, { "epoch": 0.43366902896929005, "grad_norm": 0.42430540919303894, "learning_rate": 2.831862074681918e-06, "loss": 0.7068, "step": 10464 }, { "epoch": 0.4337104728749637, "grad_norm": 0.43808457255363464, "learning_rate": 2.83165485515355e-06, "loss": 0.687, "step": 10465 }, { "epoch": 0.4337519167806374, "grad_norm": 0.4150921106338501, "learning_rate": 2.8314476356251813e-06, "loss": 0.7036, "step": 10466 }, { "epoch": 0.4337933606863111, "grad_norm": 0.39958906173706055, "learning_rate": 2.831240416096813e-06, "loss": 0.6436, "step": 10467 }, { "epoch": 0.43383480459198476, "grad_norm": 0.37656062841415405, "learning_rate": 2.8310331965684453e-06, "loss": 0.6648, "step": 10468 }, { "epoch": 0.43387624849765843, "grad_norm": 0.399624228477478, "learning_rate": 2.8308259770400763e-06, "loss": 0.7317, "step": 10469 }, { "epoch": 0.4339176924033321, "grad_norm": 0.4033322036266327, "learning_rate": 2.8306187575117085e-06, "loss": 0.6682, "step": 10470 }, { "epoch": 0.43395913630900573, "grad_norm": 0.41484978795051575, "learning_rate": 2.8304115379833395e-06, "loss": 0.6672, "step": 10471 }, { "epoch": 0.4340005802146794, "grad_norm": 0.38175979256629944, "learning_rate": 2.8302043184549717e-06, "loss": 0.6749, "step": 10472 }, { "epoch": 0.4340420241203531, "grad_norm": 0.3942864239215851, "learning_rate": 2.829997098926603e-06, "loss": 0.6844, "step": 10473 }, { "epoch": 0.43408346802602676, "grad_norm": 0.38024571537971497, "learning_rate": 2.829789879398235e-06, "loss": 0.656, "step": 10474 }, { "epoch": 0.43412491193170044, "grad_norm": 0.3977097272872925, "learning_rate": 2.8295826598698663e-06, "loss": 0.6895, "step": 10475 }, { "epoch": 0.4341663558373741, "grad_norm": 0.39658039808273315, "learning_rate": 2.829375440341498e-06, "loss": 0.6581, "step": 10476 }, { "epoch": 0.4342077997430478, "grad_norm": 0.4145636260509491, "learning_rate": 2.82916822081313e-06, "loss": 0.6465, "step": 10477 }, { "epoch": 0.4342492436487215, "grad_norm": 0.4419283866882324, "learning_rate": 2.8289610012847613e-06, "loss": 0.7417, "step": 10478 }, { "epoch": 0.43429068755439515, "grad_norm": 0.3772049844264984, "learning_rate": 2.828753781756393e-06, "loss": 0.6864, "step": 10479 }, { "epoch": 0.43433213146006877, "grad_norm": 0.3961716294288635, "learning_rate": 2.8285465622280245e-06, "loss": 0.6528, "step": 10480 }, { "epoch": 0.43437357536574245, "grad_norm": 0.43045318126678467, "learning_rate": 2.8283393426996563e-06, "loss": 0.6707, "step": 10481 }, { "epoch": 0.4344150192714161, "grad_norm": 0.42050227522850037, "learning_rate": 2.8281321231712877e-06, "loss": 0.7131, "step": 10482 }, { "epoch": 0.4344564631770898, "grad_norm": 0.39020708203315735, "learning_rate": 2.8279249036429195e-06, "loss": 0.6646, "step": 10483 }, { "epoch": 0.4344979070827635, "grad_norm": 0.432284414768219, "learning_rate": 2.827717684114551e-06, "loss": 0.6824, "step": 10484 }, { "epoch": 0.43453935098843716, "grad_norm": 0.396045058965683, "learning_rate": 2.8275104645861827e-06, "loss": 0.6782, "step": 10485 }, { "epoch": 0.43458079489411083, "grad_norm": 0.41037964820861816, "learning_rate": 2.827303245057815e-06, "loss": 0.7098, "step": 10486 }, { "epoch": 0.4346222387997845, "grad_norm": 0.38621383905410767, "learning_rate": 2.827096025529446e-06, "loss": 0.7354, "step": 10487 }, { "epoch": 0.4346636827054582, "grad_norm": 0.39662718772888184, "learning_rate": 2.826888806001078e-06, "loss": 0.6588, "step": 10488 }, { "epoch": 0.4347051266111318, "grad_norm": 0.41946738958358765, "learning_rate": 2.8266815864727095e-06, "loss": 0.6823, "step": 10489 }, { "epoch": 0.4347465705168055, "grad_norm": 0.433270663022995, "learning_rate": 2.8264743669443413e-06, "loss": 0.7177, "step": 10490 }, { "epoch": 0.43478801442247916, "grad_norm": 0.4116487503051758, "learning_rate": 2.8262671474159727e-06, "loss": 0.6906, "step": 10491 }, { "epoch": 0.43482945832815284, "grad_norm": 0.3993769586086273, "learning_rate": 2.8260599278876045e-06, "loss": 0.6345, "step": 10492 }, { "epoch": 0.4348709022338265, "grad_norm": 0.424850195646286, "learning_rate": 2.825852708359236e-06, "loss": 0.7167, "step": 10493 }, { "epoch": 0.4349123461395002, "grad_norm": 0.402822345495224, "learning_rate": 2.8256454888308677e-06, "loss": 0.7317, "step": 10494 }, { "epoch": 0.43495379004517387, "grad_norm": 0.4139024019241333, "learning_rate": 2.8254382693024995e-06, "loss": 0.6396, "step": 10495 }, { "epoch": 0.43499523395084755, "grad_norm": 0.43480122089385986, "learning_rate": 2.825231049774131e-06, "loss": 0.6902, "step": 10496 }, { "epoch": 0.4350366778565212, "grad_norm": 0.40440455079078674, "learning_rate": 2.8250238302457627e-06, "loss": 0.6963, "step": 10497 }, { "epoch": 0.43507812176219485, "grad_norm": 0.4423624873161316, "learning_rate": 2.824816610717394e-06, "loss": 0.6774, "step": 10498 }, { "epoch": 0.4351195656678685, "grad_norm": 0.40952593088150024, "learning_rate": 2.824609391189026e-06, "loss": 0.7397, "step": 10499 }, { "epoch": 0.4351610095735422, "grad_norm": 0.40536072850227356, "learning_rate": 2.8244021716606573e-06, "loss": 0.718, "step": 10500 }, { "epoch": 0.4352024534792159, "grad_norm": 0.4244198203086853, "learning_rate": 2.824194952132289e-06, "loss": 0.7441, "step": 10501 }, { "epoch": 0.43524389738488956, "grad_norm": 0.3927190601825714, "learning_rate": 2.8239877326039205e-06, "loss": 0.6692, "step": 10502 }, { "epoch": 0.43528534129056323, "grad_norm": 0.42186030745506287, "learning_rate": 2.8237805130755523e-06, "loss": 0.7111, "step": 10503 }, { "epoch": 0.4353267851962369, "grad_norm": 0.42222970724105835, "learning_rate": 2.8235732935471845e-06, "loss": 0.7449, "step": 10504 }, { "epoch": 0.4353682291019106, "grad_norm": 0.37797150015830994, "learning_rate": 2.823366074018816e-06, "loss": 0.6899, "step": 10505 }, { "epoch": 0.4354096730075842, "grad_norm": 0.42747077345848083, "learning_rate": 2.8231588544904477e-06, "loss": 0.663, "step": 10506 }, { "epoch": 0.4354511169132579, "grad_norm": 0.41385143995285034, "learning_rate": 2.822951634962079e-06, "loss": 0.7111, "step": 10507 }, { "epoch": 0.43549256081893156, "grad_norm": 0.4049971103668213, "learning_rate": 2.822744415433711e-06, "loss": 0.6587, "step": 10508 }, { "epoch": 0.43553400472460524, "grad_norm": 0.4067429304122925, "learning_rate": 2.8225371959053423e-06, "loss": 0.7156, "step": 10509 }, { "epoch": 0.4355754486302789, "grad_norm": 0.44558238983154297, "learning_rate": 2.822329976376974e-06, "loss": 0.6871, "step": 10510 }, { "epoch": 0.4356168925359526, "grad_norm": 0.45163431763648987, "learning_rate": 2.8221227568486055e-06, "loss": 0.7032, "step": 10511 }, { "epoch": 0.43565833644162627, "grad_norm": 0.39426249265670776, "learning_rate": 2.8219155373202373e-06, "loss": 0.7511, "step": 10512 }, { "epoch": 0.43569978034729995, "grad_norm": 0.39733532071113586, "learning_rate": 2.821708317791869e-06, "loss": 0.6276, "step": 10513 }, { "epoch": 0.4357412242529736, "grad_norm": 0.4135626256465912, "learning_rate": 2.8215010982635005e-06, "loss": 0.6664, "step": 10514 }, { "epoch": 0.43578266815864725, "grad_norm": 0.3550359606742859, "learning_rate": 2.8212938787351323e-06, "loss": 0.6376, "step": 10515 }, { "epoch": 0.4358241120643209, "grad_norm": 0.401189923286438, "learning_rate": 2.8210866592067637e-06, "loss": 0.6859, "step": 10516 }, { "epoch": 0.4358655559699946, "grad_norm": 0.41182178258895874, "learning_rate": 2.8208794396783955e-06, "loss": 0.6853, "step": 10517 }, { "epoch": 0.4359069998756683, "grad_norm": 0.42777812480926514, "learning_rate": 2.820672220150027e-06, "loss": 0.7268, "step": 10518 }, { "epoch": 0.43594844378134195, "grad_norm": 0.3913375735282898, "learning_rate": 2.8204650006216587e-06, "loss": 0.6448, "step": 10519 }, { "epoch": 0.43598988768701563, "grad_norm": 0.44535398483276367, "learning_rate": 2.820257781093291e-06, "loss": 0.6997, "step": 10520 }, { "epoch": 0.4360313315926893, "grad_norm": 0.4279990792274475, "learning_rate": 2.820050561564922e-06, "loss": 0.6793, "step": 10521 }, { "epoch": 0.436072775498363, "grad_norm": 0.3860864043235779, "learning_rate": 2.819843342036554e-06, "loss": 0.6599, "step": 10522 }, { "epoch": 0.43611421940403666, "grad_norm": 0.41974228620529175, "learning_rate": 2.8196361225081855e-06, "loss": 0.7134, "step": 10523 }, { "epoch": 0.4361556633097103, "grad_norm": 0.3703858554363251, "learning_rate": 2.8194289029798173e-06, "loss": 0.6343, "step": 10524 }, { "epoch": 0.43619710721538396, "grad_norm": 0.3997543752193451, "learning_rate": 2.8192216834514487e-06, "loss": 0.678, "step": 10525 }, { "epoch": 0.43623855112105764, "grad_norm": 0.43000444769859314, "learning_rate": 2.8190144639230805e-06, "loss": 0.6931, "step": 10526 }, { "epoch": 0.4362799950267313, "grad_norm": 0.4271995723247528, "learning_rate": 2.818807244394712e-06, "loss": 0.7231, "step": 10527 }, { "epoch": 0.436321438932405, "grad_norm": 0.41930752992630005, "learning_rate": 2.8186000248663437e-06, "loss": 0.699, "step": 10528 }, { "epoch": 0.43636288283807867, "grad_norm": 0.4193907380104065, "learning_rate": 2.8183928053379755e-06, "loss": 0.7102, "step": 10529 }, { "epoch": 0.43640432674375235, "grad_norm": 0.3911597728729248, "learning_rate": 2.818185585809607e-06, "loss": 0.6589, "step": 10530 }, { "epoch": 0.436445770649426, "grad_norm": 0.40989765524864197, "learning_rate": 2.8179783662812387e-06, "loss": 0.719, "step": 10531 }, { "epoch": 0.43648721455509965, "grad_norm": 0.4120161235332489, "learning_rate": 2.81777114675287e-06, "loss": 0.7134, "step": 10532 }, { "epoch": 0.4365286584607733, "grad_norm": 0.45054763555526733, "learning_rate": 2.817563927224502e-06, "loss": 0.7251, "step": 10533 }, { "epoch": 0.436570102366447, "grad_norm": 0.40333986282348633, "learning_rate": 2.8173567076961333e-06, "loss": 0.6725, "step": 10534 }, { "epoch": 0.4366115462721207, "grad_norm": 0.46302980184555054, "learning_rate": 2.817149488167765e-06, "loss": 0.7195, "step": 10535 }, { "epoch": 0.43665299017779435, "grad_norm": 0.4543590247631073, "learning_rate": 2.8169422686393965e-06, "loss": 0.6653, "step": 10536 }, { "epoch": 0.43669443408346803, "grad_norm": 0.4041726291179657, "learning_rate": 2.8167350491110283e-06, "loss": 0.7134, "step": 10537 }, { "epoch": 0.4367358779891417, "grad_norm": 0.4104674160480499, "learning_rate": 2.8165278295826605e-06, "loss": 0.6461, "step": 10538 }, { "epoch": 0.4367773218948154, "grad_norm": 0.4113777279853821, "learning_rate": 2.816320610054292e-06, "loss": 0.6572, "step": 10539 }, { "epoch": 0.43681876580048906, "grad_norm": 0.41088980436325073, "learning_rate": 2.8161133905259237e-06, "loss": 0.6776, "step": 10540 }, { "epoch": 0.4368602097061627, "grad_norm": 0.4095016419887543, "learning_rate": 2.815906170997555e-06, "loss": 0.693, "step": 10541 }, { "epoch": 0.43690165361183636, "grad_norm": 0.4238571524620056, "learning_rate": 2.815698951469187e-06, "loss": 0.6946, "step": 10542 }, { "epoch": 0.43694309751751004, "grad_norm": 0.4106113910675049, "learning_rate": 2.8154917319408183e-06, "loss": 0.7159, "step": 10543 }, { "epoch": 0.4369845414231837, "grad_norm": 0.3952290117740631, "learning_rate": 2.81528451241245e-06, "loss": 0.696, "step": 10544 }, { "epoch": 0.4370259853288574, "grad_norm": 0.4206458330154419, "learning_rate": 2.8150772928840815e-06, "loss": 0.6693, "step": 10545 }, { "epoch": 0.43706742923453107, "grad_norm": 0.41807064414024353, "learning_rate": 2.8148700733557133e-06, "loss": 0.6824, "step": 10546 }, { "epoch": 0.43710887314020475, "grad_norm": 0.3907195031642914, "learning_rate": 2.814662853827345e-06, "loss": 0.6686, "step": 10547 }, { "epoch": 0.4371503170458784, "grad_norm": 0.39504575729370117, "learning_rate": 2.8144556342989765e-06, "loss": 0.6621, "step": 10548 }, { "epoch": 0.4371917609515521, "grad_norm": 0.375581294298172, "learning_rate": 2.8142484147706083e-06, "loss": 0.6389, "step": 10549 }, { "epoch": 0.4372332048572257, "grad_norm": 0.4173888564109802, "learning_rate": 2.8140411952422397e-06, "loss": 0.6973, "step": 10550 }, { "epoch": 0.4372746487628994, "grad_norm": 0.4885769486427307, "learning_rate": 2.8138339757138715e-06, "loss": 0.7322, "step": 10551 }, { "epoch": 0.4373160926685731, "grad_norm": 0.4213376045227051, "learning_rate": 2.813626756185503e-06, "loss": 0.7437, "step": 10552 }, { "epoch": 0.43735753657424675, "grad_norm": 0.40378329157829285, "learning_rate": 2.8134195366571347e-06, "loss": 0.6732, "step": 10553 }, { "epoch": 0.43739898047992043, "grad_norm": 0.40505045652389526, "learning_rate": 2.813212317128766e-06, "loss": 0.7404, "step": 10554 }, { "epoch": 0.4374404243855941, "grad_norm": 0.4088124930858612, "learning_rate": 2.813005097600398e-06, "loss": 0.7034, "step": 10555 }, { "epoch": 0.4374818682912678, "grad_norm": 0.38331568241119385, "learning_rate": 2.81279787807203e-06, "loss": 0.6522, "step": 10556 }, { "epoch": 0.43752331219694146, "grad_norm": 0.45342105627059937, "learning_rate": 2.8125906585436615e-06, "loss": 0.7258, "step": 10557 }, { "epoch": 0.43756475610261514, "grad_norm": 0.43386366963386536, "learning_rate": 2.8123834390152933e-06, "loss": 0.738, "step": 10558 }, { "epoch": 0.43760620000828876, "grad_norm": 0.403163343667984, "learning_rate": 2.8121762194869247e-06, "loss": 0.6968, "step": 10559 }, { "epoch": 0.43764764391396244, "grad_norm": 0.4145965874195099, "learning_rate": 2.8119689999585565e-06, "loss": 0.6768, "step": 10560 }, { "epoch": 0.4376890878196361, "grad_norm": 0.415755033493042, "learning_rate": 2.811761780430188e-06, "loss": 0.6965, "step": 10561 }, { "epoch": 0.4377305317253098, "grad_norm": 0.4121474325656891, "learning_rate": 2.8115545609018197e-06, "loss": 0.6301, "step": 10562 }, { "epoch": 0.43777197563098347, "grad_norm": 0.4487161338329315, "learning_rate": 2.811347341373451e-06, "loss": 0.6838, "step": 10563 }, { "epoch": 0.43781341953665714, "grad_norm": 0.4149439334869385, "learning_rate": 2.811140121845083e-06, "loss": 0.6677, "step": 10564 }, { "epoch": 0.4378548634423308, "grad_norm": 0.47265148162841797, "learning_rate": 2.8109329023167147e-06, "loss": 0.7224, "step": 10565 }, { "epoch": 0.4378963073480045, "grad_norm": 0.3953212797641754, "learning_rate": 2.810725682788346e-06, "loss": 0.696, "step": 10566 }, { "epoch": 0.4379377512536781, "grad_norm": 0.4550792872905731, "learning_rate": 2.810518463259978e-06, "loss": 0.6948, "step": 10567 }, { "epoch": 0.4379791951593518, "grad_norm": 0.4062862992286682, "learning_rate": 2.8103112437316093e-06, "loss": 0.6743, "step": 10568 }, { "epoch": 0.4380206390650255, "grad_norm": 0.42095720767974854, "learning_rate": 2.810104024203241e-06, "loss": 0.6003, "step": 10569 }, { "epoch": 0.43806208297069915, "grad_norm": 0.40781882405281067, "learning_rate": 2.8098968046748725e-06, "loss": 0.7029, "step": 10570 }, { "epoch": 0.43810352687637283, "grad_norm": 0.4319226145744324, "learning_rate": 2.8096895851465043e-06, "loss": 0.6348, "step": 10571 }, { "epoch": 0.4381449707820465, "grad_norm": 0.4180569052696228, "learning_rate": 2.8094823656181357e-06, "loss": 0.7301, "step": 10572 }, { "epoch": 0.4381864146877202, "grad_norm": 0.4475403130054474, "learning_rate": 2.809275146089768e-06, "loss": 0.7253, "step": 10573 }, { "epoch": 0.43822785859339386, "grad_norm": 0.44447505474090576, "learning_rate": 2.8090679265613997e-06, "loss": 0.7538, "step": 10574 }, { "epoch": 0.43826930249906754, "grad_norm": 0.3841472268104553, "learning_rate": 2.808860707033031e-06, "loss": 0.6349, "step": 10575 }, { "epoch": 0.43831074640474116, "grad_norm": 0.41565394401550293, "learning_rate": 2.808653487504663e-06, "loss": 0.6914, "step": 10576 }, { "epoch": 0.43835219031041484, "grad_norm": 0.37992823123931885, "learning_rate": 2.8084462679762943e-06, "loss": 0.6667, "step": 10577 }, { "epoch": 0.4383936342160885, "grad_norm": 0.4416390657424927, "learning_rate": 2.808239048447926e-06, "loss": 0.7446, "step": 10578 }, { "epoch": 0.4384350781217622, "grad_norm": 0.41148489713668823, "learning_rate": 2.8080318289195575e-06, "loss": 0.7119, "step": 10579 }, { "epoch": 0.43847652202743587, "grad_norm": 0.4190036356449127, "learning_rate": 2.8078246093911893e-06, "loss": 0.6683, "step": 10580 }, { "epoch": 0.43851796593310954, "grad_norm": 0.4412931203842163, "learning_rate": 2.807617389862821e-06, "loss": 0.6141, "step": 10581 }, { "epoch": 0.4385594098387832, "grad_norm": 0.41133633255958557, "learning_rate": 2.8074101703344525e-06, "loss": 0.7148, "step": 10582 }, { "epoch": 0.4386008537444569, "grad_norm": 0.4217776358127594, "learning_rate": 2.8072029508060843e-06, "loss": 0.7068, "step": 10583 }, { "epoch": 0.4386422976501306, "grad_norm": 0.4044494926929474, "learning_rate": 2.8069957312777157e-06, "loss": 0.6781, "step": 10584 }, { "epoch": 0.4386837415558042, "grad_norm": 0.4196532070636749, "learning_rate": 2.8067885117493475e-06, "loss": 0.6812, "step": 10585 }, { "epoch": 0.4387251854614779, "grad_norm": 0.40873807668685913, "learning_rate": 2.806581292220979e-06, "loss": 0.6827, "step": 10586 }, { "epoch": 0.43876662936715155, "grad_norm": 0.39048802852630615, "learning_rate": 2.8063740726926107e-06, "loss": 0.6289, "step": 10587 }, { "epoch": 0.4388080732728252, "grad_norm": 0.39452847838401794, "learning_rate": 2.806166853164242e-06, "loss": 0.7085, "step": 10588 }, { "epoch": 0.4388495171784989, "grad_norm": 0.4516887664794922, "learning_rate": 2.805959633635874e-06, "loss": 0.7197, "step": 10589 }, { "epoch": 0.4388909610841726, "grad_norm": 0.41256222128868103, "learning_rate": 2.805752414107506e-06, "loss": 0.7128, "step": 10590 }, { "epoch": 0.43893240498984626, "grad_norm": 0.4292495548725128, "learning_rate": 2.8055451945791375e-06, "loss": 0.7139, "step": 10591 }, { "epoch": 0.43897384889551994, "grad_norm": 0.3860321342945099, "learning_rate": 2.8053379750507693e-06, "loss": 0.7107, "step": 10592 }, { "epoch": 0.43901529280119356, "grad_norm": 0.3742777407169342, "learning_rate": 2.8051307555224007e-06, "loss": 0.6594, "step": 10593 }, { "epoch": 0.43905673670686723, "grad_norm": 0.425051212310791, "learning_rate": 2.8049235359940325e-06, "loss": 0.6935, "step": 10594 }, { "epoch": 0.4390981806125409, "grad_norm": 0.4327065944671631, "learning_rate": 2.804716316465664e-06, "loss": 0.6772, "step": 10595 }, { "epoch": 0.4391396245182146, "grad_norm": 0.3994385600090027, "learning_rate": 2.8045090969372957e-06, "loss": 0.7175, "step": 10596 }, { "epoch": 0.43918106842388827, "grad_norm": 0.44643959403038025, "learning_rate": 2.804301877408927e-06, "loss": 0.7716, "step": 10597 }, { "epoch": 0.43922251232956194, "grad_norm": 0.40110304951667786, "learning_rate": 2.804094657880559e-06, "loss": 0.6873, "step": 10598 }, { "epoch": 0.4392639562352356, "grad_norm": 0.45577630400657654, "learning_rate": 2.8038874383521907e-06, "loss": 0.7759, "step": 10599 }, { "epoch": 0.4393054001409093, "grad_norm": 0.4214892089366913, "learning_rate": 2.803680218823822e-06, "loss": 0.6566, "step": 10600 }, { "epoch": 0.439346844046583, "grad_norm": 0.4173821806907654, "learning_rate": 2.803472999295454e-06, "loss": 0.6888, "step": 10601 }, { "epoch": 0.4393882879522566, "grad_norm": 0.40902063250541687, "learning_rate": 2.8032657797670853e-06, "loss": 0.7188, "step": 10602 }, { "epoch": 0.4394297318579303, "grad_norm": 0.4221217930316925, "learning_rate": 2.803058560238717e-06, "loss": 0.7158, "step": 10603 }, { "epoch": 0.43947117576360395, "grad_norm": 0.4052228331565857, "learning_rate": 2.8028513407103485e-06, "loss": 0.7188, "step": 10604 }, { "epoch": 0.4395126196692776, "grad_norm": 0.3986262381076813, "learning_rate": 2.8026441211819803e-06, "loss": 0.72, "step": 10605 }, { "epoch": 0.4395540635749513, "grad_norm": 0.42035117745399475, "learning_rate": 2.8024369016536117e-06, "loss": 0.6899, "step": 10606 }, { "epoch": 0.439595507480625, "grad_norm": 0.4032190442085266, "learning_rate": 2.802229682125244e-06, "loss": 0.7178, "step": 10607 }, { "epoch": 0.43963695138629866, "grad_norm": 0.4030471444129944, "learning_rate": 2.8020224625968757e-06, "loss": 0.6398, "step": 10608 }, { "epoch": 0.43967839529197233, "grad_norm": 0.44073107838630676, "learning_rate": 2.801815243068507e-06, "loss": 0.7056, "step": 10609 }, { "epoch": 0.439719839197646, "grad_norm": 0.3942532539367676, "learning_rate": 2.801608023540139e-06, "loss": 0.6741, "step": 10610 }, { "epoch": 0.43976128310331963, "grad_norm": 0.4088027775287628, "learning_rate": 2.8014008040117703e-06, "loss": 0.688, "step": 10611 }, { "epoch": 0.4398027270089933, "grad_norm": 0.4064330458641052, "learning_rate": 2.801193584483402e-06, "loss": 0.6643, "step": 10612 }, { "epoch": 0.439844170914667, "grad_norm": 0.47182697057724, "learning_rate": 2.8009863649550335e-06, "loss": 0.6802, "step": 10613 }, { "epoch": 0.43988561482034066, "grad_norm": 0.4067172408103943, "learning_rate": 2.8007791454266653e-06, "loss": 0.6436, "step": 10614 }, { "epoch": 0.43992705872601434, "grad_norm": 0.39407485723495483, "learning_rate": 2.8005719258982967e-06, "loss": 0.6866, "step": 10615 }, { "epoch": 0.439968502631688, "grad_norm": 0.4145001173019409, "learning_rate": 2.8003647063699285e-06, "loss": 0.6649, "step": 10616 }, { "epoch": 0.4400099465373617, "grad_norm": 0.40470680594444275, "learning_rate": 2.8001574868415603e-06, "loss": 0.6641, "step": 10617 }, { "epoch": 0.4400513904430354, "grad_norm": 0.4191378057003021, "learning_rate": 2.7999502673131917e-06, "loss": 0.6985, "step": 10618 }, { "epoch": 0.44009283434870905, "grad_norm": 0.42289814352989197, "learning_rate": 2.7997430477848235e-06, "loss": 0.7471, "step": 10619 }, { "epoch": 0.44013427825438267, "grad_norm": 0.398616224527359, "learning_rate": 2.799535828256455e-06, "loss": 0.693, "step": 10620 }, { "epoch": 0.44017572216005635, "grad_norm": 0.3996465802192688, "learning_rate": 2.7993286087280867e-06, "loss": 0.6963, "step": 10621 }, { "epoch": 0.44021716606573, "grad_norm": 0.39495009183883667, "learning_rate": 2.799121389199718e-06, "loss": 0.6992, "step": 10622 }, { "epoch": 0.4402586099714037, "grad_norm": 0.43602702021598816, "learning_rate": 2.79891416967135e-06, "loss": 0.6582, "step": 10623 }, { "epoch": 0.4403000538770774, "grad_norm": 0.4120887815952301, "learning_rate": 2.7987069501429813e-06, "loss": 0.7324, "step": 10624 }, { "epoch": 0.44034149778275106, "grad_norm": 0.4042099118232727, "learning_rate": 2.7984997306146135e-06, "loss": 0.6731, "step": 10625 }, { "epoch": 0.44038294168842473, "grad_norm": 0.40953895449638367, "learning_rate": 2.7982925110862453e-06, "loss": 0.7229, "step": 10626 }, { "epoch": 0.4404243855940984, "grad_norm": 0.4370701313018799, "learning_rate": 2.7980852915578767e-06, "loss": 0.7239, "step": 10627 }, { "epoch": 0.44046582949977203, "grad_norm": 0.3957687020301819, "learning_rate": 2.7978780720295085e-06, "loss": 0.6882, "step": 10628 }, { "epoch": 0.4405072734054457, "grad_norm": 0.43923306465148926, "learning_rate": 2.79767085250114e-06, "loss": 0.7203, "step": 10629 }, { "epoch": 0.4405487173111194, "grad_norm": 0.4286334216594696, "learning_rate": 2.7974636329727717e-06, "loss": 0.6553, "step": 10630 }, { "epoch": 0.44059016121679306, "grad_norm": 0.4045356512069702, "learning_rate": 2.797256413444403e-06, "loss": 0.6677, "step": 10631 }, { "epoch": 0.44063160512246674, "grad_norm": 0.425481915473938, "learning_rate": 2.797049193916035e-06, "loss": 0.6757, "step": 10632 }, { "epoch": 0.4406730490281404, "grad_norm": 0.4431073069572449, "learning_rate": 2.7968419743876663e-06, "loss": 0.676, "step": 10633 }, { "epoch": 0.4407144929338141, "grad_norm": 0.39554330706596375, "learning_rate": 2.796634754859298e-06, "loss": 0.7019, "step": 10634 }, { "epoch": 0.44075593683948777, "grad_norm": 0.4320926070213318, "learning_rate": 2.79642753533093e-06, "loss": 0.7622, "step": 10635 }, { "epoch": 0.44079738074516145, "grad_norm": 0.407745897769928, "learning_rate": 2.7962203158025613e-06, "loss": 0.7046, "step": 10636 }, { "epoch": 0.44083882465083507, "grad_norm": 0.43103858828544617, "learning_rate": 2.796013096274193e-06, "loss": 0.7179, "step": 10637 }, { "epoch": 0.44088026855650875, "grad_norm": 0.4133353531360626, "learning_rate": 2.7958058767458245e-06, "loss": 0.7064, "step": 10638 }, { "epoch": 0.4409217124621824, "grad_norm": 0.40666496753692627, "learning_rate": 2.7955986572174563e-06, "loss": 0.6741, "step": 10639 }, { "epoch": 0.4409631563678561, "grad_norm": 0.41530531644821167, "learning_rate": 2.7953914376890877e-06, "loss": 0.7004, "step": 10640 }, { "epoch": 0.4410046002735298, "grad_norm": 0.4393569231033325, "learning_rate": 2.79518421816072e-06, "loss": 0.7197, "step": 10641 }, { "epoch": 0.44104604417920346, "grad_norm": 0.4229731261730194, "learning_rate": 2.7949769986323517e-06, "loss": 0.7461, "step": 10642 }, { "epoch": 0.44108748808487713, "grad_norm": 0.41027265787124634, "learning_rate": 2.794769779103983e-06, "loss": 0.7251, "step": 10643 }, { "epoch": 0.4411289319905508, "grad_norm": 0.5027011036872864, "learning_rate": 2.794562559575615e-06, "loss": 0.6592, "step": 10644 }, { "epoch": 0.4411703758962245, "grad_norm": 0.39715152978897095, "learning_rate": 2.7943553400472463e-06, "loss": 0.6826, "step": 10645 }, { "epoch": 0.4412118198018981, "grad_norm": 0.42920026183128357, "learning_rate": 2.794148120518878e-06, "loss": 0.6713, "step": 10646 }, { "epoch": 0.4412532637075718, "grad_norm": 0.38504379987716675, "learning_rate": 2.7939409009905095e-06, "loss": 0.6515, "step": 10647 }, { "epoch": 0.44129470761324546, "grad_norm": 0.4209655821323395, "learning_rate": 2.7937336814621413e-06, "loss": 0.6528, "step": 10648 }, { "epoch": 0.44133615151891914, "grad_norm": 0.41192448139190674, "learning_rate": 2.7935264619337727e-06, "loss": 0.699, "step": 10649 }, { "epoch": 0.4413775954245928, "grad_norm": 0.46252885460853577, "learning_rate": 2.7933192424054045e-06, "loss": 0.7214, "step": 10650 }, { "epoch": 0.4414190393302665, "grad_norm": 0.4012692868709564, "learning_rate": 2.7931120228770363e-06, "loss": 0.7068, "step": 10651 }, { "epoch": 0.44146048323594017, "grad_norm": 0.4071120023727417, "learning_rate": 2.7929048033486677e-06, "loss": 0.6722, "step": 10652 }, { "epoch": 0.44150192714161385, "grad_norm": 0.3719985783100128, "learning_rate": 2.7926975838202995e-06, "loss": 0.6228, "step": 10653 }, { "epoch": 0.44154337104728747, "grad_norm": 0.40987682342529297, "learning_rate": 2.792490364291931e-06, "loss": 0.6926, "step": 10654 }, { "epoch": 0.44158481495296115, "grad_norm": 0.3787139058113098, "learning_rate": 2.7922831447635627e-06, "loss": 0.6658, "step": 10655 }, { "epoch": 0.4416262588586348, "grad_norm": 0.4065896272659302, "learning_rate": 2.792075925235194e-06, "loss": 0.7168, "step": 10656 }, { "epoch": 0.4416677027643085, "grad_norm": 0.4119814336299896, "learning_rate": 2.791868705706826e-06, "loss": 0.6492, "step": 10657 }, { "epoch": 0.4417091466699822, "grad_norm": 0.44896841049194336, "learning_rate": 2.7916614861784573e-06, "loss": 0.752, "step": 10658 }, { "epoch": 0.44175059057565585, "grad_norm": 0.439333438873291, "learning_rate": 2.7914542666500895e-06, "loss": 0.6864, "step": 10659 }, { "epoch": 0.44179203448132953, "grad_norm": 0.4057009518146515, "learning_rate": 2.7912470471217213e-06, "loss": 0.7283, "step": 10660 }, { "epoch": 0.4418334783870032, "grad_norm": 0.4217979609966278, "learning_rate": 2.7910398275933527e-06, "loss": 0.7349, "step": 10661 }, { "epoch": 0.4418749222926769, "grad_norm": 0.40723666548728943, "learning_rate": 2.7908326080649845e-06, "loss": 0.7119, "step": 10662 }, { "epoch": 0.4419163661983505, "grad_norm": 0.41493383049964905, "learning_rate": 2.790625388536616e-06, "loss": 0.7471, "step": 10663 }, { "epoch": 0.4419578101040242, "grad_norm": 0.4638195037841797, "learning_rate": 2.7904181690082477e-06, "loss": 0.756, "step": 10664 }, { "epoch": 0.44199925400969786, "grad_norm": 0.39149704575538635, "learning_rate": 2.790210949479879e-06, "loss": 0.6951, "step": 10665 }, { "epoch": 0.44204069791537154, "grad_norm": 0.41711366176605225, "learning_rate": 2.790003729951511e-06, "loss": 0.6803, "step": 10666 }, { "epoch": 0.4420821418210452, "grad_norm": 0.4307255744934082, "learning_rate": 2.7897965104231423e-06, "loss": 0.7043, "step": 10667 }, { "epoch": 0.4421235857267189, "grad_norm": 0.4422287046909332, "learning_rate": 2.789589290894774e-06, "loss": 0.7371, "step": 10668 }, { "epoch": 0.44216502963239257, "grad_norm": 0.4224344491958618, "learning_rate": 2.789382071366406e-06, "loss": 0.7251, "step": 10669 }, { "epoch": 0.44220647353806625, "grad_norm": 0.3960646390914917, "learning_rate": 2.7891748518380373e-06, "loss": 0.7131, "step": 10670 }, { "epoch": 0.4422479174437399, "grad_norm": 0.4138723313808441, "learning_rate": 2.788967632309669e-06, "loss": 0.7058, "step": 10671 }, { "epoch": 0.44228936134941355, "grad_norm": 0.43399345874786377, "learning_rate": 2.7887604127813005e-06, "loss": 0.7166, "step": 10672 }, { "epoch": 0.4423308052550872, "grad_norm": 0.3994515836238861, "learning_rate": 2.7885531932529323e-06, "loss": 0.6901, "step": 10673 }, { "epoch": 0.4423722491607609, "grad_norm": 0.41371437907218933, "learning_rate": 2.7883459737245637e-06, "loss": 0.6648, "step": 10674 }, { "epoch": 0.4424136930664346, "grad_norm": 0.4053540527820587, "learning_rate": 2.788138754196196e-06, "loss": 0.6399, "step": 10675 }, { "epoch": 0.44245513697210825, "grad_norm": 0.44357797503471375, "learning_rate": 2.787931534667827e-06, "loss": 0.749, "step": 10676 }, { "epoch": 0.44249658087778193, "grad_norm": 0.37790152430534363, "learning_rate": 2.787724315139459e-06, "loss": 0.6646, "step": 10677 }, { "epoch": 0.4425380247834556, "grad_norm": 0.43542614579200745, "learning_rate": 2.787517095611091e-06, "loss": 0.7091, "step": 10678 }, { "epoch": 0.4425794686891293, "grad_norm": 0.3899983763694763, "learning_rate": 2.7873098760827223e-06, "loss": 0.6743, "step": 10679 }, { "epoch": 0.4426209125948029, "grad_norm": 0.41934314370155334, "learning_rate": 2.787102656554354e-06, "loss": 0.679, "step": 10680 }, { "epoch": 0.4426623565004766, "grad_norm": 0.4647693634033203, "learning_rate": 2.7868954370259855e-06, "loss": 0.7148, "step": 10681 }, { "epoch": 0.44270380040615026, "grad_norm": 0.4176410436630249, "learning_rate": 2.7866882174976173e-06, "loss": 0.6902, "step": 10682 }, { "epoch": 0.44274524431182394, "grad_norm": 0.4041560888290405, "learning_rate": 2.7864809979692487e-06, "loss": 0.6658, "step": 10683 }, { "epoch": 0.4427866882174976, "grad_norm": 0.40555325150489807, "learning_rate": 2.7862737784408805e-06, "loss": 0.6956, "step": 10684 }, { "epoch": 0.4428281321231713, "grad_norm": 0.3938453197479248, "learning_rate": 2.786066558912512e-06, "loss": 0.6721, "step": 10685 }, { "epoch": 0.44286957602884497, "grad_norm": 0.4473835229873657, "learning_rate": 2.7858593393841437e-06, "loss": 0.7321, "step": 10686 }, { "epoch": 0.44291101993451865, "grad_norm": 0.3867129385471344, "learning_rate": 2.7856521198557755e-06, "loss": 0.6763, "step": 10687 }, { "epoch": 0.4429524638401923, "grad_norm": 0.3999990224838257, "learning_rate": 2.785444900327407e-06, "loss": 0.7057, "step": 10688 }, { "epoch": 0.44299390774586594, "grad_norm": 0.3968227803707123, "learning_rate": 2.7852376807990387e-06, "loss": 0.6956, "step": 10689 }, { "epoch": 0.4430353516515396, "grad_norm": 0.45817697048187256, "learning_rate": 2.78503046127067e-06, "loss": 0.6852, "step": 10690 }, { "epoch": 0.4430767955572133, "grad_norm": 0.4271210730075836, "learning_rate": 2.7848232417423023e-06, "loss": 0.6251, "step": 10691 }, { "epoch": 0.443118239462887, "grad_norm": 0.39607587456703186, "learning_rate": 2.7846160222139333e-06, "loss": 0.7278, "step": 10692 }, { "epoch": 0.44315968336856065, "grad_norm": 0.37491244077682495, "learning_rate": 2.7844088026855655e-06, "loss": 0.6195, "step": 10693 }, { "epoch": 0.44320112727423433, "grad_norm": 0.391167014837265, "learning_rate": 2.7842015831571965e-06, "loss": 0.6362, "step": 10694 }, { "epoch": 0.443242571179908, "grad_norm": 0.39960363507270813, "learning_rate": 2.7839943636288287e-06, "loss": 0.6765, "step": 10695 }, { "epoch": 0.4432840150855817, "grad_norm": 0.42391476035118103, "learning_rate": 2.7837871441004605e-06, "loss": 0.6929, "step": 10696 }, { "epoch": 0.44332545899125536, "grad_norm": 0.3722314238548279, "learning_rate": 2.783579924572092e-06, "loss": 0.625, "step": 10697 }, { "epoch": 0.443366902896929, "grad_norm": 0.38690996170043945, "learning_rate": 2.7833727050437237e-06, "loss": 0.6162, "step": 10698 }, { "epoch": 0.44340834680260266, "grad_norm": 0.4186747670173645, "learning_rate": 2.783165485515355e-06, "loss": 0.7336, "step": 10699 }, { "epoch": 0.44344979070827634, "grad_norm": 0.3984531760215759, "learning_rate": 2.782958265986987e-06, "loss": 0.7257, "step": 10700 }, { "epoch": 0.44349123461395, "grad_norm": 0.38653764128685, "learning_rate": 2.7827510464586183e-06, "loss": 0.7065, "step": 10701 }, { "epoch": 0.4435326785196237, "grad_norm": 0.4031617343425751, "learning_rate": 2.78254382693025e-06, "loss": 0.6616, "step": 10702 }, { "epoch": 0.44357412242529737, "grad_norm": 0.3965320885181427, "learning_rate": 2.782336607401882e-06, "loss": 0.6956, "step": 10703 }, { "epoch": 0.44361556633097105, "grad_norm": 0.41684433817863464, "learning_rate": 2.7821293878735133e-06, "loss": 0.6912, "step": 10704 }, { "epoch": 0.4436570102366447, "grad_norm": 0.41227489709854126, "learning_rate": 2.781922168345145e-06, "loss": 0.6964, "step": 10705 }, { "epoch": 0.4436984541423184, "grad_norm": 0.4462659955024719, "learning_rate": 2.7817149488167765e-06, "loss": 0.6986, "step": 10706 }, { "epoch": 0.443739898047992, "grad_norm": 0.40475383400917053, "learning_rate": 2.7815077292884083e-06, "loss": 0.6833, "step": 10707 }, { "epoch": 0.4437813419536657, "grad_norm": 0.5172575116157532, "learning_rate": 2.7813005097600397e-06, "loss": 0.7681, "step": 10708 }, { "epoch": 0.4438227858593394, "grad_norm": 0.4176976680755615, "learning_rate": 2.781093290231672e-06, "loss": 0.74, "step": 10709 }, { "epoch": 0.44386422976501305, "grad_norm": 0.3735900819301605, "learning_rate": 2.780886070703303e-06, "loss": 0.616, "step": 10710 }, { "epoch": 0.44390567367068673, "grad_norm": 0.41185203194618225, "learning_rate": 2.780678851174935e-06, "loss": 0.7158, "step": 10711 }, { "epoch": 0.4439471175763604, "grad_norm": 0.40176498889923096, "learning_rate": 2.780471631646567e-06, "loss": 0.7191, "step": 10712 }, { "epoch": 0.4439885614820341, "grad_norm": 0.38810068368911743, "learning_rate": 2.7802644121181983e-06, "loss": 0.7056, "step": 10713 }, { "epoch": 0.44403000538770776, "grad_norm": 0.41772010922431946, "learning_rate": 2.78005719258983e-06, "loss": 0.7268, "step": 10714 }, { "epoch": 0.4440714492933814, "grad_norm": 0.397176593542099, "learning_rate": 2.7798499730614615e-06, "loss": 0.6462, "step": 10715 }, { "epoch": 0.44411289319905506, "grad_norm": 0.41932404041290283, "learning_rate": 2.7796427535330933e-06, "loss": 0.6812, "step": 10716 }, { "epoch": 0.44415433710472874, "grad_norm": 0.4088246822357178, "learning_rate": 2.7794355340047247e-06, "loss": 0.7056, "step": 10717 }, { "epoch": 0.4441957810104024, "grad_norm": 0.42907828092575073, "learning_rate": 2.7792283144763565e-06, "loss": 0.7156, "step": 10718 }, { "epoch": 0.4442372249160761, "grad_norm": 0.42180898785591125, "learning_rate": 2.779021094947988e-06, "loss": 0.7346, "step": 10719 }, { "epoch": 0.44427866882174977, "grad_norm": 0.4063490927219391, "learning_rate": 2.7788138754196197e-06, "loss": 0.7537, "step": 10720 }, { "epoch": 0.44432011272742344, "grad_norm": 0.4329044222831726, "learning_rate": 2.7786066558912515e-06, "loss": 0.7051, "step": 10721 }, { "epoch": 0.4443615566330971, "grad_norm": 0.3947679102420807, "learning_rate": 2.778399436362883e-06, "loss": 0.7032, "step": 10722 }, { "epoch": 0.4444030005387708, "grad_norm": 0.43656572699546814, "learning_rate": 2.7781922168345147e-06, "loss": 0.6718, "step": 10723 }, { "epoch": 0.4444444444444444, "grad_norm": 0.45558103919029236, "learning_rate": 2.777984997306146e-06, "loss": 0.6835, "step": 10724 }, { "epoch": 0.4444858883501181, "grad_norm": 0.40221020579338074, "learning_rate": 2.7777777777777783e-06, "loss": 0.7114, "step": 10725 }, { "epoch": 0.4445273322557918, "grad_norm": 0.4129464030265808, "learning_rate": 2.7775705582494093e-06, "loss": 0.6801, "step": 10726 }, { "epoch": 0.44456877616146545, "grad_norm": 0.39348557591438293, "learning_rate": 2.7773633387210415e-06, "loss": 0.6881, "step": 10727 }, { "epoch": 0.44461022006713913, "grad_norm": 0.3803649842739105, "learning_rate": 2.7771561191926725e-06, "loss": 0.6702, "step": 10728 }, { "epoch": 0.4446516639728128, "grad_norm": 0.43088364601135254, "learning_rate": 2.7769488996643047e-06, "loss": 0.7524, "step": 10729 }, { "epoch": 0.4446931078784865, "grad_norm": 0.39187121391296387, "learning_rate": 2.7767416801359365e-06, "loss": 0.6829, "step": 10730 }, { "epoch": 0.44473455178416016, "grad_norm": 0.4070812463760376, "learning_rate": 2.776534460607568e-06, "loss": 0.7019, "step": 10731 }, { "epoch": 0.44477599568983384, "grad_norm": 0.3851306438446045, "learning_rate": 2.7763272410791997e-06, "loss": 0.6519, "step": 10732 }, { "epoch": 0.44481743959550746, "grad_norm": 0.4127126932144165, "learning_rate": 2.776120021550831e-06, "loss": 0.7158, "step": 10733 }, { "epoch": 0.44485888350118113, "grad_norm": 0.4145079255104065, "learning_rate": 2.775912802022463e-06, "loss": 0.6888, "step": 10734 }, { "epoch": 0.4449003274068548, "grad_norm": 0.383944034576416, "learning_rate": 2.7757055824940943e-06, "loss": 0.6554, "step": 10735 }, { "epoch": 0.4449417713125285, "grad_norm": 0.42479604482650757, "learning_rate": 2.775498362965726e-06, "loss": 0.7365, "step": 10736 }, { "epoch": 0.44498321521820217, "grad_norm": 0.4285183548927307, "learning_rate": 2.7752911434373575e-06, "loss": 0.7349, "step": 10737 }, { "epoch": 0.44502465912387584, "grad_norm": 0.4095458686351776, "learning_rate": 2.7750839239089893e-06, "loss": 0.7029, "step": 10738 }, { "epoch": 0.4450661030295495, "grad_norm": 0.38396984338760376, "learning_rate": 2.774876704380621e-06, "loss": 0.6465, "step": 10739 }, { "epoch": 0.4451075469352232, "grad_norm": 0.42741823196411133, "learning_rate": 2.7746694848522525e-06, "loss": 0.7263, "step": 10740 }, { "epoch": 0.4451489908408968, "grad_norm": 0.4390709102153778, "learning_rate": 2.7744622653238843e-06, "loss": 0.7129, "step": 10741 }, { "epoch": 0.4451904347465705, "grad_norm": 0.43229514360427856, "learning_rate": 2.7742550457955157e-06, "loss": 0.6899, "step": 10742 }, { "epoch": 0.4452318786522442, "grad_norm": 0.38738396763801575, "learning_rate": 2.774047826267148e-06, "loss": 0.6582, "step": 10743 }, { "epoch": 0.44527332255791785, "grad_norm": 0.35839617252349854, "learning_rate": 2.773840606738779e-06, "loss": 0.6165, "step": 10744 }, { "epoch": 0.4453147664635915, "grad_norm": 0.4070381820201874, "learning_rate": 2.773633387210411e-06, "loss": 0.6869, "step": 10745 }, { "epoch": 0.4453562103692652, "grad_norm": 0.4540160298347473, "learning_rate": 2.7734261676820425e-06, "loss": 0.6921, "step": 10746 }, { "epoch": 0.4453976542749389, "grad_norm": 0.4162324368953705, "learning_rate": 2.7732189481536743e-06, "loss": 0.7141, "step": 10747 }, { "epoch": 0.44543909818061256, "grad_norm": 0.3760441541671753, "learning_rate": 2.773011728625306e-06, "loss": 0.6422, "step": 10748 }, { "epoch": 0.44548054208628624, "grad_norm": 0.4429740309715271, "learning_rate": 2.7728045090969375e-06, "loss": 0.6882, "step": 10749 }, { "epoch": 0.44552198599195986, "grad_norm": 0.4257534146308899, "learning_rate": 2.7725972895685693e-06, "loss": 0.6846, "step": 10750 }, { "epoch": 0.44556342989763353, "grad_norm": 0.44154950976371765, "learning_rate": 2.7723900700402007e-06, "loss": 0.719, "step": 10751 }, { "epoch": 0.4456048738033072, "grad_norm": 0.4083128571510315, "learning_rate": 2.7721828505118325e-06, "loss": 0.6667, "step": 10752 }, { "epoch": 0.4456463177089809, "grad_norm": 0.4325467050075531, "learning_rate": 2.771975630983464e-06, "loss": 0.7561, "step": 10753 }, { "epoch": 0.44568776161465457, "grad_norm": 0.4141205847263336, "learning_rate": 2.7717684114550957e-06, "loss": 0.675, "step": 10754 }, { "epoch": 0.44572920552032824, "grad_norm": 0.515047013759613, "learning_rate": 2.7715611919267275e-06, "loss": 0.7239, "step": 10755 }, { "epoch": 0.4457706494260019, "grad_norm": 0.39870646595954895, "learning_rate": 2.771353972398359e-06, "loss": 0.675, "step": 10756 }, { "epoch": 0.4458120933316756, "grad_norm": 0.44386667013168335, "learning_rate": 2.7711467528699907e-06, "loss": 0.6868, "step": 10757 }, { "epoch": 0.4458535372373493, "grad_norm": 0.3994561731815338, "learning_rate": 2.770939533341622e-06, "loss": 0.6216, "step": 10758 }, { "epoch": 0.4458949811430229, "grad_norm": 0.3937024772167206, "learning_rate": 2.7707323138132543e-06, "loss": 0.637, "step": 10759 }, { "epoch": 0.44593642504869657, "grad_norm": 0.4203580915927887, "learning_rate": 2.7705250942848853e-06, "loss": 0.6421, "step": 10760 }, { "epoch": 0.44597786895437025, "grad_norm": 0.39245256781578064, "learning_rate": 2.7703178747565175e-06, "loss": 0.6584, "step": 10761 }, { "epoch": 0.4460193128600439, "grad_norm": 0.41881120204925537, "learning_rate": 2.7701106552281485e-06, "loss": 0.6958, "step": 10762 }, { "epoch": 0.4460607567657176, "grad_norm": 0.40717312693595886, "learning_rate": 2.7699034356997807e-06, "loss": 0.6978, "step": 10763 }, { "epoch": 0.4461022006713913, "grad_norm": 0.4443020522594452, "learning_rate": 2.7696962161714125e-06, "loss": 0.7031, "step": 10764 }, { "epoch": 0.44614364457706496, "grad_norm": 0.43122488260269165, "learning_rate": 2.769488996643044e-06, "loss": 0.7351, "step": 10765 }, { "epoch": 0.44618508848273863, "grad_norm": 0.5521938800811768, "learning_rate": 2.7692817771146757e-06, "loss": 0.7028, "step": 10766 }, { "epoch": 0.4462265323884123, "grad_norm": 0.4305494427680969, "learning_rate": 2.769074557586307e-06, "loss": 0.6821, "step": 10767 }, { "epoch": 0.44626797629408593, "grad_norm": 0.42370104789733887, "learning_rate": 2.768867338057939e-06, "loss": 0.6643, "step": 10768 }, { "epoch": 0.4463094201997596, "grad_norm": 0.4145101308822632, "learning_rate": 2.7686601185295703e-06, "loss": 0.6875, "step": 10769 }, { "epoch": 0.4463508641054333, "grad_norm": 0.4131934642791748, "learning_rate": 2.768452899001202e-06, "loss": 0.7046, "step": 10770 }, { "epoch": 0.44639230801110696, "grad_norm": 0.4026491045951843, "learning_rate": 2.7682456794728335e-06, "loss": 0.6897, "step": 10771 }, { "epoch": 0.44643375191678064, "grad_norm": 0.49821916222572327, "learning_rate": 2.7680384599444653e-06, "loss": 0.7076, "step": 10772 }, { "epoch": 0.4464751958224543, "grad_norm": 0.41298797726631165, "learning_rate": 2.767831240416097e-06, "loss": 0.6926, "step": 10773 }, { "epoch": 0.446516639728128, "grad_norm": 0.4178711175918579, "learning_rate": 2.7676240208877285e-06, "loss": 0.6548, "step": 10774 }, { "epoch": 0.4465580836338017, "grad_norm": 0.4024145305156708, "learning_rate": 2.7674168013593603e-06, "loss": 0.7007, "step": 10775 }, { "epoch": 0.4465995275394753, "grad_norm": 0.45866072177886963, "learning_rate": 2.7672095818309917e-06, "loss": 0.7324, "step": 10776 }, { "epoch": 0.44664097144514897, "grad_norm": 0.41210514307022095, "learning_rate": 2.767002362302624e-06, "loss": 0.6987, "step": 10777 }, { "epoch": 0.44668241535082265, "grad_norm": 0.4201638698577881, "learning_rate": 2.766795142774255e-06, "loss": 0.6897, "step": 10778 }, { "epoch": 0.4467238592564963, "grad_norm": 0.4367576241493225, "learning_rate": 2.766587923245887e-06, "loss": 0.6899, "step": 10779 }, { "epoch": 0.44676530316217, "grad_norm": 0.3872509300708771, "learning_rate": 2.7663807037175185e-06, "loss": 0.6868, "step": 10780 }, { "epoch": 0.4468067470678437, "grad_norm": 0.41382497549057007, "learning_rate": 2.7661734841891503e-06, "loss": 0.7222, "step": 10781 }, { "epoch": 0.44684819097351736, "grad_norm": 0.4649643301963806, "learning_rate": 2.765966264660782e-06, "loss": 0.7117, "step": 10782 }, { "epoch": 0.44688963487919103, "grad_norm": 0.4160113036632538, "learning_rate": 2.7657590451324135e-06, "loss": 0.7095, "step": 10783 }, { "epoch": 0.4469310787848647, "grad_norm": 0.4087352454662323, "learning_rate": 2.7655518256040453e-06, "loss": 0.689, "step": 10784 }, { "epoch": 0.44697252269053833, "grad_norm": 0.41340169310569763, "learning_rate": 2.7653446060756767e-06, "loss": 0.6531, "step": 10785 }, { "epoch": 0.447013966596212, "grad_norm": 0.412334144115448, "learning_rate": 2.7651373865473085e-06, "loss": 0.6602, "step": 10786 }, { "epoch": 0.4470554105018857, "grad_norm": 0.4670717418193817, "learning_rate": 2.76493016701894e-06, "loss": 0.7279, "step": 10787 }, { "epoch": 0.44709685440755936, "grad_norm": 0.36499109864234924, "learning_rate": 2.7647229474905717e-06, "loss": 0.6853, "step": 10788 }, { "epoch": 0.44713829831323304, "grad_norm": 0.3761703670024872, "learning_rate": 2.764515727962203e-06, "loss": 0.6628, "step": 10789 }, { "epoch": 0.4471797422189067, "grad_norm": 0.42338645458221436, "learning_rate": 2.764308508433835e-06, "loss": 0.7263, "step": 10790 }, { "epoch": 0.4472211861245804, "grad_norm": 0.4451175630092621, "learning_rate": 2.7641012889054667e-06, "loss": 0.6879, "step": 10791 }, { "epoch": 0.44726263003025407, "grad_norm": 0.4271060526371002, "learning_rate": 2.763894069377098e-06, "loss": 0.7205, "step": 10792 }, { "epoch": 0.44730407393592775, "grad_norm": 0.42221876978874207, "learning_rate": 2.7636868498487303e-06, "loss": 0.6808, "step": 10793 }, { "epoch": 0.44734551784160137, "grad_norm": 0.3987949788570404, "learning_rate": 2.7634796303203613e-06, "loss": 0.6838, "step": 10794 }, { "epoch": 0.44738696174727505, "grad_norm": 0.4065544605255127, "learning_rate": 2.7632724107919935e-06, "loss": 0.6777, "step": 10795 }, { "epoch": 0.4474284056529487, "grad_norm": 0.41755664348602295, "learning_rate": 2.7630651912636245e-06, "loss": 0.6404, "step": 10796 }, { "epoch": 0.4474698495586224, "grad_norm": 0.40265128016471863, "learning_rate": 2.7628579717352567e-06, "loss": 0.6895, "step": 10797 }, { "epoch": 0.4475112934642961, "grad_norm": 0.43838223814964294, "learning_rate": 2.762650752206888e-06, "loss": 0.6682, "step": 10798 }, { "epoch": 0.44755273736996976, "grad_norm": 0.44894281029701233, "learning_rate": 2.76244353267852e-06, "loss": 0.7244, "step": 10799 }, { "epoch": 0.44759418127564343, "grad_norm": 0.5027337670326233, "learning_rate": 2.7622363131501517e-06, "loss": 0.7837, "step": 10800 }, { "epoch": 0.4476356251813171, "grad_norm": 0.4085533320903778, "learning_rate": 2.762029093621783e-06, "loss": 0.6663, "step": 10801 }, { "epoch": 0.44767706908699073, "grad_norm": 0.40160036087036133, "learning_rate": 2.761821874093415e-06, "loss": 0.6848, "step": 10802 }, { "epoch": 0.4477185129926644, "grad_norm": 0.43014684319496155, "learning_rate": 2.7616146545650463e-06, "loss": 0.7092, "step": 10803 }, { "epoch": 0.4477599568983381, "grad_norm": 0.38239920139312744, "learning_rate": 2.761407435036678e-06, "loss": 0.677, "step": 10804 }, { "epoch": 0.44780140080401176, "grad_norm": 0.4521341919898987, "learning_rate": 2.7612002155083095e-06, "loss": 0.7076, "step": 10805 }, { "epoch": 0.44784284470968544, "grad_norm": 0.42911645770072937, "learning_rate": 2.7609929959799413e-06, "loss": 0.6722, "step": 10806 }, { "epoch": 0.4478842886153591, "grad_norm": 0.3787784278392792, "learning_rate": 2.7607857764515727e-06, "loss": 0.661, "step": 10807 }, { "epoch": 0.4479257325210328, "grad_norm": 0.42653194069862366, "learning_rate": 2.7605785569232045e-06, "loss": 0.6699, "step": 10808 }, { "epoch": 0.44796717642670647, "grad_norm": 0.42119038105010986, "learning_rate": 2.7603713373948363e-06, "loss": 0.7219, "step": 10809 }, { "epoch": 0.44800862033238015, "grad_norm": 0.39014604687690735, "learning_rate": 2.7601641178664677e-06, "loss": 0.6979, "step": 10810 }, { "epoch": 0.44805006423805377, "grad_norm": 0.40384072065353394, "learning_rate": 2.7599568983381e-06, "loss": 0.677, "step": 10811 }, { "epoch": 0.44809150814372745, "grad_norm": 0.40486863255500793, "learning_rate": 2.759749678809731e-06, "loss": 0.6704, "step": 10812 }, { "epoch": 0.4481329520494011, "grad_norm": 0.4096280038356781, "learning_rate": 2.759542459281363e-06, "loss": 0.7083, "step": 10813 }, { "epoch": 0.4481743959550748, "grad_norm": 0.37814274430274963, "learning_rate": 2.7593352397529945e-06, "loss": 0.6326, "step": 10814 }, { "epoch": 0.4482158398607485, "grad_norm": 0.40980005264282227, "learning_rate": 2.7591280202246263e-06, "loss": 0.7097, "step": 10815 }, { "epoch": 0.44825728376642215, "grad_norm": 0.3945491909980774, "learning_rate": 2.758920800696258e-06, "loss": 0.6631, "step": 10816 }, { "epoch": 0.44829872767209583, "grad_norm": 0.439034640789032, "learning_rate": 2.7587135811678895e-06, "loss": 0.743, "step": 10817 }, { "epoch": 0.4483401715777695, "grad_norm": 0.4239235520362854, "learning_rate": 2.7585063616395213e-06, "loss": 0.6991, "step": 10818 }, { "epoch": 0.4483816154834432, "grad_norm": 0.41628003120422363, "learning_rate": 2.7582991421111527e-06, "loss": 0.7466, "step": 10819 }, { "epoch": 0.4484230593891168, "grad_norm": 0.40465855598449707, "learning_rate": 2.7580919225827845e-06, "loss": 0.6418, "step": 10820 }, { "epoch": 0.4484645032947905, "grad_norm": 0.398535817861557, "learning_rate": 2.757884703054416e-06, "loss": 0.6512, "step": 10821 }, { "epoch": 0.44850594720046416, "grad_norm": 0.4544258415699005, "learning_rate": 2.7576774835260477e-06, "loss": 0.6915, "step": 10822 }, { "epoch": 0.44854739110613784, "grad_norm": 0.42401474714279175, "learning_rate": 2.757470263997679e-06, "loss": 0.6809, "step": 10823 }, { "epoch": 0.4485888350118115, "grad_norm": 0.4807281792163849, "learning_rate": 2.757263044469311e-06, "loss": 0.7727, "step": 10824 }, { "epoch": 0.4486302789174852, "grad_norm": 0.4088771343231201, "learning_rate": 2.7570558249409427e-06, "loss": 0.7041, "step": 10825 }, { "epoch": 0.44867172282315887, "grad_norm": 0.4210512340068817, "learning_rate": 2.756848605412574e-06, "loss": 0.6868, "step": 10826 }, { "epoch": 0.44871316672883255, "grad_norm": 0.4039902091026306, "learning_rate": 2.7566413858842064e-06, "loss": 0.7212, "step": 10827 }, { "epoch": 0.4487546106345062, "grad_norm": 0.391928493976593, "learning_rate": 2.7564341663558373e-06, "loss": 0.7405, "step": 10828 }, { "epoch": 0.44879605454017985, "grad_norm": 0.4118398129940033, "learning_rate": 2.7562269468274695e-06, "loss": 0.7057, "step": 10829 }, { "epoch": 0.4488374984458535, "grad_norm": 0.39546898007392883, "learning_rate": 2.756019727299101e-06, "loss": 0.6532, "step": 10830 }, { "epoch": 0.4488789423515272, "grad_norm": 0.37725499272346497, "learning_rate": 2.7558125077707327e-06, "loss": 0.7217, "step": 10831 }, { "epoch": 0.4489203862572009, "grad_norm": 0.3909328579902649, "learning_rate": 2.755605288242364e-06, "loss": 0.686, "step": 10832 }, { "epoch": 0.44896183016287455, "grad_norm": 0.4160630404949188, "learning_rate": 2.755398068713996e-06, "loss": 0.6803, "step": 10833 }, { "epoch": 0.44900327406854823, "grad_norm": 0.4155634045600891, "learning_rate": 2.7551908491856277e-06, "loss": 0.7227, "step": 10834 }, { "epoch": 0.4490447179742219, "grad_norm": 0.4120108485221863, "learning_rate": 2.754983629657259e-06, "loss": 0.667, "step": 10835 }, { "epoch": 0.4490861618798956, "grad_norm": 0.40564781427383423, "learning_rate": 2.754776410128891e-06, "loss": 0.7156, "step": 10836 }, { "epoch": 0.4491276057855692, "grad_norm": 0.42689386010169983, "learning_rate": 2.7545691906005223e-06, "loss": 0.7219, "step": 10837 }, { "epoch": 0.4491690496912429, "grad_norm": 0.4072379469871521, "learning_rate": 2.754361971072154e-06, "loss": 0.679, "step": 10838 }, { "epoch": 0.44921049359691656, "grad_norm": 0.3911210298538208, "learning_rate": 2.7541547515437855e-06, "loss": 0.6743, "step": 10839 }, { "epoch": 0.44925193750259024, "grad_norm": 0.4427466094493866, "learning_rate": 2.7539475320154173e-06, "loss": 0.7186, "step": 10840 }, { "epoch": 0.4492933814082639, "grad_norm": 0.4513285160064697, "learning_rate": 2.7537403124870487e-06, "loss": 0.7183, "step": 10841 }, { "epoch": 0.4493348253139376, "grad_norm": 0.4227806031703949, "learning_rate": 2.7535330929586805e-06, "loss": 0.6394, "step": 10842 }, { "epoch": 0.44937626921961127, "grad_norm": 0.4032141864299774, "learning_rate": 2.7533258734303123e-06, "loss": 0.6542, "step": 10843 }, { "epoch": 0.44941771312528495, "grad_norm": 0.40027546882629395, "learning_rate": 2.7531186539019437e-06, "loss": 0.7113, "step": 10844 }, { "epoch": 0.4494591570309586, "grad_norm": 0.39920344948768616, "learning_rate": 2.752911434373576e-06, "loss": 0.6345, "step": 10845 }, { "epoch": 0.44950060093663224, "grad_norm": 0.38025155663490295, "learning_rate": 2.752704214845207e-06, "loss": 0.7003, "step": 10846 }, { "epoch": 0.4495420448423059, "grad_norm": 0.4052783250808716, "learning_rate": 2.752496995316839e-06, "loss": 0.6857, "step": 10847 }, { "epoch": 0.4495834887479796, "grad_norm": 0.4205634891986847, "learning_rate": 2.7522897757884705e-06, "loss": 0.7039, "step": 10848 }, { "epoch": 0.4496249326536533, "grad_norm": 0.42052412033081055, "learning_rate": 2.7520825562601023e-06, "loss": 0.7014, "step": 10849 }, { "epoch": 0.44966637655932695, "grad_norm": 0.39881160855293274, "learning_rate": 2.7518753367317337e-06, "loss": 0.6975, "step": 10850 }, { "epoch": 0.44970782046500063, "grad_norm": 0.4214608073234558, "learning_rate": 2.7516681172033655e-06, "loss": 0.6985, "step": 10851 }, { "epoch": 0.4497492643706743, "grad_norm": 0.4114598333835602, "learning_rate": 2.7514608976749973e-06, "loss": 0.645, "step": 10852 }, { "epoch": 0.449790708276348, "grad_norm": 0.4306933283805847, "learning_rate": 2.7512536781466287e-06, "loss": 0.6613, "step": 10853 }, { "epoch": 0.44983215218202166, "grad_norm": 0.3998236358165741, "learning_rate": 2.7510464586182605e-06, "loss": 0.6853, "step": 10854 }, { "epoch": 0.4498735960876953, "grad_norm": 0.42441365122795105, "learning_rate": 2.750839239089892e-06, "loss": 0.6489, "step": 10855 }, { "epoch": 0.44991503999336896, "grad_norm": 0.42009133100509644, "learning_rate": 2.7506320195615237e-06, "loss": 0.7478, "step": 10856 }, { "epoch": 0.44995648389904264, "grad_norm": 0.4120667278766632, "learning_rate": 2.750424800033155e-06, "loss": 0.6775, "step": 10857 }, { "epoch": 0.4499979278047163, "grad_norm": 0.37560510635375977, "learning_rate": 2.750217580504787e-06, "loss": 0.6279, "step": 10858 }, { "epoch": 0.45003937171039, "grad_norm": 0.4543132185935974, "learning_rate": 2.7500103609764183e-06, "loss": 0.7448, "step": 10859 }, { "epoch": 0.45008081561606367, "grad_norm": 0.40168115496635437, "learning_rate": 2.74980314144805e-06, "loss": 0.6597, "step": 10860 }, { "epoch": 0.45012225952173734, "grad_norm": 0.3960631191730499, "learning_rate": 2.7495959219196824e-06, "loss": 0.667, "step": 10861 }, { "epoch": 0.450163703427411, "grad_norm": 0.38308054208755493, "learning_rate": 2.7493887023913133e-06, "loss": 0.6339, "step": 10862 }, { "epoch": 0.45020514733308464, "grad_norm": 0.44173336029052734, "learning_rate": 2.7491814828629456e-06, "loss": 0.6772, "step": 10863 }, { "epoch": 0.4502465912387583, "grad_norm": 0.42553815245628357, "learning_rate": 2.748974263334577e-06, "loss": 0.7214, "step": 10864 }, { "epoch": 0.450288035144432, "grad_norm": 0.4157269299030304, "learning_rate": 2.7487670438062087e-06, "loss": 0.7295, "step": 10865 }, { "epoch": 0.4503294790501057, "grad_norm": 0.40996667742729187, "learning_rate": 2.74855982427784e-06, "loss": 0.7051, "step": 10866 }, { "epoch": 0.45037092295577935, "grad_norm": 0.5329059958457947, "learning_rate": 2.748352604749472e-06, "loss": 0.7686, "step": 10867 }, { "epoch": 0.45041236686145303, "grad_norm": 0.39402318000793457, "learning_rate": 2.7481453852211033e-06, "loss": 0.7112, "step": 10868 }, { "epoch": 0.4504538107671267, "grad_norm": 0.393494188785553, "learning_rate": 2.747938165692735e-06, "loss": 0.6924, "step": 10869 }, { "epoch": 0.4504952546728004, "grad_norm": 0.39389529824256897, "learning_rate": 2.747730946164367e-06, "loss": 0.6735, "step": 10870 }, { "epoch": 0.45053669857847406, "grad_norm": 0.4094820022583008, "learning_rate": 2.7475237266359983e-06, "loss": 0.672, "step": 10871 }, { "epoch": 0.4505781424841477, "grad_norm": 0.4184476435184479, "learning_rate": 2.74731650710763e-06, "loss": 0.7101, "step": 10872 }, { "epoch": 0.45061958638982136, "grad_norm": 0.36338573694229126, "learning_rate": 2.7471092875792615e-06, "loss": 0.6755, "step": 10873 }, { "epoch": 0.45066103029549504, "grad_norm": 0.4082651436328888, "learning_rate": 2.7469020680508933e-06, "loss": 0.686, "step": 10874 }, { "epoch": 0.4507024742011687, "grad_norm": 0.4405486583709717, "learning_rate": 2.7466948485225247e-06, "loss": 0.6973, "step": 10875 }, { "epoch": 0.4507439181068424, "grad_norm": 0.4147479236125946, "learning_rate": 2.7464876289941565e-06, "loss": 0.7164, "step": 10876 }, { "epoch": 0.45078536201251607, "grad_norm": 0.4813583791255951, "learning_rate": 2.7462804094657888e-06, "loss": 0.7621, "step": 10877 }, { "epoch": 0.45082680591818974, "grad_norm": 0.3963519036769867, "learning_rate": 2.7460731899374197e-06, "loss": 0.6882, "step": 10878 }, { "epoch": 0.4508682498238634, "grad_norm": 0.41709020733833313, "learning_rate": 2.745865970409052e-06, "loss": 0.7108, "step": 10879 }, { "epoch": 0.4509096937295371, "grad_norm": 0.41503480076789856, "learning_rate": 2.745658750880683e-06, "loss": 0.7058, "step": 10880 }, { "epoch": 0.4509511376352107, "grad_norm": 0.4125930964946747, "learning_rate": 2.745451531352315e-06, "loss": 0.7446, "step": 10881 }, { "epoch": 0.4509925815408844, "grad_norm": 0.4199207127094269, "learning_rate": 2.7452443118239465e-06, "loss": 0.6688, "step": 10882 }, { "epoch": 0.4510340254465581, "grad_norm": 0.39728984236717224, "learning_rate": 2.7450370922955783e-06, "loss": 0.6505, "step": 10883 }, { "epoch": 0.45107546935223175, "grad_norm": 0.3735872209072113, "learning_rate": 2.7448298727672097e-06, "loss": 0.6976, "step": 10884 }, { "epoch": 0.4511169132579054, "grad_norm": 0.41844668984413147, "learning_rate": 2.7446226532388415e-06, "loss": 0.7563, "step": 10885 }, { "epoch": 0.4511583571635791, "grad_norm": 0.4364495277404785, "learning_rate": 2.7444154337104734e-06, "loss": 0.7158, "step": 10886 }, { "epoch": 0.4511998010692528, "grad_norm": 0.40021952986717224, "learning_rate": 2.7442082141821047e-06, "loss": 0.6798, "step": 10887 }, { "epoch": 0.45124124497492646, "grad_norm": 0.4262683689594269, "learning_rate": 2.7440009946537365e-06, "loss": 0.7659, "step": 10888 }, { "epoch": 0.4512826888806001, "grad_norm": 0.38293272256851196, "learning_rate": 2.743793775125368e-06, "loss": 0.6045, "step": 10889 }, { "epoch": 0.45132413278627376, "grad_norm": 0.39113572239875793, "learning_rate": 2.7435865555969997e-06, "loss": 0.6401, "step": 10890 }, { "epoch": 0.45136557669194743, "grad_norm": 0.41732561588287354, "learning_rate": 2.743379336068631e-06, "loss": 0.6855, "step": 10891 }, { "epoch": 0.4514070205976211, "grad_norm": 0.4109300374984741, "learning_rate": 2.743172116540263e-06, "loss": 0.7167, "step": 10892 }, { "epoch": 0.4514484645032948, "grad_norm": 0.47449973225593567, "learning_rate": 2.7429648970118943e-06, "loss": 0.7256, "step": 10893 }, { "epoch": 0.45148990840896847, "grad_norm": 0.41985392570495605, "learning_rate": 2.742757677483526e-06, "loss": 0.7074, "step": 10894 }, { "epoch": 0.45153135231464214, "grad_norm": 0.40595000982284546, "learning_rate": 2.7425504579551584e-06, "loss": 0.7311, "step": 10895 }, { "epoch": 0.4515727962203158, "grad_norm": 0.40797045826911926, "learning_rate": 2.7423432384267893e-06, "loss": 0.7209, "step": 10896 }, { "epoch": 0.4516142401259895, "grad_norm": 0.40827038884162903, "learning_rate": 2.7421360188984216e-06, "loss": 0.7429, "step": 10897 }, { "epoch": 0.4516556840316631, "grad_norm": 0.42651981115341187, "learning_rate": 2.741928799370053e-06, "loss": 0.6561, "step": 10898 }, { "epoch": 0.4516971279373368, "grad_norm": 0.410020112991333, "learning_rate": 2.7417215798416848e-06, "loss": 0.6678, "step": 10899 }, { "epoch": 0.4517385718430105, "grad_norm": 0.4196649491786957, "learning_rate": 2.741514360313316e-06, "loss": 0.7085, "step": 10900 }, { "epoch": 0.45178001574868415, "grad_norm": 0.38706499338150024, "learning_rate": 2.741307140784948e-06, "loss": 0.646, "step": 10901 }, { "epoch": 0.4518214596543578, "grad_norm": 0.44052794575691223, "learning_rate": 2.7410999212565793e-06, "loss": 0.7144, "step": 10902 }, { "epoch": 0.4518629035600315, "grad_norm": 0.37969788908958435, "learning_rate": 2.740892701728211e-06, "loss": 0.6819, "step": 10903 }, { "epoch": 0.4519043474657052, "grad_norm": 0.47429054975509644, "learning_rate": 2.740685482199843e-06, "loss": 0.7134, "step": 10904 }, { "epoch": 0.45194579137137886, "grad_norm": 0.3993639051914215, "learning_rate": 2.7404782626714743e-06, "loss": 0.6755, "step": 10905 }, { "epoch": 0.45198723527705253, "grad_norm": 0.41313016414642334, "learning_rate": 2.740271043143106e-06, "loss": 0.6721, "step": 10906 }, { "epoch": 0.45202867918272616, "grad_norm": 0.4232945740222931, "learning_rate": 2.7400638236147375e-06, "loss": 0.7156, "step": 10907 }, { "epoch": 0.45207012308839983, "grad_norm": 0.38364794850349426, "learning_rate": 2.7398566040863693e-06, "loss": 0.6497, "step": 10908 }, { "epoch": 0.4521115669940735, "grad_norm": 0.38954055309295654, "learning_rate": 2.7396493845580007e-06, "loss": 0.6604, "step": 10909 }, { "epoch": 0.4521530108997472, "grad_norm": 0.43266937136650085, "learning_rate": 2.7394421650296325e-06, "loss": 0.7166, "step": 10910 }, { "epoch": 0.45219445480542086, "grad_norm": 0.39658209681510925, "learning_rate": 2.739234945501264e-06, "loss": 0.665, "step": 10911 }, { "epoch": 0.45223589871109454, "grad_norm": 0.40220561623573303, "learning_rate": 2.7390277259728957e-06, "loss": 0.6971, "step": 10912 }, { "epoch": 0.4522773426167682, "grad_norm": 0.4212718605995178, "learning_rate": 2.738820506444528e-06, "loss": 0.7075, "step": 10913 }, { "epoch": 0.4523187865224419, "grad_norm": 0.42087072134017944, "learning_rate": 2.738613286916159e-06, "loss": 0.6399, "step": 10914 }, { "epoch": 0.4523602304281156, "grad_norm": 0.40563976764678955, "learning_rate": 2.738406067387791e-06, "loss": 0.6985, "step": 10915 }, { "epoch": 0.4524016743337892, "grad_norm": 0.411584734916687, "learning_rate": 2.7381988478594225e-06, "loss": 0.7087, "step": 10916 }, { "epoch": 0.45244311823946287, "grad_norm": 0.44316041469573975, "learning_rate": 2.7379916283310544e-06, "loss": 0.7076, "step": 10917 }, { "epoch": 0.45248456214513655, "grad_norm": 0.41748175024986267, "learning_rate": 2.7377844088026857e-06, "loss": 0.6927, "step": 10918 }, { "epoch": 0.4525260060508102, "grad_norm": 0.4268921911716461, "learning_rate": 2.7375771892743175e-06, "loss": 0.7339, "step": 10919 }, { "epoch": 0.4525674499564839, "grad_norm": 0.3742809295654297, "learning_rate": 2.737369969745949e-06, "loss": 0.6539, "step": 10920 }, { "epoch": 0.4526088938621576, "grad_norm": 0.4144238829612732, "learning_rate": 2.7371627502175807e-06, "loss": 0.7051, "step": 10921 }, { "epoch": 0.45265033776783126, "grad_norm": 0.4325546324253082, "learning_rate": 2.7369555306892126e-06, "loss": 0.7336, "step": 10922 }, { "epoch": 0.45269178167350493, "grad_norm": 0.4144049882888794, "learning_rate": 2.736748311160844e-06, "loss": 0.6678, "step": 10923 }, { "epoch": 0.45273322557917856, "grad_norm": 0.4398965537548065, "learning_rate": 2.7365410916324757e-06, "loss": 0.718, "step": 10924 }, { "epoch": 0.45277466948485223, "grad_norm": 0.381059467792511, "learning_rate": 2.736333872104107e-06, "loss": 0.6669, "step": 10925 }, { "epoch": 0.4528161133905259, "grad_norm": 0.4741170108318329, "learning_rate": 2.736126652575739e-06, "loss": 0.7087, "step": 10926 }, { "epoch": 0.4528575572961996, "grad_norm": 0.418230801820755, "learning_rate": 2.7359194330473703e-06, "loss": 0.6981, "step": 10927 }, { "epoch": 0.45289900120187326, "grad_norm": 0.4087010622024536, "learning_rate": 2.735712213519002e-06, "loss": 0.739, "step": 10928 }, { "epoch": 0.45294044510754694, "grad_norm": 0.4169796407222748, "learning_rate": 2.7355049939906335e-06, "loss": 0.6387, "step": 10929 }, { "epoch": 0.4529818890132206, "grad_norm": 0.4080181419849396, "learning_rate": 2.7352977744622653e-06, "loss": 0.6541, "step": 10930 }, { "epoch": 0.4530233329188943, "grad_norm": 0.39472997188568115, "learning_rate": 2.7350905549338976e-06, "loss": 0.658, "step": 10931 }, { "epoch": 0.45306477682456797, "grad_norm": 0.4234772026538849, "learning_rate": 2.734883335405529e-06, "loss": 0.7158, "step": 10932 }, { "epoch": 0.4531062207302416, "grad_norm": 0.47047293186187744, "learning_rate": 2.7346761158771608e-06, "loss": 0.7998, "step": 10933 }, { "epoch": 0.45314766463591527, "grad_norm": 0.39611148834228516, "learning_rate": 2.734468896348792e-06, "loss": 0.6993, "step": 10934 }, { "epoch": 0.45318910854158895, "grad_norm": 0.42588523030281067, "learning_rate": 2.734261676820424e-06, "loss": 0.6927, "step": 10935 }, { "epoch": 0.4532305524472626, "grad_norm": 0.4029209613800049, "learning_rate": 2.7340544572920553e-06, "loss": 0.6797, "step": 10936 }, { "epoch": 0.4532719963529363, "grad_norm": 0.39266279339790344, "learning_rate": 2.733847237763687e-06, "loss": 0.696, "step": 10937 }, { "epoch": 0.45331344025861, "grad_norm": 0.4313693642616272, "learning_rate": 2.733640018235319e-06, "loss": 0.7412, "step": 10938 }, { "epoch": 0.45335488416428366, "grad_norm": 0.42305421829223633, "learning_rate": 2.7334327987069503e-06, "loss": 0.6987, "step": 10939 }, { "epoch": 0.45339632806995733, "grad_norm": 0.39201682806015015, "learning_rate": 2.733225579178582e-06, "loss": 0.6311, "step": 10940 }, { "epoch": 0.453437771975631, "grad_norm": 0.3959994912147522, "learning_rate": 2.7330183596502135e-06, "loss": 0.7034, "step": 10941 }, { "epoch": 0.45347921588130463, "grad_norm": 0.4310702383518219, "learning_rate": 2.7328111401218453e-06, "loss": 0.6826, "step": 10942 }, { "epoch": 0.4535206597869783, "grad_norm": 0.39889174699783325, "learning_rate": 2.7326039205934767e-06, "loss": 0.6494, "step": 10943 }, { "epoch": 0.453562103692652, "grad_norm": 0.44837307929992676, "learning_rate": 2.7323967010651085e-06, "loss": 0.7244, "step": 10944 }, { "epoch": 0.45360354759832566, "grad_norm": 0.41153982281684875, "learning_rate": 2.73218948153674e-06, "loss": 0.6564, "step": 10945 }, { "epoch": 0.45364499150399934, "grad_norm": 0.4167439043521881, "learning_rate": 2.7319822620083717e-06, "loss": 0.6675, "step": 10946 }, { "epoch": 0.453686435409673, "grad_norm": 0.4112125337123871, "learning_rate": 2.731775042480004e-06, "loss": 0.7319, "step": 10947 }, { "epoch": 0.4537278793153467, "grad_norm": 0.3924325704574585, "learning_rate": 2.731567822951635e-06, "loss": 0.7112, "step": 10948 }, { "epoch": 0.45376932322102037, "grad_norm": 0.41091832518577576, "learning_rate": 2.731360603423267e-06, "loss": 0.6785, "step": 10949 }, { "epoch": 0.453810767126694, "grad_norm": 0.4320807456970215, "learning_rate": 2.7311533838948986e-06, "loss": 0.6936, "step": 10950 }, { "epoch": 0.45385221103236767, "grad_norm": 0.3961375653743744, "learning_rate": 2.7309461643665304e-06, "loss": 0.7095, "step": 10951 }, { "epoch": 0.45389365493804135, "grad_norm": 0.41803792119026184, "learning_rate": 2.7307389448381617e-06, "loss": 0.6689, "step": 10952 }, { "epoch": 0.453935098843715, "grad_norm": 0.41552016139030457, "learning_rate": 2.7305317253097936e-06, "loss": 0.7207, "step": 10953 }, { "epoch": 0.4539765427493887, "grad_norm": 0.3932361602783203, "learning_rate": 2.730324505781425e-06, "loss": 0.6637, "step": 10954 }, { "epoch": 0.4540179866550624, "grad_norm": 0.40272071957588196, "learning_rate": 2.7301172862530568e-06, "loss": 0.6996, "step": 10955 }, { "epoch": 0.45405943056073605, "grad_norm": 0.47219178080558777, "learning_rate": 2.7299100667246886e-06, "loss": 0.7329, "step": 10956 }, { "epoch": 0.45410087446640973, "grad_norm": 0.4067838490009308, "learning_rate": 2.72970284719632e-06, "loss": 0.7036, "step": 10957 }, { "epoch": 0.4541423183720834, "grad_norm": 0.417812317609787, "learning_rate": 2.7294956276679518e-06, "loss": 0.6721, "step": 10958 }, { "epoch": 0.45418376227775703, "grad_norm": 0.4504159986972809, "learning_rate": 2.729288408139583e-06, "loss": 0.6643, "step": 10959 }, { "epoch": 0.4542252061834307, "grad_norm": 0.4014267325401306, "learning_rate": 2.729081188611215e-06, "loss": 0.7046, "step": 10960 }, { "epoch": 0.4542666500891044, "grad_norm": 0.42609161138534546, "learning_rate": 2.7288739690828463e-06, "loss": 0.6552, "step": 10961 }, { "epoch": 0.45430809399477806, "grad_norm": 0.45836910605430603, "learning_rate": 2.728666749554478e-06, "loss": 0.7549, "step": 10962 }, { "epoch": 0.45434953790045174, "grad_norm": 0.4530719816684723, "learning_rate": 2.7284595300261095e-06, "loss": 0.7288, "step": 10963 }, { "epoch": 0.4543909818061254, "grad_norm": 0.3804531693458557, "learning_rate": 2.7282523104977413e-06, "loss": 0.6489, "step": 10964 }, { "epoch": 0.4544324257117991, "grad_norm": 0.39621302485466003, "learning_rate": 2.7280450909693736e-06, "loss": 0.6587, "step": 10965 }, { "epoch": 0.45447386961747277, "grad_norm": 0.40300020575523376, "learning_rate": 2.727837871441005e-06, "loss": 0.6772, "step": 10966 }, { "epoch": 0.45451531352314645, "grad_norm": 0.3925478458404541, "learning_rate": 2.7276306519126368e-06, "loss": 0.7024, "step": 10967 }, { "epoch": 0.45455675742882007, "grad_norm": 0.4346516728401184, "learning_rate": 2.727423432384268e-06, "loss": 0.7327, "step": 10968 }, { "epoch": 0.45459820133449375, "grad_norm": 0.4222603738307953, "learning_rate": 2.7272162128559e-06, "loss": 0.7429, "step": 10969 }, { "epoch": 0.4546396452401674, "grad_norm": 0.422123521566391, "learning_rate": 2.7270089933275313e-06, "loss": 0.7382, "step": 10970 }, { "epoch": 0.4546810891458411, "grad_norm": 0.4419358968734741, "learning_rate": 2.726801773799163e-06, "loss": 0.6896, "step": 10971 }, { "epoch": 0.4547225330515148, "grad_norm": 0.42431479692459106, "learning_rate": 2.7265945542707945e-06, "loss": 0.739, "step": 10972 }, { "epoch": 0.45476397695718845, "grad_norm": 0.415476530790329, "learning_rate": 2.7263873347424264e-06, "loss": 0.7039, "step": 10973 }, { "epoch": 0.45480542086286213, "grad_norm": 0.413877934217453, "learning_rate": 2.726180115214058e-06, "loss": 0.7408, "step": 10974 }, { "epoch": 0.4548468647685358, "grad_norm": 0.42428210377693176, "learning_rate": 2.7259728956856895e-06, "loss": 0.6986, "step": 10975 }, { "epoch": 0.4548883086742095, "grad_norm": 0.41334787011146545, "learning_rate": 2.7257656761573214e-06, "loss": 0.6752, "step": 10976 }, { "epoch": 0.4549297525798831, "grad_norm": 0.43593302369117737, "learning_rate": 2.7255584566289527e-06, "loss": 0.7058, "step": 10977 }, { "epoch": 0.4549711964855568, "grad_norm": 0.4018903374671936, "learning_rate": 2.7253512371005845e-06, "loss": 0.7281, "step": 10978 }, { "epoch": 0.45501264039123046, "grad_norm": 0.4166567325592041, "learning_rate": 2.725144017572216e-06, "loss": 0.6733, "step": 10979 }, { "epoch": 0.45505408429690414, "grad_norm": 0.4349125027656555, "learning_rate": 2.7249367980438477e-06, "loss": 0.7083, "step": 10980 }, { "epoch": 0.4550955282025778, "grad_norm": 0.41710150241851807, "learning_rate": 2.724729578515479e-06, "loss": 0.6893, "step": 10981 }, { "epoch": 0.4551369721082515, "grad_norm": 0.38541314005851746, "learning_rate": 2.724522358987111e-06, "loss": 0.6462, "step": 10982 }, { "epoch": 0.45517841601392517, "grad_norm": 0.42610839009284973, "learning_rate": 2.724315139458743e-06, "loss": 0.7142, "step": 10983 }, { "epoch": 0.45521985991959885, "grad_norm": 0.3879481852054596, "learning_rate": 2.7241079199303746e-06, "loss": 0.663, "step": 10984 }, { "epoch": 0.45526130382527247, "grad_norm": 0.416305810213089, "learning_rate": 2.7239007004020064e-06, "loss": 0.6918, "step": 10985 }, { "epoch": 0.45530274773094614, "grad_norm": 0.40580788254737854, "learning_rate": 2.7236934808736378e-06, "loss": 0.6692, "step": 10986 }, { "epoch": 0.4553441916366198, "grad_norm": 0.40694156289100647, "learning_rate": 2.7234862613452696e-06, "loss": 0.6472, "step": 10987 }, { "epoch": 0.4553856355422935, "grad_norm": 0.4609130620956421, "learning_rate": 2.723279041816901e-06, "loss": 0.6803, "step": 10988 }, { "epoch": 0.4554270794479672, "grad_norm": 0.3797556161880493, "learning_rate": 2.7230718222885328e-06, "loss": 0.6997, "step": 10989 }, { "epoch": 0.45546852335364085, "grad_norm": 0.3887242376804352, "learning_rate": 2.722864602760164e-06, "loss": 0.7407, "step": 10990 }, { "epoch": 0.45550996725931453, "grad_norm": 0.4107593894004822, "learning_rate": 2.722657383231796e-06, "loss": 0.6715, "step": 10991 }, { "epoch": 0.4555514111649882, "grad_norm": 0.42508140206336975, "learning_rate": 2.7224501637034278e-06, "loss": 0.7725, "step": 10992 }, { "epoch": 0.4555928550706619, "grad_norm": 0.4022448658943176, "learning_rate": 2.722242944175059e-06, "loss": 0.6921, "step": 10993 }, { "epoch": 0.4556342989763355, "grad_norm": 0.41692981123924255, "learning_rate": 2.722035724646691e-06, "loss": 0.7209, "step": 10994 }, { "epoch": 0.4556757428820092, "grad_norm": 0.4232257008552551, "learning_rate": 2.7218285051183223e-06, "loss": 0.6902, "step": 10995 }, { "epoch": 0.45571718678768286, "grad_norm": 0.3977949023246765, "learning_rate": 2.721621285589954e-06, "loss": 0.6636, "step": 10996 }, { "epoch": 0.45575863069335654, "grad_norm": 0.40580636262893677, "learning_rate": 2.7214140660615855e-06, "loss": 0.6636, "step": 10997 }, { "epoch": 0.4558000745990302, "grad_norm": 0.4000755846500397, "learning_rate": 2.7212068465332173e-06, "loss": 0.6726, "step": 10998 }, { "epoch": 0.4558415185047039, "grad_norm": 0.39793479442596436, "learning_rate": 2.7209996270048496e-06, "loss": 0.6692, "step": 10999 }, { "epoch": 0.45588296241037757, "grad_norm": 0.4175099730491638, "learning_rate": 2.720792407476481e-06, "loss": 0.7214, "step": 11000 }, { "epoch": 0.45592440631605125, "grad_norm": 0.3890663683414459, "learning_rate": 2.7205851879481128e-06, "loss": 0.6875, "step": 11001 }, { "epoch": 0.4559658502217249, "grad_norm": 0.420482873916626, "learning_rate": 2.720377968419744e-06, "loss": 0.7307, "step": 11002 }, { "epoch": 0.45600729412739854, "grad_norm": 0.4144669771194458, "learning_rate": 2.720170748891376e-06, "loss": 0.7368, "step": 11003 }, { "epoch": 0.4560487380330722, "grad_norm": 0.4443295896053314, "learning_rate": 2.7199635293630074e-06, "loss": 0.7135, "step": 11004 }, { "epoch": 0.4560901819387459, "grad_norm": 0.40316635370254517, "learning_rate": 2.719756309834639e-06, "loss": 0.6985, "step": 11005 }, { "epoch": 0.4561316258444196, "grad_norm": 0.39365121722221375, "learning_rate": 2.7195490903062705e-06, "loss": 0.6649, "step": 11006 }, { "epoch": 0.45617306975009325, "grad_norm": 0.43081071972846985, "learning_rate": 2.7193418707779024e-06, "loss": 0.7306, "step": 11007 }, { "epoch": 0.45621451365576693, "grad_norm": 0.4056808054447174, "learning_rate": 2.719134651249534e-06, "loss": 0.6746, "step": 11008 }, { "epoch": 0.4562559575614406, "grad_norm": 0.39404046535491943, "learning_rate": 2.7189274317211656e-06, "loss": 0.6693, "step": 11009 }, { "epoch": 0.4562974014671143, "grad_norm": 0.3803918659687042, "learning_rate": 2.7187202121927974e-06, "loss": 0.687, "step": 11010 }, { "epoch": 0.4563388453727879, "grad_norm": 0.39889997243881226, "learning_rate": 2.7185129926644287e-06, "loss": 0.6818, "step": 11011 }, { "epoch": 0.4563802892784616, "grad_norm": 0.3972780108451843, "learning_rate": 2.7183057731360606e-06, "loss": 0.6542, "step": 11012 }, { "epoch": 0.45642173318413526, "grad_norm": 0.4153781533241272, "learning_rate": 2.718098553607692e-06, "loss": 0.7039, "step": 11013 }, { "epoch": 0.45646317708980894, "grad_norm": 0.4415527284145355, "learning_rate": 2.7178913340793238e-06, "loss": 0.6836, "step": 11014 }, { "epoch": 0.4565046209954826, "grad_norm": 0.41895467042922974, "learning_rate": 2.717684114550955e-06, "loss": 0.7496, "step": 11015 }, { "epoch": 0.4565460649011563, "grad_norm": 0.37936803698539734, "learning_rate": 2.7174768950225874e-06, "loss": 0.6295, "step": 11016 }, { "epoch": 0.45658750880682997, "grad_norm": 0.4163251519203186, "learning_rate": 2.717269675494219e-06, "loss": 0.6478, "step": 11017 }, { "epoch": 0.45662895271250364, "grad_norm": 0.3695342242717743, "learning_rate": 2.7170624559658506e-06, "loss": 0.6758, "step": 11018 }, { "epoch": 0.4566703966181773, "grad_norm": 0.4327620565891266, "learning_rate": 2.7168552364374824e-06, "loss": 0.7075, "step": 11019 }, { "epoch": 0.45671184052385094, "grad_norm": 0.4081602990627289, "learning_rate": 2.7166480169091138e-06, "loss": 0.6647, "step": 11020 }, { "epoch": 0.4567532844295246, "grad_norm": 0.39616838097572327, "learning_rate": 2.7164407973807456e-06, "loss": 0.6938, "step": 11021 }, { "epoch": 0.4567947283351983, "grad_norm": 0.4005618691444397, "learning_rate": 2.716233577852377e-06, "loss": 0.6682, "step": 11022 }, { "epoch": 0.456836172240872, "grad_norm": 0.3990097939968109, "learning_rate": 2.7160263583240088e-06, "loss": 0.708, "step": 11023 }, { "epoch": 0.45687761614654565, "grad_norm": 0.4063732326030731, "learning_rate": 2.71581913879564e-06, "loss": 0.6995, "step": 11024 }, { "epoch": 0.45691906005221933, "grad_norm": 0.4292859137058258, "learning_rate": 2.715611919267272e-06, "loss": 0.7429, "step": 11025 }, { "epoch": 0.456960503957893, "grad_norm": 0.4322301745414734, "learning_rate": 2.7154046997389038e-06, "loss": 0.6583, "step": 11026 }, { "epoch": 0.4570019478635667, "grad_norm": 0.39700940251350403, "learning_rate": 2.715197480210535e-06, "loss": 0.6831, "step": 11027 }, { "epoch": 0.45704339176924036, "grad_norm": 0.41327154636383057, "learning_rate": 2.714990260682167e-06, "loss": 0.7007, "step": 11028 }, { "epoch": 0.457084835674914, "grad_norm": 0.3812848627567291, "learning_rate": 2.7147830411537983e-06, "loss": 0.6865, "step": 11029 }, { "epoch": 0.45712627958058766, "grad_norm": 0.4133034646511078, "learning_rate": 2.71457582162543e-06, "loss": 0.6852, "step": 11030 }, { "epoch": 0.45716772348626133, "grad_norm": 0.42187151312828064, "learning_rate": 2.7143686020970615e-06, "loss": 0.698, "step": 11031 }, { "epoch": 0.457209167391935, "grad_norm": 0.3703000247478485, "learning_rate": 2.7141613825686934e-06, "loss": 0.6816, "step": 11032 }, { "epoch": 0.4572506112976087, "grad_norm": 0.45361965894699097, "learning_rate": 2.7139541630403247e-06, "loss": 0.707, "step": 11033 }, { "epoch": 0.45729205520328237, "grad_norm": 0.3807827830314636, "learning_rate": 2.713746943511957e-06, "loss": 0.7258, "step": 11034 }, { "epoch": 0.45733349910895604, "grad_norm": 0.3980445861816406, "learning_rate": 2.7135397239835888e-06, "loss": 0.6884, "step": 11035 }, { "epoch": 0.4573749430146297, "grad_norm": 0.37374451756477356, "learning_rate": 2.71333250445522e-06, "loss": 0.6396, "step": 11036 }, { "epoch": 0.45741638692030334, "grad_norm": 0.3747153878211975, "learning_rate": 2.713125284926852e-06, "loss": 0.6721, "step": 11037 }, { "epoch": 0.457457830825977, "grad_norm": 0.43894830346107483, "learning_rate": 2.7129180653984834e-06, "loss": 0.7479, "step": 11038 }, { "epoch": 0.4574992747316507, "grad_norm": 0.39191851019859314, "learning_rate": 2.712710845870115e-06, "loss": 0.6581, "step": 11039 }, { "epoch": 0.4575407186373244, "grad_norm": 0.4144415259361267, "learning_rate": 2.7125036263417466e-06, "loss": 0.728, "step": 11040 }, { "epoch": 0.45758216254299805, "grad_norm": 0.4504624307155609, "learning_rate": 2.7122964068133784e-06, "loss": 0.6775, "step": 11041 }, { "epoch": 0.4576236064486717, "grad_norm": 0.4267991781234741, "learning_rate": 2.7120891872850097e-06, "loss": 0.72, "step": 11042 }, { "epoch": 0.4576650503543454, "grad_norm": 0.43962401151657104, "learning_rate": 2.7118819677566416e-06, "loss": 0.7076, "step": 11043 }, { "epoch": 0.4577064942600191, "grad_norm": 0.39161714911460876, "learning_rate": 2.7116747482282734e-06, "loss": 0.6533, "step": 11044 }, { "epoch": 0.45774793816569276, "grad_norm": 0.38124167919158936, "learning_rate": 2.7114675286999048e-06, "loss": 0.6962, "step": 11045 }, { "epoch": 0.4577893820713664, "grad_norm": 0.4161529242992401, "learning_rate": 2.7112603091715366e-06, "loss": 0.7544, "step": 11046 }, { "epoch": 0.45783082597704006, "grad_norm": 0.4410530924797058, "learning_rate": 2.711053089643168e-06, "loss": 0.6686, "step": 11047 }, { "epoch": 0.45787226988271373, "grad_norm": 0.5071407556533813, "learning_rate": 2.7108458701147998e-06, "loss": 0.6958, "step": 11048 }, { "epoch": 0.4579137137883874, "grad_norm": 0.4109571874141693, "learning_rate": 2.710638650586431e-06, "loss": 0.6957, "step": 11049 }, { "epoch": 0.4579551576940611, "grad_norm": 0.47870010137557983, "learning_rate": 2.7104314310580634e-06, "loss": 0.7141, "step": 11050 }, { "epoch": 0.45799660159973477, "grad_norm": 0.37814196944236755, "learning_rate": 2.7102242115296943e-06, "loss": 0.6667, "step": 11051 }, { "epoch": 0.45803804550540844, "grad_norm": 0.4037674367427826, "learning_rate": 2.7100169920013266e-06, "loss": 0.7023, "step": 11052 }, { "epoch": 0.4580794894110821, "grad_norm": 0.40082207322120667, "learning_rate": 2.7098097724729584e-06, "loss": 0.7109, "step": 11053 }, { "epoch": 0.4581209333167558, "grad_norm": 0.4080982804298401, "learning_rate": 2.7096025529445898e-06, "loss": 0.6539, "step": 11054 }, { "epoch": 0.4581623772224294, "grad_norm": 0.4657817482948303, "learning_rate": 2.7093953334162216e-06, "loss": 0.694, "step": 11055 }, { "epoch": 0.4582038211281031, "grad_norm": 0.4042403995990753, "learning_rate": 2.709188113887853e-06, "loss": 0.6467, "step": 11056 }, { "epoch": 0.45824526503377677, "grad_norm": 0.39588502049446106, "learning_rate": 2.7089808943594848e-06, "loss": 0.6543, "step": 11057 }, { "epoch": 0.45828670893945045, "grad_norm": 0.4253116548061371, "learning_rate": 2.708773674831116e-06, "loss": 0.7163, "step": 11058 }, { "epoch": 0.4583281528451241, "grad_norm": 0.3875236511230469, "learning_rate": 2.708566455302748e-06, "loss": 0.6321, "step": 11059 }, { "epoch": 0.4583695967507978, "grad_norm": 0.41057148575782776, "learning_rate": 2.7083592357743798e-06, "loss": 0.6836, "step": 11060 }, { "epoch": 0.4584110406564715, "grad_norm": 0.4294869601726532, "learning_rate": 2.708152016246011e-06, "loss": 0.6649, "step": 11061 }, { "epoch": 0.45845248456214516, "grad_norm": 0.43284764885902405, "learning_rate": 2.707944796717643e-06, "loss": 0.6907, "step": 11062 }, { "epoch": 0.45849392846781883, "grad_norm": 0.40644901990890503, "learning_rate": 2.7077375771892744e-06, "loss": 0.6876, "step": 11063 }, { "epoch": 0.45853537237349246, "grad_norm": 0.4286835193634033, "learning_rate": 2.707530357660906e-06, "loss": 0.7295, "step": 11064 }, { "epoch": 0.45857681627916613, "grad_norm": 0.38506999611854553, "learning_rate": 2.7073231381325375e-06, "loss": 0.6964, "step": 11065 }, { "epoch": 0.4586182601848398, "grad_norm": 0.40911439061164856, "learning_rate": 2.7071159186041694e-06, "loss": 0.6885, "step": 11066 }, { "epoch": 0.4586597040905135, "grad_norm": 0.40578508377075195, "learning_rate": 2.7069086990758007e-06, "loss": 0.6792, "step": 11067 }, { "epoch": 0.45870114799618716, "grad_norm": 0.415713906288147, "learning_rate": 2.706701479547433e-06, "loss": 0.7029, "step": 11068 }, { "epoch": 0.45874259190186084, "grad_norm": 0.40949469804763794, "learning_rate": 2.7064942600190648e-06, "loss": 0.751, "step": 11069 }, { "epoch": 0.4587840358075345, "grad_norm": 0.4660669267177582, "learning_rate": 2.706287040490696e-06, "loss": 0.6812, "step": 11070 }, { "epoch": 0.4588254797132082, "grad_norm": 0.42611274123191833, "learning_rate": 2.706079820962328e-06, "loss": 0.6489, "step": 11071 }, { "epoch": 0.4588669236188818, "grad_norm": 0.4142078757286072, "learning_rate": 2.7058726014339594e-06, "loss": 0.6943, "step": 11072 }, { "epoch": 0.4589083675245555, "grad_norm": 0.4200544059276581, "learning_rate": 2.705665381905591e-06, "loss": 0.6875, "step": 11073 }, { "epoch": 0.45894981143022917, "grad_norm": 0.4499918520450592, "learning_rate": 2.7054581623772226e-06, "loss": 0.7078, "step": 11074 }, { "epoch": 0.45899125533590285, "grad_norm": 0.40911298990249634, "learning_rate": 2.7052509428488544e-06, "loss": 0.658, "step": 11075 }, { "epoch": 0.4590326992415765, "grad_norm": 0.4008288085460663, "learning_rate": 2.7050437233204858e-06, "loss": 0.6582, "step": 11076 }, { "epoch": 0.4590741431472502, "grad_norm": 0.4159731864929199, "learning_rate": 2.7048365037921176e-06, "loss": 0.6538, "step": 11077 }, { "epoch": 0.4591155870529239, "grad_norm": 0.39277616143226624, "learning_rate": 2.7046292842637494e-06, "loss": 0.6495, "step": 11078 }, { "epoch": 0.45915703095859756, "grad_norm": 0.4392695426940918, "learning_rate": 2.7044220647353808e-06, "loss": 0.696, "step": 11079 }, { "epoch": 0.45919847486427123, "grad_norm": 0.4126371741294861, "learning_rate": 2.7042148452070126e-06, "loss": 0.6919, "step": 11080 }, { "epoch": 0.45923991876994485, "grad_norm": 0.3887927830219269, "learning_rate": 2.704007625678644e-06, "loss": 0.6835, "step": 11081 }, { "epoch": 0.45928136267561853, "grad_norm": 0.3839223086833954, "learning_rate": 2.7038004061502758e-06, "loss": 0.693, "step": 11082 }, { "epoch": 0.4593228065812922, "grad_norm": 0.38559722900390625, "learning_rate": 2.703593186621907e-06, "loss": 0.702, "step": 11083 }, { "epoch": 0.4593642504869659, "grad_norm": 0.4121198058128357, "learning_rate": 2.7033859670935394e-06, "loss": 0.7231, "step": 11084 }, { "epoch": 0.45940569439263956, "grad_norm": 0.39820775389671326, "learning_rate": 2.7031787475651703e-06, "loss": 0.7014, "step": 11085 }, { "epoch": 0.45944713829831324, "grad_norm": 0.38914281129837036, "learning_rate": 2.7029715280368026e-06, "loss": 0.6814, "step": 11086 }, { "epoch": 0.4594885822039869, "grad_norm": 0.3773519694805145, "learning_rate": 2.7027643085084344e-06, "loss": 0.6855, "step": 11087 }, { "epoch": 0.4595300261096606, "grad_norm": 0.43106913566589355, "learning_rate": 2.7025570889800658e-06, "loss": 0.6604, "step": 11088 }, { "epoch": 0.45957147001533427, "grad_norm": 0.4008869528770447, "learning_rate": 2.7023498694516976e-06, "loss": 0.6794, "step": 11089 }, { "epoch": 0.4596129139210079, "grad_norm": 0.41924574971199036, "learning_rate": 2.702142649923329e-06, "loss": 0.6448, "step": 11090 }, { "epoch": 0.45965435782668157, "grad_norm": 0.40154513716697693, "learning_rate": 2.7019354303949608e-06, "loss": 0.729, "step": 11091 }, { "epoch": 0.45969580173235525, "grad_norm": 0.40835919976234436, "learning_rate": 2.701728210866592e-06, "loss": 0.6349, "step": 11092 }, { "epoch": 0.4597372456380289, "grad_norm": 0.41681328415870667, "learning_rate": 2.701520991338224e-06, "loss": 0.7322, "step": 11093 }, { "epoch": 0.4597786895437026, "grad_norm": 0.4502260088920593, "learning_rate": 2.7013137718098554e-06, "loss": 0.6733, "step": 11094 }, { "epoch": 0.4598201334493763, "grad_norm": 0.37679728865623474, "learning_rate": 2.701106552281487e-06, "loss": 0.647, "step": 11095 }, { "epoch": 0.45986157735504996, "grad_norm": 0.40633487701416016, "learning_rate": 2.700899332753119e-06, "loss": 0.6885, "step": 11096 }, { "epoch": 0.45990302126072363, "grad_norm": 0.4069505035877228, "learning_rate": 2.7006921132247504e-06, "loss": 0.6829, "step": 11097 }, { "epoch": 0.45994446516639725, "grad_norm": 0.44154998660087585, "learning_rate": 2.700484893696382e-06, "loss": 0.6951, "step": 11098 }, { "epoch": 0.45998590907207093, "grad_norm": 0.41747498512268066, "learning_rate": 2.7002776741680136e-06, "loss": 0.688, "step": 11099 }, { "epoch": 0.4600273529777446, "grad_norm": 0.43520429730415344, "learning_rate": 2.7000704546396454e-06, "loss": 0.7227, "step": 11100 }, { "epoch": 0.4600687968834183, "grad_norm": 0.4050680696964264, "learning_rate": 2.6998632351112767e-06, "loss": 0.689, "step": 11101 }, { "epoch": 0.46011024078909196, "grad_norm": 0.3939696252346039, "learning_rate": 2.699656015582909e-06, "loss": 0.6672, "step": 11102 }, { "epoch": 0.46015168469476564, "grad_norm": 0.4252401888370514, "learning_rate": 2.69944879605454e-06, "loss": 0.7075, "step": 11103 }, { "epoch": 0.4601931286004393, "grad_norm": 0.4103040099143982, "learning_rate": 2.699241576526172e-06, "loss": 0.6995, "step": 11104 }, { "epoch": 0.460234572506113, "grad_norm": 0.4442475140094757, "learning_rate": 2.699034356997804e-06, "loss": 0.7183, "step": 11105 }, { "epoch": 0.46027601641178667, "grad_norm": 0.44601768255233765, "learning_rate": 2.6988271374694354e-06, "loss": 0.7185, "step": 11106 }, { "epoch": 0.4603174603174603, "grad_norm": 0.3692470192909241, "learning_rate": 2.698619917941067e-06, "loss": 0.684, "step": 11107 }, { "epoch": 0.46035890422313397, "grad_norm": 0.429762601852417, "learning_rate": 2.6984126984126986e-06, "loss": 0.7566, "step": 11108 }, { "epoch": 0.46040034812880765, "grad_norm": 0.3877778947353363, "learning_rate": 2.6982054788843304e-06, "loss": 0.6655, "step": 11109 }, { "epoch": 0.4604417920344813, "grad_norm": 0.40138059854507446, "learning_rate": 2.6979982593559618e-06, "loss": 0.7009, "step": 11110 }, { "epoch": 0.460483235940155, "grad_norm": 0.41679006814956665, "learning_rate": 2.6977910398275936e-06, "loss": 0.6556, "step": 11111 }, { "epoch": 0.4605246798458287, "grad_norm": 0.44560715556144714, "learning_rate": 2.697583820299225e-06, "loss": 0.7043, "step": 11112 }, { "epoch": 0.46056612375150235, "grad_norm": 0.40604913234710693, "learning_rate": 2.6973766007708568e-06, "loss": 0.6547, "step": 11113 }, { "epoch": 0.46060756765717603, "grad_norm": 0.4061640202999115, "learning_rate": 2.6971693812424886e-06, "loss": 0.6774, "step": 11114 }, { "epoch": 0.4606490115628497, "grad_norm": 0.41733548045158386, "learning_rate": 2.69696216171412e-06, "loss": 0.6938, "step": 11115 }, { "epoch": 0.46069045546852333, "grad_norm": 0.3916509747505188, "learning_rate": 2.6967549421857518e-06, "loss": 0.7092, "step": 11116 }, { "epoch": 0.460731899374197, "grad_norm": 0.4092314839363098, "learning_rate": 2.696547722657383e-06, "loss": 0.6329, "step": 11117 }, { "epoch": 0.4607733432798707, "grad_norm": 0.44937360286712646, "learning_rate": 2.6963405031290154e-06, "loss": 0.6863, "step": 11118 }, { "epoch": 0.46081478718554436, "grad_norm": 0.40027719736099243, "learning_rate": 2.6961332836006463e-06, "loss": 0.6897, "step": 11119 }, { "epoch": 0.46085623109121804, "grad_norm": 0.39452603459358215, "learning_rate": 2.6959260640722786e-06, "loss": 0.6991, "step": 11120 }, { "epoch": 0.4608976749968917, "grad_norm": 0.38913100957870483, "learning_rate": 2.6957188445439104e-06, "loss": 0.6405, "step": 11121 }, { "epoch": 0.4609391189025654, "grad_norm": 0.4196983277797699, "learning_rate": 2.6955116250155418e-06, "loss": 0.7178, "step": 11122 }, { "epoch": 0.46098056280823907, "grad_norm": 0.40368756651878357, "learning_rate": 2.6953044054871736e-06, "loss": 0.7119, "step": 11123 }, { "epoch": 0.46102200671391275, "grad_norm": 0.4047142565250397, "learning_rate": 2.695097185958805e-06, "loss": 0.6758, "step": 11124 }, { "epoch": 0.46106345061958637, "grad_norm": 0.39317867159843445, "learning_rate": 2.6948899664304368e-06, "loss": 0.703, "step": 11125 }, { "epoch": 0.46110489452526005, "grad_norm": 0.42687445878982544, "learning_rate": 2.694682746902068e-06, "loss": 0.7212, "step": 11126 }, { "epoch": 0.4611463384309337, "grad_norm": 0.3832915723323822, "learning_rate": 2.6944755273737e-06, "loss": 0.6981, "step": 11127 }, { "epoch": 0.4611877823366074, "grad_norm": 0.38915538787841797, "learning_rate": 2.6942683078453314e-06, "loss": 0.6904, "step": 11128 }, { "epoch": 0.4612292262422811, "grad_norm": 0.41386979818344116, "learning_rate": 2.694061088316963e-06, "loss": 0.7229, "step": 11129 }, { "epoch": 0.46127067014795475, "grad_norm": 0.4211133122444153, "learning_rate": 2.693853868788595e-06, "loss": 0.7134, "step": 11130 }, { "epoch": 0.46131211405362843, "grad_norm": 0.41509777307510376, "learning_rate": 2.6936466492602264e-06, "loss": 0.7051, "step": 11131 }, { "epoch": 0.4613535579593021, "grad_norm": 0.4328169524669647, "learning_rate": 2.693439429731858e-06, "loss": 0.647, "step": 11132 }, { "epoch": 0.46139500186497573, "grad_norm": 0.41960057616233826, "learning_rate": 2.6932322102034896e-06, "loss": 0.708, "step": 11133 }, { "epoch": 0.4614364457706494, "grad_norm": 0.41000857949256897, "learning_rate": 2.6930249906751214e-06, "loss": 0.7445, "step": 11134 }, { "epoch": 0.4614778896763231, "grad_norm": 0.4023102819919586, "learning_rate": 2.6928177711467528e-06, "loss": 0.7253, "step": 11135 }, { "epoch": 0.46151933358199676, "grad_norm": 0.3988569974899292, "learning_rate": 2.692610551618385e-06, "loss": 0.6494, "step": 11136 }, { "epoch": 0.46156077748767044, "grad_norm": 0.4133259952068329, "learning_rate": 2.692403332090016e-06, "loss": 0.6929, "step": 11137 }, { "epoch": 0.4616022213933441, "grad_norm": 0.4169234037399292, "learning_rate": 2.692196112561648e-06, "loss": 0.6973, "step": 11138 }, { "epoch": 0.4616436652990178, "grad_norm": 0.40685009956359863, "learning_rate": 2.69198889303328e-06, "loss": 0.6678, "step": 11139 }, { "epoch": 0.46168510920469147, "grad_norm": 0.4177339971065521, "learning_rate": 2.6917816735049114e-06, "loss": 0.6475, "step": 11140 }, { "epoch": 0.46172655311036515, "grad_norm": 0.4211559295654297, "learning_rate": 2.691574453976543e-06, "loss": 0.7119, "step": 11141 }, { "epoch": 0.46176799701603877, "grad_norm": 0.3892132341861725, "learning_rate": 2.6913672344481746e-06, "loss": 0.6901, "step": 11142 }, { "epoch": 0.46180944092171244, "grad_norm": 0.39078250527381897, "learning_rate": 2.6911600149198064e-06, "loss": 0.6045, "step": 11143 }, { "epoch": 0.4618508848273861, "grad_norm": 0.42629191279411316, "learning_rate": 2.6909527953914378e-06, "loss": 0.7203, "step": 11144 }, { "epoch": 0.4618923287330598, "grad_norm": 0.3952932357788086, "learning_rate": 2.6907455758630696e-06, "loss": 0.6184, "step": 11145 }, { "epoch": 0.4619337726387335, "grad_norm": 0.4084804654121399, "learning_rate": 2.690538356334701e-06, "loss": 0.6791, "step": 11146 }, { "epoch": 0.46197521654440715, "grad_norm": 0.40639325976371765, "learning_rate": 2.6903311368063328e-06, "loss": 0.709, "step": 11147 }, { "epoch": 0.46201666045008083, "grad_norm": 0.3865593671798706, "learning_rate": 2.6901239172779646e-06, "loss": 0.7188, "step": 11148 }, { "epoch": 0.4620581043557545, "grad_norm": 0.40918588638305664, "learning_rate": 2.689916697749596e-06, "loss": 0.6631, "step": 11149 }, { "epoch": 0.4620995482614282, "grad_norm": 0.4436027705669403, "learning_rate": 2.6897094782212278e-06, "loss": 0.739, "step": 11150 }, { "epoch": 0.4621409921671018, "grad_norm": 0.4142022430896759, "learning_rate": 2.689502258692859e-06, "loss": 0.7046, "step": 11151 }, { "epoch": 0.4621824360727755, "grad_norm": 0.4158035218715668, "learning_rate": 2.6892950391644914e-06, "loss": 0.6892, "step": 11152 }, { "epoch": 0.46222387997844916, "grad_norm": 0.4455680847167969, "learning_rate": 2.6890878196361224e-06, "loss": 0.7122, "step": 11153 }, { "epoch": 0.46226532388412284, "grad_norm": 0.40724867582321167, "learning_rate": 2.6888806001077546e-06, "loss": 0.7146, "step": 11154 }, { "epoch": 0.4623067677897965, "grad_norm": 0.4016520380973816, "learning_rate": 2.6886733805793856e-06, "loss": 0.686, "step": 11155 }, { "epoch": 0.4623482116954702, "grad_norm": 0.41035157442092896, "learning_rate": 2.6884661610510178e-06, "loss": 0.6741, "step": 11156 }, { "epoch": 0.46238965560114387, "grad_norm": 0.403272420167923, "learning_rate": 2.6882589415226496e-06, "loss": 0.6548, "step": 11157 }, { "epoch": 0.46243109950681754, "grad_norm": 0.42731815576553345, "learning_rate": 2.688051721994281e-06, "loss": 0.7454, "step": 11158 }, { "epoch": 0.46247254341249117, "grad_norm": 0.4065122902393341, "learning_rate": 2.6878445024659128e-06, "loss": 0.677, "step": 11159 }, { "epoch": 0.46251398731816484, "grad_norm": 0.38682854175567627, "learning_rate": 2.687637282937544e-06, "loss": 0.626, "step": 11160 }, { "epoch": 0.4625554312238385, "grad_norm": 0.43228790163993835, "learning_rate": 2.687430063409176e-06, "loss": 0.6841, "step": 11161 }, { "epoch": 0.4625968751295122, "grad_norm": 0.3832246661186218, "learning_rate": 2.6872228438808074e-06, "loss": 0.649, "step": 11162 }, { "epoch": 0.4626383190351859, "grad_norm": 0.44277992844581604, "learning_rate": 2.687015624352439e-06, "loss": 0.6976, "step": 11163 }, { "epoch": 0.46267976294085955, "grad_norm": 0.41466712951660156, "learning_rate": 2.6868084048240706e-06, "loss": 0.6913, "step": 11164 }, { "epoch": 0.46272120684653323, "grad_norm": 0.37682676315307617, "learning_rate": 2.6866011852957024e-06, "loss": 0.6935, "step": 11165 }, { "epoch": 0.4627626507522069, "grad_norm": 0.38312166929244995, "learning_rate": 2.686393965767334e-06, "loss": 0.686, "step": 11166 }, { "epoch": 0.4628040946578806, "grad_norm": 0.40527021884918213, "learning_rate": 2.6861867462389656e-06, "loss": 0.6643, "step": 11167 }, { "epoch": 0.4628455385635542, "grad_norm": 0.3953838646411896, "learning_rate": 2.6859795267105974e-06, "loss": 0.62, "step": 11168 }, { "epoch": 0.4628869824692279, "grad_norm": 0.4136052429676056, "learning_rate": 2.6857723071822288e-06, "loss": 0.6934, "step": 11169 }, { "epoch": 0.46292842637490156, "grad_norm": 0.4169231653213501, "learning_rate": 2.685565087653861e-06, "loss": 0.7235, "step": 11170 }, { "epoch": 0.46296987028057524, "grad_norm": 0.4529958665370941, "learning_rate": 2.685357868125492e-06, "loss": 0.7178, "step": 11171 }, { "epoch": 0.4630113141862489, "grad_norm": 0.42302411794662476, "learning_rate": 2.685150648597124e-06, "loss": 0.6804, "step": 11172 }, { "epoch": 0.4630527580919226, "grad_norm": 0.4239014983177185, "learning_rate": 2.684943429068756e-06, "loss": 0.7234, "step": 11173 }, { "epoch": 0.46309420199759627, "grad_norm": 0.4115825593471527, "learning_rate": 2.6847362095403874e-06, "loss": 0.667, "step": 11174 }, { "epoch": 0.46313564590326994, "grad_norm": 0.41107621788978577, "learning_rate": 2.684528990012019e-06, "loss": 0.7029, "step": 11175 }, { "epoch": 0.4631770898089436, "grad_norm": 0.37145453691482544, "learning_rate": 2.6843217704836506e-06, "loss": 0.7002, "step": 11176 }, { "epoch": 0.46321853371461724, "grad_norm": 0.5006809234619141, "learning_rate": 2.6841145509552824e-06, "loss": 0.7411, "step": 11177 }, { "epoch": 0.4632599776202909, "grad_norm": 0.4266074597835541, "learning_rate": 2.6839073314269138e-06, "loss": 0.6973, "step": 11178 }, { "epoch": 0.4633014215259646, "grad_norm": 0.3802022635936737, "learning_rate": 2.6837001118985456e-06, "loss": 0.6582, "step": 11179 }, { "epoch": 0.4633428654316383, "grad_norm": 0.38080036640167236, "learning_rate": 2.683492892370177e-06, "loss": 0.6433, "step": 11180 }, { "epoch": 0.46338430933731195, "grad_norm": 0.4272611439228058, "learning_rate": 2.6832856728418088e-06, "loss": 0.7402, "step": 11181 }, { "epoch": 0.4634257532429856, "grad_norm": 0.4095204770565033, "learning_rate": 2.6830784533134406e-06, "loss": 0.6753, "step": 11182 }, { "epoch": 0.4634671971486593, "grad_norm": 0.39807790517807007, "learning_rate": 2.682871233785072e-06, "loss": 0.6157, "step": 11183 }, { "epoch": 0.463508641054333, "grad_norm": 0.3881725072860718, "learning_rate": 2.6826640142567038e-06, "loss": 0.668, "step": 11184 }, { "epoch": 0.46355008496000666, "grad_norm": 0.4209165871143341, "learning_rate": 2.682456794728335e-06, "loss": 0.6758, "step": 11185 }, { "epoch": 0.4635915288656803, "grad_norm": 0.4349238872528076, "learning_rate": 2.6822495751999674e-06, "loss": 0.6682, "step": 11186 }, { "epoch": 0.46363297277135396, "grad_norm": 0.3940873146057129, "learning_rate": 2.6820423556715984e-06, "loss": 0.6687, "step": 11187 }, { "epoch": 0.46367441667702763, "grad_norm": 0.4084358215332031, "learning_rate": 2.6818351361432306e-06, "loss": 0.6936, "step": 11188 }, { "epoch": 0.4637158605827013, "grad_norm": 0.39920103549957275, "learning_rate": 2.681627916614862e-06, "loss": 0.6771, "step": 11189 }, { "epoch": 0.463757304488375, "grad_norm": 0.39023837447166443, "learning_rate": 2.681420697086494e-06, "loss": 0.7234, "step": 11190 }, { "epoch": 0.46379874839404867, "grad_norm": 0.4536566734313965, "learning_rate": 2.6812134775581256e-06, "loss": 0.7559, "step": 11191 }, { "epoch": 0.46384019229972234, "grad_norm": 0.40461117029190063, "learning_rate": 2.681006258029757e-06, "loss": 0.7167, "step": 11192 }, { "epoch": 0.463881636205396, "grad_norm": 0.41064390540122986, "learning_rate": 2.680799038501389e-06, "loss": 0.7327, "step": 11193 }, { "epoch": 0.46392308011106964, "grad_norm": 0.4117027521133423, "learning_rate": 2.68059181897302e-06, "loss": 0.676, "step": 11194 }, { "epoch": 0.4639645240167433, "grad_norm": 0.42216360569000244, "learning_rate": 2.680384599444652e-06, "loss": 0.6895, "step": 11195 }, { "epoch": 0.464005967922417, "grad_norm": 0.41259244084358215, "learning_rate": 2.6801773799162834e-06, "loss": 0.7015, "step": 11196 }, { "epoch": 0.4640474118280907, "grad_norm": 0.4024483859539032, "learning_rate": 2.679970160387915e-06, "loss": 0.6859, "step": 11197 }, { "epoch": 0.46408885573376435, "grad_norm": 0.4277755916118622, "learning_rate": 2.6797629408595466e-06, "loss": 0.6758, "step": 11198 }, { "epoch": 0.464130299639438, "grad_norm": 0.45284128189086914, "learning_rate": 2.6795557213311784e-06, "loss": 0.7598, "step": 11199 }, { "epoch": 0.4641717435451117, "grad_norm": 0.419006884098053, "learning_rate": 2.67934850180281e-06, "loss": 0.6592, "step": 11200 }, { "epoch": 0.4642131874507854, "grad_norm": 0.4490922689437866, "learning_rate": 2.6791412822744416e-06, "loss": 0.76, "step": 11201 }, { "epoch": 0.46425463135645906, "grad_norm": 0.4028441309928894, "learning_rate": 2.678934062746074e-06, "loss": 0.7141, "step": 11202 }, { "epoch": 0.4642960752621327, "grad_norm": 0.4196886420249939, "learning_rate": 2.6787268432177048e-06, "loss": 0.7167, "step": 11203 }, { "epoch": 0.46433751916780636, "grad_norm": 0.3859315514564514, "learning_rate": 2.678519623689337e-06, "loss": 0.6827, "step": 11204 }, { "epoch": 0.46437896307348003, "grad_norm": 0.40718165040016174, "learning_rate": 2.678312404160968e-06, "loss": 0.6705, "step": 11205 }, { "epoch": 0.4644204069791537, "grad_norm": 0.4229024350643158, "learning_rate": 2.6781051846326e-06, "loss": 0.6963, "step": 11206 }, { "epoch": 0.4644618508848274, "grad_norm": 0.4159987270832062, "learning_rate": 2.6778979651042316e-06, "loss": 0.6324, "step": 11207 }, { "epoch": 0.46450329479050106, "grad_norm": 0.3814437985420227, "learning_rate": 2.6776907455758634e-06, "loss": 0.629, "step": 11208 }, { "epoch": 0.46454473869617474, "grad_norm": 0.3941105008125305, "learning_rate": 2.677483526047495e-06, "loss": 0.7175, "step": 11209 }, { "epoch": 0.4645861826018484, "grad_norm": 0.4210870563983917, "learning_rate": 2.6772763065191266e-06, "loss": 0.6755, "step": 11210 }, { "epoch": 0.4646276265075221, "grad_norm": 0.40596073865890503, "learning_rate": 2.6770690869907584e-06, "loss": 0.7295, "step": 11211 }, { "epoch": 0.4646690704131957, "grad_norm": 0.42053118348121643, "learning_rate": 2.6768618674623898e-06, "loss": 0.7454, "step": 11212 }, { "epoch": 0.4647105143188694, "grad_norm": 0.4199621379375458, "learning_rate": 2.6766546479340216e-06, "loss": 0.6776, "step": 11213 }, { "epoch": 0.46475195822454307, "grad_norm": 0.4141105115413666, "learning_rate": 2.676447428405653e-06, "loss": 0.6987, "step": 11214 }, { "epoch": 0.46479340213021675, "grad_norm": 0.37793076038360596, "learning_rate": 2.6762402088772848e-06, "loss": 0.6753, "step": 11215 }, { "epoch": 0.4648348460358904, "grad_norm": 0.448324590921402, "learning_rate": 2.676032989348916e-06, "loss": 0.748, "step": 11216 }, { "epoch": 0.4648762899415641, "grad_norm": 0.40591832995414734, "learning_rate": 2.675825769820548e-06, "loss": 0.6754, "step": 11217 }, { "epoch": 0.4649177338472378, "grad_norm": 0.3897480368614197, "learning_rate": 2.6756185502921798e-06, "loss": 0.629, "step": 11218 }, { "epoch": 0.46495917775291146, "grad_norm": 0.42676663398742676, "learning_rate": 2.675411330763811e-06, "loss": 0.6923, "step": 11219 }, { "epoch": 0.4650006216585851, "grad_norm": 0.4161882698535919, "learning_rate": 2.6752041112354434e-06, "loss": 0.6992, "step": 11220 }, { "epoch": 0.46504206556425876, "grad_norm": 0.4115017354488373, "learning_rate": 2.6749968917070744e-06, "loss": 0.7063, "step": 11221 }, { "epoch": 0.46508350946993243, "grad_norm": 0.4191448390483856, "learning_rate": 2.6747896721787066e-06, "loss": 0.6975, "step": 11222 }, { "epoch": 0.4651249533756061, "grad_norm": 0.46519437432289124, "learning_rate": 2.674582452650338e-06, "loss": 0.6843, "step": 11223 }, { "epoch": 0.4651663972812798, "grad_norm": 0.4161173403263092, "learning_rate": 2.67437523312197e-06, "loss": 0.675, "step": 11224 }, { "epoch": 0.46520784118695346, "grad_norm": 0.3683862090110779, "learning_rate": 2.674168013593601e-06, "loss": 0.6545, "step": 11225 }, { "epoch": 0.46524928509262714, "grad_norm": 0.41165271401405334, "learning_rate": 2.673960794065233e-06, "loss": 0.6991, "step": 11226 }, { "epoch": 0.4652907289983008, "grad_norm": 0.42277175188064575, "learning_rate": 2.673753574536865e-06, "loss": 0.6743, "step": 11227 }, { "epoch": 0.4653321729039745, "grad_norm": 0.43579211831092834, "learning_rate": 2.673546355008496e-06, "loss": 0.7025, "step": 11228 }, { "epoch": 0.4653736168096481, "grad_norm": 0.3758833110332489, "learning_rate": 2.673339135480128e-06, "loss": 0.7156, "step": 11229 }, { "epoch": 0.4654150607153218, "grad_norm": 0.4107937216758728, "learning_rate": 2.6731319159517594e-06, "loss": 0.6841, "step": 11230 }, { "epoch": 0.46545650462099547, "grad_norm": 0.4381679594516754, "learning_rate": 2.672924696423391e-06, "loss": 0.7385, "step": 11231 }, { "epoch": 0.46549794852666915, "grad_norm": 0.4296608567237854, "learning_rate": 2.6727174768950226e-06, "loss": 0.7539, "step": 11232 }, { "epoch": 0.4655393924323428, "grad_norm": 0.4551069736480713, "learning_rate": 2.6725102573666544e-06, "loss": 0.7246, "step": 11233 }, { "epoch": 0.4655808363380165, "grad_norm": 0.3940431475639343, "learning_rate": 2.672303037838286e-06, "loss": 0.6309, "step": 11234 }, { "epoch": 0.4656222802436902, "grad_norm": 0.4092904031276703, "learning_rate": 2.6720958183099176e-06, "loss": 0.6848, "step": 11235 }, { "epoch": 0.46566372414936386, "grad_norm": 0.40257173776626587, "learning_rate": 2.67188859878155e-06, "loss": 0.7053, "step": 11236 }, { "epoch": 0.46570516805503753, "grad_norm": 0.4007249176502228, "learning_rate": 2.6716813792531808e-06, "loss": 0.752, "step": 11237 }, { "epoch": 0.46574661196071115, "grad_norm": 0.39026397466659546, "learning_rate": 2.671474159724813e-06, "loss": 0.6464, "step": 11238 }, { "epoch": 0.46578805586638483, "grad_norm": 0.40933090448379517, "learning_rate": 2.671266940196444e-06, "loss": 0.6733, "step": 11239 }, { "epoch": 0.4658294997720585, "grad_norm": 0.4237338602542877, "learning_rate": 2.671059720668076e-06, "loss": 0.6774, "step": 11240 }, { "epoch": 0.4658709436777322, "grad_norm": 0.40366876125335693, "learning_rate": 2.6708525011397076e-06, "loss": 0.6589, "step": 11241 }, { "epoch": 0.46591238758340586, "grad_norm": 0.40464428067207336, "learning_rate": 2.6706452816113394e-06, "loss": 0.6843, "step": 11242 }, { "epoch": 0.46595383148907954, "grad_norm": 0.43191391229629517, "learning_rate": 2.670438062082971e-06, "loss": 0.6853, "step": 11243 }, { "epoch": 0.4659952753947532, "grad_norm": 0.42226117849349976, "learning_rate": 2.6702308425546026e-06, "loss": 0.6442, "step": 11244 }, { "epoch": 0.4660367193004269, "grad_norm": 0.3899506628513336, "learning_rate": 2.6700236230262344e-06, "loss": 0.703, "step": 11245 }, { "epoch": 0.4660781632061005, "grad_norm": 0.4078708291053772, "learning_rate": 2.6698164034978658e-06, "loss": 0.7422, "step": 11246 }, { "epoch": 0.4661196071117742, "grad_norm": 0.4090125262737274, "learning_rate": 2.6696091839694976e-06, "loss": 0.683, "step": 11247 }, { "epoch": 0.46616105101744787, "grad_norm": 0.40038812160491943, "learning_rate": 2.669401964441129e-06, "loss": 0.6703, "step": 11248 }, { "epoch": 0.46620249492312155, "grad_norm": 0.39683952927589417, "learning_rate": 2.669194744912761e-06, "loss": 0.7209, "step": 11249 }, { "epoch": 0.4662439388287952, "grad_norm": 0.3966313898563385, "learning_rate": 2.668987525384392e-06, "loss": 0.7024, "step": 11250 }, { "epoch": 0.4662853827344689, "grad_norm": 0.43369200825691223, "learning_rate": 2.668780305856024e-06, "loss": 0.7231, "step": 11251 }, { "epoch": 0.4663268266401426, "grad_norm": 0.3945905566215515, "learning_rate": 2.668573086327656e-06, "loss": 0.7018, "step": 11252 }, { "epoch": 0.46636827054581625, "grad_norm": 0.42186418175697327, "learning_rate": 2.668365866799287e-06, "loss": 0.7322, "step": 11253 }, { "epoch": 0.46640971445148993, "grad_norm": 0.37021923065185547, "learning_rate": 2.6681586472709194e-06, "loss": 0.6963, "step": 11254 }, { "epoch": 0.46645115835716355, "grad_norm": 0.44502708315849304, "learning_rate": 2.6679514277425504e-06, "loss": 0.6873, "step": 11255 }, { "epoch": 0.46649260226283723, "grad_norm": 0.4111633598804474, "learning_rate": 2.6677442082141826e-06, "loss": 0.7158, "step": 11256 }, { "epoch": 0.4665340461685109, "grad_norm": 0.3981391191482544, "learning_rate": 2.667536988685814e-06, "loss": 0.6333, "step": 11257 }, { "epoch": 0.4665754900741846, "grad_norm": 0.4147587716579437, "learning_rate": 2.667329769157446e-06, "loss": 0.7153, "step": 11258 }, { "epoch": 0.46661693397985826, "grad_norm": 0.36941131949424744, "learning_rate": 2.667122549629077e-06, "loss": 0.6287, "step": 11259 }, { "epoch": 0.46665837788553194, "grad_norm": 0.4135850965976715, "learning_rate": 2.666915330100709e-06, "loss": 0.7053, "step": 11260 }, { "epoch": 0.4666998217912056, "grad_norm": 0.4090293347835541, "learning_rate": 2.666708110572341e-06, "loss": 0.694, "step": 11261 }, { "epoch": 0.4667412656968793, "grad_norm": 0.39830052852630615, "learning_rate": 2.666500891043972e-06, "loss": 0.7053, "step": 11262 }, { "epoch": 0.46678270960255297, "grad_norm": 0.40726739168167114, "learning_rate": 2.666293671515604e-06, "loss": 0.6465, "step": 11263 }, { "epoch": 0.4668241535082266, "grad_norm": 0.39864087104797363, "learning_rate": 2.6660864519872354e-06, "loss": 0.6907, "step": 11264 }, { "epoch": 0.46686559741390027, "grad_norm": 0.4562920331954956, "learning_rate": 2.665879232458867e-06, "loss": 0.749, "step": 11265 }, { "epoch": 0.46690704131957395, "grad_norm": 0.39818036556243896, "learning_rate": 2.6656720129304986e-06, "loss": 0.6475, "step": 11266 }, { "epoch": 0.4669484852252476, "grad_norm": 0.4208519458770752, "learning_rate": 2.6654647934021304e-06, "loss": 0.6595, "step": 11267 }, { "epoch": 0.4669899291309213, "grad_norm": 0.41602328419685364, "learning_rate": 2.6652575738737618e-06, "loss": 0.6725, "step": 11268 }, { "epoch": 0.467031373036595, "grad_norm": 0.4702516794204712, "learning_rate": 2.6650503543453936e-06, "loss": 0.7239, "step": 11269 }, { "epoch": 0.46707281694226865, "grad_norm": 0.41449037194252014, "learning_rate": 2.664843134817026e-06, "loss": 0.719, "step": 11270 }, { "epoch": 0.46711426084794233, "grad_norm": 0.4287029504776001, "learning_rate": 2.6646359152886568e-06, "loss": 0.6885, "step": 11271 }, { "epoch": 0.467155704753616, "grad_norm": 0.41523489356040955, "learning_rate": 2.664428695760289e-06, "loss": 0.7067, "step": 11272 }, { "epoch": 0.46719714865928963, "grad_norm": 0.44421476125717163, "learning_rate": 2.66422147623192e-06, "loss": 0.6849, "step": 11273 }, { "epoch": 0.4672385925649633, "grad_norm": 0.41227471828460693, "learning_rate": 2.664014256703552e-06, "loss": 0.6965, "step": 11274 }, { "epoch": 0.467280036470637, "grad_norm": 0.43132659792900085, "learning_rate": 2.6638070371751836e-06, "loss": 0.7288, "step": 11275 }, { "epoch": 0.46732148037631066, "grad_norm": 0.4015710651874542, "learning_rate": 2.6635998176468154e-06, "loss": 0.6609, "step": 11276 }, { "epoch": 0.46736292428198434, "grad_norm": 0.41580522060394287, "learning_rate": 2.6633925981184468e-06, "loss": 0.682, "step": 11277 }, { "epoch": 0.467404368187658, "grad_norm": 0.4613843262195587, "learning_rate": 2.6631853785900786e-06, "loss": 0.6816, "step": 11278 }, { "epoch": 0.4674458120933317, "grad_norm": 0.41593068838119507, "learning_rate": 2.6629781590617104e-06, "loss": 0.6643, "step": 11279 }, { "epoch": 0.46748725599900537, "grad_norm": 0.41972148418426514, "learning_rate": 2.662770939533342e-06, "loss": 0.6765, "step": 11280 }, { "epoch": 0.467528699904679, "grad_norm": 0.4007648527622223, "learning_rate": 2.6625637200049736e-06, "loss": 0.6956, "step": 11281 }, { "epoch": 0.46757014381035267, "grad_norm": 0.3916735053062439, "learning_rate": 2.662356500476605e-06, "loss": 0.6693, "step": 11282 }, { "epoch": 0.46761158771602634, "grad_norm": 0.42664656043052673, "learning_rate": 2.662149280948237e-06, "loss": 0.6869, "step": 11283 }, { "epoch": 0.4676530316217, "grad_norm": 0.4421672523021698, "learning_rate": 2.661942061419868e-06, "loss": 0.755, "step": 11284 }, { "epoch": 0.4676944755273737, "grad_norm": 0.3841400742530823, "learning_rate": 2.6617348418915e-06, "loss": 0.686, "step": 11285 }, { "epoch": 0.4677359194330474, "grad_norm": 0.39451977610588074, "learning_rate": 2.6615276223631314e-06, "loss": 0.6675, "step": 11286 }, { "epoch": 0.46777736333872105, "grad_norm": 0.4100334048271179, "learning_rate": 2.661320402834763e-06, "loss": 0.6713, "step": 11287 }, { "epoch": 0.46781880724439473, "grad_norm": 0.3835384249687195, "learning_rate": 2.6611131833063954e-06, "loss": 0.655, "step": 11288 }, { "epoch": 0.4678602511500684, "grad_norm": 0.38414764404296875, "learning_rate": 2.6609059637780264e-06, "loss": 0.6655, "step": 11289 }, { "epoch": 0.46790169505574203, "grad_norm": 0.43537354469299316, "learning_rate": 2.6606987442496586e-06, "loss": 0.686, "step": 11290 }, { "epoch": 0.4679431389614157, "grad_norm": 0.41374656558036804, "learning_rate": 2.66049152472129e-06, "loss": 0.6711, "step": 11291 }, { "epoch": 0.4679845828670894, "grad_norm": 0.43200749158859253, "learning_rate": 2.660284305192922e-06, "loss": 0.6501, "step": 11292 }, { "epoch": 0.46802602677276306, "grad_norm": 0.39816364645957947, "learning_rate": 2.660077085664553e-06, "loss": 0.6589, "step": 11293 }, { "epoch": 0.46806747067843674, "grad_norm": 0.42823320627212524, "learning_rate": 2.659869866136185e-06, "loss": 0.7546, "step": 11294 }, { "epoch": 0.4681089145841104, "grad_norm": 0.40132007002830505, "learning_rate": 2.659662646607817e-06, "loss": 0.6503, "step": 11295 }, { "epoch": 0.4681503584897841, "grad_norm": 0.4054487943649292, "learning_rate": 2.659455427079448e-06, "loss": 0.6741, "step": 11296 }, { "epoch": 0.46819180239545777, "grad_norm": 0.404396116733551, "learning_rate": 2.65924820755108e-06, "loss": 0.6852, "step": 11297 }, { "epoch": 0.46823324630113144, "grad_norm": 0.40296295285224915, "learning_rate": 2.6590409880227114e-06, "loss": 0.6823, "step": 11298 }, { "epoch": 0.46827469020680507, "grad_norm": 0.3938201069831848, "learning_rate": 2.658833768494343e-06, "loss": 0.6423, "step": 11299 }, { "epoch": 0.46831613411247874, "grad_norm": 0.3931317925453186, "learning_rate": 2.6586265489659746e-06, "loss": 0.6826, "step": 11300 }, { "epoch": 0.4683575780181524, "grad_norm": 0.4503451883792877, "learning_rate": 2.6584193294376064e-06, "loss": 0.7451, "step": 11301 }, { "epoch": 0.4683990219238261, "grad_norm": 0.4446885585784912, "learning_rate": 2.6582121099092378e-06, "loss": 0.6798, "step": 11302 }, { "epoch": 0.4684404658294998, "grad_norm": 0.4059770703315735, "learning_rate": 2.6580048903808696e-06, "loss": 0.6915, "step": 11303 }, { "epoch": 0.46848190973517345, "grad_norm": 0.4197401702404022, "learning_rate": 2.657797670852502e-06, "loss": 0.6545, "step": 11304 }, { "epoch": 0.46852335364084713, "grad_norm": 0.4146048426628113, "learning_rate": 2.6575904513241328e-06, "loss": 0.7275, "step": 11305 }, { "epoch": 0.4685647975465208, "grad_norm": 0.40017473697662354, "learning_rate": 2.657383231795765e-06, "loss": 0.6351, "step": 11306 }, { "epoch": 0.4686062414521944, "grad_norm": 0.4120080769062042, "learning_rate": 2.657176012267396e-06, "loss": 0.6461, "step": 11307 }, { "epoch": 0.4686476853578681, "grad_norm": 0.4195287525653839, "learning_rate": 2.6569687927390282e-06, "loss": 0.6262, "step": 11308 }, { "epoch": 0.4686891292635418, "grad_norm": 0.3913799226284027, "learning_rate": 2.6567615732106596e-06, "loss": 0.689, "step": 11309 }, { "epoch": 0.46873057316921546, "grad_norm": 0.4036743640899658, "learning_rate": 2.6565543536822914e-06, "loss": 0.6924, "step": 11310 }, { "epoch": 0.46877201707488914, "grad_norm": 0.4109649956226349, "learning_rate": 2.656347134153923e-06, "loss": 0.6567, "step": 11311 }, { "epoch": 0.4688134609805628, "grad_norm": 0.42733505368232727, "learning_rate": 2.6561399146255546e-06, "loss": 0.6675, "step": 11312 }, { "epoch": 0.4688549048862365, "grad_norm": 0.41861337423324585, "learning_rate": 2.6559326950971864e-06, "loss": 0.722, "step": 11313 }, { "epoch": 0.46889634879191017, "grad_norm": 0.3882550895214081, "learning_rate": 2.655725475568818e-06, "loss": 0.644, "step": 11314 }, { "epoch": 0.46893779269758384, "grad_norm": 0.39804166555404663, "learning_rate": 2.6555182560404496e-06, "loss": 0.6884, "step": 11315 }, { "epoch": 0.46897923660325747, "grad_norm": 0.40709370374679565, "learning_rate": 2.655311036512081e-06, "loss": 0.6963, "step": 11316 }, { "epoch": 0.46902068050893114, "grad_norm": 0.4176960587501526, "learning_rate": 2.655103816983713e-06, "loss": 0.6904, "step": 11317 }, { "epoch": 0.4690621244146048, "grad_norm": 0.4087452292442322, "learning_rate": 2.654896597455344e-06, "loss": 0.7083, "step": 11318 }, { "epoch": 0.4691035683202785, "grad_norm": 0.4135391116142273, "learning_rate": 2.654689377926976e-06, "loss": 0.6893, "step": 11319 }, { "epoch": 0.4691450122259522, "grad_norm": 0.4051947295665741, "learning_rate": 2.6544821583986074e-06, "loss": 0.7078, "step": 11320 }, { "epoch": 0.46918645613162585, "grad_norm": 0.458431601524353, "learning_rate": 2.654274938870239e-06, "loss": 0.7285, "step": 11321 }, { "epoch": 0.46922790003729953, "grad_norm": 0.3892737030982971, "learning_rate": 2.6540677193418714e-06, "loss": 0.6636, "step": 11322 }, { "epoch": 0.4692693439429732, "grad_norm": 0.42468512058258057, "learning_rate": 2.6538604998135024e-06, "loss": 0.6952, "step": 11323 }, { "epoch": 0.4693107878486469, "grad_norm": 0.3670247793197632, "learning_rate": 2.6536532802851346e-06, "loss": 0.6285, "step": 11324 }, { "epoch": 0.4693522317543205, "grad_norm": 0.428126722574234, "learning_rate": 2.653446060756766e-06, "loss": 0.7373, "step": 11325 }, { "epoch": 0.4693936756599942, "grad_norm": 0.4004904329776764, "learning_rate": 2.653238841228398e-06, "loss": 0.6351, "step": 11326 }, { "epoch": 0.46943511956566786, "grad_norm": 0.4415145814418793, "learning_rate": 2.653031621700029e-06, "loss": 0.7209, "step": 11327 }, { "epoch": 0.46947656347134153, "grad_norm": 0.4037712812423706, "learning_rate": 2.652824402171661e-06, "loss": 0.7222, "step": 11328 }, { "epoch": 0.4695180073770152, "grad_norm": 0.40898892283439636, "learning_rate": 2.6526171826432924e-06, "loss": 0.6545, "step": 11329 }, { "epoch": 0.4695594512826889, "grad_norm": 0.4330713748931885, "learning_rate": 2.652409963114924e-06, "loss": 0.6597, "step": 11330 }, { "epoch": 0.46960089518836257, "grad_norm": 0.3888359069824219, "learning_rate": 2.652202743586556e-06, "loss": 0.6787, "step": 11331 }, { "epoch": 0.46964233909403624, "grad_norm": 0.43450841307640076, "learning_rate": 2.6519955240581874e-06, "loss": 0.683, "step": 11332 }, { "epoch": 0.4696837829997099, "grad_norm": 0.3883165717124939, "learning_rate": 2.651788304529819e-06, "loss": 0.6405, "step": 11333 }, { "epoch": 0.46972522690538354, "grad_norm": 0.4168032705783844, "learning_rate": 2.6515810850014506e-06, "loss": 0.6758, "step": 11334 }, { "epoch": 0.4697666708110572, "grad_norm": 0.3981277644634247, "learning_rate": 2.6513738654730824e-06, "loss": 0.5984, "step": 11335 }, { "epoch": 0.4698081147167309, "grad_norm": 0.463856041431427, "learning_rate": 2.6511666459447138e-06, "loss": 0.6638, "step": 11336 }, { "epoch": 0.4698495586224046, "grad_norm": 0.403384804725647, "learning_rate": 2.6509594264163456e-06, "loss": 0.6726, "step": 11337 }, { "epoch": 0.46989100252807825, "grad_norm": 0.4381128251552582, "learning_rate": 2.650752206887977e-06, "loss": 0.6826, "step": 11338 }, { "epoch": 0.4699324464337519, "grad_norm": 0.40064650774002075, "learning_rate": 2.650544987359609e-06, "loss": 0.6924, "step": 11339 }, { "epoch": 0.4699738903394256, "grad_norm": 0.4129202663898468, "learning_rate": 2.650337767831241e-06, "loss": 0.6779, "step": 11340 }, { "epoch": 0.4700153342450993, "grad_norm": 0.4021745026111603, "learning_rate": 2.650130548302872e-06, "loss": 0.6616, "step": 11341 }, { "epoch": 0.4700567781507729, "grad_norm": 0.423465758562088, "learning_rate": 2.6499233287745042e-06, "loss": 0.6913, "step": 11342 }, { "epoch": 0.4700982220564466, "grad_norm": 0.38998717069625854, "learning_rate": 2.6497161092461356e-06, "loss": 0.675, "step": 11343 }, { "epoch": 0.47013966596212026, "grad_norm": 0.3829076290130615, "learning_rate": 2.6495088897177674e-06, "loss": 0.7096, "step": 11344 }, { "epoch": 0.47018110986779393, "grad_norm": 0.39907750487327576, "learning_rate": 2.649301670189399e-06, "loss": 0.704, "step": 11345 }, { "epoch": 0.4702225537734676, "grad_norm": 0.4129486680030823, "learning_rate": 2.6490944506610306e-06, "loss": 0.7075, "step": 11346 }, { "epoch": 0.4702639976791413, "grad_norm": 0.40914076566696167, "learning_rate": 2.648887231132662e-06, "loss": 0.6627, "step": 11347 }, { "epoch": 0.47030544158481496, "grad_norm": 0.4120344817638397, "learning_rate": 2.648680011604294e-06, "loss": 0.7181, "step": 11348 }, { "epoch": 0.47034688549048864, "grad_norm": 0.4412282109260559, "learning_rate": 2.6484727920759256e-06, "loss": 0.7185, "step": 11349 }, { "epoch": 0.4703883293961623, "grad_norm": 0.4474334418773651, "learning_rate": 2.648265572547557e-06, "loss": 0.6941, "step": 11350 }, { "epoch": 0.47042977330183594, "grad_norm": 0.45297300815582275, "learning_rate": 2.648058353019189e-06, "loss": 0.7136, "step": 11351 }, { "epoch": 0.4704712172075096, "grad_norm": 0.38765719532966614, "learning_rate": 2.64785113349082e-06, "loss": 0.6868, "step": 11352 }, { "epoch": 0.4705126611131833, "grad_norm": 0.45698076486587524, "learning_rate": 2.647643913962452e-06, "loss": 0.6785, "step": 11353 }, { "epoch": 0.47055410501885697, "grad_norm": 0.4141662120819092, "learning_rate": 2.6474366944340834e-06, "loss": 0.652, "step": 11354 }, { "epoch": 0.47059554892453065, "grad_norm": 0.4493732154369354, "learning_rate": 2.647229474905715e-06, "loss": 0.6783, "step": 11355 }, { "epoch": 0.4706369928302043, "grad_norm": 0.43075716495513916, "learning_rate": 2.6470222553773474e-06, "loss": 0.6956, "step": 11356 }, { "epoch": 0.470678436735878, "grad_norm": 0.39770838618278503, "learning_rate": 2.6468150358489784e-06, "loss": 0.688, "step": 11357 }, { "epoch": 0.4707198806415517, "grad_norm": 0.4198416471481323, "learning_rate": 2.6466078163206106e-06, "loss": 0.7207, "step": 11358 }, { "epoch": 0.47076132454722536, "grad_norm": 0.4094206988811493, "learning_rate": 2.646400596792242e-06, "loss": 0.7051, "step": 11359 }, { "epoch": 0.470802768452899, "grad_norm": 0.4325757622718811, "learning_rate": 2.646193377263874e-06, "loss": 0.7098, "step": 11360 }, { "epoch": 0.47084421235857266, "grad_norm": 0.4404744505882263, "learning_rate": 2.645986157735505e-06, "loss": 0.6726, "step": 11361 }, { "epoch": 0.47088565626424633, "grad_norm": 0.44359090924263, "learning_rate": 2.645778938207137e-06, "loss": 0.7461, "step": 11362 }, { "epoch": 0.47092710016992, "grad_norm": 0.39852702617645264, "learning_rate": 2.6455717186787684e-06, "loss": 0.6752, "step": 11363 }, { "epoch": 0.4709685440755937, "grad_norm": 0.44236046075820923, "learning_rate": 2.6453644991504e-06, "loss": 0.6946, "step": 11364 }, { "epoch": 0.47100998798126736, "grad_norm": 0.43115130066871643, "learning_rate": 2.645157279622032e-06, "loss": 0.6604, "step": 11365 }, { "epoch": 0.47105143188694104, "grad_norm": 0.41119763255119324, "learning_rate": 2.6449500600936634e-06, "loss": 0.7151, "step": 11366 }, { "epoch": 0.4710928757926147, "grad_norm": 0.39191895723342896, "learning_rate": 2.6447428405652952e-06, "loss": 0.6604, "step": 11367 }, { "epoch": 0.47113431969828834, "grad_norm": 0.4050554633140564, "learning_rate": 2.6445356210369266e-06, "loss": 0.7427, "step": 11368 }, { "epoch": 0.471175763603962, "grad_norm": 0.3746495544910431, "learning_rate": 2.6443284015085584e-06, "loss": 0.6302, "step": 11369 }, { "epoch": 0.4712172075096357, "grad_norm": 0.4161635935306549, "learning_rate": 2.64412118198019e-06, "loss": 0.7184, "step": 11370 }, { "epoch": 0.47125865141530937, "grad_norm": 0.38502535223960876, "learning_rate": 2.6439139624518216e-06, "loss": 0.6564, "step": 11371 }, { "epoch": 0.47130009532098305, "grad_norm": 0.39421147108078003, "learning_rate": 2.643706742923453e-06, "loss": 0.6769, "step": 11372 }, { "epoch": 0.4713415392266567, "grad_norm": 0.42189276218414307, "learning_rate": 2.643499523395085e-06, "loss": 0.7478, "step": 11373 }, { "epoch": 0.4713829831323304, "grad_norm": 0.40506711602211, "learning_rate": 2.643292303866717e-06, "loss": 0.7043, "step": 11374 }, { "epoch": 0.4714244270380041, "grad_norm": 0.45878490805625916, "learning_rate": 2.6430850843383484e-06, "loss": 0.7019, "step": 11375 }, { "epoch": 0.47146587094367776, "grad_norm": 0.3727363049983978, "learning_rate": 2.6428778648099802e-06, "loss": 0.6613, "step": 11376 }, { "epoch": 0.4715073148493514, "grad_norm": 0.41939523816108704, "learning_rate": 2.6426706452816116e-06, "loss": 0.6809, "step": 11377 }, { "epoch": 0.47154875875502505, "grad_norm": 0.42505860328674316, "learning_rate": 2.6424634257532434e-06, "loss": 0.6594, "step": 11378 }, { "epoch": 0.47159020266069873, "grad_norm": 0.4066566228866577, "learning_rate": 2.642256206224875e-06, "loss": 0.6833, "step": 11379 }, { "epoch": 0.4716316465663724, "grad_norm": 0.4299336075782776, "learning_rate": 2.6420489866965066e-06, "loss": 0.7344, "step": 11380 }, { "epoch": 0.4716730904720461, "grad_norm": 0.39767220616340637, "learning_rate": 2.641841767168138e-06, "loss": 0.6564, "step": 11381 }, { "epoch": 0.47171453437771976, "grad_norm": 0.41492271423339844, "learning_rate": 2.64163454763977e-06, "loss": 0.6855, "step": 11382 }, { "epoch": 0.47175597828339344, "grad_norm": 0.4342914819717407, "learning_rate": 2.6414273281114016e-06, "loss": 0.6885, "step": 11383 }, { "epoch": 0.4717974221890671, "grad_norm": 0.41960659623146057, "learning_rate": 2.641220108583033e-06, "loss": 0.6648, "step": 11384 }, { "epoch": 0.4718388660947408, "grad_norm": 0.40715715289115906, "learning_rate": 2.641012889054665e-06, "loss": 0.6924, "step": 11385 }, { "epoch": 0.4718803100004144, "grad_norm": 0.4295501708984375, "learning_rate": 2.640805669526296e-06, "loss": 0.705, "step": 11386 }, { "epoch": 0.4719217539060881, "grad_norm": 0.4021407663822174, "learning_rate": 2.640598449997928e-06, "loss": 0.6931, "step": 11387 }, { "epoch": 0.47196319781176177, "grad_norm": 0.4279367923736572, "learning_rate": 2.6403912304695594e-06, "loss": 0.7128, "step": 11388 }, { "epoch": 0.47200464171743545, "grad_norm": 0.4057573080062866, "learning_rate": 2.640184010941191e-06, "loss": 0.6875, "step": 11389 }, { "epoch": 0.4720460856231091, "grad_norm": 0.3977506458759308, "learning_rate": 2.6399767914128226e-06, "loss": 0.7109, "step": 11390 }, { "epoch": 0.4720875295287828, "grad_norm": 0.4762437641620636, "learning_rate": 2.6397695718844544e-06, "loss": 0.7216, "step": 11391 }, { "epoch": 0.4721289734344565, "grad_norm": 0.3883860409259796, "learning_rate": 2.6395623523560866e-06, "loss": 0.6344, "step": 11392 }, { "epoch": 0.47217041734013016, "grad_norm": 0.3890821933746338, "learning_rate": 2.639355132827718e-06, "loss": 0.7249, "step": 11393 }, { "epoch": 0.47221186124580383, "grad_norm": 0.4166756570339203, "learning_rate": 2.63914791329935e-06, "loss": 0.7306, "step": 11394 }, { "epoch": 0.47225330515147745, "grad_norm": 0.40263593196868896, "learning_rate": 2.638940693770981e-06, "loss": 0.7439, "step": 11395 }, { "epoch": 0.47229474905715113, "grad_norm": 0.4387344717979431, "learning_rate": 2.638733474242613e-06, "loss": 0.679, "step": 11396 }, { "epoch": 0.4723361929628248, "grad_norm": 0.43287423253059387, "learning_rate": 2.6385262547142444e-06, "loss": 0.6836, "step": 11397 }, { "epoch": 0.4723776368684985, "grad_norm": 0.4239111542701721, "learning_rate": 2.6383190351858762e-06, "loss": 0.6729, "step": 11398 }, { "epoch": 0.47241908077417216, "grad_norm": 0.43029096722602844, "learning_rate": 2.6381118156575076e-06, "loss": 0.6935, "step": 11399 }, { "epoch": 0.47246052467984584, "grad_norm": 0.4543099105358124, "learning_rate": 2.6379045961291394e-06, "loss": 0.6943, "step": 11400 }, { "epoch": 0.4725019685855195, "grad_norm": 0.38043293356895447, "learning_rate": 2.6376973766007712e-06, "loss": 0.6678, "step": 11401 }, { "epoch": 0.4725434124911932, "grad_norm": 0.43285271525382996, "learning_rate": 2.6374901570724026e-06, "loss": 0.7224, "step": 11402 }, { "epoch": 0.4725848563968668, "grad_norm": 0.3954186737537384, "learning_rate": 2.6372829375440344e-06, "loss": 0.6588, "step": 11403 }, { "epoch": 0.4726263003025405, "grad_norm": 0.42266973853111267, "learning_rate": 2.637075718015666e-06, "loss": 0.7273, "step": 11404 }, { "epoch": 0.47266774420821417, "grad_norm": 0.41446173191070557, "learning_rate": 2.6368684984872976e-06, "loss": 0.7228, "step": 11405 }, { "epoch": 0.47270918811388785, "grad_norm": 0.39462971687316895, "learning_rate": 2.636661278958929e-06, "loss": 0.7029, "step": 11406 }, { "epoch": 0.4727506320195615, "grad_norm": 0.5781558752059937, "learning_rate": 2.636454059430561e-06, "loss": 0.761, "step": 11407 }, { "epoch": 0.4727920759252352, "grad_norm": 0.4346928894519806, "learning_rate": 2.636246839902192e-06, "loss": 0.6896, "step": 11408 }, { "epoch": 0.4728335198309089, "grad_norm": 0.3628773093223572, "learning_rate": 2.6360396203738244e-06, "loss": 0.6316, "step": 11409 }, { "epoch": 0.47287496373658255, "grad_norm": 0.42471399903297424, "learning_rate": 2.6358324008454562e-06, "loss": 0.6713, "step": 11410 }, { "epoch": 0.47291640764225623, "grad_norm": 0.4325403869152069, "learning_rate": 2.6356251813170876e-06, "loss": 0.697, "step": 11411 }, { "epoch": 0.47295785154792985, "grad_norm": 0.4441758394241333, "learning_rate": 2.6354179617887194e-06, "loss": 0.7239, "step": 11412 }, { "epoch": 0.47299929545360353, "grad_norm": 0.41379514336586, "learning_rate": 2.635210742260351e-06, "loss": 0.7007, "step": 11413 }, { "epoch": 0.4730407393592772, "grad_norm": 0.41520535945892334, "learning_rate": 2.6350035227319826e-06, "loss": 0.6503, "step": 11414 }, { "epoch": 0.4730821832649509, "grad_norm": 0.3927014470100403, "learning_rate": 2.634796303203614e-06, "loss": 0.6407, "step": 11415 }, { "epoch": 0.47312362717062456, "grad_norm": 0.4386177062988281, "learning_rate": 2.634589083675246e-06, "loss": 0.7126, "step": 11416 }, { "epoch": 0.47316507107629824, "grad_norm": 0.45869630575180054, "learning_rate": 2.6343818641468776e-06, "loss": 0.7122, "step": 11417 }, { "epoch": 0.4732065149819719, "grad_norm": 0.44832679629325867, "learning_rate": 2.634174644618509e-06, "loss": 0.7068, "step": 11418 }, { "epoch": 0.4732479588876456, "grad_norm": 0.42184901237487793, "learning_rate": 2.633967425090141e-06, "loss": 0.71, "step": 11419 }, { "epoch": 0.47328940279331927, "grad_norm": 0.3978688418865204, "learning_rate": 2.633760205561772e-06, "loss": 0.7041, "step": 11420 }, { "epoch": 0.4733308466989929, "grad_norm": 0.4360009729862213, "learning_rate": 2.633552986033404e-06, "loss": 0.6682, "step": 11421 }, { "epoch": 0.47337229060466657, "grad_norm": 0.3902774155139923, "learning_rate": 2.6333457665050354e-06, "loss": 0.7019, "step": 11422 }, { "epoch": 0.47341373451034024, "grad_norm": 0.44248345494270325, "learning_rate": 2.633138546976667e-06, "loss": 0.7135, "step": 11423 }, { "epoch": 0.4734551784160139, "grad_norm": 0.4178068935871124, "learning_rate": 2.6329313274482986e-06, "loss": 0.6536, "step": 11424 }, { "epoch": 0.4734966223216876, "grad_norm": 0.43097278475761414, "learning_rate": 2.6327241079199304e-06, "loss": 0.6677, "step": 11425 }, { "epoch": 0.4735380662273613, "grad_norm": 0.3958195447921753, "learning_rate": 2.6325168883915626e-06, "loss": 0.6882, "step": 11426 }, { "epoch": 0.47357951013303495, "grad_norm": 0.40992850065231323, "learning_rate": 2.632309668863194e-06, "loss": 0.6515, "step": 11427 }, { "epoch": 0.47362095403870863, "grad_norm": 0.44090375304222107, "learning_rate": 2.632102449334826e-06, "loss": 0.7048, "step": 11428 }, { "epoch": 0.47366239794438225, "grad_norm": 0.43152981996536255, "learning_rate": 2.6318952298064572e-06, "loss": 0.7144, "step": 11429 }, { "epoch": 0.47370384185005593, "grad_norm": 0.40817901492118835, "learning_rate": 2.631688010278089e-06, "loss": 0.6611, "step": 11430 }, { "epoch": 0.4737452857557296, "grad_norm": 0.4613114595413208, "learning_rate": 2.6314807907497204e-06, "loss": 0.7307, "step": 11431 }, { "epoch": 0.4737867296614033, "grad_norm": 0.41390907764434814, "learning_rate": 2.6312735712213522e-06, "loss": 0.703, "step": 11432 }, { "epoch": 0.47382817356707696, "grad_norm": 0.4283941090106964, "learning_rate": 2.6310663516929836e-06, "loss": 0.698, "step": 11433 }, { "epoch": 0.47386961747275064, "grad_norm": 0.4182806611061096, "learning_rate": 2.6308591321646154e-06, "loss": 0.6571, "step": 11434 }, { "epoch": 0.4739110613784243, "grad_norm": 0.43244579434394836, "learning_rate": 2.6306519126362472e-06, "loss": 0.7693, "step": 11435 }, { "epoch": 0.473952505284098, "grad_norm": 0.4175407886505127, "learning_rate": 2.6304446931078786e-06, "loss": 0.723, "step": 11436 }, { "epoch": 0.47399394918977167, "grad_norm": 0.36724182963371277, "learning_rate": 2.6302374735795104e-06, "loss": 0.667, "step": 11437 }, { "epoch": 0.4740353930954453, "grad_norm": 0.4450782239437103, "learning_rate": 2.630030254051142e-06, "loss": 0.6901, "step": 11438 }, { "epoch": 0.47407683700111897, "grad_norm": 0.42237043380737305, "learning_rate": 2.6298230345227736e-06, "loss": 0.7263, "step": 11439 }, { "epoch": 0.47411828090679264, "grad_norm": 0.42181065678596497, "learning_rate": 2.629615814994405e-06, "loss": 0.6958, "step": 11440 }, { "epoch": 0.4741597248124663, "grad_norm": 0.3753145933151245, "learning_rate": 2.629408595466037e-06, "loss": 0.6858, "step": 11441 }, { "epoch": 0.47420116871814, "grad_norm": 0.4241846203804016, "learning_rate": 2.629201375937668e-06, "loss": 0.719, "step": 11442 }, { "epoch": 0.4742426126238137, "grad_norm": 0.3956572711467743, "learning_rate": 2.6289941564093004e-06, "loss": 0.689, "step": 11443 }, { "epoch": 0.47428405652948735, "grad_norm": 0.43075209856033325, "learning_rate": 2.6287869368809322e-06, "loss": 0.6636, "step": 11444 }, { "epoch": 0.47432550043516103, "grad_norm": 0.42895960807800293, "learning_rate": 2.6285797173525636e-06, "loss": 0.7026, "step": 11445 }, { "epoch": 0.4743669443408347, "grad_norm": 0.4139692187309265, "learning_rate": 2.6283724978241954e-06, "loss": 0.6531, "step": 11446 }, { "epoch": 0.47440838824650833, "grad_norm": 0.40550312399864197, "learning_rate": 2.628165278295827e-06, "loss": 0.7053, "step": 11447 }, { "epoch": 0.474449832152182, "grad_norm": 0.4048577845096588, "learning_rate": 2.6279580587674586e-06, "loss": 0.7164, "step": 11448 }, { "epoch": 0.4744912760578557, "grad_norm": 0.40656283497810364, "learning_rate": 2.62775083923909e-06, "loss": 0.7125, "step": 11449 }, { "epoch": 0.47453271996352936, "grad_norm": 0.4224267899990082, "learning_rate": 2.627543619710722e-06, "loss": 0.6992, "step": 11450 }, { "epoch": 0.47457416386920304, "grad_norm": 0.4325571060180664, "learning_rate": 2.627336400182353e-06, "loss": 0.6758, "step": 11451 }, { "epoch": 0.4746156077748767, "grad_norm": 0.45373156666755676, "learning_rate": 2.627129180653985e-06, "loss": 0.7144, "step": 11452 }, { "epoch": 0.4746570516805504, "grad_norm": 0.4058187007904053, "learning_rate": 2.626921961125617e-06, "loss": 0.6714, "step": 11453 }, { "epoch": 0.47469849558622407, "grad_norm": 0.4216562807559967, "learning_rate": 2.626714741597248e-06, "loss": 0.6478, "step": 11454 }, { "epoch": 0.4747399394918977, "grad_norm": 0.3842514455318451, "learning_rate": 2.62650752206888e-06, "loss": 0.6758, "step": 11455 }, { "epoch": 0.47478138339757137, "grad_norm": 0.41620180010795593, "learning_rate": 2.6263003025405114e-06, "loss": 0.7096, "step": 11456 }, { "epoch": 0.47482282730324504, "grad_norm": 0.40963149070739746, "learning_rate": 2.6260930830121432e-06, "loss": 0.6937, "step": 11457 }, { "epoch": 0.4748642712089187, "grad_norm": 0.4420025944709778, "learning_rate": 2.6258858634837746e-06, "loss": 0.699, "step": 11458 }, { "epoch": 0.4749057151145924, "grad_norm": 0.42657753825187683, "learning_rate": 2.6256786439554064e-06, "loss": 0.7046, "step": 11459 }, { "epoch": 0.4749471590202661, "grad_norm": 0.4237903952598572, "learning_rate": 2.625471424427038e-06, "loss": 0.6451, "step": 11460 }, { "epoch": 0.47498860292593975, "grad_norm": 0.41626524925231934, "learning_rate": 2.62526420489867e-06, "loss": 0.6755, "step": 11461 }, { "epoch": 0.47503004683161343, "grad_norm": 0.4227791130542755, "learning_rate": 2.625056985370302e-06, "loss": 0.6653, "step": 11462 }, { "epoch": 0.4750714907372871, "grad_norm": 0.4050874710083008, "learning_rate": 2.6248497658419332e-06, "loss": 0.6952, "step": 11463 }, { "epoch": 0.4751129346429607, "grad_norm": 0.3859892189502716, "learning_rate": 2.624642546313565e-06, "loss": 0.7024, "step": 11464 }, { "epoch": 0.4751543785486344, "grad_norm": 0.4008544385433197, "learning_rate": 2.6244353267851964e-06, "loss": 0.7363, "step": 11465 }, { "epoch": 0.4751958224543081, "grad_norm": 0.4063122272491455, "learning_rate": 2.6242281072568282e-06, "loss": 0.6624, "step": 11466 }, { "epoch": 0.47523726635998176, "grad_norm": 0.39847856760025024, "learning_rate": 2.6240208877284596e-06, "loss": 0.6963, "step": 11467 }, { "epoch": 0.47527871026565544, "grad_norm": 0.41683730483055115, "learning_rate": 2.6238136682000914e-06, "loss": 0.6912, "step": 11468 }, { "epoch": 0.4753201541713291, "grad_norm": 0.41590383648872375, "learning_rate": 2.623606448671723e-06, "loss": 0.7046, "step": 11469 }, { "epoch": 0.4753615980770028, "grad_norm": 0.40955421328544617, "learning_rate": 2.6233992291433546e-06, "loss": 0.7017, "step": 11470 }, { "epoch": 0.47540304198267647, "grad_norm": 0.4093576669692993, "learning_rate": 2.6231920096149864e-06, "loss": 0.6967, "step": 11471 }, { "epoch": 0.47544448588835014, "grad_norm": 0.4248949885368347, "learning_rate": 2.622984790086618e-06, "loss": 0.6453, "step": 11472 }, { "epoch": 0.47548592979402377, "grad_norm": 0.39768165349960327, "learning_rate": 2.6227775705582496e-06, "loss": 0.703, "step": 11473 }, { "epoch": 0.47552737369969744, "grad_norm": 0.4450720548629761, "learning_rate": 2.622570351029881e-06, "loss": 0.7585, "step": 11474 }, { "epoch": 0.4755688176053711, "grad_norm": 0.3806042969226837, "learning_rate": 2.622363131501513e-06, "loss": 0.6703, "step": 11475 }, { "epoch": 0.4756102615110448, "grad_norm": 0.39418166875839233, "learning_rate": 2.622155911973144e-06, "loss": 0.687, "step": 11476 }, { "epoch": 0.4756517054167185, "grad_norm": 0.39723825454711914, "learning_rate": 2.6219486924447764e-06, "loss": 0.6663, "step": 11477 }, { "epoch": 0.47569314932239215, "grad_norm": 0.4337148368358612, "learning_rate": 2.6217414729164082e-06, "loss": 0.7588, "step": 11478 }, { "epoch": 0.4757345932280658, "grad_norm": 0.4224752187728882, "learning_rate": 2.6215342533880396e-06, "loss": 0.6997, "step": 11479 }, { "epoch": 0.4757760371337395, "grad_norm": 0.4324043095111847, "learning_rate": 2.6213270338596714e-06, "loss": 0.7441, "step": 11480 }, { "epoch": 0.4758174810394132, "grad_norm": 0.4136542081832886, "learning_rate": 2.621119814331303e-06, "loss": 0.6804, "step": 11481 }, { "epoch": 0.4758589249450868, "grad_norm": 0.3994368612766266, "learning_rate": 2.6209125948029346e-06, "loss": 0.6791, "step": 11482 }, { "epoch": 0.4759003688507605, "grad_norm": 0.384559690952301, "learning_rate": 2.620705375274566e-06, "loss": 0.6587, "step": 11483 }, { "epoch": 0.47594181275643416, "grad_norm": 0.42351388931274414, "learning_rate": 2.620498155746198e-06, "loss": 0.7024, "step": 11484 }, { "epoch": 0.47598325666210783, "grad_norm": 0.468805730342865, "learning_rate": 2.6202909362178292e-06, "loss": 0.7351, "step": 11485 }, { "epoch": 0.4760247005677815, "grad_norm": 0.41827571392059326, "learning_rate": 2.620083716689461e-06, "loss": 0.658, "step": 11486 }, { "epoch": 0.4760661444734552, "grad_norm": 0.41353392601013184, "learning_rate": 2.619876497161093e-06, "loss": 0.6924, "step": 11487 }, { "epoch": 0.47610758837912887, "grad_norm": 0.4172934889793396, "learning_rate": 2.6196692776327242e-06, "loss": 0.6514, "step": 11488 }, { "epoch": 0.47614903228480254, "grad_norm": 0.40974968671798706, "learning_rate": 2.619462058104356e-06, "loss": 0.6882, "step": 11489 }, { "epoch": 0.47619047619047616, "grad_norm": 0.4307625889778137, "learning_rate": 2.6192548385759874e-06, "loss": 0.6771, "step": 11490 }, { "epoch": 0.47623192009614984, "grad_norm": 0.4092712700366974, "learning_rate": 2.6190476190476192e-06, "loss": 0.7092, "step": 11491 }, { "epoch": 0.4762733640018235, "grad_norm": 0.41111546754837036, "learning_rate": 2.6188403995192506e-06, "loss": 0.7383, "step": 11492 }, { "epoch": 0.4763148079074972, "grad_norm": 0.4058970510959625, "learning_rate": 2.6186331799908824e-06, "loss": 0.6741, "step": 11493 }, { "epoch": 0.4763562518131709, "grad_norm": 0.394513875246048, "learning_rate": 2.618425960462514e-06, "loss": 0.7212, "step": 11494 }, { "epoch": 0.47639769571884455, "grad_norm": 0.3905352056026459, "learning_rate": 2.618218740934146e-06, "loss": 0.677, "step": 11495 }, { "epoch": 0.4764391396245182, "grad_norm": 0.4193161427974701, "learning_rate": 2.618011521405778e-06, "loss": 0.6306, "step": 11496 }, { "epoch": 0.4764805835301919, "grad_norm": 0.39033299684524536, "learning_rate": 2.6178043018774092e-06, "loss": 0.6677, "step": 11497 }, { "epoch": 0.4765220274358656, "grad_norm": 0.3951054811477661, "learning_rate": 2.617597082349041e-06, "loss": 0.7228, "step": 11498 }, { "epoch": 0.4765634713415392, "grad_norm": 0.4232650101184845, "learning_rate": 2.6173898628206724e-06, "loss": 0.6946, "step": 11499 }, { "epoch": 0.4766049152472129, "grad_norm": 0.44773146510124207, "learning_rate": 2.6171826432923042e-06, "loss": 0.702, "step": 11500 }, { "epoch": 0.47664635915288656, "grad_norm": 0.42612895369529724, "learning_rate": 2.6169754237639356e-06, "loss": 0.6279, "step": 11501 }, { "epoch": 0.47668780305856023, "grad_norm": 0.42460209131240845, "learning_rate": 2.6167682042355674e-06, "loss": 0.7095, "step": 11502 }, { "epoch": 0.4767292469642339, "grad_norm": 0.4598274528980255, "learning_rate": 2.616560984707199e-06, "loss": 0.7108, "step": 11503 }, { "epoch": 0.4767706908699076, "grad_norm": 0.422524631023407, "learning_rate": 2.6163537651788306e-06, "loss": 0.7527, "step": 11504 }, { "epoch": 0.47681213477558126, "grad_norm": 0.388109028339386, "learning_rate": 2.6161465456504624e-06, "loss": 0.6042, "step": 11505 }, { "epoch": 0.47685357868125494, "grad_norm": 0.4475538432598114, "learning_rate": 2.615939326122094e-06, "loss": 0.6968, "step": 11506 }, { "epoch": 0.4768950225869286, "grad_norm": 0.3924456834793091, "learning_rate": 2.6157321065937256e-06, "loss": 0.7114, "step": 11507 }, { "epoch": 0.47693646649260224, "grad_norm": 0.42523661255836487, "learning_rate": 2.615524887065357e-06, "loss": 0.7, "step": 11508 }, { "epoch": 0.4769779103982759, "grad_norm": 0.4303314983844757, "learning_rate": 2.615317667536989e-06, "loss": 0.7148, "step": 11509 }, { "epoch": 0.4770193543039496, "grad_norm": 0.3966044783592224, "learning_rate": 2.61511044800862e-06, "loss": 0.6282, "step": 11510 }, { "epoch": 0.47706079820962327, "grad_norm": 0.39889660477638245, "learning_rate": 2.6149032284802524e-06, "loss": 0.6777, "step": 11511 }, { "epoch": 0.47710224211529695, "grad_norm": 0.44521674513816833, "learning_rate": 2.6146960089518834e-06, "loss": 0.7261, "step": 11512 }, { "epoch": 0.4771436860209706, "grad_norm": 0.41493889689445496, "learning_rate": 2.6144887894235156e-06, "loss": 0.6982, "step": 11513 }, { "epoch": 0.4771851299266443, "grad_norm": 0.42919066548347473, "learning_rate": 2.6142815698951474e-06, "loss": 0.6696, "step": 11514 }, { "epoch": 0.477226573832318, "grad_norm": 0.4288454055786133, "learning_rate": 2.614074350366779e-06, "loss": 0.678, "step": 11515 }, { "epoch": 0.4772680177379916, "grad_norm": 0.47752150893211365, "learning_rate": 2.6138671308384106e-06, "loss": 0.7003, "step": 11516 }, { "epoch": 0.4773094616436653, "grad_norm": 0.46381378173828125, "learning_rate": 2.613659911310042e-06, "loss": 0.7012, "step": 11517 }, { "epoch": 0.47735090554933896, "grad_norm": 0.38335275650024414, "learning_rate": 2.613452691781674e-06, "loss": 0.6526, "step": 11518 }, { "epoch": 0.47739234945501263, "grad_norm": 0.40139323472976685, "learning_rate": 2.6132454722533052e-06, "loss": 0.7058, "step": 11519 }, { "epoch": 0.4774337933606863, "grad_norm": 0.3763595521450043, "learning_rate": 2.613038252724937e-06, "loss": 0.6389, "step": 11520 }, { "epoch": 0.47747523726636, "grad_norm": 0.424081951379776, "learning_rate": 2.6128310331965684e-06, "loss": 0.752, "step": 11521 }, { "epoch": 0.47751668117203366, "grad_norm": 0.39727458357810974, "learning_rate": 2.6126238136682002e-06, "loss": 0.7297, "step": 11522 }, { "epoch": 0.47755812507770734, "grad_norm": 0.42372527718544006, "learning_rate": 2.612416594139832e-06, "loss": 0.6857, "step": 11523 }, { "epoch": 0.477599568983381, "grad_norm": 0.4009189307689667, "learning_rate": 2.6122093746114634e-06, "loss": 0.6145, "step": 11524 }, { "epoch": 0.47764101288905464, "grad_norm": 0.40530744194984436, "learning_rate": 2.6120021550830952e-06, "loss": 0.7256, "step": 11525 }, { "epoch": 0.4776824567947283, "grad_norm": 0.40740352869033813, "learning_rate": 2.6117949355547266e-06, "loss": 0.6549, "step": 11526 }, { "epoch": 0.477723900700402, "grad_norm": 0.3994278907775879, "learning_rate": 2.6115877160263584e-06, "loss": 0.6532, "step": 11527 }, { "epoch": 0.47776534460607567, "grad_norm": 0.4334213137626648, "learning_rate": 2.61138049649799e-06, "loss": 0.7283, "step": 11528 }, { "epoch": 0.47780678851174935, "grad_norm": 0.4287005066871643, "learning_rate": 2.611173276969622e-06, "loss": 0.6858, "step": 11529 }, { "epoch": 0.477848232417423, "grad_norm": 0.43473875522613525, "learning_rate": 2.610966057441254e-06, "loss": 0.698, "step": 11530 }, { "epoch": 0.4778896763230967, "grad_norm": 0.41809961199760437, "learning_rate": 2.6107588379128852e-06, "loss": 0.7327, "step": 11531 }, { "epoch": 0.4779311202287704, "grad_norm": 0.4139997363090515, "learning_rate": 2.610551618384517e-06, "loss": 0.7046, "step": 11532 }, { "epoch": 0.47797256413444406, "grad_norm": 0.40067538619041443, "learning_rate": 2.6103443988561484e-06, "loss": 0.6567, "step": 11533 }, { "epoch": 0.4780140080401177, "grad_norm": 0.4183250665664673, "learning_rate": 2.6101371793277802e-06, "loss": 0.6875, "step": 11534 }, { "epoch": 0.47805545194579135, "grad_norm": 0.4032343626022339, "learning_rate": 2.6099299597994116e-06, "loss": 0.6714, "step": 11535 }, { "epoch": 0.47809689585146503, "grad_norm": 0.3915208876132965, "learning_rate": 2.6097227402710434e-06, "loss": 0.7112, "step": 11536 }, { "epoch": 0.4781383397571387, "grad_norm": 0.40662887692451477, "learning_rate": 2.609515520742675e-06, "loss": 0.7385, "step": 11537 }, { "epoch": 0.4781797836628124, "grad_norm": 0.41005921363830566, "learning_rate": 2.6093083012143066e-06, "loss": 0.6675, "step": 11538 }, { "epoch": 0.47822122756848606, "grad_norm": 0.4286688268184662, "learning_rate": 2.6091010816859384e-06, "loss": 0.7927, "step": 11539 }, { "epoch": 0.47826267147415974, "grad_norm": 0.3635653257369995, "learning_rate": 2.60889386215757e-06, "loss": 0.6177, "step": 11540 }, { "epoch": 0.4783041153798334, "grad_norm": 0.41436442732810974, "learning_rate": 2.6086866426292016e-06, "loss": 0.6978, "step": 11541 }, { "epoch": 0.4783455592855071, "grad_norm": 0.40024450421333313, "learning_rate": 2.608479423100833e-06, "loss": 0.6677, "step": 11542 }, { "epoch": 0.4783870031911807, "grad_norm": 0.3978344798088074, "learning_rate": 2.608272203572465e-06, "loss": 0.7074, "step": 11543 }, { "epoch": 0.4784284470968544, "grad_norm": 0.3991069197654724, "learning_rate": 2.6080649840440962e-06, "loss": 0.7031, "step": 11544 }, { "epoch": 0.47846989100252807, "grad_norm": 0.4107877016067505, "learning_rate": 2.6078577645157284e-06, "loss": 0.6694, "step": 11545 }, { "epoch": 0.47851133490820175, "grad_norm": 0.4001396596431732, "learning_rate": 2.6076505449873594e-06, "loss": 0.6569, "step": 11546 }, { "epoch": 0.4785527788138754, "grad_norm": 0.429826557636261, "learning_rate": 2.6074433254589916e-06, "loss": 0.6931, "step": 11547 }, { "epoch": 0.4785942227195491, "grad_norm": 0.4147481322288513, "learning_rate": 2.6072361059306235e-06, "loss": 0.7114, "step": 11548 }, { "epoch": 0.4786356666252228, "grad_norm": 0.4371446371078491, "learning_rate": 2.607028886402255e-06, "loss": 0.6787, "step": 11549 }, { "epoch": 0.47867711053089645, "grad_norm": 0.45762690901756287, "learning_rate": 2.6068216668738866e-06, "loss": 0.7488, "step": 11550 }, { "epoch": 0.4787185544365701, "grad_norm": 0.4165171980857849, "learning_rate": 2.606614447345518e-06, "loss": 0.7351, "step": 11551 }, { "epoch": 0.47875999834224375, "grad_norm": 0.4067695140838623, "learning_rate": 2.60640722781715e-06, "loss": 0.6504, "step": 11552 }, { "epoch": 0.47880144224791743, "grad_norm": 0.4237878620624542, "learning_rate": 2.6062000082887812e-06, "loss": 0.6578, "step": 11553 }, { "epoch": 0.4788428861535911, "grad_norm": 0.3869917690753937, "learning_rate": 2.605992788760413e-06, "loss": 0.7045, "step": 11554 }, { "epoch": 0.4788843300592648, "grad_norm": 0.4245646893978119, "learning_rate": 2.6057855692320444e-06, "loss": 0.7065, "step": 11555 }, { "epoch": 0.47892577396493846, "grad_norm": 0.4330706298351288, "learning_rate": 2.6055783497036762e-06, "loss": 0.6816, "step": 11556 }, { "epoch": 0.47896721787061214, "grad_norm": 0.39767351746559143, "learning_rate": 2.605371130175308e-06, "loss": 0.7034, "step": 11557 }, { "epoch": 0.4790086617762858, "grad_norm": 0.40080204606056213, "learning_rate": 2.6051639106469394e-06, "loss": 0.6189, "step": 11558 }, { "epoch": 0.4790501056819595, "grad_norm": 0.4052805006504059, "learning_rate": 2.6049566911185712e-06, "loss": 0.6616, "step": 11559 }, { "epoch": 0.4790915495876331, "grad_norm": 0.38503092527389526, "learning_rate": 2.6047494715902026e-06, "loss": 0.6852, "step": 11560 }, { "epoch": 0.4791329934933068, "grad_norm": 0.4146961271762848, "learning_rate": 2.604542252061835e-06, "loss": 0.6802, "step": 11561 }, { "epoch": 0.47917443739898047, "grad_norm": 0.38257667422294617, "learning_rate": 2.604335032533466e-06, "loss": 0.655, "step": 11562 }, { "epoch": 0.47921588130465415, "grad_norm": 0.39498648047447205, "learning_rate": 2.604127813005098e-06, "loss": 0.6709, "step": 11563 }, { "epoch": 0.4792573252103278, "grad_norm": 0.4321293532848358, "learning_rate": 2.603920593476729e-06, "loss": 0.7101, "step": 11564 }, { "epoch": 0.4792987691160015, "grad_norm": 0.44235503673553467, "learning_rate": 2.6037133739483612e-06, "loss": 0.7268, "step": 11565 }, { "epoch": 0.4793402130216752, "grad_norm": 0.4245663583278656, "learning_rate": 2.603506154419993e-06, "loss": 0.7089, "step": 11566 }, { "epoch": 0.47938165692734885, "grad_norm": 0.43888047337532043, "learning_rate": 2.6032989348916244e-06, "loss": 0.7197, "step": 11567 }, { "epoch": 0.47942310083302253, "grad_norm": 0.4368878901004791, "learning_rate": 2.6030917153632562e-06, "loss": 0.7214, "step": 11568 }, { "epoch": 0.47946454473869615, "grad_norm": 0.3895343244075775, "learning_rate": 2.6028844958348876e-06, "loss": 0.668, "step": 11569 }, { "epoch": 0.47950598864436983, "grad_norm": 0.41012686491012573, "learning_rate": 2.6026772763065194e-06, "loss": 0.6682, "step": 11570 }, { "epoch": 0.4795474325500435, "grad_norm": 0.41477519273757935, "learning_rate": 2.602470056778151e-06, "loss": 0.7085, "step": 11571 }, { "epoch": 0.4795888764557172, "grad_norm": 0.4126252233982086, "learning_rate": 2.6022628372497826e-06, "loss": 0.6868, "step": 11572 }, { "epoch": 0.47963032036139086, "grad_norm": 0.409091979265213, "learning_rate": 2.602055617721414e-06, "loss": 0.6798, "step": 11573 }, { "epoch": 0.47967176426706454, "grad_norm": 0.4318733215332031, "learning_rate": 2.601848398193046e-06, "loss": 0.6891, "step": 11574 }, { "epoch": 0.4797132081727382, "grad_norm": 0.4004359245300293, "learning_rate": 2.6016411786646776e-06, "loss": 0.6709, "step": 11575 }, { "epoch": 0.4797546520784119, "grad_norm": 0.3953864276409149, "learning_rate": 2.601433959136309e-06, "loss": 0.707, "step": 11576 }, { "epoch": 0.4797960959840855, "grad_norm": 0.39123499393463135, "learning_rate": 2.601226739607941e-06, "loss": 0.644, "step": 11577 }, { "epoch": 0.4798375398897592, "grad_norm": 0.4328339993953705, "learning_rate": 2.6010195200795722e-06, "loss": 0.692, "step": 11578 }, { "epoch": 0.47987898379543287, "grad_norm": 0.39897745847702026, "learning_rate": 2.6008123005512045e-06, "loss": 0.6733, "step": 11579 }, { "epoch": 0.47992042770110654, "grad_norm": 0.39828118681907654, "learning_rate": 2.6006050810228354e-06, "loss": 0.6562, "step": 11580 }, { "epoch": 0.4799618716067802, "grad_norm": 0.39140596985816956, "learning_rate": 2.6003978614944676e-06, "loss": 0.6201, "step": 11581 }, { "epoch": 0.4800033155124539, "grad_norm": 0.38618969917297363, "learning_rate": 2.600190641966099e-06, "loss": 0.6478, "step": 11582 }, { "epoch": 0.4800447594181276, "grad_norm": 0.4119326174259186, "learning_rate": 2.599983422437731e-06, "loss": 0.7139, "step": 11583 }, { "epoch": 0.48008620332380125, "grad_norm": 0.3926195502281189, "learning_rate": 2.5997762029093627e-06, "loss": 0.696, "step": 11584 }, { "epoch": 0.48012764722947493, "grad_norm": 0.4104214608669281, "learning_rate": 2.599568983380994e-06, "loss": 0.6909, "step": 11585 }, { "epoch": 0.48016909113514855, "grad_norm": 0.4070115089416504, "learning_rate": 2.599361763852626e-06, "loss": 0.7871, "step": 11586 }, { "epoch": 0.48021053504082223, "grad_norm": 0.4437679946422577, "learning_rate": 2.5991545443242572e-06, "loss": 0.6465, "step": 11587 }, { "epoch": 0.4802519789464959, "grad_norm": 0.4363630712032318, "learning_rate": 2.598947324795889e-06, "loss": 0.6528, "step": 11588 }, { "epoch": 0.4802934228521696, "grad_norm": 0.4051705598831177, "learning_rate": 2.5987401052675204e-06, "loss": 0.6622, "step": 11589 }, { "epoch": 0.48033486675784326, "grad_norm": 0.41794949769973755, "learning_rate": 2.5985328857391522e-06, "loss": 0.658, "step": 11590 }, { "epoch": 0.48037631066351694, "grad_norm": 0.3889048099517822, "learning_rate": 2.598325666210784e-06, "loss": 0.6843, "step": 11591 }, { "epoch": 0.4804177545691906, "grad_norm": 0.4050370752811432, "learning_rate": 2.5981184466824154e-06, "loss": 0.6624, "step": 11592 }, { "epoch": 0.4804591984748643, "grad_norm": 0.3932451903820038, "learning_rate": 2.5979112271540472e-06, "loss": 0.686, "step": 11593 }, { "epoch": 0.48050064238053797, "grad_norm": 0.3967250883579254, "learning_rate": 2.5977040076256786e-06, "loss": 0.6841, "step": 11594 }, { "epoch": 0.4805420862862116, "grad_norm": 0.4180542230606079, "learning_rate": 2.597496788097311e-06, "loss": 0.6305, "step": 11595 }, { "epoch": 0.48058353019188527, "grad_norm": 0.3803776502609253, "learning_rate": 2.597289568568942e-06, "loss": 0.692, "step": 11596 }, { "epoch": 0.48062497409755894, "grad_norm": 0.3814626634120941, "learning_rate": 2.597082349040574e-06, "loss": 0.7144, "step": 11597 }, { "epoch": 0.4806664180032326, "grad_norm": 0.39796778559684753, "learning_rate": 2.596875129512205e-06, "loss": 0.6339, "step": 11598 }, { "epoch": 0.4807078619089063, "grad_norm": 0.4293161630630493, "learning_rate": 2.5966679099838372e-06, "loss": 0.7506, "step": 11599 }, { "epoch": 0.48074930581458, "grad_norm": 0.3981829583644867, "learning_rate": 2.596460690455469e-06, "loss": 0.6451, "step": 11600 }, { "epoch": 0.48079074972025365, "grad_norm": 0.4225258529186249, "learning_rate": 2.5962534709271004e-06, "loss": 0.7122, "step": 11601 }, { "epoch": 0.48083219362592733, "grad_norm": 0.40936288237571716, "learning_rate": 2.5960462513987323e-06, "loss": 0.678, "step": 11602 }, { "epoch": 0.48087363753160095, "grad_norm": 0.38916948437690735, "learning_rate": 2.5958390318703636e-06, "loss": 0.626, "step": 11603 }, { "epoch": 0.4809150814372746, "grad_norm": 0.42521753907203674, "learning_rate": 2.5956318123419954e-06, "loss": 0.7061, "step": 11604 }, { "epoch": 0.4809565253429483, "grad_norm": 0.4177034795284271, "learning_rate": 2.595424592813627e-06, "loss": 0.7356, "step": 11605 }, { "epoch": 0.480997969248622, "grad_norm": 0.45508503913879395, "learning_rate": 2.5952173732852586e-06, "loss": 0.7056, "step": 11606 }, { "epoch": 0.48103941315429566, "grad_norm": 0.41210442781448364, "learning_rate": 2.59501015375689e-06, "loss": 0.6846, "step": 11607 }, { "epoch": 0.48108085705996934, "grad_norm": 0.47408780455589294, "learning_rate": 2.594802934228522e-06, "loss": 0.6836, "step": 11608 }, { "epoch": 0.481122300965643, "grad_norm": 0.386417955160141, "learning_rate": 2.5945957147001536e-06, "loss": 0.6782, "step": 11609 }, { "epoch": 0.4811637448713167, "grad_norm": 0.41070273518562317, "learning_rate": 2.594388495171785e-06, "loss": 0.7341, "step": 11610 }, { "epoch": 0.48120518877699037, "grad_norm": 0.47239434719085693, "learning_rate": 2.594181275643417e-06, "loss": 0.6901, "step": 11611 }, { "epoch": 0.481246632682664, "grad_norm": 0.39068931341171265, "learning_rate": 2.5939740561150482e-06, "loss": 0.6508, "step": 11612 }, { "epoch": 0.48128807658833767, "grad_norm": 0.4528723955154419, "learning_rate": 2.5937668365866805e-06, "loss": 0.7166, "step": 11613 }, { "epoch": 0.48132952049401134, "grad_norm": 0.3947731554508209, "learning_rate": 2.5935596170583114e-06, "loss": 0.6752, "step": 11614 }, { "epoch": 0.481370964399685, "grad_norm": 0.3788282871246338, "learning_rate": 2.5933523975299437e-06, "loss": 0.6826, "step": 11615 }, { "epoch": 0.4814124083053587, "grad_norm": 0.42978227138519287, "learning_rate": 2.593145178001575e-06, "loss": 0.7317, "step": 11616 }, { "epoch": 0.4814538522110324, "grad_norm": 0.40099453926086426, "learning_rate": 2.592937958473207e-06, "loss": 0.6721, "step": 11617 }, { "epoch": 0.48149529611670605, "grad_norm": 0.42128872871398926, "learning_rate": 2.5927307389448387e-06, "loss": 0.6758, "step": 11618 }, { "epoch": 0.48153674002237973, "grad_norm": 0.46361130475997925, "learning_rate": 2.59252351941647e-06, "loss": 0.7297, "step": 11619 }, { "epoch": 0.4815781839280534, "grad_norm": 0.40562406182289124, "learning_rate": 2.592316299888102e-06, "loss": 0.6813, "step": 11620 }, { "epoch": 0.481619627833727, "grad_norm": 0.4406891167163849, "learning_rate": 2.5921090803597332e-06, "loss": 0.7249, "step": 11621 }, { "epoch": 0.4816610717394007, "grad_norm": 0.45224037766456604, "learning_rate": 2.591901860831365e-06, "loss": 0.6941, "step": 11622 }, { "epoch": 0.4817025156450744, "grad_norm": 0.4184279441833496, "learning_rate": 2.5916946413029964e-06, "loss": 0.7012, "step": 11623 }, { "epoch": 0.48174395955074806, "grad_norm": 0.4488790035247803, "learning_rate": 2.5914874217746282e-06, "loss": 0.7092, "step": 11624 }, { "epoch": 0.48178540345642173, "grad_norm": 0.42170652747154236, "learning_rate": 2.5912802022462596e-06, "loss": 0.6256, "step": 11625 }, { "epoch": 0.4818268473620954, "grad_norm": 0.39791765809059143, "learning_rate": 2.5910729827178914e-06, "loss": 0.6582, "step": 11626 }, { "epoch": 0.4818682912677691, "grad_norm": 0.4144909679889679, "learning_rate": 2.5908657631895232e-06, "loss": 0.7446, "step": 11627 }, { "epoch": 0.48190973517344277, "grad_norm": 0.4263695180416107, "learning_rate": 2.5906585436611546e-06, "loss": 0.6865, "step": 11628 }, { "epoch": 0.48195117907911644, "grad_norm": 0.4539168179035187, "learning_rate": 2.590451324132787e-06, "loss": 0.726, "step": 11629 }, { "epoch": 0.48199262298479006, "grad_norm": 0.40417903661727905, "learning_rate": 2.590244104604418e-06, "loss": 0.6663, "step": 11630 }, { "epoch": 0.48203406689046374, "grad_norm": 0.4610176682472229, "learning_rate": 2.59003688507605e-06, "loss": 0.7583, "step": 11631 }, { "epoch": 0.4820755107961374, "grad_norm": 0.40591952204704285, "learning_rate": 2.589829665547681e-06, "loss": 0.6887, "step": 11632 }, { "epoch": 0.4821169547018111, "grad_norm": 0.39120566844940186, "learning_rate": 2.5896224460193133e-06, "loss": 0.6831, "step": 11633 }, { "epoch": 0.4821583986074848, "grad_norm": 0.5742559432983398, "learning_rate": 2.5894152264909446e-06, "loss": 0.6721, "step": 11634 }, { "epoch": 0.48219984251315845, "grad_norm": 0.42650851607322693, "learning_rate": 2.5892080069625764e-06, "loss": 0.7072, "step": 11635 }, { "epoch": 0.4822412864188321, "grad_norm": 0.41156336665153503, "learning_rate": 2.5890007874342083e-06, "loss": 0.7036, "step": 11636 }, { "epoch": 0.4822827303245058, "grad_norm": 0.4662439525127411, "learning_rate": 2.5887935679058396e-06, "loss": 0.7468, "step": 11637 }, { "epoch": 0.4823241742301794, "grad_norm": 0.4466845393180847, "learning_rate": 2.5885863483774715e-06, "loss": 0.6948, "step": 11638 }, { "epoch": 0.4823656181358531, "grad_norm": 0.4165245592594147, "learning_rate": 2.588379128849103e-06, "loss": 0.6847, "step": 11639 }, { "epoch": 0.4824070620415268, "grad_norm": 0.46678534150123596, "learning_rate": 2.5881719093207346e-06, "loss": 0.6913, "step": 11640 }, { "epoch": 0.48244850594720046, "grad_norm": 0.39842164516448975, "learning_rate": 2.587964689792366e-06, "loss": 0.7168, "step": 11641 }, { "epoch": 0.48248994985287413, "grad_norm": 0.403902143239975, "learning_rate": 2.587757470263998e-06, "loss": 0.6333, "step": 11642 }, { "epoch": 0.4825313937585478, "grad_norm": 0.39395859837532043, "learning_rate": 2.5875502507356292e-06, "loss": 0.6449, "step": 11643 }, { "epoch": 0.4825728376642215, "grad_norm": 0.3754208981990814, "learning_rate": 2.587343031207261e-06, "loss": 0.6331, "step": 11644 }, { "epoch": 0.48261428156989516, "grad_norm": 0.41033515334129333, "learning_rate": 2.587135811678893e-06, "loss": 0.672, "step": 11645 }, { "epoch": 0.48265572547556884, "grad_norm": 0.4282844662666321, "learning_rate": 2.5869285921505242e-06, "loss": 0.6808, "step": 11646 }, { "epoch": 0.48269716938124246, "grad_norm": 0.4066835343837738, "learning_rate": 2.5867213726221565e-06, "loss": 0.6833, "step": 11647 }, { "epoch": 0.48273861328691614, "grad_norm": 0.4274231195449829, "learning_rate": 2.5865141530937874e-06, "loss": 0.7034, "step": 11648 }, { "epoch": 0.4827800571925898, "grad_norm": 0.4119492471218109, "learning_rate": 2.5863069335654197e-06, "loss": 0.6686, "step": 11649 }, { "epoch": 0.4828215010982635, "grad_norm": 0.4257236123085022, "learning_rate": 2.586099714037051e-06, "loss": 0.6958, "step": 11650 }, { "epoch": 0.48286294500393717, "grad_norm": 0.42284804582595825, "learning_rate": 2.585892494508683e-06, "loss": 0.6877, "step": 11651 }, { "epoch": 0.48290438890961085, "grad_norm": 0.4185241162776947, "learning_rate": 2.5856852749803147e-06, "loss": 0.6724, "step": 11652 }, { "epoch": 0.4829458328152845, "grad_norm": 0.41104406118392944, "learning_rate": 2.585478055451946e-06, "loss": 0.6943, "step": 11653 }, { "epoch": 0.4829872767209582, "grad_norm": 0.4149235188961029, "learning_rate": 2.585270835923578e-06, "loss": 0.73, "step": 11654 }, { "epoch": 0.4830287206266319, "grad_norm": 0.39431366324424744, "learning_rate": 2.5850636163952092e-06, "loss": 0.6572, "step": 11655 }, { "epoch": 0.4830701645323055, "grad_norm": 0.4001201391220093, "learning_rate": 2.584856396866841e-06, "loss": 0.6852, "step": 11656 }, { "epoch": 0.4831116084379792, "grad_norm": 0.3522079586982727, "learning_rate": 2.5846491773384724e-06, "loss": 0.6086, "step": 11657 }, { "epoch": 0.48315305234365286, "grad_norm": 0.4063052833080292, "learning_rate": 2.5844419578101042e-06, "loss": 0.667, "step": 11658 }, { "epoch": 0.48319449624932653, "grad_norm": 0.3912616968154907, "learning_rate": 2.5842347382817356e-06, "loss": 0.6987, "step": 11659 }, { "epoch": 0.4832359401550002, "grad_norm": 0.3868068754673004, "learning_rate": 2.5840275187533674e-06, "loss": 0.6947, "step": 11660 }, { "epoch": 0.4832773840606739, "grad_norm": 0.4248347878456116, "learning_rate": 2.5838202992249993e-06, "loss": 0.6887, "step": 11661 }, { "epoch": 0.48331882796634756, "grad_norm": 0.4194730520248413, "learning_rate": 2.5836130796966306e-06, "loss": 0.6689, "step": 11662 }, { "epoch": 0.48336027187202124, "grad_norm": 0.4166310727596283, "learning_rate": 2.583405860168263e-06, "loss": 0.6963, "step": 11663 }, { "epoch": 0.48340171577769486, "grad_norm": 0.39779525995254517, "learning_rate": 2.583198640639894e-06, "loss": 0.6797, "step": 11664 }, { "epoch": 0.48344315968336854, "grad_norm": 0.39168864488601685, "learning_rate": 2.582991421111526e-06, "loss": 0.6604, "step": 11665 }, { "epoch": 0.4834846035890422, "grad_norm": 0.4259786009788513, "learning_rate": 2.582784201583157e-06, "loss": 0.6765, "step": 11666 }, { "epoch": 0.4835260474947159, "grad_norm": 0.4194049537181854, "learning_rate": 2.5825769820547893e-06, "loss": 0.7499, "step": 11667 }, { "epoch": 0.48356749140038957, "grad_norm": 0.38810208439826965, "learning_rate": 2.5823697625264206e-06, "loss": 0.6552, "step": 11668 }, { "epoch": 0.48360893530606325, "grad_norm": 0.42150184512138367, "learning_rate": 2.5821625429980525e-06, "loss": 0.7168, "step": 11669 }, { "epoch": 0.4836503792117369, "grad_norm": 0.4383477568626404, "learning_rate": 2.5819553234696843e-06, "loss": 0.7258, "step": 11670 }, { "epoch": 0.4836918231174106, "grad_norm": 0.39602476358413696, "learning_rate": 2.5817481039413156e-06, "loss": 0.7439, "step": 11671 }, { "epoch": 0.4837332670230843, "grad_norm": 0.4621061384677887, "learning_rate": 2.5815408844129475e-06, "loss": 0.6511, "step": 11672 }, { "epoch": 0.4837747109287579, "grad_norm": 0.41699016094207764, "learning_rate": 2.581333664884579e-06, "loss": 0.6528, "step": 11673 }, { "epoch": 0.4838161548344316, "grad_norm": 0.3604426383972168, "learning_rate": 2.5811264453562107e-06, "loss": 0.6536, "step": 11674 }, { "epoch": 0.48385759874010525, "grad_norm": 0.384452760219574, "learning_rate": 2.580919225827842e-06, "loss": 0.666, "step": 11675 }, { "epoch": 0.48389904264577893, "grad_norm": 0.4200873076915741, "learning_rate": 2.580712006299474e-06, "loss": 0.6874, "step": 11676 }, { "epoch": 0.4839404865514526, "grad_norm": 0.39917704463005066, "learning_rate": 2.5805047867711052e-06, "loss": 0.6606, "step": 11677 }, { "epoch": 0.4839819304571263, "grad_norm": 0.4574318528175354, "learning_rate": 2.580297567242737e-06, "loss": 0.6968, "step": 11678 }, { "epoch": 0.48402337436279996, "grad_norm": 0.3878830373287201, "learning_rate": 2.580090347714369e-06, "loss": 0.6544, "step": 11679 }, { "epoch": 0.48406481826847364, "grad_norm": 0.40189918875694275, "learning_rate": 2.5798831281860002e-06, "loss": 0.6499, "step": 11680 }, { "epoch": 0.4841062621741473, "grad_norm": 0.4005547761917114, "learning_rate": 2.5796759086576325e-06, "loss": 0.6677, "step": 11681 }, { "epoch": 0.48414770607982094, "grad_norm": 0.42312827706336975, "learning_rate": 2.5794686891292634e-06, "loss": 0.6282, "step": 11682 }, { "epoch": 0.4841891499854946, "grad_norm": 0.415216326713562, "learning_rate": 2.5792614696008957e-06, "loss": 0.6173, "step": 11683 }, { "epoch": 0.4842305938911683, "grad_norm": 0.42040297389030457, "learning_rate": 2.579054250072527e-06, "loss": 0.6399, "step": 11684 }, { "epoch": 0.48427203779684197, "grad_norm": 0.44499635696411133, "learning_rate": 2.578847030544159e-06, "loss": 0.7555, "step": 11685 }, { "epoch": 0.48431348170251565, "grad_norm": 0.4037047326564789, "learning_rate": 2.5786398110157902e-06, "loss": 0.667, "step": 11686 }, { "epoch": 0.4843549256081893, "grad_norm": 0.39928266406059265, "learning_rate": 2.578432591487422e-06, "loss": 0.6932, "step": 11687 }, { "epoch": 0.484396369513863, "grad_norm": 0.41333672404289246, "learning_rate": 2.578225371959054e-06, "loss": 0.6865, "step": 11688 }, { "epoch": 0.4844378134195367, "grad_norm": 0.4288559854030609, "learning_rate": 2.5780181524306853e-06, "loss": 0.6841, "step": 11689 }, { "epoch": 0.48447925732521036, "grad_norm": 0.3841112554073334, "learning_rate": 2.577810932902317e-06, "loss": 0.6763, "step": 11690 }, { "epoch": 0.484520701230884, "grad_norm": 0.41102245450019836, "learning_rate": 2.5776037133739484e-06, "loss": 0.6953, "step": 11691 }, { "epoch": 0.48456214513655765, "grad_norm": 0.44632160663604736, "learning_rate": 2.5773964938455803e-06, "loss": 0.6627, "step": 11692 }, { "epoch": 0.48460358904223133, "grad_norm": 0.4115983843803406, "learning_rate": 2.5771892743172116e-06, "loss": 0.6542, "step": 11693 }, { "epoch": 0.484645032947905, "grad_norm": 0.40611201524734497, "learning_rate": 2.5769820547888434e-06, "loss": 0.7737, "step": 11694 }, { "epoch": 0.4846864768535787, "grad_norm": 0.4429548680782318, "learning_rate": 2.576774835260475e-06, "loss": 0.7345, "step": 11695 }, { "epoch": 0.48472792075925236, "grad_norm": 0.39729732275009155, "learning_rate": 2.5765676157321066e-06, "loss": 0.6865, "step": 11696 }, { "epoch": 0.48476936466492604, "grad_norm": 0.42085039615631104, "learning_rate": 2.576360396203739e-06, "loss": 0.676, "step": 11697 }, { "epoch": 0.4848108085705997, "grad_norm": 0.40861254930496216, "learning_rate": 2.57615317667537e-06, "loss": 0.6149, "step": 11698 }, { "epoch": 0.48485225247627334, "grad_norm": 0.41195034980773926, "learning_rate": 2.575945957147002e-06, "loss": 0.647, "step": 11699 }, { "epoch": 0.484893696381947, "grad_norm": 0.46981099247932434, "learning_rate": 2.575738737618633e-06, "loss": 0.7092, "step": 11700 }, { "epoch": 0.4849351402876207, "grad_norm": 0.40118664503097534, "learning_rate": 2.5755315180902653e-06, "loss": 0.6641, "step": 11701 }, { "epoch": 0.48497658419329437, "grad_norm": 0.3987587094306946, "learning_rate": 2.5753242985618967e-06, "loss": 0.6421, "step": 11702 }, { "epoch": 0.48501802809896805, "grad_norm": 0.42760929465293884, "learning_rate": 2.5751170790335285e-06, "loss": 0.6533, "step": 11703 }, { "epoch": 0.4850594720046417, "grad_norm": 0.43486782908439636, "learning_rate": 2.57490985950516e-06, "loss": 0.6799, "step": 11704 }, { "epoch": 0.4851009159103154, "grad_norm": 0.4044019281864166, "learning_rate": 2.5747026399767917e-06, "loss": 0.6807, "step": 11705 }, { "epoch": 0.4851423598159891, "grad_norm": 0.4227464199066162, "learning_rate": 2.5744954204484235e-06, "loss": 0.6863, "step": 11706 }, { "epoch": 0.48518380372166275, "grad_norm": 0.4053824841976166, "learning_rate": 2.574288200920055e-06, "loss": 0.6708, "step": 11707 }, { "epoch": 0.4852252476273364, "grad_norm": 0.4071059823036194, "learning_rate": 2.5740809813916867e-06, "loss": 0.6897, "step": 11708 }, { "epoch": 0.48526669153301005, "grad_norm": 0.419656366109848, "learning_rate": 2.573873761863318e-06, "loss": 0.6445, "step": 11709 }, { "epoch": 0.48530813543868373, "grad_norm": 0.4550735056400299, "learning_rate": 2.57366654233495e-06, "loss": 0.7273, "step": 11710 }, { "epoch": 0.4853495793443574, "grad_norm": 0.3978002369403839, "learning_rate": 2.5734593228065812e-06, "loss": 0.7452, "step": 11711 }, { "epoch": 0.4853910232500311, "grad_norm": 0.43315574526786804, "learning_rate": 2.573252103278213e-06, "loss": 0.7375, "step": 11712 }, { "epoch": 0.48543246715570476, "grad_norm": 0.42050817608833313, "learning_rate": 2.573044883749845e-06, "loss": 0.6714, "step": 11713 }, { "epoch": 0.48547391106137844, "grad_norm": 0.38777080178260803, "learning_rate": 2.5728376642214762e-06, "loss": 0.6661, "step": 11714 }, { "epoch": 0.4855153549670521, "grad_norm": 0.42340484261512756, "learning_rate": 2.5726304446931085e-06, "loss": 0.7441, "step": 11715 }, { "epoch": 0.4855567988727258, "grad_norm": 0.40363290905952454, "learning_rate": 2.5724232251647394e-06, "loss": 0.6958, "step": 11716 }, { "epoch": 0.4855982427783994, "grad_norm": 0.396987646818161, "learning_rate": 2.5722160056363717e-06, "loss": 0.7152, "step": 11717 }, { "epoch": 0.4856396866840731, "grad_norm": 0.40842631459236145, "learning_rate": 2.572008786108003e-06, "loss": 0.6514, "step": 11718 }, { "epoch": 0.48568113058974677, "grad_norm": 0.4362861216068268, "learning_rate": 2.571801566579635e-06, "loss": 0.7524, "step": 11719 }, { "epoch": 0.48572257449542044, "grad_norm": 0.4214134216308594, "learning_rate": 2.5715943470512663e-06, "loss": 0.735, "step": 11720 }, { "epoch": 0.4857640184010941, "grad_norm": 0.3909485340118408, "learning_rate": 2.571387127522898e-06, "loss": 0.6918, "step": 11721 }, { "epoch": 0.4858054623067678, "grad_norm": 0.4207461178302765, "learning_rate": 2.57117990799453e-06, "loss": 0.7517, "step": 11722 }, { "epoch": 0.4858469062124415, "grad_norm": 0.46485665440559387, "learning_rate": 2.5709726884661613e-06, "loss": 0.7922, "step": 11723 }, { "epoch": 0.48588835011811515, "grad_norm": 0.423723965883255, "learning_rate": 2.570765468937793e-06, "loss": 0.637, "step": 11724 }, { "epoch": 0.4859297940237888, "grad_norm": 0.4169079065322876, "learning_rate": 2.5705582494094245e-06, "loss": 0.653, "step": 11725 }, { "epoch": 0.48597123792946245, "grad_norm": 0.42474526166915894, "learning_rate": 2.5703510298810563e-06, "loss": 0.657, "step": 11726 }, { "epoch": 0.48601268183513613, "grad_norm": 0.42716607451438904, "learning_rate": 2.5701438103526876e-06, "loss": 0.6808, "step": 11727 }, { "epoch": 0.4860541257408098, "grad_norm": 0.382269948720932, "learning_rate": 2.5699365908243195e-06, "loss": 0.6683, "step": 11728 }, { "epoch": 0.4860955696464835, "grad_norm": 0.4053051769733429, "learning_rate": 2.569729371295951e-06, "loss": 0.6904, "step": 11729 }, { "epoch": 0.48613701355215716, "grad_norm": 0.42488184571266174, "learning_rate": 2.5695221517675826e-06, "loss": 0.6719, "step": 11730 }, { "epoch": 0.48617845745783084, "grad_norm": 0.45039093494415283, "learning_rate": 2.569314932239215e-06, "loss": 0.7241, "step": 11731 }, { "epoch": 0.4862199013635045, "grad_norm": 0.44674497842788696, "learning_rate": 2.569107712710846e-06, "loss": 0.7108, "step": 11732 }, { "epoch": 0.4862613452691782, "grad_norm": 0.4420705735683441, "learning_rate": 2.568900493182478e-06, "loss": 0.7159, "step": 11733 }, { "epoch": 0.4863027891748518, "grad_norm": 0.4540080726146698, "learning_rate": 2.5686932736541095e-06, "loss": 0.7336, "step": 11734 }, { "epoch": 0.4863442330805255, "grad_norm": 0.4138307273387909, "learning_rate": 2.5684860541257413e-06, "loss": 0.6202, "step": 11735 }, { "epoch": 0.48638567698619917, "grad_norm": 0.4144209027290344, "learning_rate": 2.5682788345973727e-06, "loss": 0.6497, "step": 11736 }, { "epoch": 0.48642712089187284, "grad_norm": 0.40233033895492554, "learning_rate": 2.5680716150690045e-06, "loss": 0.6519, "step": 11737 }, { "epoch": 0.4864685647975465, "grad_norm": 0.38257160782814026, "learning_rate": 2.567864395540636e-06, "loss": 0.6746, "step": 11738 }, { "epoch": 0.4865100087032202, "grad_norm": 0.434166818857193, "learning_rate": 2.5676571760122677e-06, "loss": 0.7242, "step": 11739 }, { "epoch": 0.4865514526088939, "grad_norm": 0.4222361743450165, "learning_rate": 2.5674499564838995e-06, "loss": 0.7065, "step": 11740 }, { "epoch": 0.48659289651456755, "grad_norm": 0.46821728348731995, "learning_rate": 2.567242736955531e-06, "loss": 0.7258, "step": 11741 }, { "epoch": 0.48663434042024123, "grad_norm": 0.39915820956230164, "learning_rate": 2.5670355174271627e-06, "loss": 0.7117, "step": 11742 }, { "epoch": 0.48667578432591485, "grad_norm": 0.4079072177410126, "learning_rate": 2.566828297898794e-06, "loss": 0.6936, "step": 11743 }, { "epoch": 0.48671722823158853, "grad_norm": 0.42718204855918884, "learning_rate": 2.566621078370426e-06, "loss": 0.6787, "step": 11744 }, { "epoch": 0.4867586721372622, "grad_norm": 0.46704432368278503, "learning_rate": 2.5664138588420572e-06, "loss": 0.6738, "step": 11745 }, { "epoch": 0.4868001160429359, "grad_norm": 0.4072502851486206, "learning_rate": 2.566206639313689e-06, "loss": 0.6561, "step": 11746 }, { "epoch": 0.48684155994860956, "grad_norm": 0.5002769827842712, "learning_rate": 2.5659994197853204e-06, "loss": 0.7214, "step": 11747 }, { "epoch": 0.48688300385428324, "grad_norm": 0.4059838652610779, "learning_rate": 2.5657922002569523e-06, "loss": 0.6935, "step": 11748 }, { "epoch": 0.4869244477599569, "grad_norm": 0.4504377543926239, "learning_rate": 2.5655849807285845e-06, "loss": 0.7427, "step": 11749 }, { "epoch": 0.4869658916656306, "grad_norm": 0.4694659113883972, "learning_rate": 2.5653777612002154e-06, "loss": 0.7417, "step": 11750 }, { "epoch": 0.48700733557130427, "grad_norm": 0.41655439138412476, "learning_rate": 2.5651705416718477e-06, "loss": 0.72, "step": 11751 }, { "epoch": 0.4870487794769779, "grad_norm": 0.429232656955719, "learning_rate": 2.564963322143479e-06, "loss": 0.6376, "step": 11752 }, { "epoch": 0.48709022338265157, "grad_norm": 0.36466559767723083, "learning_rate": 2.564756102615111e-06, "loss": 0.636, "step": 11753 }, { "epoch": 0.48713166728832524, "grad_norm": 0.37916186451911926, "learning_rate": 2.5645488830867423e-06, "loss": 0.6637, "step": 11754 }, { "epoch": 0.4871731111939989, "grad_norm": 0.4096997082233429, "learning_rate": 2.564341663558374e-06, "loss": 0.7573, "step": 11755 }, { "epoch": 0.4872145550996726, "grad_norm": 0.4438663125038147, "learning_rate": 2.5641344440300055e-06, "loss": 0.7476, "step": 11756 }, { "epoch": 0.4872559990053463, "grad_norm": 0.39736124873161316, "learning_rate": 2.5639272245016373e-06, "loss": 0.6628, "step": 11757 }, { "epoch": 0.48729744291101995, "grad_norm": 0.3926561176776886, "learning_rate": 2.563720004973269e-06, "loss": 0.6973, "step": 11758 }, { "epoch": 0.48733888681669363, "grad_norm": 0.40299558639526367, "learning_rate": 2.5635127854449005e-06, "loss": 0.6882, "step": 11759 }, { "epoch": 0.48738033072236725, "grad_norm": 0.41009944677352905, "learning_rate": 2.5633055659165323e-06, "loss": 0.7012, "step": 11760 }, { "epoch": 0.4874217746280409, "grad_norm": 0.4384874403476715, "learning_rate": 2.5630983463881637e-06, "loss": 0.6853, "step": 11761 }, { "epoch": 0.4874632185337146, "grad_norm": 0.43121787905693054, "learning_rate": 2.5628911268597955e-06, "loss": 0.7137, "step": 11762 }, { "epoch": 0.4875046624393883, "grad_norm": 0.4062303304672241, "learning_rate": 2.562683907331427e-06, "loss": 0.7051, "step": 11763 }, { "epoch": 0.48754610634506196, "grad_norm": 0.4356639087200165, "learning_rate": 2.5624766878030587e-06, "loss": 0.6785, "step": 11764 }, { "epoch": 0.48758755025073564, "grad_norm": 0.39018702507019043, "learning_rate": 2.56226946827469e-06, "loss": 0.7072, "step": 11765 }, { "epoch": 0.4876289941564093, "grad_norm": 0.38774725794792175, "learning_rate": 2.562062248746322e-06, "loss": 0.6543, "step": 11766 }, { "epoch": 0.487670438062083, "grad_norm": 0.41465339064598083, "learning_rate": 2.561855029217954e-06, "loss": 0.6765, "step": 11767 }, { "epoch": 0.48771188196775667, "grad_norm": 0.41915664076805115, "learning_rate": 2.5616478096895855e-06, "loss": 0.731, "step": 11768 }, { "epoch": 0.4877533258734303, "grad_norm": 0.41065385937690735, "learning_rate": 2.5614405901612173e-06, "loss": 0.6531, "step": 11769 }, { "epoch": 0.48779476977910396, "grad_norm": 0.4317989945411682, "learning_rate": 2.5612333706328487e-06, "loss": 0.6862, "step": 11770 }, { "epoch": 0.48783621368477764, "grad_norm": 0.43733957409858704, "learning_rate": 2.5610261511044805e-06, "loss": 0.7183, "step": 11771 }, { "epoch": 0.4878776575904513, "grad_norm": 0.42367860674858093, "learning_rate": 2.560818931576112e-06, "loss": 0.688, "step": 11772 }, { "epoch": 0.487919101496125, "grad_norm": 0.4038856625556946, "learning_rate": 2.5606117120477437e-06, "loss": 0.6648, "step": 11773 }, { "epoch": 0.4879605454017987, "grad_norm": 0.4097851812839508, "learning_rate": 2.5604044925193755e-06, "loss": 0.7021, "step": 11774 }, { "epoch": 0.48800198930747235, "grad_norm": 0.39602458477020264, "learning_rate": 2.560197272991007e-06, "loss": 0.6697, "step": 11775 }, { "epoch": 0.488043433213146, "grad_norm": 0.43369197845458984, "learning_rate": 2.5599900534626387e-06, "loss": 0.662, "step": 11776 }, { "epoch": 0.4880848771188197, "grad_norm": 0.42835086584091187, "learning_rate": 2.55978283393427e-06, "loss": 0.6663, "step": 11777 }, { "epoch": 0.4881263210244933, "grad_norm": 0.3784225881099701, "learning_rate": 2.559575614405902e-06, "loss": 0.6608, "step": 11778 }, { "epoch": 0.488167764930167, "grad_norm": 0.49114200472831726, "learning_rate": 2.5593683948775333e-06, "loss": 0.7217, "step": 11779 }, { "epoch": 0.4882092088358407, "grad_norm": 0.423460990190506, "learning_rate": 2.559161175349165e-06, "loss": 0.675, "step": 11780 }, { "epoch": 0.48825065274151436, "grad_norm": 0.4231064021587372, "learning_rate": 2.5589539558207964e-06, "loss": 0.6616, "step": 11781 }, { "epoch": 0.48829209664718803, "grad_norm": 0.3901020884513855, "learning_rate": 2.5587467362924283e-06, "loss": 0.6527, "step": 11782 }, { "epoch": 0.4883335405528617, "grad_norm": 0.42021113634109497, "learning_rate": 2.5585395167640605e-06, "loss": 0.7344, "step": 11783 }, { "epoch": 0.4883749844585354, "grad_norm": 0.4223724603652954, "learning_rate": 2.5583322972356915e-06, "loss": 0.6892, "step": 11784 }, { "epoch": 0.48841642836420907, "grad_norm": 0.41570302844047546, "learning_rate": 2.5581250777073237e-06, "loss": 0.6814, "step": 11785 }, { "epoch": 0.4884578722698827, "grad_norm": 0.3912476599216461, "learning_rate": 2.557917858178955e-06, "loss": 0.7214, "step": 11786 }, { "epoch": 0.48849931617555636, "grad_norm": 0.41298478841781616, "learning_rate": 2.557710638650587e-06, "loss": 0.645, "step": 11787 }, { "epoch": 0.48854076008123004, "grad_norm": 0.4091927111148834, "learning_rate": 2.5575034191222183e-06, "loss": 0.7034, "step": 11788 }, { "epoch": 0.4885822039869037, "grad_norm": 0.3971502184867859, "learning_rate": 2.55729619959385e-06, "loss": 0.6464, "step": 11789 }, { "epoch": 0.4886236478925774, "grad_norm": 0.4347737729549408, "learning_rate": 2.5570889800654815e-06, "loss": 0.7012, "step": 11790 }, { "epoch": 0.48866509179825107, "grad_norm": 0.4490567743778229, "learning_rate": 2.5568817605371133e-06, "loss": 0.6892, "step": 11791 }, { "epoch": 0.48870653570392475, "grad_norm": 0.4031572639942169, "learning_rate": 2.556674541008745e-06, "loss": 0.7236, "step": 11792 }, { "epoch": 0.4887479796095984, "grad_norm": 0.36614537239074707, "learning_rate": 2.5564673214803765e-06, "loss": 0.6156, "step": 11793 }, { "epoch": 0.4887894235152721, "grad_norm": 0.42122191190719604, "learning_rate": 2.5562601019520083e-06, "loss": 0.7244, "step": 11794 }, { "epoch": 0.4888308674209457, "grad_norm": 0.40989986062049866, "learning_rate": 2.5560528824236397e-06, "loss": 0.6771, "step": 11795 }, { "epoch": 0.4888723113266194, "grad_norm": 0.3913651406764984, "learning_rate": 2.5558456628952715e-06, "loss": 0.6169, "step": 11796 }, { "epoch": 0.4889137552322931, "grad_norm": 0.38704216480255127, "learning_rate": 2.555638443366903e-06, "loss": 0.7042, "step": 11797 }, { "epoch": 0.48895519913796676, "grad_norm": 0.410615473985672, "learning_rate": 2.5554312238385347e-06, "loss": 0.6433, "step": 11798 }, { "epoch": 0.48899664304364043, "grad_norm": 0.45726287364959717, "learning_rate": 2.555224004310166e-06, "loss": 0.6908, "step": 11799 }, { "epoch": 0.4890380869493141, "grad_norm": 0.429104208946228, "learning_rate": 2.555016784781798e-06, "loss": 0.6578, "step": 11800 }, { "epoch": 0.4890795308549878, "grad_norm": 0.4122019410133362, "learning_rate": 2.55480956525343e-06, "loss": 0.6213, "step": 11801 }, { "epoch": 0.48912097476066146, "grad_norm": 0.4092191755771637, "learning_rate": 2.5546023457250615e-06, "loss": 0.6329, "step": 11802 }, { "epoch": 0.48916241866633514, "grad_norm": 0.41506391763687134, "learning_rate": 2.5543951261966933e-06, "loss": 0.6989, "step": 11803 }, { "epoch": 0.48920386257200876, "grad_norm": 0.4621204435825348, "learning_rate": 2.5541879066683247e-06, "loss": 0.7395, "step": 11804 }, { "epoch": 0.48924530647768244, "grad_norm": 0.3812274932861328, "learning_rate": 2.5539806871399565e-06, "loss": 0.6593, "step": 11805 }, { "epoch": 0.4892867503833561, "grad_norm": 0.432704895734787, "learning_rate": 2.553773467611588e-06, "loss": 0.7329, "step": 11806 }, { "epoch": 0.4893281942890298, "grad_norm": 0.39898422360420227, "learning_rate": 2.5535662480832197e-06, "loss": 0.6775, "step": 11807 }, { "epoch": 0.48936963819470347, "grad_norm": 0.38203686475753784, "learning_rate": 2.553359028554851e-06, "loss": 0.7493, "step": 11808 }, { "epoch": 0.48941108210037715, "grad_norm": 0.44070830941200256, "learning_rate": 2.553151809026483e-06, "loss": 0.6707, "step": 11809 }, { "epoch": 0.4894525260060508, "grad_norm": 0.4193633794784546, "learning_rate": 2.5529445894981147e-06, "loss": 0.6736, "step": 11810 }, { "epoch": 0.4894939699117245, "grad_norm": 0.47975507378578186, "learning_rate": 2.552737369969746e-06, "loss": 0.7681, "step": 11811 }, { "epoch": 0.4895354138173981, "grad_norm": 0.41110509634017944, "learning_rate": 2.552530150441378e-06, "loss": 0.6852, "step": 11812 }, { "epoch": 0.4895768577230718, "grad_norm": 0.4707271456718445, "learning_rate": 2.5523229309130093e-06, "loss": 0.7383, "step": 11813 }, { "epoch": 0.4896183016287455, "grad_norm": 0.4056098163127899, "learning_rate": 2.552115711384641e-06, "loss": 0.6951, "step": 11814 }, { "epoch": 0.48965974553441916, "grad_norm": 0.4092637896537781, "learning_rate": 2.5519084918562725e-06, "loss": 0.6829, "step": 11815 }, { "epoch": 0.48970118944009283, "grad_norm": 0.4322759807109833, "learning_rate": 2.5517012723279043e-06, "loss": 0.6638, "step": 11816 }, { "epoch": 0.4897426333457665, "grad_norm": 0.42654603719711304, "learning_rate": 2.5514940527995356e-06, "loss": 0.6951, "step": 11817 }, { "epoch": 0.4897840772514402, "grad_norm": 0.4356617331504822, "learning_rate": 2.5512868332711675e-06, "loss": 0.7192, "step": 11818 }, { "epoch": 0.48982552115711386, "grad_norm": 0.418536901473999, "learning_rate": 2.5510796137427997e-06, "loss": 0.6965, "step": 11819 }, { "epoch": 0.48986696506278754, "grad_norm": 0.4005710482597351, "learning_rate": 2.550872394214431e-06, "loss": 0.6799, "step": 11820 }, { "epoch": 0.48990840896846116, "grad_norm": 0.3959464430809021, "learning_rate": 2.550665174686063e-06, "loss": 0.6576, "step": 11821 }, { "epoch": 0.48994985287413484, "grad_norm": 0.4176786541938782, "learning_rate": 2.5504579551576943e-06, "loss": 0.7063, "step": 11822 }, { "epoch": 0.4899912967798085, "grad_norm": 0.3999761641025543, "learning_rate": 2.550250735629326e-06, "loss": 0.6488, "step": 11823 }, { "epoch": 0.4900327406854822, "grad_norm": 0.4032444655895233, "learning_rate": 2.5500435161009575e-06, "loss": 0.6943, "step": 11824 }, { "epoch": 0.49007418459115587, "grad_norm": 0.42853134870529175, "learning_rate": 2.5498362965725893e-06, "loss": 0.6951, "step": 11825 }, { "epoch": 0.49011562849682955, "grad_norm": 0.36227530241012573, "learning_rate": 2.5496290770442207e-06, "loss": 0.6724, "step": 11826 }, { "epoch": 0.4901570724025032, "grad_norm": 0.4039766788482666, "learning_rate": 2.5494218575158525e-06, "loss": 0.7124, "step": 11827 }, { "epoch": 0.4901985163081769, "grad_norm": 0.39769476652145386, "learning_rate": 2.5492146379874843e-06, "loss": 0.6241, "step": 11828 }, { "epoch": 0.4902399602138506, "grad_norm": 0.3912244439125061, "learning_rate": 2.5490074184591157e-06, "loss": 0.6667, "step": 11829 }, { "epoch": 0.4902814041195242, "grad_norm": 0.41921713948249817, "learning_rate": 2.5488001989307475e-06, "loss": 0.7174, "step": 11830 }, { "epoch": 0.4903228480251979, "grad_norm": 0.40436655282974243, "learning_rate": 2.548592979402379e-06, "loss": 0.6875, "step": 11831 }, { "epoch": 0.49036429193087155, "grad_norm": 0.4021992087364197, "learning_rate": 2.5483857598740107e-06, "loss": 0.7246, "step": 11832 }, { "epoch": 0.49040573583654523, "grad_norm": 0.3978753685951233, "learning_rate": 2.548178540345642e-06, "loss": 0.6903, "step": 11833 }, { "epoch": 0.4904471797422189, "grad_norm": 0.3932604491710663, "learning_rate": 2.547971320817274e-06, "loss": 0.6526, "step": 11834 }, { "epoch": 0.4904886236478926, "grad_norm": 0.3858346939086914, "learning_rate": 2.547764101288906e-06, "loss": 0.6802, "step": 11835 }, { "epoch": 0.49053006755356626, "grad_norm": 0.4108521640300751, "learning_rate": 2.5475568817605375e-06, "loss": 0.7217, "step": 11836 }, { "epoch": 0.49057151145923994, "grad_norm": 0.4297684133052826, "learning_rate": 2.5473496622321693e-06, "loss": 0.739, "step": 11837 }, { "epoch": 0.4906129553649136, "grad_norm": 0.37183231115341187, "learning_rate": 2.5471424427038007e-06, "loss": 0.6621, "step": 11838 }, { "epoch": 0.49065439927058724, "grad_norm": 0.41974401473999023, "learning_rate": 2.5469352231754325e-06, "loss": 0.6919, "step": 11839 }, { "epoch": 0.4906958431762609, "grad_norm": 0.39542335271835327, "learning_rate": 2.546728003647064e-06, "loss": 0.7153, "step": 11840 }, { "epoch": 0.4907372870819346, "grad_norm": 0.43556898832321167, "learning_rate": 2.5465207841186957e-06, "loss": 0.6771, "step": 11841 }, { "epoch": 0.49077873098760827, "grad_norm": 0.4343653619289398, "learning_rate": 2.546313564590327e-06, "loss": 0.6765, "step": 11842 }, { "epoch": 0.49082017489328195, "grad_norm": 0.4049076437950134, "learning_rate": 2.546106345061959e-06, "loss": 0.666, "step": 11843 }, { "epoch": 0.4908616187989556, "grad_norm": 0.40400511026382446, "learning_rate": 2.5458991255335907e-06, "loss": 0.6682, "step": 11844 }, { "epoch": 0.4909030627046293, "grad_norm": 0.4484359920024872, "learning_rate": 2.545691906005222e-06, "loss": 0.6814, "step": 11845 }, { "epoch": 0.490944506610303, "grad_norm": 0.4321199655532837, "learning_rate": 2.545484686476854e-06, "loss": 0.7141, "step": 11846 }, { "epoch": 0.4909859505159766, "grad_norm": 0.3911685049533844, "learning_rate": 2.5452774669484853e-06, "loss": 0.6875, "step": 11847 }, { "epoch": 0.4910273944216503, "grad_norm": 0.41462430357933044, "learning_rate": 2.545070247420117e-06, "loss": 0.7178, "step": 11848 }, { "epoch": 0.49106883832732395, "grad_norm": 0.3773510158061981, "learning_rate": 2.5448630278917485e-06, "loss": 0.6526, "step": 11849 }, { "epoch": 0.49111028223299763, "grad_norm": 0.3944261074066162, "learning_rate": 2.5446558083633803e-06, "loss": 0.7092, "step": 11850 }, { "epoch": 0.4911517261386713, "grad_norm": 0.42144206166267395, "learning_rate": 2.5444485888350117e-06, "loss": 0.707, "step": 11851 }, { "epoch": 0.491193170044345, "grad_norm": 0.42240363359451294, "learning_rate": 2.5442413693066435e-06, "loss": 0.6796, "step": 11852 }, { "epoch": 0.49123461395001866, "grad_norm": 0.4180384874343872, "learning_rate": 2.5440341497782757e-06, "loss": 0.696, "step": 11853 }, { "epoch": 0.49127605785569234, "grad_norm": 0.4444087743759155, "learning_rate": 2.543826930249907e-06, "loss": 0.7529, "step": 11854 }, { "epoch": 0.491317501761366, "grad_norm": 0.4109300971031189, "learning_rate": 2.543619710721539e-06, "loss": 0.6666, "step": 11855 }, { "epoch": 0.49135894566703964, "grad_norm": 0.42331811785697937, "learning_rate": 2.5434124911931703e-06, "loss": 0.6768, "step": 11856 }, { "epoch": 0.4914003895727133, "grad_norm": 0.4209948182106018, "learning_rate": 2.543205271664802e-06, "loss": 0.6848, "step": 11857 }, { "epoch": 0.491441833478387, "grad_norm": 0.4185831844806671, "learning_rate": 2.5429980521364335e-06, "loss": 0.6792, "step": 11858 }, { "epoch": 0.49148327738406067, "grad_norm": 0.44093117117881775, "learning_rate": 2.5427908326080653e-06, "loss": 0.7213, "step": 11859 }, { "epoch": 0.49152472128973435, "grad_norm": 0.4204893112182617, "learning_rate": 2.5425836130796967e-06, "loss": 0.6877, "step": 11860 }, { "epoch": 0.491566165195408, "grad_norm": 0.5034067630767822, "learning_rate": 2.5423763935513285e-06, "loss": 0.7206, "step": 11861 }, { "epoch": 0.4916076091010817, "grad_norm": 0.39760157465934753, "learning_rate": 2.5421691740229603e-06, "loss": 0.6691, "step": 11862 }, { "epoch": 0.4916490530067554, "grad_norm": 0.3997891843318939, "learning_rate": 2.5419619544945917e-06, "loss": 0.6218, "step": 11863 }, { "epoch": 0.49169049691242905, "grad_norm": 0.4092789888381958, "learning_rate": 2.5417547349662235e-06, "loss": 0.6833, "step": 11864 }, { "epoch": 0.4917319408181027, "grad_norm": 0.39059022068977356, "learning_rate": 2.541547515437855e-06, "loss": 0.6664, "step": 11865 }, { "epoch": 0.49177338472377635, "grad_norm": 0.3996644914150238, "learning_rate": 2.5413402959094867e-06, "loss": 0.6194, "step": 11866 }, { "epoch": 0.49181482862945003, "grad_norm": 0.39793941378593445, "learning_rate": 2.541133076381118e-06, "loss": 0.686, "step": 11867 }, { "epoch": 0.4918562725351237, "grad_norm": 0.4015215039253235, "learning_rate": 2.54092585685275e-06, "loss": 0.7085, "step": 11868 }, { "epoch": 0.4918977164407974, "grad_norm": 0.3885466754436493, "learning_rate": 2.5407186373243813e-06, "loss": 0.6401, "step": 11869 }, { "epoch": 0.49193916034647106, "grad_norm": 0.43044522404670715, "learning_rate": 2.5405114177960135e-06, "loss": 0.7222, "step": 11870 }, { "epoch": 0.49198060425214474, "grad_norm": 0.4216560125350952, "learning_rate": 2.5403041982676453e-06, "loss": 0.7023, "step": 11871 }, { "epoch": 0.4920220481578184, "grad_norm": 0.4033070504665375, "learning_rate": 2.5400969787392767e-06, "loss": 0.6626, "step": 11872 }, { "epoch": 0.49206349206349204, "grad_norm": 0.4591406285762787, "learning_rate": 2.5398897592109085e-06, "loss": 0.7756, "step": 11873 }, { "epoch": 0.4921049359691657, "grad_norm": 0.46172305941581726, "learning_rate": 2.53968253968254e-06, "loss": 0.7234, "step": 11874 }, { "epoch": 0.4921463798748394, "grad_norm": 0.3872503936290741, "learning_rate": 2.5394753201541717e-06, "loss": 0.6917, "step": 11875 }, { "epoch": 0.49218782378051307, "grad_norm": 0.4404793977737427, "learning_rate": 2.539268100625803e-06, "loss": 0.7593, "step": 11876 }, { "epoch": 0.49222926768618674, "grad_norm": 0.4223960340023041, "learning_rate": 2.539060881097435e-06, "loss": 0.6919, "step": 11877 }, { "epoch": 0.4922707115918604, "grad_norm": 0.3922606110572815, "learning_rate": 2.5388536615690663e-06, "loss": 0.6484, "step": 11878 }, { "epoch": 0.4923121554975341, "grad_norm": 0.4334293305873871, "learning_rate": 2.538646442040698e-06, "loss": 0.7112, "step": 11879 }, { "epoch": 0.4923535994032078, "grad_norm": 0.41647428274154663, "learning_rate": 2.53843922251233e-06, "loss": 0.6829, "step": 11880 }, { "epoch": 0.49239504330888145, "grad_norm": 0.4099753201007843, "learning_rate": 2.5382320029839613e-06, "loss": 0.6545, "step": 11881 }, { "epoch": 0.4924364872145551, "grad_norm": 0.3943389654159546, "learning_rate": 2.538024783455593e-06, "loss": 0.6642, "step": 11882 }, { "epoch": 0.49247793112022875, "grad_norm": 0.4057413935661316, "learning_rate": 2.5378175639272245e-06, "loss": 0.6758, "step": 11883 }, { "epoch": 0.49251937502590243, "grad_norm": 0.3960435688495636, "learning_rate": 2.5376103443988563e-06, "loss": 0.655, "step": 11884 }, { "epoch": 0.4925608189315761, "grad_norm": 0.4294629693031311, "learning_rate": 2.5374031248704877e-06, "loss": 0.6821, "step": 11885 }, { "epoch": 0.4926022628372498, "grad_norm": 0.4214281737804413, "learning_rate": 2.5371959053421195e-06, "loss": 0.6667, "step": 11886 }, { "epoch": 0.49264370674292346, "grad_norm": 0.42540642619132996, "learning_rate": 2.5369886858137517e-06, "loss": 0.6602, "step": 11887 }, { "epoch": 0.49268515064859714, "grad_norm": 0.4040733575820923, "learning_rate": 2.536781466285383e-06, "loss": 0.7051, "step": 11888 }, { "epoch": 0.4927265945542708, "grad_norm": 0.40961313247680664, "learning_rate": 2.536574246757015e-06, "loss": 0.6539, "step": 11889 }, { "epoch": 0.4927680384599445, "grad_norm": 0.45013532042503357, "learning_rate": 2.5363670272286463e-06, "loss": 0.7177, "step": 11890 }, { "epoch": 0.4928094823656181, "grad_norm": 0.40048396587371826, "learning_rate": 2.536159807700278e-06, "loss": 0.6259, "step": 11891 }, { "epoch": 0.4928509262712918, "grad_norm": 0.4677923321723938, "learning_rate": 2.5359525881719095e-06, "loss": 0.7701, "step": 11892 }, { "epoch": 0.49289237017696547, "grad_norm": 0.375237375497818, "learning_rate": 2.5357453686435413e-06, "loss": 0.6663, "step": 11893 }, { "epoch": 0.49293381408263914, "grad_norm": 0.4697551131248474, "learning_rate": 2.5355381491151727e-06, "loss": 0.7939, "step": 11894 }, { "epoch": 0.4929752579883128, "grad_norm": 0.4318310022354126, "learning_rate": 2.5353309295868045e-06, "loss": 0.6587, "step": 11895 }, { "epoch": 0.4930167018939865, "grad_norm": 0.3958035111427307, "learning_rate": 2.5351237100584363e-06, "loss": 0.6575, "step": 11896 }, { "epoch": 0.4930581457996602, "grad_norm": 0.40231743454933167, "learning_rate": 2.5349164905300677e-06, "loss": 0.6603, "step": 11897 }, { "epoch": 0.49309958970533385, "grad_norm": 0.3843839168548584, "learning_rate": 2.5347092710016995e-06, "loss": 0.6792, "step": 11898 }, { "epoch": 0.49314103361100753, "grad_norm": 0.4204179644584656, "learning_rate": 2.534502051473331e-06, "loss": 0.682, "step": 11899 }, { "epoch": 0.49318247751668115, "grad_norm": 0.3666963279247284, "learning_rate": 2.5342948319449627e-06, "loss": 0.6653, "step": 11900 }, { "epoch": 0.4932239214223548, "grad_norm": 0.3982766270637512, "learning_rate": 2.534087612416594e-06, "loss": 0.6748, "step": 11901 }, { "epoch": 0.4932653653280285, "grad_norm": 0.40631696581840515, "learning_rate": 2.533880392888226e-06, "loss": 0.7026, "step": 11902 }, { "epoch": 0.4933068092337022, "grad_norm": 0.405801922082901, "learning_rate": 2.5336731733598573e-06, "loss": 0.7266, "step": 11903 }, { "epoch": 0.49334825313937586, "grad_norm": 0.3891429305076599, "learning_rate": 2.5334659538314895e-06, "loss": 0.6575, "step": 11904 }, { "epoch": 0.49338969704504954, "grad_norm": 0.4176947772502899, "learning_rate": 2.5332587343031213e-06, "loss": 0.6965, "step": 11905 }, { "epoch": 0.4934311409507232, "grad_norm": 0.3754618763923645, "learning_rate": 2.5330515147747527e-06, "loss": 0.6799, "step": 11906 }, { "epoch": 0.4934725848563969, "grad_norm": 0.41250160336494446, "learning_rate": 2.5328442952463845e-06, "loss": 0.6953, "step": 11907 }, { "epoch": 0.4935140287620705, "grad_norm": 0.45370081067085266, "learning_rate": 2.532637075718016e-06, "loss": 0.749, "step": 11908 }, { "epoch": 0.4935554726677442, "grad_norm": 0.41599783301353455, "learning_rate": 2.5324298561896477e-06, "loss": 0.7106, "step": 11909 }, { "epoch": 0.49359691657341787, "grad_norm": 0.3919639587402344, "learning_rate": 2.532222636661279e-06, "loss": 0.647, "step": 11910 }, { "epoch": 0.49363836047909154, "grad_norm": 0.4156576991081238, "learning_rate": 2.532015417132911e-06, "loss": 0.6996, "step": 11911 }, { "epoch": 0.4936798043847652, "grad_norm": 0.40890729427337646, "learning_rate": 2.5318081976045423e-06, "loss": 0.6455, "step": 11912 }, { "epoch": 0.4937212482904389, "grad_norm": 0.4306601583957672, "learning_rate": 2.531600978076174e-06, "loss": 0.6378, "step": 11913 }, { "epoch": 0.4937626921961126, "grad_norm": 0.402927964925766, "learning_rate": 2.531393758547806e-06, "loss": 0.6974, "step": 11914 }, { "epoch": 0.49380413610178625, "grad_norm": 0.435237318277359, "learning_rate": 2.5311865390194373e-06, "loss": 0.7483, "step": 11915 }, { "epoch": 0.49384558000745993, "grad_norm": 0.4264313578605652, "learning_rate": 2.530979319491069e-06, "loss": 0.6821, "step": 11916 }, { "epoch": 0.49388702391313355, "grad_norm": 0.42379096150398254, "learning_rate": 2.5307720999627005e-06, "loss": 0.7137, "step": 11917 }, { "epoch": 0.4939284678188072, "grad_norm": 0.4034231901168823, "learning_rate": 2.5305648804343323e-06, "loss": 0.6807, "step": 11918 }, { "epoch": 0.4939699117244809, "grad_norm": 0.43287500739097595, "learning_rate": 2.5303576609059637e-06, "loss": 0.7908, "step": 11919 }, { "epoch": 0.4940113556301546, "grad_norm": 0.4099683463573456, "learning_rate": 2.530150441377596e-06, "loss": 0.728, "step": 11920 }, { "epoch": 0.49405279953582826, "grad_norm": 0.3925293982028961, "learning_rate": 2.529943221849227e-06, "loss": 0.6821, "step": 11921 }, { "epoch": 0.49409424344150193, "grad_norm": 0.4048459231853485, "learning_rate": 2.529736002320859e-06, "loss": 0.6989, "step": 11922 }, { "epoch": 0.4941356873471756, "grad_norm": 0.4153275787830353, "learning_rate": 2.529528782792491e-06, "loss": 0.6876, "step": 11923 }, { "epoch": 0.4941771312528493, "grad_norm": 0.4109082818031311, "learning_rate": 2.5293215632641223e-06, "loss": 0.7103, "step": 11924 }, { "epoch": 0.49421857515852297, "grad_norm": 0.3953899145126343, "learning_rate": 2.529114343735754e-06, "loss": 0.637, "step": 11925 }, { "epoch": 0.4942600190641966, "grad_norm": 0.3811253607273102, "learning_rate": 2.5289071242073855e-06, "loss": 0.6733, "step": 11926 }, { "epoch": 0.49430146296987026, "grad_norm": 0.38809067010879517, "learning_rate": 2.5286999046790173e-06, "loss": 0.7112, "step": 11927 }, { "epoch": 0.49434290687554394, "grad_norm": 0.4251621961593628, "learning_rate": 2.5284926851506487e-06, "loss": 0.7329, "step": 11928 }, { "epoch": 0.4943843507812176, "grad_norm": 0.3860369622707367, "learning_rate": 2.5282854656222805e-06, "loss": 0.6737, "step": 11929 }, { "epoch": 0.4944257946868913, "grad_norm": 0.3976783752441406, "learning_rate": 2.528078246093912e-06, "loss": 0.6541, "step": 11930 }, { "epoch": 0.494467238592565, "grad_norm": 0.38773834705352783, "learning_rate": 2.5278710265655437e-06, "loss": 0.6941, "step": 11931 }, { "epoch": 0.49450868249823865, "grad_norm": 0.4206503927707672, "learning_rate": 2.5276638070371755e-06, "loss": 0.7126, "step": 11932 }, { "epoch": 0.4945501264039123, "grad_norm": 0.41528499126434326, "learning_rate": 2.527456587508807e-06, "loss": 0.624, "step": 11933 }, { "epoch": 0.49459157030958595, "grad_norm": 0.3745574653148651, "learning_rate": 2.5272493679804387e-06, "loss": 0.6881, "step": 11934 }, { "epoch": 0.4946330142152596, "grad_norm": 0.4035796523094177, "learning_rate": 2.52704214845207e-06, "loss": 0.6477, "step": 11935 }, { "epoch": 0.4946744581209333, "grad_norm": 0.41015711426734924, "learning_rate": 2.526834928923702e-06, "loss": 0.7039, "step": 11936 }, { "epoch": 0.494715902026607, "grad_norm": 0.40707623958587646, "learning_rate": 2.5266277093953333e-06, "loss": 0.684, "step": 11937 }, { "epoch": 0.49475734593228066, "grad_norm": 0.4093051254749298, "learning_rate": 2.5264204898669655e-06, "loss": 0.7123, "step": 11938 }, { "epoch": 0.49479878983795433, "grad_norm": 0.4050680100917816, "learning_rate": 2.5262132703385965e-06, "loss": 0.684, "step": 11939 }, { "epoch": 0.494840233743628, "grad_norm": 0.4116615653038025, "learning_rate": 2.5260060508102287e-06, "loss": 0.6968, "step": 11940 }, { "epoch": 0.4948816776493017, "grad_norm": 0.42350536584854126, "learning_rate": 2.5257988312818605e-06, "loss": 0.7393, "step": 11941 }, { "epoch": 0.49492312155497536, "grad_norm": 0.4255143702030182, "learning_rate": 2.525591611753492e-06, "loss": 0.6296, "step": 11942 }, { "epoch": 0.494964565460649, "grad_norm": 0.40906867384910583, "learning_rate": 2.5253843922251237e-06, "loss": 0.6611, "step": 11943 }, { "epoch": 0.49500600936632266, "grad_norm": 0.4224621653556824, "learning_rate": 2.525177172696755e-06, "loss": 0.6786, "step": 11944 }, { "epoch": 0.49504745327199634, "grad_norm": 0.4067190885543823, "learning_rate": 2.524969953168387e-06, "loss": 0.6687, "step": 11945 }, { "epoch": 0.49508889717767, "grad_norm": 0.4177899956703186, "learning_rate": 2.5247627336400183e-06, "loss": 0.7036, "step": 11946 }, { "epoch": 0.4951303410833437, "grad_norm": 0.422874391078949, "learning_rate": 2.52455551411165e-06, "loss": 0.7231, "step": 11947 }, { "epoch": 0.49517178498901737, "grad_norm": 0.4232557415962219, "learning_rate": 2.524348294583282e-06, "loss": 0.6777, "step": 11948 }, { "epoch": 0.49521322889469105, "grad_norm": 0.41736260056495667, "learning_rate": 2.5241410750549133e-06, "loss": 0.7006, "step": 11949 }, { "epoch": 0.4952546728003647, "grad_norm": 0.4092543423175812, "learning_rate": 2.523933855526545e-06, "loss": 0.6897, "step": 11950 }, { "epoch": 0.4952961167060384, "grad_norm": 0.3978733718395233, "learning_rate": 2.5237266359981765e-06, "loss": 0.6851, "step": 11951 }, { "epoch": 0.495337560611712, "grad_norm": 0.4310804307460785, "learning_rate": 2.5235194164698083e-06, "loss": 0.6772, "step": 11952 }, { "epoch": 0.4953790045173857, "grad_norm": 0.38709092140197754, "learning_rate": 2.5233121969414397e-06, "loss": 0.6436, "step": 11953 }, { "epoch": 0.4954204484230594, "grad_norm": 0.38359251618385315, "learning_rate": 2.523104977413072e-06, "loss": 0.6632, "step": 11954 }, { "epoch": 0.49546189232873306, "grad_norm": 0.396247923374176, "learning_rate": 2.522897757884703e-06, "loss": 0.6625, "step": 11955 }, { "epoch": 0.49550333623440673, "grad_norm": 0.39747077226638794, "learning_rate": 2.522690538356335e-06, "loss": 0.6692, "step": 11956 }, { "epoch": 0.4955447801400804, "grad_norm": 0.3933205306529999, "learning_rate": 2.522483318827967e-06, "loss": 0.6923, "step": 11957 }, { "epoch": 0.4955862240457541, "grad_norm": 0.4358789324760437, "learning_rate": 2.5222760992995983e-06, "loss": 0.6799, "step": 11958 }, { "epoch": 0.49562766795142776, "grad_norm": 0.42939484119415283, "learning_rate": 2.52206887977123e-06, "loss": 0.6967, "step": 11959 }, { "epoch": 0.49566911185710144, "grad_norm": 0.43787631392478943, "learning_rate": 2.5218616602428615e-06, "loss": 0.6904, "step": 11960 }, { "epoch": 0.49571055576277506, "grad_norm": 0.3841419816017151, "learning_rate": 2.5216544407144933e-06, "loss": 0.6865, "step": 11961 }, { "epoch": 0.49575199966844874, "grad_norm": 0.4146205484867096, "learning_rate": 2.5214472211861247e-06, "loss": 0.7205, "step": 11962 }, { "epoch": 0.4957934435741224, "grad_norm": 0.40564408898353577, "learning_rate": 2.5212400016577565e-06, "loss": 0.6401, "step": 11963 }, { "epoch": 0.4958348874797961, "grad_norm": 0.4023638665676117, "learning_rate": 2.521032782129388e-06, "loss": 0.6882, "step": 11964 }, { "epoch": 0.49587633138546977, "grad_norm": 0.45414650440216064, "learning_rate": 2.5208255626010197e-06, "loss": 0.6908, "step": 11965 }, { "epoch": 0.49591777529114345, "grad_norm": 0.3852802515029907, "learning_rate": 2.5206183430726515e-06, "loss": 0.6484, "step": 11966 }, { "epoch": 0.4959592191968171, "grad_norm": 0.4603707194328308, "learning_rate": 2.520411123544283e-06, "loss": 0.7444, "step": 11967 }, { "epoch": 0.4960006631024908, "grad_norm": 0.4256884753704071, "learning_rate": 2.5202039040159147e-06, "loss": 0.7069, "step": 11968 }, { "epoch": 0.4960421070081644, "grad_norm": 0.42455315589904785, "learning_rate": 2.519996684487546e-06, "loss": 0.7349, "step": 11969 }, { "epoch": 0.4960835509138381, "grad_norm": 0.3844889998435974, "learning_rate": 2.519789464959178e-06, "loss": 0.7068, "step": 11970 }, { "epoch": 0.4961249948195118, "grad_norm": 0.3990664780139923, "learning_rate": 2.5195822454308093e-06, "loss": 0.6486, "step": 11971 }, { "epoch": 0.49616643872518545, "grad_norm": 0.41621580719947815, "learning_rate": 2.5193750259024415e-06, "loss": 0.671, "step": 11972 }, { "epoch": 0.49620788263085913, "grad_norm": 0.42466944456100464, "learning_rate": 2.5191678063740725e-06, "loss": 0.7122, "step": 11973 }, { "epoch": 0.4962493265365328, "grad_norm": 0.4108840227127075, "learning_rate": 2.5189605868457047e-06, "loss": 0.6799, "step": 11974 }, { "epoch": 0.4962907704422065, "grad_norm": 0.38877516984939575, "learning_rate": 2.5187533673173365e-06, "loss": 0.7058, "step": 11975 }, { "epoch": 0.49633221434788016, "grad_norm": 0.5147877931594849, "learning_rate": 2.518546147788968e-06, "loss": 0.6914, "step": 11976 }, { "epoch": 0.49637365825355384, "grad_norm": 0.4093729257583618, "learning_rate": 2.5183389282605997e-06, "loss": 0.7366, "step": 11977 }, { "epoch": 0.49641510215922746, "grad_norm": 0.41924306750297546, "learning_rate": 2.518131708732231e-06, "loss": 0.699, "step": 11978 }, { "epoch": 0.49645654606490114, "grad_norm": 0.42035865783691406, "learning_rate": 2.517924489203863e-06, "loss": 0.6772, "step": 11979 }, { "epoch": 0.4964979899705748, "grad_norm": 0.4675120711326599, "learning_rate": 2.5177172696754943e-06, "loss": 0.6583, "step": 11980 }, { "epoch": 0.4965394338762485, "grad_norm": 0.3918024003505707, "learning_rate": 2.517510050147126e-06, "loss": 0.6754, "step": 11981 }, { "epoch": 0.49658087778192217, "grad_norm": 0.41508054733276367, "learning_rate": 2.5173028306187575e-06, "loss": 0.6735, "step": 11982 }, { "epoch": 0.49662232168759585, "grad_norm": 0.35878026485443115, "learning_rate": 2.5170956110903893e-06, "loss": 0.6425, "step": 11983 }, { "epoch": 0.4966637655932695, "grad_norm": 0.3775622248649597, "learning_rate": 2.516888391562021e-06, "loss": 0.6436, "step": 11984 }, { "epoch": 0.4967052094989432, "grad_norm": 0.46326592564582825, "learning_rate": 2.5166811720336525e-06, "loss": 0.7006, "step": 11985 }, { "epoch": 0.4967466534046169, "grad_norm": 0.42945584654808044, "learning_rate": 2.5164739525052843e-06, "loss": 0.6573, "step": 11986 }, { "epoch": 0.4967880973102905, "grad_norm": 0.3993362784385681, "learning_rate": 2.5162667329769157e-06, "loss": 0.7063, "step": 11987 }, { "epoch": 0.4968295412159642, "grad_norm": 0.45028671622276306, "learning_rate": 2.516059513448548e-06, "loss": 0.7297, "step": 11988 }, { "epoch": 0.49687098512163785, "grad_norm": 0.3696768283843994, "learning_rate": 2.515852293920179e-06, "loss": 0.6998, "step": 11989 }, { "epoch": 0.49691242902731153, "grad_norm": 0.4076455533504486, "learning_rate": 2.515645074391811e-06, "loss": 0.6786, "step": 11990 }, { "epoch": 0.4969538729329852, "grad_norm": 0.43897855281829834, "learning_rate": 2.515437854863442e-06, "loss": 0.7318, "step": 11991 }, { "epoch": 0.4969953168386589, "grad_norm": 0.4200321435928345, "learning_rate": 2.5152306353350743e-06, "loss": 0.7026, "step": 11992 }, { "epoch": 0.49703676074433256, "grad_norm": 0.3863934874534607, "learning_rate": 2.515023415806706e-06, "loss": 0.6931, "step": 11993 }, { "epoch": 0.49707820465000624, "grad_norm": 0.4182608425617218, "learning_rate": 2.5148161962783375e-06, "loss": 0.7134, "step": 11994 }, { "epoch": 0.49711964855567986, "grad_norm": 0.39725589752197266, "learning_rate": 2.5146089767499693e-06, "loss": 0.7151, "step": 11995 }, { "epoch": 0.49716109246135354, "grad_norm": 0.40578627586364746, "learning_rate": 2.5144017572216007e-06, "loss": 0.679, "step": 11996 }, { "epoch": 0.4972025363670272, "grad_norm": 0.3958173990249634, "learning_rate": 2.5141945376932325e-06, "loss": 0.6537, "step": 11997 }, { "epoch": 0.4972439802727009, "grad_norm": 0.3821972608566284, "learning_rate": 2.513987318164864e-06, "loss": 0.6395, "step": 11998 }, { "epoch": 0.49728542417837457, "grad_norm": 0.42043787240982056, "learning_rate": 2.5137800986364957e-06, "loss": 0.7158, "step": 11999 }, { "epoch": 0.49732686808404825, "grad_norm": 0.396319717168808, "learning_rate": 2.513572879108127e-06, "loss": 0.7014, "step": 12000 }, { "epoch": 0.4973683119897219, "grad_norm": 0.40823930501937866, "learning_rate": 2.513365659579759e-06, "loss": 0.6682, "step": 12001 }, { "epoch": 0.4974097558953956, "grad_norm": 0.40652742981910706, "learning_rate": 2.5131584400513907e-06, "loss": 0.6765, "step": 12002 }, { "epoch": 0.4974511998010693, "grad_norm": 0.43065810203552246, "learning_rate": 2.512951220523022e-06, "loss": 0.7185, "step": 12003 }, { "epoch": 0.4974926437067429, "grad_norm": 0.3975633978843689, "learning_rate": 2.512744000994654e-06, "loss": 0.6215, "step": 12004 }, { "epoch": 0.4975340876124166, "grad_norm": 0.4000629186630249, "learning_rate": 2.5125367814662853e-06, "loss": 0.6833, "step": 12005 }, { "epoch": 0.49757553151809025, "grad_norm": 0.42067578434944153, "learning_rate": 2.5123295619379175e-06, "loss": 0.6821, "step": 12006 }, { "epoch": 0.49761697542376393, "grad_norm": 0.411510705947876, "learning_rate": 2.5121223424095485e-06, "loss": 0.7305, "step": 12007 }, { "epoch": 0.4976584193294376, "grad_norm": 0.44052401185035706, "learning_rate": 2.5119151228811807e-06, "loss": 0.709, "step": 12008 }, { "epoch": 0.4976998632351113, "grad_norm": 0.41767334938049316, "learning_rate": 2.5117079033528125e-06, "loss": 0.6865, "step": 12009 }, { "epoch": 0.49774130714078496, "grad_norm": 0.44363266229629517, "learning_rate": 2.511500683824444e-06, "loss": 0.7024, "step": 12010 }, { "epoch": 0.49778275104645864, "grad_norm": 0.432852178812027, "learning_rate": 2.5112934642960757e-06, "loss": 0.7156, "step": 12011 }, { "epoch": 0.4978241949521323, "grad_norm": 0.4528099298477173, "learning_rate": 2.511086244767707e-06, "loss": 0.6547, "step": 12012 }, { "epoch": 0.49786563885780594, "grad_norm": 0.42078375816345215, "learning_rate": 2.510879025239339e-06, "loss": 0.668, "step": 12013 }, { "epoch": 0.4979070827634796, "grad_norm": 0.4412442445755005, "learning_rate": 2.5106718057109703e-06, "loss": 0.7344, "step": 12014 }, { "epoch": 0.4979485266691533, "grad_norm": 0.39522692561149597, "learning_rate": 2.510464586182602e-06, "loss": 0.6765, "step": 12015 }, { "epoch": 0.49798997057482697, "grad_norm": 0.44233158230781555, "learning_rate": 2.5102573666542335e-06, "loss": 0.6833, "step": 12016 }, { "epoch": 0.49803141448050064, "grad_norm": 0.39103421568870544, "learning_rate": 2.5100501471258653e-06, "loss": 0.7344, "step": 12017 }, { "epoch": 0.4980728583861743, "grad_norm": 0.40546971559524536, "learning_rate": 2.509842927597497e-06, "loss": 0.6697, "step": 12018 }, { "epoch": 0.498114302291848, "grad_norm": 0.4304859936237335, "learning_rate": 2.5096357080691285e-06, "loss": 0.71, "step": 12019 }, { "epoch": 0.4981557461975217, "grad_norm": 0.412144273519516, "learning_rate": 2.5094284885407603e-06, "loss": 0.6788, "step": 12020 }, { "epoch": 0.4981971901031953, "grad_norm": 0.4435880482196808, "learning_rate": 2.5092212690123917e-06, "loss": 0.7385, "step": 12021 }, { "epoch": 0.498238634008869, "grad_norm": 0.3919011354446411, "learning_rate": 2.509014049484024e-06, "loss": 0.6604, "step": 12022 }, { "epoch": 0.49828007791454265, "grad_norm": 0.41757670044898987, "learning_rate": 2.508806829955655e-06, "loss": 0.6935, "step": 12023 }, { "epoch": 0.49832152182021633, "grad_norm": 0.39835837483406067, "learning_rate": 2.508599610427287e-06, "loss": 0.7051, "step": 12024 }, { "epoch": 0.49836296572589, "grad_norm": 0.4181268513202667, "learning_rate": 2.508392390898918e-06, "loss": 0.6749, "step": 12025 }, { "epoch": 0.4984044096315637, "grad_norm": 0.434345543384552, "learning_rate": 2.5081851713705503e-06, "loss": 0.7385, "step": 12026 }, { "epoch": 0.49844585353723736, "grad_norm": 0.410093754529953, "learning_rate": 2.507977951842182e-06, "loss": 0.6626, "step": 12027 }, { "epoch": 0.49848729744291104, "grad_norm": 0.4303359091281891, "learning_rate": 2.5077707323138135e-06, "loss": 0.702, "step": 12028 }, { "epoch": 0.4985287413485847, "grad_norm": 0.4118058681488037, "learning_rate": 2.5075635127854453e-06, "loss": 0.6572, "step": 12029 }, { "epoch": 0.49857018525425834, "grad_norm": 0.4294072985649109, "learning_rate": 2.5073562932570767e-06, "loss": 0.7041, "step": 12030 }, { "epoch": 0.498611629159932, "grad_norm": 0.443994402885437, "learning_rate": 2.5071490737287085e-06, "loss": 0.7014, "step": 12031 }, { "epoch": 0.4986530730656057, "grad_norm": 0.40063682198524475, "learning_rate": 2.50694185420034e-06, "loss": 0.6846, "step": 12032 }, { "epoch": 0.49869451697127937, "grad_norm": 0.41289472579956055, "learning_rate": 2.5067346346719717e-06, "loss": 0.6338, "step": 12033 }, { "epoch": 0.49873596087695304, "grad_norm": 0.4603697955608368, "learning_rate": 2.506527415143603e-06, "loss": 0.7336, "step": 12034 }, { "epoch": 0.4987774047826267, "grad_norm": 0.4044231176376343, "learning_rate": 2.506320195615235e-06, "loss": 0.6689, "step": 12035 }, { "epoch": 0.4988188486883004, "grad_norm": 0.4238329231739044, "learning_rate": 2.5061129760868667e-06, "loss": 0.6941, "step": 12036 }, { "epoch": 0.4988602925939741, "grad_norm": 0.4304075241088867, "learning_rate": 2.505905756558498e-06, "loss": 0.7002, "step": 12037 }, { "epoch": 0.49890173649964775, "grad_norm": 0.4487854838371277, "learning_rate": 2.50569853703013e-06, "loss": 0.7612, "step": 12038 }, { "epoch": 0.4989431804053214, "grad_norm": 0.40556544065475464, "learning_rate": 2.5054913175017613e-06, "loss": 0.6733, "step": 12039 }, { "epoch": 0.49898462431099505, "grad_norm": 0.4324802756309509, "learning_rate": 2.5052840979733935e-06, "loss": 0.7173, "step": 12040 }, { "epoch": 0.49902606821666873, "grad_norm": 0.4120703935623169, "learning_rate": 2.5050768784450245e-06, "loss": 0.639, "step": 12041 }, { "epoch": 0.4990675121223424, "grad_norm": 0.39611753821372986, "learning_rate": 2.5048696589166567e-06, "loss": 0.677, "step": 12042 }, { "epoch": 0.4991089560280161, "grad_norm": 0.39439111948013306, "learning_rate": 2.504662439388288e-06, "loss": 0.6763, "step": 12043 }, { "epoch": 0.49915039993368976, "grad_norm": 0.41333523392677307, "learning_rate": 2.50445521985992e-06, "loss": 0.6274, "step": 12044 }, { "epoch": 0.49919184383936344, "grad_norm": 0.41353872418403625, "learning_rate": 2.5042480003315517e-06, "loss": 0.729, "step": 12045 }, { "epoch": 0.4992332877450371, "grad_norm": 0.4190951883792877, "learning_rate": 2.504040780803183e-06, "loss": 0.6963, "step": 12046 }, { "epoch": 0.4992747316507108, "grad_norm": 0.4346992075443268, "learning_rate": 2.503833561274815e-06, "loss": 0.7247, "step": 12047 }, { "epoch": 0.4993161755563844, "grad_norm": 0.40948501229286194, "learning_rate": 2.5036263417464463e-06, "loss": 0.6721, "step": 12048 }, { "epoch": 0.4993576194620581, "grad_norm": 0.41514986753463745, "learning_rate": 2.503419122218078e-06, "loss": 0.6984, "step": 12049 }, { "epoch": 0.49939906336773177, "grad_norm": 0.39792993664741516, "learning_rate": 2.5032119026897095e-06, "loss": 0.6943, "step": 12050 }, { "epoch": 0.49944050727340544, "grad_norm": 0.43684446811676025, "learning_rate": 2.5030046831613413e-06, "loss": 0.6915, "step": 12051 }, { "epoch": 0.4994819511790791, "grad_norm": 0.40352702140808105, "learning_rate": 2.5027974636329727e-06, "loss": 0.6451, "step": 12052 }, { "epoch": 0.4995233950847528, "grad_norm": 0.4345749318599701, "learning_rate": 2.5025902441046045e-06, "loss": 0.6938, "step": 12053 }, { "epoch": 0.4995648389904265, "grad_norm": 0.41432181000709534, "learning_rate": 2.5023830245762363e-06, "loss": 0.6434, "step": 12054 }, { "epoch": 0.49960628289610015, "grad_norm": 0.3918708264827728, "learning_rate": 2.5021758050478677e-06, "loss": 0.6842, "step": 12055 }, { "epoch": 0.4996477268017738, "grad_norm": 0.3903842270374298, "learning_rate": 2.5019685855195e-06, "loss": 0.6638, "step": 12056 }, { "epoch": 0.49968917070744745, "grad_norm": 0.4764392077922821, "learning_rate": 2.501761365991131e-06, "loss": 0.6969, "step": 12057 }, { "epoch": 0.4997306146131211, "grad_norm": 0.417617529630661, "learning_rate": 2.501554146462763e-06, "loss": 0.6781, "step": 12058 }, { "epoch": 0.4997720585187948, "grad_norm": 0.40553587675094604, "learning_rate": 2.5013469269343945e-06, "loss": 0.6853, "step": 12059 }, { "epoch": 0.4998135024244685, "grad_norm": 0.42576566338539124, "learning_rate": 2.5011397074060263e-06, "loss": 0.689, "step": 12060 }, { "epoch": 0.49985494633014216, "grad_norm": 0.39877843856811523, "learning_rate": 2.5009324878776577e-06, "loss": 0.7032, "step": 12061 }, { "epoch": 0.49989639023581584, "grad_norm": 0.4165988862514496, "learning_rate": 2.5007252683492895e-06, "loss": 0.7024, "step": 12062 }, { "epoch": 0.4999378341414895, "grad_norm": 0.3979125916957855, "learning_rate": 2.5005180488209213e-06, "loss": 0.6545, "step": 12063 }, { "epoch": 0.4999792780471632, "grad_norm": 0.39304810762405396, "learning_rate": 2.5003108292925527e-06, "loss": 0.6475, "step": 12064 }, { "epoch": 0.5000207219528369, "grad_norm": 0.3913843035697937, "learning_rate": 2.5001036097641845e-06, "loss": 0.6804, "step": 12065 }, { "epoch": 0.5000621658585105, "grad_norm": 0.392557829618454, "learning_rate": 2.4998963902358163e-06, "loss": 0.6113, "step": 12066 }, { "epoch": 0.5001036097641842, "grad_norm": 0.4007066488265991, "learning_rate": 2.4996891707074477e-06, "loss": 0.7057, "step": 12067 }, { "epoch": 0.5001450536698578, "grad_norm": 0.3967163562774658, "learning_rate": 2.4994819511790795e-06, "loss": 0.689, "step": 12068 }, { "epoch": 0.5001864975755315, "grad_norm": 0.4334842264652252, "learning_rate": 2.499274731650711e-06, "loss": 0.6893, "step": 12069 }, { "epoch": 0.5002279414812052, "grad_norm": 0.44219881296157837, "learning_rate": 2.4990675121223427e-06, "loss": 0.6875, "step": 12070 }, { "epoch": 0.5002693853868788, "grad_norm": 0.39369404315948486, "learning_rate": 2.498860292593974e-06, "loss": 0.6688, "step": 12071 }, { "epoch": 0.5003108292925526, "grad_norm": 0.40580645203590393, "learning_rate": 2.498653073065606e-06, "loss": 0.6802, "step": 12072 }, { "epoch": 0.5003522731982262, "grad_norm": 0.39442992210388184, "learning_rate": 2.4984458535372373e-06, "loss": 0.652, "step": 12073 }, { "epoch": 0.5003937171038999, "grad_norm": 0.38728371262550354, "learning_rate": 2.498238634008869e-06, "loss": 0.6328, "step": 12074 }, { "epoch": 0.5004351610095735, "grad_norm": 0.4222020208835602, "learning_rate": 2.498031414480501e-06, "loss": 0.7429, "step": 12075 }, { "epoch": 0.5004766049152473, "grad_norm": 0.3780023753643036, "learning_rate": 2.4978241949521327e-06, "loss": 0.6898, "step": 12076 }, { "epoch": 0.5005180488209209, "grad_norm": 0.38240793347358704, "learning_rate": 2.497616975423764e-06, "loss": 0.6938, "step": 12077 }, { "epoch": 0.5005594927265945, "grad_norm": 0.48400115966796875, "learning_rate": 2.497409755895396e-06, "loss": 0.7175, "step": 12078 }, { "epoch": 0.5006009366322682, "grad_norm": 0.422498881816864, "learning_rate": 2.4972025363670273e-06, "loss": 0.7158, "step": 12079 }, { "epoch": 0.5006423805379419, "grad_norm": 0.40404435992240906, "learning_rate": 2.496995316838659e-06, "loss": 0.6372, "step": 12080 }, { "epoch": 0.5006838244436156, "grad_norm": 0.4048217535018921, "learning_rate": 2.4967880973102905e-06, "loss": 0.6633, "step": 12081 }, { "epoch": 0.5007252683492892, "grad_norm": 0.465869665145874, "learning_rate": 2.4965808777819223e-06, "loss": 0.7322, "step": 12082 }, { "epoch": 0.5007667122549629, "grad_norm": 0.3907471001148224, "learning_rate": 2.496373658253554e-06, "loss": 0.6674, "step": 12083 }, { "epoch": 0.5008081561606366, "grad_norm": 0.4338460862636566, "learning_rate": 2.496166438725186e-06, "loss": 0.7615, "step": 12084 }, { "epoch": 0.5008496000663103, "grad_norm": 0.41220352053642273, "learning_rate": 2.4959592191968173e-06, "loss": 0.625, "step": 12085 }, { "epoch": 0.5008910439719839, "grad_norm": 0.3711015284061432, "learning_rate": 2.495751999668449e-06, "loss": 0.632, "step": 12086 }, { "epoch": 0.5009324878776575, "grad_norm": 0.3978126049041748, "learning_rate": 2.4955447801400805e-06, "loss": 0.6635, "step": 12087 }, { "epoch": 0.5009739317833313, "grad_norm": 0.3690616488456726, "learning_rate": 2.4953375606117123e-06, "loss": 0.6504, "step": 12088 }, { "epoch": 0.5010153756890049, "grad_norm": 0.43138614296913147, "learning_rate": 2.4951303410833437e-06, "loss": 0.6978, "step": 12089 }, { "epoch": 0.5010568195946786, "grad_norm": 0.38003459572792053, "learning_rate": 2.4949231215549755e-06, "loss": 0.7004, "step": 12090 }, { "epoch": 0.5010982635003522, "grad_norm": 0.41197532415390015, "learning_rate": 2.494715902026607e-06, "loss": 0.6915, "step": 12091 }, { "epoch": 0.501139707406026, "grad_norm": 0.399027019739151, "learning_rate": 2.494508682498239e-06, "loss": 0.7029, "step": 12092 }, { "epoch": 0.5011811513116996, "grad_norm": 0.3783413767814636, "learning_rate": 2.4943014629698705e-06, "loss": 0.6754, "step": 12093 }, { "epoch": 0.5012225952173733, "grad_norm": 0.3827938437461853, "learning_rate": 2.4940942434415023e-06, "loss": 0.6885, "step": 12094 }, { "epoch": 0.501264039123047, "grad_norm": 0.42406222224235535, "learning_rate": 2.4938870239131337e-06, "loss": 0.7034, "step": 12095 }, { "epoch": 0.5013054830287206, "grad_norm": 0.44488710165023804, "learning_rate": 2.4936798043847655e-06, "loss": 0.6843, "step": 12096 }, { "epoch": 0.5013469269343943, "grad_norm": 0.4248327612876892, "learning_rate": 2.493472584856397e-06, "loss": 0.7112, "step": 12097 }, { "epoch": 0.5013883708400679, "grad_norm": 0.41129153966903687, "learning_rate": 2.4932653653280287e-06, "loss": 0.6947, "step": 12098 }, { "epoch": 0.5014298147457417, "grad_norm": 0.3921922445297241, "learning_rate": 2.49305814579966e-06, "loss": 0.6155, "step": 12099 }, { "epoch": 0.5014712586514153, "grad_norm": 0.42110082507133484, "learning_rate": 2.492850926271292e-06, "loss": 0.6636, "step": 12100 }, { "epoch": 0.501512702557089, "grad_norm": 0.3675326406955719, "learning_rate": 2.4926437067429237e-06, "loss": 0.6815, "step": 12101 }, { "epoch": 0.5015541464627626, "grad_norm": 0.41612574458122253, "learning_rate": 2.4924364872145555e-06, "loss": 0.6869, "step": 12102 }, { "epoch": 0.5015955903684364, "grad_norm": 0.3758845329284668, "learning_rate": 2.492229267686187e-06, "loss": 0.6715, "step": 12103 }, { "epoch": 0.50163703427411, "grad_norm": 0.4302389323711395, "learning_rate": 2.4920220481578187e-06, "loss": 0.6724, "step": 12104 }, { "epoch": 0.5016784781797836, "grad_norm": 0.40757232904434204, "learning_rate": 2.49181482862945e-06, "loss": 0.6792, "step": 12105 }, { "epoch": 0.5017199220854573, "grad_norm": 0.4076420068740845, "learning_rate": 2.491607609101082e-06, "loss": 0.7021, "step": 12106 }, { "epoch": 0.501761365991131, "grad_norm": 0.5484263896942139, "learning_rate": 2.4914003895727133e-06, "loss": 0.7571, "step": 12107 }, { "epoch": 0.5018028098968047, "grad_norm": 0.40297257900238037, "learning_rate": 2.491193170044345e-06, "loss": 0.71, "step": 12108 }, { "epoch": 0.5018442538024783, "grad_norm": 0.4634743332862854, "learning_rate": 2.4909859505159765e-06, "loss": 0.8298, "step": 12109 }, { "epoch": 0.5018856977081521, "grad_norm": 0.42236819863319397, "learning_rate": 2.4907787309876087e-06, "loss": 0.7231, "step": 12110 }, { "epoch": 0.5019271416138257, "grad_norm": 0.4364050030708313, "learning_rate": 2.49057151145924e-06, "loss": 0.6849, "step": 12111 }, { "epoch": 0.5019685855194994, "grad_norm": 0.4014566242694855, "learning_rate": 2.490364291930872e-06, "loss": 0.6598, "step": 12112 }, { "epoch": 0.502010029425173, "grad_norm": 0.4068412780761719, "learning_rate": 2.4901570724025033e-06, "loss": 0.6827, "step": 12113 }, { "epoch": 0.5020514733308467, "grad_norm": 0.42305973172187805, "learning_rate": 2.489949852874135e-06, "loss": 0.6848, "step": 12114 }, { "epoch": 0.5020929172365204, "grad_norm": 0.4540117383003235, "learning_rate": 2.4897426333457665e-06, "loss": 0.6947, "step": 12115 }, { "epoch": 0.502134361142194, "grad_norm": 0.43764641880989075, "learning_rate": 2.4895354138173983e-06, "loss": 0.6855, "step": 12116 }, { "epoch": 0.5021758050478677, "grad_norm": 0.3906167149543762, "learning_rate": 2.4893281942890297e-06, "loss": 0.7195, "step": 12117 }, { "epoch": 0.5022172489535414, "grad_norm": 0.39837321639060974, "learning_rate": 2.489120974760662e-06, "loss": 0.6879, "step": 12118 }, { "epoch": 0.5022586928592151, "grad_norm": 0.3770964741706848, "learning_rate": 2.4889137552322933e-06, "loss": 0.6602, "step": 12119 }, { "epoch": 0.5023001367648887, "grad_norm": 0.41646072268486023, "learning_rate": 2.488706535703925e-06, "loss": 0.6665, "step": 12120 }, { "epoch": 0.5023415806705624, "grad_norm": 0.42203015089035034, "learning_rate": 2.4884993161755565e-06, "loss": 0.668, "step": 12121 }, { "epoch": 0.5023830245762361, "grad_norm": 0.396767795085907, "learning_rate": 2.4882920966471883e-06, "loss": 0.6805, "step": 12122 }, { "epoch": 0.5024244684819097, "grad_norm": 0.4128914475440979, "learning_rate": 2.4880848771188197e-06, "loss": 0.6931, "step": 12123 }, { "epoch": 0.5024659123875834, "grad_norm": 0.4730176031589508, "learning_rate": 2.4878776575904515e-06, "loss": 0.7441, "step": 12124 }, { "epoch": 0.502507356293257, "grad_norm": 0.4274924099445343, "learning_rate": 2.487670438062083e-06, "loss": 0.7002, "step": 12125 }, { "epoch": 0.5025488001989308, "grad_norm": 0.42004191875457764, "learning_rate": 2.4874632185337147e-06, "loss": 0.7004, "step": 12126 }, { "epoch": 0.5025902441046044, "grad_norm": 0.4135735332965851, "learning_rate": 2.4872559990053465e-06, "loss": 0.6881, "step": 12127 }, { "epoch": 0.5026316880102781, "grad_norm": 0.3836991786956787, "learning_rate": 2.4870487794769783e-06, "loss": 0.6882, "step": 12128 }, { "epoch": 0.5026731319159518, "grad_norm": 0.3888266682624817, "learning_rate": 2.4868415599486097e-06, "loss": 0.7007, "step": 12129 }, { "epoch": 0.5027145758216254, "grad_norm": 0.4015958309173584, "learning_rate": 2.4866343404202415e-06, "loss": 0.6921, "step": 12130 }, { "epoch": 0.5027560197272991, "grad_norm": 0.4097941219806671, "learning_rate": 2.486427120891873e-06, "loss": 0.696, "step": 12131 }, { "epoch": 0.5027974636329727, "grad_norm": 0.40690216422080994, "learning_rate": 2.4862199013635047e-06, "loss": 0.71, "step": 12132 }, { "epoch": 0.5028389075386465, "grad_norm": 0.41904616355895996, "learning_rate": 2.486012681835136e-06, "loss": 0.6675, "step": 12133 }, { "epoch": 0.5028803514443201, "grad_norm": 0.39571309089660645, "learning_rate": 2.485805462306768e-06, "loss": 0.6617, "step": 12134 }, { "epoch": 0.5029217953499938, "grad_norm": 0.4086185097694397, "learning_rate": 2.4855982427783993e-06, "loss": 0.6926, "step": 12135 }, { "epoch": 0.5029632392556674, "grad_norm": 0.42128175497055054, "learning_rate": 2.4853910232500315e-06, "loss": 0.6322, "step": 12136 }, { "epoch": 0.5030046831613412, "grad_norm": 0.40756407380104065, "learning_rate": 2.485183803721663e-06, "loss": 0.666, "step": 12137 }, { "epoch": 0.5030461270670148, "grad_norm": 0.42321527004241943, "learning_rate": 2.4849765841932947e-06, "loss": 0.6799, "step": 12138 }, { "epoch": 0.5030875709726884, "grad_norm": 0.4023328125476837, "learning_rate": 2.484769364664926e-06, "loss": 0.6775, "step": 12139 }, { "epoch": 0.5031290148783621, "grad_norm": 0.3966747522354126, "learning_rate": 2.484562145136558e-06, "loss": 0.7302, "step": 12140 }, { "epoch": 0.5031704587840358, "grad_norm": 0.40463635325431824, "learning_rate": 2.4843549256081893e-06, "loss": 0.7241, "step": 12141 }, { "epoch": 0.5032119026897095, "grad_norm": 0.4534701704978943, "learning_rate": 2.484147706079821e-06, "loss": 0.7119, "step": 12142 }, { "epoch": 0.5032533465953831, "grad_norm": 0.41324228048324585, "learning_rate": 2.4839404865514525e-06, "loss": 0.7239, "step": 12143 }, { "epoch": 0.5032947905010569, "grad_norm": 0.40443310141563416, "learning_rate": 2.4837332670230847e-06, "loss": 0.7285, "step": 12144 }, { "epoch": 0.5033362344067305, "grad_norm": 0.44884440302848816, "learning_rate": 2.483526047494716e-06, "loss": 0.7341, "step": 12145 }, { "epoch": 0.5033776783124042, "grad_norm": 0.4210149049758911, "learning_rate": 2.483318827966348e-06, "loss": 0.7056, "step": 12146 }, { "epoch": 0.5034191222180778, "grad_norm": 0.40544062852859497, "learning_rate": 2.4831116084379793e-06, "loss": 0.652, "step": 12147 }, { "epoch": 0.5034605661237515, "grad_norm": 0.405346155166626, "learning_rate": 2.482904388909611e-06, "loss": 0.6924, "step": 12148 }, { "epoch": 0.5035020100294252, "grad_norm": 0.38160941004753113, "learning_rate": 2.4826971693812425e-06, "loss": 0.7017, "step": 12149 }, { "epoch": 0.5035434539350988, "grad_norm": 0.4462243616580963, "learning_rate": 2.4824899498528743e-06, "loss": 0.6898, "step": 12150 }, { "epoch": 0.5035848978407725, "grad_norm": 0.3990682363510132, "learning_rate": 2.4822827303245057e-06, "loss": 0.7202, "step": 12151 }, { "epoch": 0.5036263417464462, "grad_norm": 0.3961663842201233, "learning_rate": 2.4820755107961375e-06, "loss": 0.6577, "step": 12152 }, { "epoch": 0.5036677856521199, "grad_norm": 0.43611231446266174, "learning_rate": 2.4818682912677693e-06, "loss": 0.6826, "step": 12153 }, { "epoch": 0.5037092295577935, "grad_norm": 0.402664452791214, "learning_rate": 2.481661071739401e-06, "loss": 0.6847, "step": 12154 }, { "epoch": 0.5037506734634672, "grad_norm": 0.41429275274276733, "learning_rate": 2.4814538522110325e-06, "loss": 0.7056, "step": 12155 }, { "epoch": 0.5037921173691409, "grad_norm": 0.44511136412620544, "learning_rate": 2.4812466326826643e-06, "loss": 0.6981, "step": 12156 }, { "epoch": 0.5038335612748145, "grad_norm": 0.4469084143638611, "learning_rate": 2.4810394131542957e-06, "loss": 0.7123, "step": 12157 }, { "epoch": 0.5038750051804882, "grad_norm": 0.43722930550575256, "learning_rate": 2.4808321936259275e-06, "loss": 0.6898, "step": 12158 }, { "epoch": 0.5039164490861618, "grad_norm": 0.41285890340805054, "learning_rate": 2.480624974097559e-06, "loss": 0.6572, "step": 12159 }, { "epoch": 0.5039578929918356, "grad_norm": 0.40923431515693665, "learning_rate": 2.4804177545691907e-06, "loss": 0.7046, "step": 12160 }, { "epoch": 0.5039993368975092, "grad_norm": 0.6008912324905396, "learning_rate": 2.4802105350408225e-06, "loss": 0.7225, "step": 12161 }, { "epoch": 0.5040407808031829, "grad_norm": 0.4196130335330963, "learning_rate": 2.4800033155124543e-06, "loss": 0.7048, "step": 12162 }, { "epoch": 0.5040822247088566, "grad_norm": 0.4156739115715027, "learning_rate": 2.4797960959840857e-06, "loss": 0.7347, "step": 12163 }, { "epoch": 0.5041236686145303, "grad_norm": 0.42003384232521057, "learning_rate": 2.4795888764557175e-06, "loss": 0.6791, "step": 12164 }, { "epoch": 0.5041651125202039, "grad_norm": 0.420210599899292, "learning_rate": 2.479381656927349e-06, "loss": 0.6566, "step": 12165 }, { "epoch": 0.5042065564258775, "grad_norm": 0.4262768626213074, "learning_rate": 2.4791744373989807e-06, "loss": 0.7219, "step": 12166 }, { "epoch": 0.5042480003315513, "grad_norm": 0.43248069286346436, "learning_rate": 2.478967217870612e-06, "loss": 0.7002, "step": 12167 }, { "epoch": 0.5042894442372249, "grad_norm": 0.4152391254901886, "learning_rate": 2.478759998342244e-06, "loss": 0.7025, "step": 12168 }, { "epoch": 0.5043308881428986, "grad_norm": 0.42801567912101746, "learning_rate": 2.4785527788138757e-06, "loss": 0.6771, "step": 12169 }, { "epoch": 0.5043723320485722, "grad_norm": 0.39395156502723694, "learning_rate": 2.4783455592855075e-06, "loss": 0.688, "step": 12170 }, { "epoch": 0.504413775954246, "grad_norm": 0.489948034286499, "learning_rate": 2.478138339757139e-06, "loss": 0.7026, "step": 12171 }, { "epoch": 0.5044552198599196, "grad_norm": 0.4414099156856537, "learning_rate": 2.4779311202287707e-06, "loss": 0.7036, "step": 12172 }, { "epoch": 0.5044966637655933, "grad_norm": 0.38853156566619873, "learning_rate": 2.477723900700402e-06, "loss": 0.6208, "step": 12173 }, { "epoch": 0.5045381076712669, "grad_norm": 0.3727603256702423, "learning_rate": 2.477516681172034e-06, "loss": 0.6548, "step": 12174 }, { "epoch": 0.5045795515769406, "grad_norm": 0.4071371853351593, "learning_rate": 2.4773094616436653e-06, "loss": 0.6714, "step": 12175 }, { "epoch": 0.5046209954826143, "grad_norm": 0.40217652916908264, "learning_rate": 2.477102242115297e-06, "loss": 0.6838, "step": 12176 }, { "epoch": 0.5046624393882879, "grad_norm": 0.41850778460502625, "learning_rate": 2.4768950225869285e-06, "loss": 0.7446, "step": 12177 }, { "epoch": 0.5047038832939617, "grad_norm": 0.48072972893714905, "learning_rate": 2.4766878030585603e-06, "loss": 0.7186, "step": 12178 }, { "epoch": 0.5047453271996353, "grad_norm": 0.4234667122364044, "learning_rate": 2.476480583530192e-06, "loss": 0.7129, "step": 12179 }, { "epoch": 0.504786771105309, "grad_norm": 0.4175702631473541, "learning_rate": 2.476273364001824e-06, "loss": 0.7106, "step": 12180 }, { "epoch": 0.5048282150109826, "grad_norm": 0.4012846052646637, "learning_rate": 2.4760661444734553e-06, "loss": 0.696, "step": 12181 }, { "epoch": 0.5048696589166564, "grad_norm": 0.46382564306259155, "learning_rate": 2.475858924945087e-06, "loss": 0.6608, "step": 12182 }, { "epoch": 0.50491110282233, "grad_norm": 0.4569469392299652, "learning_rate": 2.4756517054167185e-06, "loss": 0.7263, "step": 12183 }, { "epoch": 0.5049525467280036, "grad_norm": 0.4094659686088562, "learning_rate": 2.4754444858883503e-06, "loss": 0.6797, "step": 12184 }, { "epoch": 0.5049939906336773, "grad_norm": 0.41655343770980835, "learning_rate": 2.4752372663599817e-06, "loss": 0.6924, "step": 12185 }, { "epoch": 0.505035434539351, "grad_norm": 0.4058151841163635, "learning_rate": 2.4750300468316135e-06, "loss": 0.7302, "step": 12186 }, { "epoch": 0.5050768784450247, "grad_norm": 0.40714722871780396, "learning_rate": 2.4748228273032453e-06, "loss": 0.73, "step": 12187 }, { "epoch": 0.5051183223506983, "grad_norm": 0.3988172709941864, "learning_rate": 2.474615607774877e-06, "loss": 0.6741, "step": 12188 }, { "epoch": 0.505159766256372, "grad_norm": 0.4347361624240875, "learning_rate": 2.4744083882465085e-06, "loss": 0.6609, "step": 12189 }, { "epoch": 0.5052012101620457, "grad_norm": 0.4218306243419647, "learning_rate": 2.4742011687181403e-06, "loss": 0.686, "step": 12190 }, { "epoch": 0.5052426540677193, "grad_norm": 0.41292136907577515, "learning_rate": 2.4739939491897717e-06, "loss": 0.6637, "step": 12191 }, { "epoch": 0.505284097973393, "grad_norm": 0.4476361572742462, "learning_rate": 2.4737867296614035e-06, "loss": 0.66, "step": 12192 }, { "epoch": 0.5053255418790666, "grad_norm": 0.4280683696269989, "learning_rate": 2.473579510133035e-06, "loss": 0.6466, "step": 12193 }, { "epoch": 0.5053669857847404, "grad_norm": 0.389683336019516, "learning_rate": 2.4733722906046667e-06, "loss": 0.6467, "step": 12194 }, { "epoch": 0.505408429690414, "grad_norm": 0.4336313307285309, "learning_rate": 2.4731650710762985e-06, "loss": 0.7227, "step": 12195 }, { "epoch": 0.5054498735960877, "grad_norm": 0.41722410917282104, "learning_rate": 2.47295785154793e-06, "loss": 0.6914, "step": 12196 }, { "epoch": 0.5054913175017614, "grad_norm": 0.41265353560447693, "learning_rate": 2.4727506320195617e-06, "loss": 0.7083, "step": 12197 }, { "epoch": 0.5055327614074351, "grad_norm": 0.42749983072280884, "learning_rate": 2.4725434124911935e-06, "loss": 0.6467, "step": 12198 }, { "epoch": 0.5055742053131087, "grad_norm": 0.4008595645427704, "learning_rate": 2.472336192962825e-06, "loss": 0.6747, "step": 12199 }, { "epoch": 0.5056156492187823, "grad_norm": 0.40904802083969116, "learning_rate": 2.4721289734344567e-06, "loss": 0.6927, "step": 12200 }, { "epoch": 0.5056570931244561, "grad_norm": 0.43915098905563354, "learning_rate": 2.471921753906088e-06, "loss": 0.7239, "step": 12201 }, { "epoch": 0.5056985370301297, "grad_norm": 0.4118737280368805, "learning_rate": 2.47171453437772e-06, "loss": 0.6975, "step": 12202 }, { "epoch": 0.5057399809358034, "grad_norm": 0.40852171182632446, "learning_rate": 2.4715073148493517e-06, "loss": 0.6674, "step": 12203 }, { "epoch": 0.505781424841477, "grad_norm": 0.42807379364967346, "learning_rate": 2.471300095320983e-06, "loss": 0.7115, "step": 12204 }, { "epoch": 0.5058228687471508, "grad_norm": 0.4124264717102051, "learning_rate": 2.471092875792615e-06, "loss": 0.6548, "step": 12205 }, { "epoch": 0.5058643126528244, "grad_norm": 0.44087740778923035, "learning_rate": 2.4708856562642467e-06, "loss": 0.6951, "step": 12206 }, { "epoch": 0.5059057565584981, "grad_norm": 0.46886372566223145, "learning_rate": 2.470678436735878e-06, "loss": 0.7434, "step": 12207 }, { "epoch": 0.5059472004641717, "grad_norm": 0.44389280676841736, "learning_rate": 2.47047121720751e-06, "loss": 0.6847, "step": 12208 }, { "epoch": 0.5059886443698454, "grad_norm": 0.46807995438575745, "learning_rate": 2.4702639976791413e-06, "loss": 0.7605, "step": 12209 }, { "epoch": 0.5060300882755191, "grad_norm": 0.4185270369052887, "learning_rate": 2.470056778150773e-06, "loss": 0.6715, "step": 12210 }, { "epoch": 0.5060715321811927, "grad_norm": 0.4210631251335144, "learning_rate": 2.4698495586224045e-06, "loss": 0.6771, "step": 12211 }, { "epoch": 0.5061129760868665, "grad_norm": 0.3961467444896698, "learning_rate": 2.4696423390940363e-06, "loss": 0.7131, "step": 12212 }, { "epoch": 0.5061544199925401, "grad_norm": 0.4135449528694153, "learning_rate": 2.469435119565668e-06, "loss": 0.7065, "step": 12213 }, { "epoch": 0.5061958638982138, "grad_norm": 0.43862423300743103, "learning_rate": 2.4692279000373e-06, "loss": 0.705, "step": 12214 }, { "epoch": 0.5062373078038874, "grad_norm": 0.39867350459098816, "learning_rate": 2.4690206805089313e-06, "loss": 0.6504, "step": 12215 }, { "epoch": 0.5062787517095612, "grad_norm": 0.41621169447898865, "learning_rate": 2.468813460980563e-06, "loss": 0.7286, "step": 12216 }, { "epoch": 0.5063201956152348, "grad_norm": 0.4095323085784912, "learning_rate": 2.4686062414521945e-06, "loss": 0.6757, "step": 12217 }, { "epoch": 0.5063616395209084, "grad_norm": 0.42133840918540955, "learning_rate": 2.4683990219238263e-06, "loss": 0.6973, "step": 12218 }, { "epoch": 0.5064030834265821, "grad_norm": 0.404493123292923, "learning_rate": 2.4681918023954577e-06, "loss": 0.656, "step": 12219 }, { "epoch": 0.5064445273322558, "grad_norm": 0.4059443473815918, "learning_rate": 2.4679845828670895e-06, "loss": 0.6831, "step": 12220 }, { "epoch": 0.5064859712379295, "grad_norm": 0.4434875249862671, "learning_rate": 2.4677773633387213e-06, "loss": 0.7377, "step": 12221 }, { "epoch": 0.5065274151436031, "grad_norm": 0.3694967031478882, "learning_rate": 2.4675701438103527e-06, "loss": 0.675, "step": 12222 }, { "epoch": 0.5065688590492768, "grad_norm": 0.4084167182445526, "learning_rate": 2.4673629242819845e-06, "loss": 0.7253, "step": 12223 }, { "epoch": 0.5066103029549505, "grad_norm": 0.4680980145931244, "learning_rate": 2.4671557047536163e-06, "loss": 0.674, "step": 12224 }, { "epoch": 0.5066517468606242, "grad_norm": 0.4182599186897278, "learning_rate": 2.4669484852252477e-06, "loss": 0.6877, "step": 12225 }, { "epoch": 0.5066931907662978, "grad_norm": 0.4306640923023224, "learning_rate": 2.4667412656968795e-06, "loss": 0.6904, "step": 12226 }, { "epoch": 0.5067346346719714, "grad_norm": 0.3828088045120239, "learning_rate": 2.466534046168511e-06, "loss": 0.7129, "step": 12227 }, { "epoch": 0.5067760785776452, "grad_norm": 0.4040340781211853, "learning_rate": 2.4663268266401427e-06, "loss": 0.6958, "step": 12228 }, { "epoch": 0.5068175224833188, "grad_norm": 0.4159044623374939, "learning_rate": 2.4661196071117745e-06, "loss": 0.6895, "step": 12229 }, { "epoch": 0.5068589663889925, "grad_norm": 0.42758458852767944, "learning_rate": 2.465912387583406e-06, "loss": 0.6572, "step": 12230 }, { "epoch": 0.5069004102946661, "grad_norm": 0.3837548792362213, "learning_rate": 2.4657051680550377e-06, "loss": 0.6752, "step": 12231 }, { "epoch": 0.5069418542003399, "grad_norm": 0.3906620144844055, "learning_rate": 2.4654979485266695e-06, "loss": 0.7191, "step": 12232 }, { "epoch": 0.5069832981060135, "grad_norm": 0.38963863253593445, "learning_rate": 2.465290728998301e-06, "loss": 0.657, "step": 12233 }, { "epoch": 0.5070247420116872, "grad_norm": 0.39764219522476196, "learning_rate": 2.4650835094699327e-06, "loss": 0.7117, "step": 12234 }, { "epoch": 0.5070661859173609, "grad_norm": 0.3864278495311737, "learning_rate": 2.464876289941564e-06, "loss": 0.6969, "step": 12235 }, { "epoch": 0.5071076298230345, "grad_norm": 0.4151107668876648, "learning_rate": 2.464669070413196e-06, "loss": 0.7247, "step": 12236 }, { "epoch": 0.5071490737287082, "grad_norm": 0.3893643915653229, "learning_rate": 2.4644618508848277e-06, "loss": 0.6558, "step": 12237 }, { "epoch": 0.5071905176343818, "grad_norm": 0.4144800305366516, "learning_rate": 2.464254631356459e-06, "loss": 0.6611, "step": 12238 }, { "epoch": 0.5072319615400556, "grad_norm": 0.4151061177253723, "learning_rate": 2.464047411828091e-06, "loss": 0.688, "step": 12239 }, { "epoch": 0.5072734054457292, "grad_norm": 0.41983118653297424, "learning_rate": 2.4638401922997227e-06, "loss": 0.6702, "step": 12240 }, { "epoch": 0.5073148493514029, "grad_norm": 0.4059462249279022, "learning_rate": 2.463632972771354e-06, "loss": 0.6826, "step": 12241 }, { "epoch": 0.5073562932570765, "grad_norm": 0.43690910935401917, "learning_rate": 2.463425753242986e-06, "loss": 0.7006, "step": 12242 }, { "epoch": 0.5073977371627503, "grad_norm": 0.40925687551498413, "learning_rate": 2.4632185337146173e-06, "loss": 0.7004, "step": 12243 }, { "epoch": 0.5074391810684239, "grad_norm": 0.40462225675582886, "learning_rate": 2.463011314186249e-06, "loss": 0.676, "step": 12244 }, { "epoch": 0.5074806249740975, "grad_norm": 0.438272625207901, "learning_rate": 2.462804094657881e-06, "loss": 0.7189, "step": 12245 }, { "epoch": 0.5075220688797712, "grad_norm": 0.40205591917037964, "learning_rate": 2.4625968751295123e-06, "loss": 0.7114, "step": 12246 }, { "epoch": 0.5075635127854449, "grad_norm": 0.42673230171203613, "learning_rate": 2.462389655601144e-06, "loss": 0.6738, "step": 12247 }, { "epoch": 0.5076049566911186, "grad_norm": 0.4513818621635437, "learning_rate": 2.4621824360727755e-06, "loss": 0.67, "step": 12248 }, { "epoch": 0.5076464005967922, "grad_norm": 0.40852463245391846, "learning_rate": 2.4619752165444073e-06, "loss": 0.6833, "step": 12249 }, { "epoch": 0.507687844502466, "grad_norm": 0.41575610637664795, "learning_rate": 2.461767997016039e-06, "loss": 0.6873, "step": 12250 }, { "epoch": 0.5077292884081396, "grad_norm": 0.43960052728652954, "learning_rate": 2.4615607774876705e-06, "loss": 0.7, "step": 12251 }, { "epoch": 0.5077707323138132, "grad_norm": 0.4234411418437958, "learning_rate": 2.4613535579593023e-06, "loss": 0.6927, "step": 12252 }, { "epoch": 0.5078121762194869, "grad_norm": 0.40765973925590515, "learning_rate": 2.4611463384309337e-06, "loss": 0.6688, "step": 12253 }, { "epoch": 0.5078536201251606, "grad_norm": 0.4090679883956909, "learning_rate": 2.4609391189025655e-06, "loss": 0.7419, "step": 12254 }, { "epoch": 0.5078950640308343, "grad_norm": 0.4289027452468872, "learning_rate": 2.4607318993741973e-06, "loss": 0.7227, "step": 12255 }, { "epoch": 0.5079365079365079, "grad_norm": 0.422525018453598, "learning_rate": 2.4605246798458287e-06, "loss": 0.6652, "step": 12256 }, { "epoch": 0.5079779518421816, "grad_norm": 0.4026212990283966, "learning_rate": 2.4603174603174605e-06, "loss": 0.7076, "step": 12257 }, { "epoch": 0.5080193957478553, "grad_norm": 0.43463334441185, "learning_rate": 2.4601102407890923e-06, "loss": 0.7288, "step": 12258 }, { "epoch": 0.508060839653529, "grad_norm": 0.4179787039756775, "learning_rate": 2.4599030212607237e-06, "loss": 0.6624, "step": 12259 }, { "epoch": 0.5081022835592026, "grad_norm": 0.413021445274353, "learning_rate": 2.4596958017323555e-06, "loss": 0.6636, "step": 12260 }, { "epoch": 0.5081437274648762, "grad_norm": 0.3945936858654022, "learning_rate": 2.459488582203987e-06, "loss": 0.6554, "step": 12261 }, { "epoch": 0.50818517137055, "grad_norm": 0.4498036205768585, "learning_rate": 2.4592813626756187e-06, "loss": 0.7036, "step": 12262 }, { "epoch": 0.5082266152762236, "grad_norm": 0.4390327036380768, "learning_rate": 2.4590741431472505e-06, "loss": 0.7441, "step": 12263 }, { "epoch": 0.5082680591818973, "grad_norm": 0.4191127121448517, "learning_rate": 2.458866923618882e-06, "loss": 0.7249, "step": 12264 }, { "epoch": 0.508309503087571, "grad_norm": 0.3868004381656647, "learning_rate": 2.4586597040905137e-06, "loss": 0.6829, "step": 12265 }, { "epoch": 0.5083509469932447, "grad_norm": 0.4275377690792084, "learning_rate": 2.4584524845621455e-06, "loss": 0.6907, "step": 12266 }, { "epoch": 0.5083923908989183, "grad_norm": 0.4002785384654999, "learning_rate": 2.458245265033777e-06, "loss": 0.6843, "step": 12267 }, { "epoch": 0.508433834804592, "grad_norm": 0.3869546353816986, "learning_rate": 2.4580380455054087e-06, "loss": 0.6877, "step": 12268 }, { "epoch": 0.5084752787102657, "grad_norm": 0.39479702711105347, "learning_rate": 2.45783082597704e-06, "loss": 0.7141, "step": 12269 }, { "epoch": 0.5085167226159393, "grad_norm": 0.4116244912147522, "learning_rate": 2.457623606448672e-06, "loss": 0.7008, "step": 12270 }, { "epoch": 0.508558166521613, "grad_norm": 0.37338438630104065, "learning_rate": 2.4574163869203037e-06, "loss": 0.6489, "step": 12271 }, { "epoch": 0.5085996104272866, "grad_norm": 0.4580710828304291, "learning_rate": 2.457209167391935e-06, "loss": 0.6877, "step": 12272 }, { "epoch": 0.5086410543329604, "grad_norm": 0.41053277254104614, "learning_rate": 2.457001947863567e-06, "loss": 0.674, "step": 12273 }, { "epoch": 0.508682498238634, "grad_norm": 0.4166455566883087, "learning_rate": 2.4567947283351983e-06, "loss": 0.6978, "step": 12274 }, { "epoch": 0.5087239421443077, "grad_norm": 0.3813936710357666, "learning_rate": 2.45658750880683e-06, "loss": 0.6067, "step": 12275 }, { "epoch": 0.5087653860499813, "grad_norm": 0.4155506193637848, "learning_rate": 2.456380289278462e-06, "loss": 0.6857, "step": 12276 }, { "epoch": 0.5088068299556551, "grad_norm": 0.39879536628723145, "learning_rate": 2.4561730697500933e-06, "loss": 0.6553, "step": 12277 }, { "epoch": 0.5088482738613287, "grad_norm": 0.3778477907180786, "learning_rate": 2.455965850221725e-06, "loss": 0.6458, "step": 12278 }, { "epoch": 0.5088897177670023, "grad_norm": 0.4185250401496887, "learning_rate": 2.455758630693357e-06, "loss": 0.7308, "step": 12279 }, { "epoch": 0.508931161672676, "grad_norm": 0.43229997158050537, "learning_rate": 2.4555514111649883e-06, "loss": 0.6755, "step": 12280 }, { "epoch": 0.5089726055783497, "grad_norm": 0.4224403202533722, "learning_rate": 2.45534419163662e-06, "loss": 0.7854, "step": 12281 }, { "epoch": 0.5090140494840234, "grad_norm": 0.3853486180305481, "learning_rate": 2.4551369721082515e-06, "loss": 0.6765, "step": 12282 }, { "epoch": 0.509055493389697, "grad_norm": 0.38584086298942566, "learning_rate": 2.4549297525798833e-06, "loss": 0.6238, "step": 12283 }, { "epoch": 0.5090969372953708, "grad_norm": 0.3991621732711792, "learning_rate": 2.454722533051515e-06, "loss": 0.6505, "step": 12284 }, { "epoch": 0.5091383812010444, "grad_norm": 0.41635560989379883, "learning_rate": 2.4545153135231465e-06, "loss": 0.6821, "step": 12285 }, { "epoch": 0.5091798251067181, "grad_norm": 0.41656365990638733, "learning_rate": 2.4543080939947783e-06, "loss": 0.6396, "step": 12286 }, { "epoch": 0.5092212690123917, "grad_norm": 0.408433735370636, "learning_rate": 2.4541008744664097e-06, "loss": 0.6729, "step": 12287 }, { "epoch": 0.5092627129180654, "grad_norm": 0.45683589577674866, "learning_rate": 2.4538936549380415e-06, "loss": 0.7201, "step": 12288 }, { "epoch": 0.5093041568237391, "grad_norm": 0.39597707986831665, "learning_rate": 2.4536864354096733e-06, "loss": 0.6816, "step": 12289 }, { "epoch": 0.5093456007294127, "grad_norm": 0.4139714241027832, "learning_rate": 2.4534792158813047e-06, "loss": 0.7043, "step": 12290 }, { "epoch": 0.5093870446350864, "grad_norm": 0.4132087528705597, "learning_rate": 2.4532719963529365e-06, "loss": 0.696, "step": 12291 }, { "epoch": 0.5094284885407601, "grad_norm": 0.40727344155311584, "learning_rate": 2.4530647768245683e-06, "loss": 0.6954, "step": 12292 }, { "epoch": 0.5094699324464338, "grad_norm": 0.36722633242607117, "learning_rate": 2.4528575572961997e-06, "loss": 0.6692, "step": 12293 }, { "epoch": 0.5095113763521074, "grad_norm": 0.37520572543144226, "learning_rate": 2.4526503377678315e-06, "loss": 0.6541, "step": 12294 }, { "epoch": 0.5095528202577811, "grad_norm": 0.4189264476299286, "learning_rate": 2.452443118239463e-06, "loss": 0.6835, "step": 12295 }, { "epoch": 0.5095942641634548, "grad_norm": 0.40546074509620667, "learning_rate": 2.4522358987110947e-06, "loss": 0.6658, "step": 12296 }, { "epoch": 0.5096357080691284, "grad_norm": 0.40954652428627014, "learning_rate": 2.4520286791827265e-06, "loss": 0.6721, "step": 12297 }, { "epoch": 0.5096771519748021, "grad_norm": 0.41790667176246643, "learning_rate": 2.451821459654358e-06, "loss": 0.6489, "step": 12298 }, { "epoch": 0.5097185958804757, "grad_norm": 0.4155614674091339, "learning_rate": 2.4516142401259897e-06, "loss": 0.6882, "step": 12299 }, { "epoch": 0.5097600397861495, "grad_norm": 0.41406548023223877, "learning_rate": 2.451407020597621e-06, "loss": 0.6646, "step": 12300 }, { "epoch": 0.5098014836918231, "grad_norm": 0.399674654006958, "learning_rate": 2.451199801069253e-06, "loss": 0.6666, "step": 12301 }, { "epoch": 0.5098429275974968, "grad_norm": 0.42055946588516235, "learning_rate": 2.4509925815408847e-06, "loss": 0.6614, "step": 12302 }, { "epoch": 0.5098843715031705, "grad_norm": 0.40756893157958984, "learning_rate": 2.450785362012516e-06, "loss": 0.6582, "step": 12303 }, { "epoch": 0.5099258154088441, "grad_norm": 0.42666637897491455, "learning_rate": 2.450578142484148e-06, "loss": 0.6499, "step": 12304 }, { "epoch": 0.5099672593145178, "grad_norm": 0.4141885042190552, "learning_rate": 2.4503709229557797e-06, "loss": 0.6683, "step": 12305 }, { "epoch": 0.5100087032201914, "grad_norm": 0.4307863414287567, "learning_rate": 2.450163703427411e-06, "loss": 0.7729, "step": 12306 }, { "epoch": 0.5100501471258652, "grad_norm": 0.3775271773338318, "learning_rate": 2.449956483899043e-06, "loss": 0.6486, "step": 12307 }, { "epoch": 0.5100915910315388, "grad_norm": 0.3676570653915405, "learning_rate": 2.4497492643706743e-06, "loss": 0.6211, "step": 12308 }, { "epoch": 0.5101330349372125, "grad_norm": 0.4327489137649536, "learning_rate": 2.449542044842306e-06, "loss": 0.7007, "step": 12309 }, { "epoch": 0.5101744788428861, "grad_norm": 0.4338235557079315, "learning_rate": 2.449334825313938e-06, "loss": 0.7275, "step": 12310 }, { "epoch": 0.5102159227485599, "grad_norm": 0.4016983211040497, "learning_rate": 2.4491276057855693e-06, "loss": 0.7014, "step": 12311 }, { "epoch": 0.5102573666542335, "grad_norm": 0.3973780870437622, "learning_rate": 2.448920386257201e-06, "loss": 0.687, "step": 12312 }, { "epoch": 0.5102988105599071, "grad_norm": 0.39833512902259827, "learning_rate": 2.448713166728833e-06, "loss": 0.7417, "step": 12313 }, { "epoch": 0.5103402544655808, "grad_norm": 0.4238496720790863, "learning_rate": 2.4485059472004643e-06, "loss": 0.6938, "step": 12314 }, { "epoch": 0.5103816983712545, "grad_norm": 0.38524329662323, "learning_rate": 2.448298727672096e-06, "loss": 0.6777, "step": 12315 }, { "epoch": 0.5104231422769282, "grad_norm": 0.45752766728401184, "learning_rate": 2.4480915081437275e-06, "loss": 0.717, "step": 12316 }, { "epoch": 0.5104645861826018, "grad_norm": 0.4319753348827362, "learning_rate": 2.4478842886153593e-06, "loss": 0.6914, "step": 12317 }, { "epoch": 0.5105060300882756, "grad_norm": 0.38236337900161743, "learning_rate": 2.4476770690869907e-06, "loss": 0.6315, "step": 12318 }, { "epoch": 0.5105474739939492, "grad_norm": 0.39100182056427, "learning_rate": 2.4474698495586225e-06, "loss": 0.6584, "step": 12319 }, { "epoch": 0.5105889178996229, "grad_norm": 0.3938053846359253, "learning_rate": 2.4472626300302543e-06, "loss": 0.7285, "step": 12320 }, { "epoch": 0.5106303618052965, "grad_norm": 0.40324538946151733, "learning_rate": 2.4470554105018857e-06, "loss": 0.6021, "step": 12321 }, { "epoch": 0.5106718057109702, "grad_norm": 0.3964911699295044, "learning_rate": 2.4468481909735175e-06, "loss": 0.6666, "step": 12322 }, { "epoch": 0.5107132496166439, "grad_norm": 0.4506520926952362, "learning_rate": 2.4466409714451493e-06, "loss": 0.7021, "step": 12323 }, { "epoch": 0.5107546935223175, "grad_norm": 0.4161345660686493, "learning_rate": 2.4464337519167807e-06, "loss": 0.6439, "step": 12324 }, { "epoch": 0.5107961374279912, "grad_norm": 0.4516972601413727, "learning_rate": 2.4462265323884125e-06, "loss": 0.7361, "step": 12325 }, { "epoch": 0.5108375813336649, "grad_norm": 0.39624130725860596, "learning_rate": 2.446019312860044e-06, "loss": 0.6708, "step": 12326 }, { "epoch": 0.5108790252393386, "grad_norm": 0.3733086585998535, "learning_rate": 2.4458120933316757e-06, "loss": 0.6357, "step": 12327 }, { "epoch": 0.5109204691450122, "grad_norm": 0.4160917401313782, "learning_rate": 2.4456048738033075e-06, "loss": 0.6958, "step": 12328 }, { "epoch": 0.510961913050686, "grad_norm": 0.4151105284690857, "learning_rate": 2.445397654274939e-06, "loss": 0.6924, "step": 12329 }, { "epoch": 0.5110033569563596, "grad_norm": 0.4358883798122406, "learning_rate": 2.4451904347465707e-06, "loss": 0.6896, "step": 12330 }, { "epoch": 0.5110448008620332, "grad_norm": 0.4502808749675751, "learning_rate": 2.4449832152182026e-06, "loss": 0.7, "step": 12331 }, { "epoch": 0.5110862447677069, "grad_norm": 0.4560929536819458, "learning_rate": 2.444775995689834e-06, "loss": 0.6884, "step": 12332 }, { "epoch": 0.5111276886733805, "grad_norm": 0.42196333408355713, "learning_rate": 2.4445687761614657e-06, "loss": 0.6858, "step": 12333 }, { "epoch": 0.5111691325790543, "grad_norm": 0.4276515543460846, "learning_rate": 2.444361556633097e-06, "loss": 0.6895, "step": 12334 }, { "epoch": 0.5112105764847279, "grad_norm": 0.40929850935935974, "learning_rate": 2.444154337104729e-06, "loss": 0.698, "step": 12335 }, { "epoch": 0.5112520203904016, "grad_norm": 0.41483771800994873, "learning_rate": 2.4439471175763608e-06, "loss": 0.7358, "step": 12336 }, { "epoch": 0.5112934642960753, "grad_norm": 0.42353618144989014, "learning_rate": 2.443739898047992e-06, "loss": 0.6816, "step": 12337 }, { "epoch": 0.511334908201749, "grad_norm": 0.42023879289627075, "learning_rate": 2.443532678519624e-06, "loss": 0.7073, "step": 12338 }, { "epoch": 0.5113763521074226, "grad_norm": 0.4273877441883087, "learning_rate": 2.4433254589912558e-06, "loss": 0.6796, "step": 12339 }, { "epoch": 0.5114177960130962, "grad_norm": 0.4168732464313507, "learning_rate": 2.443118239462887e-06, "loss": 0.6931, "step": 12340 }, { "epoch": 0.51145923991877, "grad_norm": 0.3826819062232971, "learning_rate": 2.442911019934519e-06, "loss": 0.689, "step": 12341 }, { "epoch": 0.5115006838244436, "grad_norm": 0.3854614198207855, "learning_rate": 2.4427038004061503e-06, "loss": 0.6512, "step": 12342 }, { "epoch": 0.5115421277301173, "grad_norm": 0.4086402356624603, "learning_rate": 2.442496580877782e-06, "loss": 0.6786, "step": 12343 }, { "epoch": 0.5115835716357909, "grad_norm": 0.39128780364990234, "learning_rate": 2.4422893613494135e-06, "loss": 0.6592, "step": 12344 }, { "epoch": 0.5116250155414647, "grad_norm": 0.4435243308544159, "learning_rate": 2.4420821418210453e-06, "loss": 0.7549, "step": 12345 }, { "epoch": 0.5116664594471383, "grad_norm": 0.43374961614608765, "learning_rate": 2.441874922292677e-06, "loss": 0.7158, "step": 12346 }, { "epoch": 0.511707903352812, "grad_norm": 0.4074364900588989, "learning_rate": 2.441667702764309e-06, "loss": 0.7075, "step": 12347 }, { "epoch": 0.5117493472584856, "grad_norm": 0.4237951934337616, "learning_rate": 2.4414604832359403e-06, "loss": 0.7222, "step": 12348 }, { "epoch": 0.5117907911641593, "grad_norm": 0.427463173866272, "learning_rate": 2.441253263707572e-06, "loss": 0.7053, "step": 12349 }, { "epoch": 0.511832235069833, "grad_norm": 0.41484227776527405, "learning_rate": 2.4410460441792035e-06, "loss": 0.6968, "step": 12350 }, { "epoch": 0.5118736789755066, "grad_norm": 0.4204866290092468, "learning_rate": 2.4408388246508353e-06, "loss": 0.7314, "step": 12351 }, { "epoch": 0.5119151228811804, "grad_norm": 0.38757947087287903, "learning_rate": 2.4406316051224667e-06, "loss": 0.7034, "step": 12352 }, { "epoch": 0.511956566786854, "grad_norm": 0.4288354218006134, "learning_rate": 2.4404243855940985e-06, "loss": 0.7188, "step": 12353 }, { "epoch": 0.5119980106925277, "grad_norm": 0.3931960463523865, "learning_rate": 2.4402171660657304e-06, "loss": 0.6987, "step": 12354 }, { "epoch": 0.5120394545982013, "grad_norm": 0.40989845991134644, "learning_rate": 2.440009946537362e-06, "loss": 0.6633, "step": 12355 }, { "epoch": 0.5120808985038751, "grad_norm": 0.38906097412109375, "learning_rate": 2.4398027270089935e-06, "loss": 0.6985, "step": 12356 }, { "epoch": 0.5121223424095487, "grad_norm": 0.42476212978363037, "learning_rate": 2.4395955074806254e-06, "loss": 0.6906, "step": 12357 }, { "epoch": 0.5121637863152223, "grad_norm": 0.41792193055152893, "learning_rate": 2.4393882879522567e-06, "loss": 0.752, "step": 12358 }, { "epoch": 0.512205230220896, "grad_norm": 0.4364200532436371, "learning_rate": 2.4391810684238886e-06, "loss": 0.7156, "step": 12359 }, { "epoch": 0.5122466741265697, "grad_norm": 0.38577136397361755, "learning_rate": 2.43897384889552e-06, "loss": 0.6508, "step": 12360 }, { "epoch": 0.5122881180322434, "grad_norm": 0.4027463495731354, "learning_rate": 2.4387666293671517e-06, "loss": 0.7175, "step": 12361 }, { "epoch": 0.512329561937917, "grad_norm": 0.39810699224472046, "learning_rate": 2.4385594098387836e-06, "loss": 0.6482, "step": 12362 }, { "epoch": 0.5123710058435907, "grad_norm": 0.43842020630836487, "learning_rate": 2.438352190310415e-06, "loss": 0.6777, "step": 12363 }, { "epoch": 0.5124124497492644, "grad_norm": 0.4122227728366852, "learning_rate": 2.4381449707820467e-06, "loss": 0.6602, "step": 12364 }, { "epoch": 0.512453893654938, "grad_norm": 0.387963205575943, "learning_rate": 2.4379377512536786e-06, "loss": 0.6875, "step": 12365 }, { "epoch": 0.5124953375606117, "grad_norm": 0.43270790576934814, "learning_rate": 2.43773053172531e-06, "loss": 0.6964, "step": 12366 }, { "epoch": 0.5125367814662853, "grad_norm": 0.40784206986427307, "learning_rate": 2.4375233121969418e-06, "loss": 0.6649, "step": 12367 }, { "epoch": 0.5125782253719591, "grad_norm": 0.4311182200908661, "learning_rate": 2.437316092668573e-06, "loss": 0.7292, "step": 12368 }, { "epoch": 0.5126196692776327, "grad_norm": 0.4617283344268799, "learning_rate": 2.437108873140205e-06, "loss": 0.8054, "step": 12369 }, { "epoch": 0.5126611131833064, "grad_norm": 0.4167940020561218, "learning_rate": 2.4369016536118363e-06, "loss": 0.6571, "step": 12370 }, { "epoch": 0.51270255708898, "grad_norm": 0.460943341255188, "learning_rate": 2.436694434083468e-06, "loss": 0.6902, "step": 12371 }, { "epoch": 0.5127440009946538, "grad_norm": 0.4228794574737549, "learning_rate": 2.4364872145551e-06, "loss": 0.7069, "step": 12372 }, { "epoch": 0.5127854449003274, "grad_norm": 0.39585429430007935, "learning_rate": 2.4362799950267318e-06, "loss": 0.6624, "step": 12373 }, { "epoch": 0.512826888806001, "grad_norm": 0.3891880214214325, "learning_rate": 2.436072775498363e-06, "loss": 0.6737, "step": 12374 }, { "epoch": 0.5128683327116748, "grad_norm": 0.41229698061943054, "learning_rate": 2.435865555969995e-06, "loss": 0.6978, "step": 12375 }, { "epoch": 0.5129097766173484, "grad_norm": 0.5377939939498901, "learning_rate": 2.4356583364416263e-06, "loss": 0.7338, "step": 12376 }, { "epoch": 0.5129512205230221, "grad_norm": 0.3782595694065094, "learning_rate": 2.435451116913258e-06, "loss": 0.6515, "step": 12377 }, { "epoch": 0.5129926644286957, "grad_norm": 0.42360132932662964, "learning_rate": 2.4352438973848895e-06, "loss": 0.7092, "step": 12378 }, { "epoch": 0.5130341083343695, "grad_norm": 0.44392064213752747, "learning_rate": 2.4350366778565213e-06, "loss": 0.7302, "step": 12379 }, { "epoch": 0.5130755522400431, "grad_norm": 0.4285444915294647, "learning_rate": 2.434829458328153e-06, "loss": 0.6786, "step": 12380 }, { "epoch": 0.5131169961457168, "grad_norm": 0.42923787236213684, "learning_rate": 2.434622238799785e-06, "loss": 0.683, "step": 12381 }, { "epoch": 0.5131584400513904, "grad_norm": 0.3952816426753998, "learning_rate": 2.4344150192714163e-06, "loss": 0.7085, "step": 12382 }, { "epoch": 0.5131998839570641, "grad_norm": 0.3907700181007385, "learning_rate": 2.434207799743048e-06, "loss": 0.677, "step": 12383 }, { "epoch": 0.5132413278627378, "grad_norm": 0.4267285466194153, "learning_rate": 2.4340005802146795e-06, "loss": 0.7092, "step": 12384 }, { "epoch": 0.5132827717684114, "grad_norm": 0.37499916553497314, "learning_rate": 2.4337933606863114e-06, "loss": 0.6089, "step": 12385 }, { "epoch": 0.5133242156740851, "grad_norm": 0.4341261386871338, "learning_rate": 2.4335861411579427e-06, "loss": 0.6753, "step": 12386 }, { "epoch": 0.5133656595797588, "grad_norm": 0.38155174255371094, "learning_rate": 2.4333789216295745e-06, "loss": 0.6492, "step": 12387 }, { "epoch": 0.5134071034854325, "grad_norm": 0.41241884231567383, "learning_rate": 2.4331717021012064e-06, "loss": 0.6774, "step": 12388 }, { "epoch": 0.5134485473911061, "grad_norm": 0.4269993305206299, "learning_rate": 2.432964482572838e-06, "loss": 0.6416, "step": 12389 }, { "epoch": 0.5134899912967799, "grad_norm": 0.4279613196849823, "learning_rate": 2.4327572630444696e-06, "loss": 0.6787, "step": 12390 }, { "epoch": 0.5135314352024535, "grad_norm": 0.42685380578041077, "learning_rate": 2.4325500435161014e-06, "loss": 0.6768, "step": 12391 }, { "epoch": 0.5135728791081271, "grad_norm": 0.3986414670944214, "learning_rate": 2.4323428239877327e-06, "loss": 0.7012, "step": 12392 }, { "epoch": 0.5136143230138008, "grad_norm": 0.4117792546749115, "learning_rate": 2.4321356044593646e-06, "loss": 0.7258, "step": 12393 }, { "epoch": 0.5136557669194745, "grad_norm": 0.42331960797309875, "learning_rate": 2.431928384930996e-06, "loss": 0.7141, "step": 12394 }, { "epoch": 0.5136972108251482, "grad_norm": 0.40305784344673157, "learning_rate": 2.4317211654026278e-06, "loss": 0.6221, "step": 12395 }, { "epoch": 0.5137386547308218, "grad_norm": 0.40559908747673035, "learning_rate": 2.431513945874259e-06, "loss": 0.6831, "step": 12396 }, { "epoch": 0.5137800986364955, "grad_norm": 0.423515260219574, "learning_rate": 2.431306726345891e-06, "loss": 0.7366, "step": 12397 }, { "epoch": 0.5138215425421692, "grad_norm": 0.43370333313941956, "learning_rate": 2.4310995068175228e-06, "loss": 0.7384, "step": 12398 }, { "epoch": 0.5138629864478429, "grad_norm": 0.3805336058139801, "learning_rate": 2.4308922872891546e-06, "loss": 0.7029, "step": 12399 }, { "epoch": 0.5139044303535165, "grad_norm": 0.4202497899532318, "learning_rate": 2.430685067760786e-06, "loss": 0.6996, "step": 12400 }, { "epoch": 0.5139458742591901, "grad_norm": 0.37977880239486694, "learning_rate": 2.4304778482324178e-06, "loss": 0.6873, "step": 12401 }, { "epoch": 0.5139873181648639, "grad_norm": 0.3970392644405365, "learning_rate": 2.430270628704049e-06, "loss": 0.6785, "step": 12402 }, { "epoch": 0.5140287620705375, "grad_norm": 0.3944105803966522, "learning_rate": 2.430063409175681e-06, "loss": 0.6763, "step": 12403 }, { "epoch": 0.5140702059762112, "grad_norm": 0.40997523069381714, "learning_rate": 2.4298561896473123e-06, "loss": 0.6132, "step": 12404 }, { "epoch": 0.5141116498818848, "grad_norm": 0.3899269998073578, "learning_rate": 2.429648970118944e-06, "loss": 0.7026, "step": 12405 }, { "epoch": 0.5141530937875586, "grad_norm": 0.3951246440410614, "learning_rate": 2.429441750590576e-06, "loss": 0.6248, "step": 12406 }, { "epoch": 0.5141945376932322, "grad_norm": 0.41713371872901917, "learning_rate": 2.4292345310622078e-06, "loss": 0.6768, "step": 12407 }, { "epoch": 0.5142359815989059, "grad_norm": 0.38698330521583557, "learning_rate": 2.429027311533839e-06, "loss": 0.6526, "step": 12408 }, { "epoch": 0.5142774255045796, "grad_norm": 0.4173656702041626, "learning_rate": 2.428820092005471e-06, "loss": 0.6713, "step": 12409 }, { "epoch": 0.5143188694102532, "grad_norm": 0.40929093956947327, "learning_rate": 2.4286128724771023e-06, "loss": 0.7102, "step": 12410 }, { "epoch": 0.5143603133159269, "grad_norm": 0.4007910192012787, "learning_rate": 2.428405652948734e-06, "loss": 0.6577, "step": 12411 }, { "epoch": 0.5144017572216005, "grad_norm": 0.4513412117958069, "learning_rate": 2.4281984334203655e-06, "loss": 0.74, "step": 12412 }, { "epoch": 0.5144432011272743, "grad_norm": 0.431240051984787, "learning_rate": 2.4279912138919974e-06, "loss": 0.6517, "step": 12413 }, { "epoch": 0.5144846450329479, "grad_norm": 0.390553742647171, "learning_rate": 2.427783994363629e-06, "loss": 0.7197, "step": 12414 }, { "epoch": 0.5145260889386216, "grad_norm": 0.44447460770606995, "learning_rate": 2.427576774835261e-06, "loss": 0.7241, "step": 12415 }, { "epoch": 0.5145675328442952, "grad_norm": 0.41056710481643677, "learning_rate": 2.4273695553068924e-06, "loss": 0.6636, "step": 12416 }, { "epoch": 0.514608976749969, "grad_norm": 0.3975561261177063, "learning_rate": 2.427162335778524e-06, "loss": 0.6892, "step": 12417 }, { "epoch": 0.5146504206556426, "grad_norm": 0.4347476065158844, "learning_rate": 2.4269551162501556e-06, "loss": 0.7458, "step": 12418 }, { "epoch": 0.5146918645613162, "grad_norm": 0.39927423000335693, "learning_rate": 2.4267478967217874e-06, "loss": 0.663, "step": 12419 }, { "epoch": 0.51473330846699, "grad_norm": 0.4162602424621582, "learning_rate": 2.4265406771934187e-06, "loss": 0.6921, "step": 12420 }, { "epoch": 0.5147747523726636, "grad_norm": 0.4358236491680145, "learning_rate": 2.4263334576650506e-06, "loss": 0.7126, "step": 12421 }, { "epoch": 0.5148161962783373, "grad_norm": 0.45137056708335876, "learning_rate": 2.426126238136682e-06, "loss": 0.679, "step": 12422 }, { "epoch": 0.5148576401840109, "grad_norm": 0.43010976910591125, "learning_rate": 2.425919018608314e-06, "loss": 0.7275, "step": 12423 }, { "epoch": 0.5148990840896847, "grad_norm": 0.4156637191772461, "learning_rate": 2.4257117990799456e-06, "loss": 0.6356, "step": 12424 }, { "epoch": 0.5149405279953583, "grad_norm": 0.3929874897003174, "learning_rate": 2.4255045795515774e-06, "loss": 0.6714, "step": 12425 }, { "epoch": 0.5149819719010319, "grad_norm": 0.4154079854488373, "learning_rate": 2.4252973600232088e-06, "loss": 0.713, "step": 12426 }, { "epoch": 0.5150234158067056, "grad_norm": 0.41203850507736206, "learning_rate": 2.4250901404948406e-06, "loss": 0.7222, "step": 12427 }, { "epoch": 0.5150648597123793, "grad_norm": 0.42554187774658203, "learning_rate": 2.424882920966472e-06, "loss": 0.6814, "step": 12428 }, { "epoch": 0.515106303618053, "grad_norm": 0.4256628155708313, "learning_rate": 2.4246757014381038e-06, "loss": 0.729, "step": 12429 }, { "epoch": 0.5151477475237266, "grad_norm": 0.3902890980243683, "learning_rate": 2.424468481909735e-06, "loss": 0.6405, "step": 12430 }, { "epoch": 0.5151891914294003, "grad_norm": 0.40703219175338745, "learning_rate": 2.424261262381367e-06, "loss": 0.7228, "step": 12431 }, { "epoch": 0.515230635335074, "grad_norm": 0.3920605778694153, "learning_rate": 2.4240540428529988e-06, "loss": 0.666, "step": 12432 }, { "epoch": 0.5152720792407477, "grad_norm": 0.3911963403224945, "learning_rate": 2.4238468233246306e-06, "loss": 0.6814, "step": 12433 }, { "epoch": 0.5153135231464213, "grad_norm": 0.4035681188106537, "learning_rate": 2.423639603796262e-06, "loss": 0.6431, "step": 12434 }, { "epoch": 0.5153549670520949, "grad_norm": 0.41798722743988037, "learning_rate": 2.4234323842678938e-06, "loss": 0.6931, "step": 12435 }, { "epoch": 0.5153964109577687, "grad_norm": 0.41263723373413086, "learning_rate": 2.423225164739525e-06, "loss": 0.632, "step": 12436 }, { "epoch": 0.5154378548634423, "grad_norm": 0.4507071077823639, "learning_rate": 2.423017945211157e-06, "loss": 0.6801, "step": 12437 }, { "epoch": 0.515479298769116, "grad_norm": 0.41934531927108765, "learning_rate": 2.4228107256827883e-06, "loss": 0.7023, "step": 12438 }, { "epoch": 0.5155207426747896, "grad_norm": 0.40594613552093506, "learning_rate": 2.42260350615442e-06, "loss": 0.7009, "step": 12439 }, { "epoch": 0.5155621865804634, "grad_norm": 0.4075371026992798, "learning_rate": 2.422396286626052e-06, "loss": 0.6614, "step": 12440 }, { "epoch": 0.515603630486137, "grad_norm": 0.40703001618385315, "learning_rate": 2.4221890670976838e-06, "loss": 0.6458, "step": 12441 }, { "epoch": 0.5156450743918107, "grad_norm": 0.41371577978134155, "learning_rate": 2.421981847569315e-06, "loss": 0.6851, "step": 12442 }, { "epoch": 0.5156865182974844, "grad_norm": 0.43757346272468567, "learning_rate": 2.421774628040947e-06, "loss": 0.6637, "step": 12443 }, { "epoch": 0.515727962203158, "grad_norm": 0.4561006724834442, "learning_rate": 2.4215674085125784e-06, "loss": 0.7083, "step": 12444 }, { "epoch": 0.5157694061088317, "grad_norm": 0.3714505434036255, "learning_rate": 2.42136018898421e-06, "loss": 0.6426, "step": 12445 }, { "epoch": 0.5158108500145053, "grad_norm": 0.4047311246395111, "learning_rate": 2.4211529694558415e-06, "loss": 0.6888, "step": 12446 }, { "epoch": 0.5158522939201791, "grad_norm": 0.4280291795730591, "learning_rate": 2.4209457499274734e-06, "loss": 0.7139, "step": 12447 }, { "epoch": 0.5158937378258527, "grad_norm": 0.4457102119922638, "learning_rate": 2.4207385303991047e-06, "loss": 0.6888, "step": 12448 }, { "epoch": 0.5159351817315264, "grad_norm": 0.4048601984977722, "learning_rate": 2.420531310870737e-06, "loss": 0.672, "step": 12449 }, { "epoch": 0.5159766256372, "grad_norm": 0.438904345035553, "learning_rate": 2.4203240913423684e-06, "loss": 0.6816, "step": 12450 }, { "epoch": 0.5160180695428738, "grad_norm": 0.427717387676239, "learning_rate": 2.420116871814e-06, "loss": 0.6766, "step": 12451 }, { "epoch": 0.5160595134485474, "grad_norm": 0.4113977551460266, "learning_rate": 2.4199096522856316e-06, "loss": 0.6586, "step": 12452 }, { "epoch": 0.516100957354221, "grad_norm": 0.4249646067619324, "learning_rate": 2.4197024327572634e-06, "loss": 0.656, "step": 12453 }, { "epoch": 0.5161424012598947, "grad_norm": 0.37517818808555603, "learning_rate": 2.4194952132288948e-06, "loss": 0.6938, "step": 12454 }, { "epoch": 0.5161838451655684, "grad_norm": 0.42449942231178284, "learning_rate": 2.4192879937005266e-06, "loss": 0.6747, "step": 12455 }, { "epoch": 0.5162252890712421, "grad_norm": 0.39077767729759216, "learning_rate": 2.419080774172158e-06, "loss": 0.6672, "step": 12456 }, { "epoch": 0.5162667329769157, "grad_norm": 0.3730625808238983, "learning_rate": 2.4188735546437898e-06, "loss": 0.657, "step": 12457 }, { "epoch": 0.5163081768825895, "grad_norm": 0.39072945713996887, "learning_rate": 2.4186663351154216e-06, "loss": 0.6265, "step": 12458 }, { "epoch": 0.5163496207882631, "grad_norm": 0.4303812086582184, "learning_rate": 2.4184591155870534e-06, "loss": 0.708, "step": 12459 }, { "epoch": 0.5163910646939368, "grad_norm": 0.42370739579200745, "learning_rate": 2.4182518960586848e-06, "loss": 0.712, "step": 12460 }, { "epoch": 0.5164325085996104, "grad_norm": 0.38857710361480713, "learning_rate": 2.4180446765303166e-06, "loss": 0.7075, "step": 12461 }, { "epoch": 0.516473952505284, "grad_norm": 0.4039013981819153, "learning_rate": 2.417837457001948e-06, "loss": 0.7056, "step": 12462 }, { "epoch": 0.5165153964109578, "grad_norm": 0.43030688166618347, "learning_rate": 2.4176302374735798e-06, "loss": 0.699, "step": 12463 }, { "epoch": 0.5165568403166314, "grad_norm": 0.41584914922714233, "learning_rate": 2.417423017945211e-06, "loss": 0.6921, "step": 12464 }, { "epoch": 0.5165982842223051, "grad_norm": 0.4191140830516815, "learning_rate": 2.417215798416843e-06, "loss": 0.6541, "step": 12465 }, { "epoch": 0.5166397281279788, "grad_norm": 0.4151654541492462, "learning_rate": 2.4170085788884743e-06, "loss": 0.6835, "step": 12466 }, { "epoch": 0.5166811720336525, "grad_norm": 0.4528132975101471, "learning_rate": 2.4168013593601066e-06, "loss": 0.7583, "step": 12467 }, { "epoch": 0.5167226159393261, "grad_norm": 0.39025992155075073, "learning_rate": 2.416594139831738e-06, "loss": 0.6472, "step": 12468 }, { "epoch": 0.5167640598449998, "grad_norm": 0.4446750283241272, "learning_rate": 2.4163869203033698e-06, "loss": 0.6902, "step": 12469 }, { "epoch": 0.5168055037506735, "grad_norm": 0.4111630618572235, "learning_rate": 2.416179700775001e-06, "loss": 0.6537, "step": 12470 }, { "epoch": 0.5168469476563471, "grad_norm": 0.4135884642601013, "learning_rate": 2.415972481246633e-06, "loss": 0.6786, "step": 12471 }, { "epoch": 0.5168883915620208, "grad_norm": 0.4348341226577759, "learning_rate": 2.4157652617182644e-06, "loss": 0.6729, "step": 12472 }, { "epoch": 0.5169298354676944, "grad_norm": 0.40917840600013733, "learning_rate": 2.415558042189896e-06, "loss": 0.61, "step": 12473 }, { "epoch": 0.5169712793733682, "grad_norm": 0.4171946346759796, "learning_rate": 2.4153508226615275e-06, "loss": 0.6383, "step": 12474 }, { "epoch": 0.5170127232790418, "grad_norm": 0.41365399956703186, "learning_rate": 2.4151436031331598e-06, "loss": 0.6841, "step": 12475 }, { "epoch": 0.5170541671847155, "grad_norm": 0.42516109347343445, "learning_rate": 2.414936383604791e-06, "loss": 0.7195, "step": 12476 }, { "epoch": 0.5170956110903892, "grad_norm": 0.43032506108283997, "learning_rate": 2.414729164076423e-06, "loss": 0.6857, "step": 12477 }, { "epoch": 0.5171370549960629, "grad_norm": 0.45707836747169495, "learning_rate": 2.4145219445480544e-06, "loss": 0.7461, "step": 12478 }, { "epoch": 0.5171784989017365, "grad_norm": 0.41527754068374634, "learning_rate": 2.414314725019686e-06, "loss": 0.6799, "step": 12479 }, { "epoch": 0.5172199428074101, "grad_norm": 0.38724255561828613, "learning_rate": 2.4141075054913176e-06, "loss": 0.6934, "step": 12480 }, { "epoch": 0.5172613867130839, "grad_norm": 0.39848360419273376, "learning_rate": 2.4139002859629494e-06, "loss": 0.7003, "step": 12481 }, { "epoch": 0.5173028306187575, "grad_norm": 0.40624484419822693, "learning_rate": 2.4136930664345807e-06, "loss": 0.6903, "step": 12482 }, { "epoch": 0.5173442745244312, "grad_norm": 0.3953806757926941, "learning_rate": 2.4134858469062126e-06, "loss": 0.718, "step": 12483 }, { "epoch": 0.5173857184301048, "grad_norm": 0.4093821942806244, "learning_rate": 2.4132786273778444e-06, "loss": 0.6654, "step": 12484 }, { "epoch": 0.5174271623357786, "grad_norm": 0.44131216406822205, "learning_rate": 2.413071407849476e-06, "loss": 0.7002, "step": 12485 }, { "epoch": 0.5174686062414522, "grad_norm": 0.40850284695625305, "learning_rate": 2.4128641883211076e-06, "loss": 0.639, "step": 12486 }, { "epoch": 0.5175100501471258, "grad_norm": 0.43210911750793457, "learning_rate": 2.4126569687927394e-06, "loss": 0.6929, "step": 12487 }, { "epoch": 0.5175514940527995, "grad_norm": 0.4152526557445526, "learning_rate": 2.4124497492643708e-06, "loss": 0.6838, "step": 12488 }, { "epoch": 0.5175929379584732, "grad_norm": 0.41131117939949036, "learning_rate": 2.4122425297360026e-06, "loss": 0.7144, "step": 12489 }, { "epoch": 0.5176343818641469, "grad_norm": 0.4109557271003723, "learning_rate": 2.412035310207634e-06, "loss": 0.687, "step": 12490 }, { "epoch": 0.5176758257698205, "grad_norm": 0.4735833406448364, "learning_rate": 2.4118280906792658e-06, "loss": 0.6871, "step": 12491 }, { "epoch": 0.5177172696754943, "grad_norm": 0.4004615247249603, "learning_rate": 2.411620871150897e-06, "loss": 0.7136, "step": 12492 }, { "epoch": 0.5177587135811679, "grad_norm": 0.42039746046066284, "learning_rate": 2.4114136516225294e-06, "loss": 0.6754, "step": 12493 }, { "epoch": 0.5178001574868416, "grad_norm": 0.4239159822463989, "learning_rate": 2.4112064320941608e-06, "loss": 0.6909, "step": 12494 }, { "epoch": 0.5178416013925152, "grad_norm": 0.40937042236328125, "learning_rate": 2.4109992125657926e-06, "loss": 0.696, "step": 12495 }, { "epoch": 0.5178830452981888, "grad_norm": 0.43401363492012024, "learning_rate": 2.410791993037424e-06, "loss": 0.6262, "step": 12496 }, { "epoch": 0.5179244892038626, "grad_norm": 0.3982897698879242, "learning_rate": 2.4105847735090558e-06, "loss": 0.6704, "step": 12497 }, { "epoch": 0.5179659331095362, "grad_norm": 0.4086677134037018, "learning_rate": 2.410377553980687e-06, "loss": 0.6906, "step": 12498 }, { "epoch": 0.5180073770152099, "grad_norm": 0.42691051959991455, "learning_rate": 2.410170334452319e-06, "loss": 0.7454, "step": 12499 }, { "epoch": 0.5180488209208836, "grad_norm": 0.40323272347450256, "learning_rate": 2.4099631149239504e-06, "loss": 0.6511, "step": 12500 }, { "epoch": 0.5180902648265573, "grad_norm": 0.36057355999946594, "learning_rate": 2.4097558953955826e-06, "loss": 0.6216, "step": 12501 }, { "epoch": 0.5181317087322309, "grad_norm": 0.41945764422416687, "learning_rate": 2.409548675867214e-06, "loss": 0.6553, "step": 12502 }, { "epoch": 0.5181731526379046, "grad_norm": 0.41368958353996277, "learning_rate": 2.4093414563388458e-06, "loss": 0.6809, "step": 12503 }, { "epoch": 0.5182145965435783, "grad_norm": 0.4240632653236389, "learning_rate": 2.409134236810477e-06, "loss": 0.6995, "step": 12504 }, { "epoch": 0.5182560404492519, "grad_norm": 0.412204772233963, "learning_rate": 2.408927017282109e-06, "loss": 0.6866, "step": 12505 }, { "epoch": 0.5182974843549256, "grad_norm": 0.40530160069465637, "learning_rate": 2.4087197977537404e-06, "loss": 0.6797, "step": 12506 }, { "epoch": 0.5183389282605992, "grad_norm": 0.40192726254463196, "learning_rate": 2.408512578225372e-06, "loss": 0.655, "step": 12507 }, { "epoch": 0.518380372166273, "grad_norm": 0.408189058303833, "learning_rate": 2.4083053586970036e-06, "loss": 0.7059, "step": 12508 }, { "epoch": 0.5184218160719466, "grad_norm": 0.4096115827560425, "learning_rate": 2.4080981391686354e-06, "loss": 0.6708, "step": 12509 }, { "epoch": 0.5184632599776203, "grad_norm": 0.421370267868042, "learning_rate": 2.407890919640267e-06, "loss": 0.6831, "step": 12510 }, { "epoch": 0.518504703883294, "grad_norm": 0.3952125310897827, "learning_rate": 2.407683700111899e-06, "loss": 0.6335, "step": 12511 }, { "epoch": 0.5185461477889677, "grad_norm": 0.4815824329853058, "learning_rate": 2.4074764805835304e-06, "loss": 0.689, "step": 12512 }, { "epoch": 0.5185875916946413, "grad_norm": 0.43765345215797424, "learning_rate": 2.407269261055162e-06, "loss": 0.6829, "step": 12513 }, { "epoch": 0.5186290356003149, "grad_norm": 0.40729421377182007, "learning_rate": 2.4070620415267936e-06, "loss": 0.6852, "step": 12514 }, { "epoch": 0.5186704795059887, "grad_norm": 0.39820703864097595, "learning_rate": 2.4068548219984254e-06, "loss": 0.688, "step": 12515 }, { "epoch": 0.5187119234116623, "grad_norm": 0.41707485914230347, "learning_rate": 2.4066476024700568e-06, "loss": 0.6785, "step": 12516 }, { "epoch": 0.518753367317336, "grad_norm": 0.4507840871810913, "learning_rate": 2.4064403829416886e-06, "loss": 0.7305, "step": 12517 }, { "epoch": 0.5187948112230096, "grad_norm": 0.4300510585308075, "learning_rate": 2.40623316341332e-06, "loss": 0.7275, "step": 12518 }, { "epoch": 0.5188362551286834, "grad_norm": 0.43752312660217285, "learning_rate": 2.406025943884952e-06, "loss": 0.6829, "step": 12519 }, { "epoch": 0.518877699034357, "grad_norm": 0.436367928981781, "learning_rate": 2.4058187243565836e-06, "loss": 0.671, "step": 12520 }, { "epoch": 0.5189191429400307, "grad_norm": 0.39381086826324463, "learning_rate": 2.4056115048282154e-06, "loss": 0.6594, "step": 12521 }, { "epoch": 0.5189605868457043, "grad_norm": 0.3898296356201172, "learning_rate": 2.4054042852998468e-06, "loss": 0.6641, "step": 12522 }, { "epoch": 0.519002030751378, "grad_norm": 0.3970431685447693, "learning_rate": 2.4051970657714786e-06, "loss": 0.6682, "step": 12523 }, { "epoch": 0.5190434746570517, "grad_norm": 0.3919110596179962, "learning_rate": 2.40498984624311e-06, "loss": 0.6379, "step": 12524 }, { "epoch": 0.5190849185627253, "grad_norm": 0.36473989486694336, "learning_rate": 2.4047826267147418e-06, "loss": 0.6638, "step": 12525 }, { "epoch": 0.519126362468399, "grad_norm": 0.4072173237800598, "learning_rate": 2.404575407186373e-06, "loss": 0.7156, "step": 12526 }, { "epoch": 0.5191678063740727, "grad_norm": 0.39302361011505127, "learning_rate": 2.404368187658005e-06, "loss": 0.6898, "step": 12527 }, { "epoch": 0.5192092502797464, "grad_norm": 0.3886263370513916, "learning_rate": 2.4041609681296368e-06, "loss": 0.6519, "step": 12528 }, { "epoch": 0.51925069418542, "grad_norm": 0.432587206363678, "learning_rate": 2.4039537486012686e-06, "loss": 0.7781, "step": 12529 }, { "epoch": 0.5192921380910938, "grad_norm": 0.4729192852973938, "learning_rate": 2.4037465290729e-06, "loss": 0.6837, "step": 12530 }, { "epoch": 0.5193335819967674, "grad_norm": 0.4396415948867798, "learning_rate": 2.4035393095445318e-06, "loss": 0.7422, "step": 12531 }, { "epoch": 0.519375025902441, "grad_norm": 0.45288777351379395, "learning_rate": 2.403332090016163e-06, "loss": 0.6451, "step": 12532 }, { "epoch": 0.5194164698081147, "grad_norm": 0.43435826897621155, "learning_rate": 2.403124870487795e-06, "loss": 0.6577, "step": 12533 }, { "epoch": 0.5194579137137884, "grad_norm": 0.40854042768478394, "learning_rate": 2.4029176509594264e-06, "loss": 0.6938, "step": 12534 }, { "epoch": 0.5194993576194621, "grad_norm": 0.37687379121780396, "learning_rate": 2.402710431431058e-06, "loss": 0.6818, "step": 12535 }, { "epoch": 0.5195408015251357, "grad_norm": 0.4056571424007416, "learning_rate": 2.40250321190269e-06, "loss": 0.637, "step": 12536 }, { "epoch": 0.5195822454308094, "grad_norm": 0.43971315026283264, "learning_rate": 2.4022959923743218e-06, "loss": 0.6849, "step": 12537 }, { "epoch": 0.5196236893364831, "grad_norm": 0.4217950105667114, "learning_rate": 2.402088772845953e-06, "loss": 0.6694, "step": 12538 }, { "epoch": 0.5196651332421568, "grad_norm": 0.4526236951351166, "learning_rate": 2.401881553317585e-06, "loss": 0.709, "step": 12539 }, { "epoch": 0.5197065771478304, "grad_norm": 0.3883284628391266, "learning_rate": 2.4016743337892164e-06, "loss": 0.6967, "step": 12540 }, { "epoch": 0.519748021053504, "grad_norm": 0.38360580801963806, "learning_rate": 2.401467114260848e-06, "loss": 0.5983, "step": 12541 }, { "epoch": 0.5197894649591778, "grad_norm": 0.4257265329360962, "learning_rate": 2.4012598947324796e-06, "loss": 0.6892, "step": 12542 }, { "epoch": 0.5198309088648514, "grad_norm": 0.43864619731903076, "learning_rate": 2.4010526752041114e-06, "loss": 0.7433, "step": 12543 }, { "epoch": 0.5198723527705251, "grad_norm": 0.3953695595264435, "learning_rate": 2.4008454556757428e-06, "loss": 0.718, "step": 12544 }, { "epoch": 0.5199137966761987, "grad_norm": 0.4083937704563141, "learning_rate": 2.400638236147375e-06, "loss": 0.6782, "step": 12545 }, { "epoch": 0.5199552405818725, "grad_norm": 0.383492648601532, "learning_rate": 2.4004310166190064e-06, "loss": 0.6672, "step": 12546 }, { "epoch": 0.5199966844875461, "grad_norm": 0.4017408788204193, "learning_rate": 2.400223797090638e-06, "loss": 0.6809, "step": 12547 }, { "epoch": 0.5200381283932197, "grad_norm": 0.4100874662399292, "learning_rate": 2.4000165775622696e-06, "loss": 0.6572, "step": 12548 }, { "epoch": 0.5200795722988935, "grad_norm": 0.3927365839481354, "learning_rate": 2.3998093580339014e-06, "loss": 0.6967, "step": 12549 }, { "epoch": 0.5201210162045671, "grad_norm": 0.38179871439933777, "learning_rate": 2.3996021385055328e-06, "loss": 0.7109, "step": 12550 }, { "epoch": 0.5201624601102408, "grad_norm": 0.4520033895969391, "learning_rate": 2.3993949189771646e-06, "loss": 0.6787, "step": 12551 }, { "epoch": 0.5202039040159144, "grad_norm": 0.3848505914211273, "learning_rate": 2.399187699448796e-06, "loss": 0.6555, "step": 12552 }, { "epoch": 0.5202453479215882, "grad_norm": 0.43456435203552246, "learning_rate": 2.3989804799204278e-06, "loss": 0.7583, "step": 12553 }, { "epoch": 0.5202867918272618, "grad_norm": 0.37724238634109497, "learning_rate": 2.3987732603920596e-06, "loss": 0.6694, "step": 12554 }, { "epoch": 0.5203282357329355, "grad_norm": 0.4380587935447693, "learning_rate": 2.3985660408636914e-06, "loss": 0.6631, "step": 12555 }, { "epoch": 0.5203696796386091, "grad_norm": 0.38457006216049194, "learning_rate": 2.3983588213353228e-06, "loss": 0.6522, "step": 12556 }, { "epoch": 0.5204111235442828, "grad_norm": 0.4093158543109894, "learning_rate": 2.3981516018069546e-06, "loss": 0.6672, "step": 12557 }, { "epoch": 0.5204525674499565, "grad_norm": 0.40698012709617615, "learning_rate": 2.397944382278586e-06, "loss": 0.7083, "step": 12558 }, { "epoch": 0.5204940113556301, "grad_norm": 0.38175714015960693, "learning_rate": 2.3977371627502178e-06, "loss": 0.6394, "step": 12559 }, { "epoch": 0.5205354552613038, "grad_norm": 0.3884532153606415, "learning_rate": 2.397529943221849e-06, "loss": 0.6873, "step": 12560 }, { "epoch": 0.5205768991669775, "grad_norm": 0.4071963131427765, "learning_rate": 2.397322723693481e-06, "loss": 0.7072, "step": 12561 }, { "epoch": 0.5206183430726512, "grad_norm": 0.41138336062431335, "learning_rate": 2.3971155041651128e-06, "loss": 0.7156, "step": 12562 }, { "epoch": 0.5206597869783248, "grad_norm": 0.43066808581352234, "learning_rate": 2.3969082846367446e-06, "loss": 0.6946, "step": 12563 }, { "epoch": 0.5207012308839986, "grad_norm": 0.42132607102394104, "learning_rate": 2.396701065108376e-06, "loss": 0.657, "step": 12564 }, { "epoch": 0.5207426747896722, "grad_norm": 0.391268253326416, "learning_rate": 2.3964938455800078e-06, "loss": 0.6702, "step": 12565 }, { "epoch": 0.5207841186953458, "grad_norm": 0.43152138590812683, "learning_rate": 2.396286626051639e-06, "loss": 0.6707, "step": 12566 }, { "epoch": 0.5208255626010195, "grad_norm": 0.4148610830307007, "learning_rate": 2.396079406523271e-06, "loss": 0.6992, "step": 12567 }, { "epoch": 0.5208670065066932, "grad_norm": 0.41824865341186523, "learning_rate": 2.3958721869949024e-06, "loss": 0.6852, "step": 12568 }, { "epoch": 0.5209084504123669, "grad_norm": 0.43457284569740295, "learning_rate": 2.395664967466534e-06, "loss": 0.7435, "step": 12569 }, { "epoch": 0.5209498943180405, "grad_norm": 0.4613175094127655, "learning_rate": 2.3954577479381656e-06, "loss": 0.7152, "step": 12570 }, { "epoch": 0.5209913382237142, "grad_norm": 0.4210778474807739, "learning_rate": 2.395250528409798e-06, "loss": 0.7275, "step": 12571 }, { "epoch": 0.5210327821293879, "grad_norm": 0.42776134610176086, "learning_rate": 2.395043308881429e-06, "loss": 0.6748, "step": 12572 }, { "epoch": 0.5210742260350616, "grad_norm": 0.3835519850254059, "learning_rate": 2.394836089353061e-06, "loss": 0.6061, "step": 12573 }, { "epoch": 0.5211156699407352, "grad_norm": 0.4283444583415985, "learning_rate": 2.3946288698246924e-06, "loss": 0.6697, "step": 12574 }, { "epoch": 0.5211571138464088, "grad_norm": 0.3993901312351227, "learning_rate": 2.394421650296324e-06, "loss": 0.6556, "step": 12575 }, { "epoch": 0.5211985577520826, "grad_norm": 0.41993531584739685, "learning_rate": 2.3942144307679556e-06, "loss": 0.6781, "step": 12576 }, { "epoch": 0.5212400016577562, "grad_norm": 0.4229331314563751, "learning_rate": 2.3940072112395874e-06, "loss": 0.608, "step": 12577 }, { "epoch": 0.5212814455634299, "grad_norm": 0.3984411954879761, "learning_rate": 2.3937999917112188e-06, "loss": 0.6804, "step": 12578 }, { "epoch": 0.5213228894691035, "grad_norm": 0.40265849232673645, "learning_rate": 2.3935927721828506e-06, "loss": 0.7041, "step": 12579 }, { "epoch": 0.5213643333747773, "grad_norm": 0.3807474970817566, "learning_rate": 2.3933855526544824e-06, "loss": 0.681, "step": 12580 }, { "epoch": 0.5214057772804509, "grad_norm": 0.44563746452331543, "learning_rate": 2.393178333126114e-06, "loss": 0.7412, "step": 12581 }, { "epoch": 0.5214472211861246, "grad_norm": 0.43464934825897217, "learning_rate": 2.3929711135977456e-06, "loss": 0.6731, "step": 12582 }, { "epoch": 0.5214886650917983, "grad_norm": 0.46325355768203735, "learning_rate": 2.3927638940693774e-06, "loss": 0.7142, "step": 12583 }, { "epoch": 0.5215301089974719, "grad_norm": 0.4135199189186096, "learning_rate": 2.3925566745410088e-06, "loss": 0.6268, "step": 12584 }, { "epoch": 0.5215715529031456, "grad_norm": 0.43282660841941833, "learning_rate": 2.3923494550126406e-06, "loss": 0.6584, "step": 12585 }, { "epoch": 0.5216129968088192, "grad_norm": 0.4272555112838745, "learning_rate": 2.392142235484272e-06, "loss": 0.6584, "step": 12586 }, { "epoch": 0.521654440714493, "grad_norm": 0.4186498522758484, "learning_rate": 2.3919350159559038e-06, "loss": 0.708, "step": 12587 }, { "epoch": 0.5216958846201666, "grad_norm": 0.38211435079574585, "learning_rate": 2.3917277964275356e-06, "loss": 0.6737, "step": 12588 }, { "epoch": 0.5217373285258403, "grad_norm": 0.3978312611579895, "learning_rate": 2.3915205768991674e-06, "loss": 0.6486, "step": 12589 }, { "epoch": 0.5217787724315139, "grad_norm": 0.4126531779766083, "learning_rate": 2.3913133573707988e-06, "loss": 0.6926, "step": 12590 }, { "epoch": 0.5218202163371877, "grad_norm": 0.40570902824401855, "learning_rate": 2.3911061378424306e-06, "loss": 0.6479, "step": 12591 }, { "epoch": 0.5218616602428613, "grad_norm": 0.41340118646621704, "learning_rate": 2.390898918314062e-06, "loss": 0.6962, "step": 12592 }, { "epoch": 0.5219031041485349, "grad_norm": 0.4114457666873932, "learning_rate": 2.3906916987856938e-06, "loss": 0.6985, "step": 12593 }, { "epoch": 0.5219445480542086, "grad_norm": 0.40483883023262024, "learning_rate": 2.390484479257325e-06, "loss": 0.6935, "step": 12594 }, { "epoch": 0.5219859919598823, "grad_norm": 0.4975307583808899, "learning_rate": 2.390277259728957e-06, "loss": 0.7463, "step": 12595 }, { "epoch": 0.522027435865556, "grad_norm": 0.40892666578292847, "learning_rate": 2.3900700402005888e-06, "loss": 0.6826, "step": 12596 }, { "epoch": 0.5220688797712296, "grad_norm": 0.40424248576164246, "learning_rate": 2.3898628206722206e-06, "loss": 0.6951, "step": 12597 }, { "epoch": 0.5221103236769034, "grad_norm": 0.42291751503944397, "learning_rate": 2.389655601143852e-06, "loss": 0.6948, "step": 12598 }, { "epoch": 0.522151767582577, "grad_norm": 0.4212879240512848, "learning_rate": 2.3894483816154838e-06, "loss": 0.7129, "step": 12599 }, { "epoch": 0.5221932114882507, "grad_norm": 0.4208809435367584, "learning_rate": 2.389241162087115e-06, "loss": 0.6791, "step": 12600 }, { "epoch": 0.5222346553939243, "grad_norm": 0.4288797080516815, "learning_rate": 2.389033942558747e-06, "loss": 0.6738, "step": 12601 }, { "epoch": 0.522276099299598, "grad_norm": 0.3962031900882721, "learning_rate": 2.3888267230303784e-06, "loss": 0.6794, "step": 12602 }, { "epoch": 0.5223175432052717, "grad_norm": 0.44279807806015015, "learning_rate": 2.38861950350201e-06, "loss": 0.7302, "step": 12603 }, { "epoch": 0.5223589871109453, "grad_norm": 0.41808339953422546, "learning_rate": 2.388412283973642e-06, "loss": 0.6888, "step": 12604 }, { "epoch": 0.522400431016619, "grad_norm": 0.4430859386920929, "learning_rate": 2.3882050644452734e-06, "loss": 0.6876, "step": 12605 }, { "epoch": 0.5224418749222927, "grad_norm": 0.39555037021636963, "learning_rate": 2.387997844916905e-06, "loss": 0.6825, "step": 12606 }, { "epoch": 0.5224833188279664, "grad_norm": 0.4349897801876068, "learning_rate": 2.387790625388537e-06, "loss": 0.6875, "step": 12607 }, { "epoch": 0.52252476273364, "grad_norm": 0.4097515046596527, "learning_rate": 2.3875834058601684e-06, "loss": 0.6334, "step": 12608 }, { "epoch": 0.5225662066393136, "grad_norm": 0.3824368715286255, "learning_rate": 2.3873761863318e-06, "loss": 0.6366, "step": 12609 }, { "epoch": 0.5226076505449874, "grad_norm": 0.4280630052089691, "learning_rate": 2.3871689668034316e-06, "loss": 0.6962, "step": 12610 }, { "epoch": 0.522649094450661, "grad_norm": 0.4225119650363922, "learning_rate": 2.3869617472750634e-06, "loss": 0.6407, "step": 12611 }, { "epoch": 0.5226905383563347, "grad_norm": 0.44540655612945557, "learning_rate": 2.3867545277466948e-06, "loss": 0.7183, "step": 12612 }, { "epoch": 0.5227319822620083, "grad_norm": 0.373508483171463, "learning_rate": 2.3865473082183266e-06, "loss": 0.7096, "step": 12613 }, { "epoch": 0.5227734261676821, "grad_norm": 0.37904226779937744, "learning_rate": 2.3863400886899584e-06, "loss": 0.6919, "step": 12614 }, { "epoch": 0.5228148700733557, "grad_norm": 0.4333645701408386, "learning_rate": 2.38613286916159e-06, "loss": 0.74, "step": 12615 }, { "epoch": 0.5228563139790294, "grad_norm": 0.425849974155426, "learning_rate": 2.3859256496332216e-06, "loss": 0.6704, "step": 12616 }, { "epoch": 0.522897757884703, "grad_norm": 0.42296767234802246, "learning_rate": 2.3857184301048534e-06, "loss": 0.7142, "step": 12617 }, { "epoch": 0.5229392017903767, "grad_norm": 0.4082709550857544, "learning_rate": 2.3855112105764848e-06, "loss": 0.6183, "step": 12618 }, { "epoch": 0.5229806456960504, "grad_norm": 0.3941473960876465, "learning_rate": 2.3853039910481166e-06, "loss": 0.6584, "step": 12619 }, { "epoch": 0.523022089601724, "grad_norm": 0.4375854730606079, "learning_rate": 2.385096771519748e-06, "loss": 0.7272, "step": 12620 }, { "epoch": 0.5230635335073978, "grad_norm": 0.40812429785728455, "learning_rate": 2.3848895519913798e-06, "loss": 0.688, "step": 12621 }, { "epoch": 0.5231049774130714, "grad_norm": 0.4617955982685089, "learning_rate": 2.3846823324630116e-06, "loss": 0.7085, "step": 12622 }, { "epoch": 0.5231464213187451, "grad_norm": 0.45311516523361206, "learning_rate": 2.3844751129346434e-06, "loss": 0.7429, "step": 12623 }, { "epoch": 0.5231878652244187, "grad_norm": 0.4297454357147217, "learning_rate": 2.3842678934062748e-06, "loss": 0.6945, "step": 12624 }, { "epoch": 0.5232293091300925, "grad_norm": 0.41116654872894287, "learning_rate": 2.3840606738779066e-06, "loss": 0.6337, "step": 12625 }, { "epoch": 0.5232707530357661, "grad_norm": 0.38406509160995483, "learning_rate": 2.383853454349538e-06, "loss": 0.6455, "step": 12626 }, { "epoch": 0.5233121969414397, "grad_norm": 0.3692176342010498, "learning_rate": 2.3836462348211698e-06, "loss": 0.6707, "step": 12627 }, { "epoch": 0.5233536408471134, "grad_norm": 0.44967779517173767, "learning_rate": 2.383439015292801e-06, "loss": 0.7078, "step": 12628 }, { "epoch": 0.5233950847527871, "grad_norm": 0.4245588481426239, "learning_rate": 2.383231795764433e-06, "loss": 0.6593, "step": 12629 }, { "epoch": 0.5234365286584608, "grad_norm": 0.41723015904426575, "learning_rate": 2.383024576236065e-06, "loss": 0.6547, "step": 12630 }, { "epoch": 0.5234779725641344, "grad_norm": 0.42626410722732544, "learning_rate": 2.382817356707696e-06, "loss": 0.661, "step": 12631 }, { "epoch": 0.5235194164698082, "grad_norm": 0.3979733884334564, "learning_rate": 2.382610137179328e-06, "loss": 0.6725, "step": 12632 }, { "epoch": 0.5235608603754818, "grad_norm": 0.37187525629997253, "learning_rate": 2.38240291765096e-06, "loss": 0.6332, "step": 12633 }, { "epoch": 0.5236023042811555, "grad_norm": 0.4311836063861847, "learning_rate": 2.382195698122591e-06, "loss": 0.6809, "step": 12634 }, { "epoch": 0.5236437481868291, "grad_norm": 0.4104052782058716, "learning_rate": 2.381988478594223e-06, "loss": 0.6768, "step": 12635 }, { "epoch": 0.5236851920925027, "grad_norm": 0.41102686524391174, "learning_rate": 2.3817812590658544e-06, "loss": 0.646, "step": 12636 }, { "epoch": 0.5237266359981765, "grad_norm": 0.44484105706214905, "learning_rate": 2.381574039537486e-06, "loss": 0.6914, "step": 12637 }, { "epoch": 0.5237680799038501, "grad_norm": 0.4190810024738312, "learning_rate": 2.381366820009118e-06, "loss": 0.7161, "step": 12638 }, { "epoch": 0.5238095238095238, "grad_norm": 0.43017104268074036, "learning_rate": 2.3811596004807494e-06, "loss": 0.7102, "step": 12639 }, { "epoch": 0.5238509677151975, "grad_norm": 0.4285981059074402, "learning_rate": 2.380952380952381e-06, "loss": 0.7449, "step": 12640 }, { "epoch": 0.5238924116208712, "grad_norm": 0.412391722202301, "learning_rate": 2.380745161424013e-06, "loss": 0.7083, "step": 12641 }, { "epoch": 0.5239338555265448, "grad_norm": 0.45212551951408386, "learning_rate": 2.3805379418956444e-06, "loss": 0.7373, "step": 12642 }, { "epoch": 0.5239752994322185, "grad_norm": 0.4177693724632263, "learning_rate": 2.380330722367276e-06, "loss": 0.7188, "step": 12643 }, { "epoch": 0.5240167433378922, "grad_norm": 0.39702433347702026, "learning_rate": 2.3801235028389076e-06, "loss": 0.6466, "step": 12644 }, { "epoch": 0.5240581872435658, "grad_norm": 0.40061143040657043, "learning_rate": 2.3799162833105394e-06, "loss": 0.6869, "step": 12645 }, { "epoch": 0.5240996311492395, "grad_norm": 0.4022364914417267, "learning_rate": 2.3797090637821708e-06, "loss": 0.6891, "step": 12646 }, { "epoch": 0.5241410750549131, "grad_norm": 0.42555657029151917, "learning_rate": 2.3795018442538026e-06, "loss": 0.6802, "step": 12647 }, { "epoch": 0.5241825189605869, "grad_norm": 0.4297129213809967, "learning_rate": 2.3792946247254344e-06, "loss": 0.7517, "step": 12648 }, { "epoch": 0.5242239628662605, "grad_norm": 0.3912673592567444, "learning_rate": 2.379087405197066e-06, "loss": 0.7084, "step": 12649 }, { "epoch": 0.5242654067719342, "grad_norm": 0.3703274428844452, "learning_rate": 2.3788801856686976e-06, "loss": 0.6738, "step": 12650 }, { "epoch": 0.5243068506776078, "grad_norm": 0.43826237320899963, "learning_rate": 2.3786729661403294e-06, "loss": 0.6863, "step": 12651 }, { "epoch": 0.5243482945832816, "grad_norm": 0.42081063985824585, "learning_rate": 2.3784657466119608e-06, "loss": 0.6898, "step": 12652 }, { "epoch": 0.5243897384889552, "grad_norm": 0.3877864480018616, "learning_rate": 2.3782585270835926e-06, "loss": 0.6293, "step": 12653 }, { "epoch": 0.5244311823946288, "grad_norm": 0.3883492648601532, "learning_rate": 2.378051307555224e-06, "loss": 0.624, "step": 12654 }, { "epoch": 0.5244726263003026, "grad_norm": 0.45694699883461, "learning_rate": 2.3778440880268558e-06, "loss": 0.6716, "step": 12655 }, { "epoch": 0.5245140702059762, "grad_norm": 0.4060195982456207, "learning_rate": 2.3776368684984876e-06, "loss": 0.7324, "step": 12656 }, { "epoch": 0.5245555141116499, "grad_norm": 0.4088493883609772, "learning_rate": 2.377429648970119e-06, "loss": 0.656, "step": 12657 }, { "epoch": 0.5245969580173235, "grad_norm": 0.40790894627571106, "learning_rate": 2.3772224294417508e-06, "loss": 0.7104, "step": 12658 }, { "epoch": 0.5246384019229973, "grad_norm": 0.40414130687713623, "learning_rate": 2.3770152099133826e-06, "loss": 0.686, "step": 12659 }, { "epoch": 0.5246798458286709, "grad_norm": 0.37157467007637024, "learning_rate": 2.376807990385014e-06, "loss": 0.6274, "step": 12660 }, { "epoch": 0.5247212897343445, "grad_norm": 0.40897637605667114, "learning_rate": 2.376600770856646e-06, "loss": 0.6926, "step": 12661 }, { "epoch": 0.5247627336400182, "grad_norm": 0.43411290645599365, "learning_rate": 2.376393551328277e-06, "loss": 0.7273, "step": 12662 }, { "epoch": 0.5248041775456919, "grad_norm": 0.43205928802490234, "learning_rate": 2.376186331799909e-06, "loss": 0.7333, "step": 12663 }, { "epoch": 0.5248456214513656, "grad_norm": 0.4218291938304901, "learning_rate": 2.375979112271541e-06, "loss": 0.6642, "step": 12664 }, { "epoch": 0.5248870653570392, "grad_norm": 0.4380809962749481, "learning_rate": 2.375771892743172e-06, "loss": 0.6683, "step": 12665 }, { "epoch": 0.524928509262713, "grad_norm": 0.37009352445602417, "learning_rate": 2.375564673214804e-06, "loss": 0.6428, "step": 12666 }, { "epoch": 0.5249699531683866, "grad_norm": 0.4073854386806488, "learning_rate": 2.375357453686436e-06, "loss": 0.7047, "step": 12667 }, { "epoch": 0.5250113970740603, "grad_norm": 0.3952029049396515, "learning_rate": 2.375150234158067e-06, "loss": 0.6676, "step": 12668 }, { "epoch": 0.5250528409797339, "grad_norm": 0.43570664525032043, "learning_rate": 2.374943014629699e-06, "loss": 0.7493, "step": 12669 }, { "epoch": 0.5250942848854075, "grad_norm": 0.3945210874080658, "learning_rate": 2.3747357951013304e-06, "loss": 0.6476, "step": 12670 }, { "epoch": 0.5251357287910813, "grad_norm": 0.40418878197669983, "learning_rate": 2.374528575572962e-06, "loss": 0.6658, "step": 12671 }, { "epoch": 0.5251771726967549, "grad_norm": 0.4028280973434448, "learning_rate": 2.374321356044594e-06, "loss": 0.7146, "step": 12672 }, { "epoch": 0.5252186166024286, "grad_norm": 0.41503721475601196, "learning_rate": 2.3741141365162254e-06, "loss": 0.6957, "step": 12673 }, { "epoch": 0.5252600605081023, "grad_norm": 0.4172116219997406, "learning_rate": 2.373906916987857e-06, "loss": 0.6301, "step": 12674 }, { "epoch": 0.525301504413776, "grad_norm": 0.4189743995666504, "learning_rate": 2.3736996974594886e-06, "loss": 0.6731, "step": 12675 }, { "epoch": 0.5253429483194496, "grad_norm": 0.4130241870880127, "learning_rate": 2.3734924779311204e-06, "loss": 0.6619, "step": 12676 }, { "epoch": 0.5253843922251233, "grad_norm": 0.4559548795223236, "learning_rate": 2.373285258402752e-06, "loss": 0.6565, "step": 12677 }, { "epoch": 0.525425836130797, "grad_norm": 0.4367161989212036, "learning_rate": 2.3730780388743836e-06, "loss": 0.6676, "step": 12678 }, { "epoch": 0.5254672800364706, "grad_norm": 0.4063808023929596, "learning_rate": 2.3728708193460154e-06, "loss": 0.689, "step": 12679 }, { "epoch": 0.5255087239421443, "grad_norm": 0.409132719039917, "learning_rate": 2.3726635998176468e-06, "loss": 0.6951, "step": 12680 }, { "epoch": 0.5255501678478179, "grad_norm": 0.3958336412906647, "learning_rate": 2.3724563802892786e-06, "loss": 0.668, "step": 12681 }, { "epoch": 0.5255916117534917, "grad_norm": 0.39782172441482544, "learning_rate": 2.3722491607609104e-06, "loss": 0.7225, "step": 12682 }, { "epoch": 0.5256330556591653, "grad_norm": 0.3923264443874359, "learning_rate": 2.3720419412325418e-06, "loss": 0.6702, "step": 12683 }, { "epoch": 0.525674499564839, "grad_norm": 0.42885860800743103, "learning_rate": 2.3718347217041736e-06, "loss": 0.7006, "step": 12684 }, { "epoch": 0.5257159434705126, "grad_norm": 0.4408898949623108, "learning_rate": 2.3716275021758054e-06, "loss": 0.7454, "step": 12685 }, { "epoch": 0.5257573873761864, "grad_norm": 0.4405261278152466, "learning_rate": 2.3714202826474368e-06, "loss": 0.6964, "step": 12686 }, { "epoch": 0.52579883128186, "grad_norm": 0.3965229094028473, "learning_rate": 2.3712130631190686e-06, "loss": 0.674, "step": 12687 }, { "epoch": 0.5258402751875336, "grad_norm": 0.41549956798553467, "learning_rate": 2.3710058435907e-06, "loss": 0.6814, "step": 12688 }, { "epoch": 0.5258817190932074, "grad_norm": 0.40055760741233826, "learning_rate": 2.370798624062332e-06, "loss": 0.697, "step": 12689 }, { "epoch": 0.525923162998881, "grad_norm": 0.40844807028770447, "learning_rate": 2.3705914045339636e-06, "loss": 0.7136, "step": 12690 }, { "epoch": 0.5259646069045547, "grad_norm": 0.4323974549770355, "learning_rate": 2.370384185005595e-06, "loss": 0.6965, "step": 12691 }, { "epoch": 0.5260060508102283, "grad_norm": 0.44082149863243103, "learning_rate": 2.370176965477227e-06, "loss": 0.6786, "step": 12692 }, { "epoch": 0.5260474947159021, "grad_norm": 0.4313060939311981, "learning_rate": 2.3699697459488586e-06, "loss": 0.6973, "step": 12693 }, { "epoch": 0.5260889386215757, "grad_norm": 0.3962545692920685, "learning_rate": 2.36976252642049e-06, "loss": 0.6483, "step": 12694 }, { "epoch": 0.5261303825272494, "grad_norm": 0.42420676350593567, "learning_rate": 2.369555306892122e-06, "loss": 0.6704, "step": 12695 }, { "epoch": 0.526171826432923, "grad_norm": 0.40030965209007263, "learning_rate": 2.369348087363753e-06, "loss": 0.7094, "step": 12696 }, { "epoch": 0.5262132703385967, "grad_norm": 0.4156250059604645, "learning_rate": 2.369140867835385e-06, "loss": 0.6364, "step": 12697 }, { "epoch": 0.5262547142442704, "grad_norm": 0.4453713893890381, "learning_rate": 2.368933648307017e-06, "loss": 0.6234, "step": 12698 }, { "epoch": 0.526296158149944, "grad_norm": 0.47580769658088684, "learning_rate": 2.368726428778648e-06, "loss": 0.7421, "step": 12699 }, { "epoch": 0.5263376020556177, "grad_norm": 0.39875853061676025, "learning_rate": 2.36851920925028e-06, "loss": 0.675, "step": 12700 }, { "epoch": 0.5263790459612914, "grad_norm": 0.3920235335826874, "learning_rate": 2.3683119897219114e-06, "loss": 0.6654, "step": 12701 }, { "epoch": 0.5264204898669651, "grad_norm": 0.397657573223114, "learning_rate": 2.368104770193543e-06, "loss": 0.6575, "step": 12702 }, { "epoch": 0.5264619337726387, "grad_norm": 0.3951939642429352, "learning_rate": 2.367897550665175e-06, "loss": 0.6737, "step": 12703 }, { "epoch": 0.5265033776783125, "grad_norm": 0.4291022717952728, "learning_rate": 2.3676903311368064e-06, "loss": 0.6667, "step": 12704 }, { "epoch": 0.5265448215839861, "grad_norm": 0.43796488642692566, "learning_rate": 2.367483111608438e-06, "loss": 0.6949, "step": 12705 }, { "epoch": 0.5265862654896597, "grad_norm": 0.42261624336242676, "learning_rate": 2.36727589208007e-06, "loss": 0.7107, "step": 12706 }, { "epoch": 0.5266277093953334, "grad_norm": 0.41933420300483704, "learning_rate": 2.3670686725517014e-06, "loss": 0.7512, "step": 12707 }, { "epoch": 0.526669153301007, "grad_norm": 0.4342271089553833, "learning_rate": 2.366861453023333e-06, "loss": 0.7194, "step": 12708 }, { "epoch": 0.5267105972066808, "grad_norm": 0.42758506536483765, "learning_rate": 2.3666542334949646e-06, "loss": 0.7561, "step": 12709 }, { "epoch": 0.5267520411123544, "grad_norm": 0.4329321086406708, "learning_rate": 2.3664470139665964e-06, "loss": 0.7002, "step": 12710 }, { "epoch": 0.5267934850180281, "grad_norm": 0.39651286602020264, "learning_rate": 2.366239794438228e-06, "loss": 0.6471, "step": 12711 }, { "epoch": 0.5268349289237018, "grad_norm": 0.44623979926109314, "learning_rate": 2.3660325749098596e-06, "loss": 0.7144, "step": 12712 }, { "epoch": 0.5268763728293755, "grad_norm": 0.41242149472236633, "learning_rate": 2.3658253553814914e-06, "loss": 0.656, "step": 12713 }, { "epoch": 0.5269178167350491, "grad_norm": 0.4180530607700348, "learning_rate": 2.365618135853123e-06, "loss": 0.7018, "step": 12714 }, { "epoch": 0.5269592606407227, "grad_norm": 0.4093191921710968, "learning_rate": 2.3654109163247546e-06, "loss": 0.7043, "step": 12715 }, { "epoch": 0.5270007045463965, "grad_norm": 0.4319854974746704, "learning_rate": 2.3652036967963864e-06, "loss": 0.6598, "step": 12716 }, { "epoch": 0.5270421484520701, "grad_norm": 0.3933837115764618, "learning_rate": 2.3649964772680178e-06, "loss": 0.6396, "step": 12717 }, { "epoch": 0.5270835923577438, "grad_norm": 0.40750330686569214, "learning_rate": 2.3647892577396496e-06, "loss": 0.6809, "step": 12718 }, { "epoch": 0.5271250362634174, "grad_norm": 0.40311792492866516, "learning_rate": 2.3645820382112814e-06, "loss": 0.6787, "step": 12719 }, { "epoch": 0.5271664801690912, "grad_norm": 0.4228418469429016, "learning_rate": 2.364374818682913e-06, "loss": 0.6565, "step": 12720 }, { "epoch": 0.5272079240747648, "grad_norm": 0.4425879120826721, "learning_rate": 2.3641675991545446e-06, "loss": 0.6514, "step": 12721 }, { "epoch": 0.5272493679804384, "grad_norm": 0.41079095005989075, "learning_rate": 2.363960379626176e-06, "loss": 0.6942, "step": 12722 }, { "epoch": 0.5272908118861122, "grad_norm": 0.41754844784736633, "learning_rate": 2.363753160097808e-06, "loss": 0.6658, "step": 12723 }, { "epoch": 0.5273322557917858, "grad_norm": 0.41991499066352844, "learning_rate": 2.3635459405694396e-06, "loss": 0.7134, "step": 12724 }, { "epoch": 0.5273736996974595, "grad_norm": 0.4074188470840454, "learning_rate": 2.363338721041071e-06, "loss": 0.6262, "step": 12725 }, { "epoch": 0.5274151436031331, "grad_norm": 0.41648292541503906, "learning_rate": 2.363131501512703e-06, "loss": 0.6594, "step": 12726 }, { "epoch": 0.5274565875088069, "grad_norm": 0.4286017417907715, "learning_rate": 2.362924281984334e-06, "loss": 0.665, "step": 12727 }, { "epoch": 0.5274980314144805, "grad_norm": 0.40575817227363586, "learning_rate": 2.362717062455966e-06, "loss": 0.6743, "step": 12728 }, { "epoch": 0.5275394753201542, "grad_norm": 0.3965182602405548, "learning_rate": 2.362509842927598e-06, "loss": 0.6604, "step": 12729 }, { "epoch": 0.5275809192258278, "grad_norm": 0.419206440448761, "learning_rate": 2.362302623399229e-06, "loss": 0.6644, "step": 12730 }, { "epoch": 0.5276223631315015, "grad_norm": 0.4203120768070221, "learning_rate": 2.362095403870861e-06, "loss": 0.7168, "step": 12731 }, { "epoch": 0.5276638070371752, "grad_norm": 0.3980722725391388, "learning_rate": 2.361888184342493e-06, "loss": 0.634, "step": 12732 }, { "epoch": 0.5277052509428488, "grad_norm": 0.4063666760921478, "learning_rate": 2.361680964814124e-06, "loss": 0.693, "step": 12733 }, { "epoch": 0.5277466948485225, "grad_norm": 0.43318161368370056, "learning_rate": 2.361473745285756e-06, "loss": 0.7529, "step": 12734 }, { "epoch": 0.5277881387541962, "grad_norm": 0.44466325640678406, "learning_rate": 2.3612665257573874e-06, "loss": 0.7136, "step": 12735 }, { "epoch": 0.5278295826598699, "grad_norm": 0.3778674900531769, "learning_rate": 2.361059306229019e-06, "loss": 0.6633, "step": 12736 }, { "epoch": 0.5278710265655435, "grad_norm": 0.419139564037323, "learning_rate": 2.360852086700651e-06, "loss": 0.6665, "step": 12737 }, { "epoch": 0.5279124704712173, "grad_norm": 0.4465898871421814, "learning_rate": 2.3606448671722824e-06, "loss": 0.7339, "step": 12738 }, { "epoch": 0.5279539143768909, "grad_norm": 0.41023901104927063, "learning_rate": 2.360437647643914e-06, "loss": 0.667, "step": 12739 }, { "epoch": 0.5279953582825645, "grad_norm": 0.4159390330314636, "learning_rate": 2.360230428115546e-06, "loss": 0.6816, "step": 12740 }, { "epoch": 0.5280368021882382, "grad_norm": 0.41323333978652954, "learning_rate": 2.3600232085871774e-06, "loss": 0.7079, "step": 12741 }, { "epoch": 0.5280782460939119, "grad_norm": 0.4255991280078888, "learning_rate": 2.359815989058809e-06, "loss": 0.7102, "step": 12742 }, { "epoch": 0.5281196899995856, "grad_norm": 0.4253883957862854, "learning_rate": 2.3596087695304406e-06, "loss": 0.6848, "step": 12743 }, { "epoch": 0.5281611339052592, "grad_norm": 0.4293748438358307, "learning_rate": 2.3594015500020724e-06, "loss": 0.7014, "step": 12744 }, { "epoch": 0.5282025778109329, "grad_norm": 0.43246424198150635, "learning_rate": 2.359194330473704e-06, "loss": 0.6976, "step": 12745 }, { "epoch": 0.5282440217166066, "grad_norm": 0.41858580708503723, "learning_rate": 2.3589871109453356e-06, "loss": 0.6807, "step": 12746 }, { "epoch": 0.5282854656222803, "grad_norm": 0.406931072473526, "learning_rate": 2.3587798914169674e-06, "loss": 0.6676, "step": 12747 }, { "epoch": 0.5283269095279539, "grad_norm": 0.414328396320343, "learning_rate": 2.3585726718885992e-06, "loss": 0.6589, "step": 12748 }, { "epoch": 0.5283683534336275, "grad_norm": 0.4091304838657379, "learning_rate": 2.3583654523602306e-06, "loss": 0.6273, "step": 12749 }, { "epoch": 0.5284097973393013, "grad_norm": 0.44330504536628723, "learning_rate": 2.3581582328318624e-06, "loss": 0.6709, "step": 12750 }, { "epoch": 0.5284512412449749, "grad_norm": 0.3971948027610779, "learning_rate": 2.357951013303494e-06, "loss": 0.6653, "step": 12751 }, { "epoch": 0.5284926851506486, "grad_norm": 0.39212262630462646, "learning_rate": 2.3577437937751256e-06, "loss": 0.6772, "step": 12752 }, { "epoch": 0.5285341290563222, "grad_norm": 0.4163492023944855, "learning_rate": 2.357536574246757e-06, "loss": 0.6704, "step": 12753 }, { "epoch": 0.528575572961996, "grad_norm": 0.4218030869960785, "learning_rate": 2.357329354718389e-06, "loss": 0.7046, "step": 12754 }, { "epoch": 0.5286170168676696, "grad_norm": 0.38087084889411926, "learning_rate": 2.3571221351900206e-06, "loss": 0.6598, "step": 12755 }, { "epoch": 0.5286584607733433, "grad_norm": 0.4195363223552704, "learning_rate": 2.356914915661652e-06, "loss": 0.6904, "step": 12756 }, { "epoch": 0.528699904679017, "grad_norm": 0.4067707657814026, "learning_rate": 2.356707696133284e-06, "loss": 0.6478, "step": 12757 }, { "epoch": 0.5287413485846906, "grad_norm": 0.4406799077987671, "learning_rate": 2.3565004766049156e-06, "loss": 0.6699, "step": 12758 }, { "epoch": 0.5287827924903643, "grad_norm": 0.38076919317245483, "learning_rate": 2.356293257076547e-06, "loss": 0.681, "step": 12759 }, { "epoch": 0.5288242363960379, "grad_norm": 0.4239473342895508, "learning_rate": 2.356086037548179e-06, "loss": 0.7322, "step": 12760 }, { "epoch": 0.5288656803017117, "grad_norm": 0.40087074041366577, "learning_rate": 2.35587881801981e-06, "loss": 0.6646, "step": 12761 }, { "epoch": 0.5289071242073853, "grad_norm": 0.4074096381664276, "learning_rate": 2.355671598491442e-06, "loss": 0.6765, "step": 12762 }, { "epoch": 0.528948568113059, "grad_norm": 0.4054841697216034, "learning_rate": 2.355464378963074e-06, "loss": 0.7202, "step": 12763 }, { "epoch": 0.5289900120187326, "grad_norm": 0.4648331105709076, "learning_rate": 2.355257159434705e-06, "loss": 0.7073, "step": 12764 }, { "epoch": 0.5290314559244064, "grad_norm": 0.39438101649284363, "learning_rate": 2.355049939906337e-06, "loss": 0.7, "step": 12765 }, { "epoch": 0.52907289983008, "grad_norm": 0.3795521855354309, "learning_rate": 2.354842720377969e-06, "loss": 0.7054, "step": 12766 }, { "epoch": 0.5291143437357536, "grad_norm": 0.39583802223205566, "learning_rate": 2.3546355008496e-06, "loss": 0.6954, "step": 12767 }, { "epoch": 0.5291557876414273, "grad_norm": 0.43929022550582886, "learning_rate": 2.354428281321232e-06, "loss": 0.7345, "step": 12768 }, { "epoch": 0.529197231547101, "grad_norm": 0.41069528460502625, "learning_rate": 2.3542210617928634e-06, "loss": 0.6525, "step": 12769 }, { "epoch": 0.5292386754527747, "grad_norm": 0.4104939103126526, "learning_rate": 2.354013842264495e-06, "loss": 0.6709, "step": 12770 }, { "epoch": 0.5292801193584483, "grad_norm": 0.407794713973999, "learning_rate": 2.353806622736127e-06, "loss": 0.6614, "step": 12771 }, { "epoch": 0.529321563264122, "grad_norm": 0.4212910532951355, "learning_rate": 2.3535994032077584e-06, "loss": 0.6936, "step": 12772 }, { "epoch": 0.5293630071697957, "grad_norm": 0.39720138907432556, "learning_rate": 2.35339218367939e-06, "loss": 0.7473, "step": 12773 }, { "epoch": 0.5294044510754694, "grad_norm": 0.39428165555000305, "learning_rate": 2.353184964151022e-06, "loss": 0.6509, "step": 12774 }, { "epoch": 0.529445894981143, "grad_norm": 0.39720964431762695, "learning_rate": 2.3529777446226534e-06, "loss": 0.744, "step": 12775 }, { "epoch": 0.5294873388868166, "grad_norm": 0.42615777254104614, "learning_rate": 2.3527705250942852e-06, "loss": 0.7457, "step": 12776 }, { "epoch": 0.5295287827924904, "grad_norm": 0.4502609074115753, "learning_rate": 2.3525633055659166e-06, "loss": 0.6932, "step": 12777 }, { "epoch": 0.529570226698164, "grad_norm": 0.4430423974990845, "learning_rate": 2.3523560860375484e-06, "loss": 0.6794, "step": 12778 }, { "epoch": 0.5296116706038377, "grad_norm": 0.382313996553421, "learning_rate": 2.35214886650918e-06, "loss": 0.7288, "step": 12779 }, { "epoch": 0.5296531145095114, "grad_norm": 0.43854355812072754, "learning_rate": 2.3519416469808116e-06, "loss": 0.6622, "step": 12780 }, { "epoch": 0.5296945584151851, "grad_norm": 0.4560701847076416, "learning_rate": 2.3517344274524434e-06, "loss": 0.7444, "step": 12781 }, { "epoch": 0.5297360023208587, "grad_norm": 0.4166525602340698, "learning_rate": 2.3515272079240752e-06, "loss": 0.7034, "step": 12782 }, { "epoch": 0.5297774462265323, "grad_norm": 0.3945890963077545, "learning_rate": 2.3513199883957066e-06, "loss": 0.6558, "step": 12783 }, { "epoch": 0.5298188901322061, "grad_norm": 0.4028562307357788, "learning_rate": 2.3511127688673384e-06, "loss": 0.7168, "step": 12784 }, { "epoch": 0.5298603340378797, "grad_norm": 0.4088309109210968, "learning_rate": 2.35090554933897e-06, "loss": 0.6787, "step": 12785 }, { "epoch": 0.5299017779435534, "grad_norm": 0.394471675157547, "learning_rate": 2.3506983298106016e-06, "loss": 0.64, "step": 12786 }, { "epoch": 0.529943221849227, "grad_norm": 0.38554680347442627, "learning_rate": 2.350491110282233e-06, "loss": 0.6705, "step": 12787 }, { "epoch": 0.5299846657549008, "grad_norm": 0.3892744183540344, "learning_rate": 2.350283890753865e-06, "loss": 0.6633, "step": 12788 }, { "epoch": 0.5300261096605744, "grad_norm": 0.3607287108898163, "learning_rate": 2.3500766712254966e-06, "loss": 0.6156, "step": 12789 }, { "epoch": 0.5300675535662481, "grad_norm": 0.4060533940792084, "learning_rate": 2.3498694516971284e-06, "loss": 0.6641, "step": 12790 }, { "epoch": 0.5301089974719218, "grad_norm": 0.4194815456867218, "learning_rate": 2.34966223216876e-06, "loss": 0.7363, "step": 12791 }, { "epoch": 0.5301504413775954, "grad_norm": 0.40061917901039124, "learning_rate": 2.3494550126403916e-06, "loss": 0.7128, "step": 12792 }, { "epoch": 0.5301918852832691, "grad_norm": 0.441508948802948, "learning_rate": 2.349247793112023e-06, "loss": 0.6781, "step": 12793 }, { "epoch": 0.5302333291889427, "grad_norm": 0.39830511808395386, "learning_rate": 2.349040573583655e-06, "loss": 0.6753, "step": 12794 }, { "epoch": 0.5302747730946165, "grad_norm": 0.4329967498779297, "learning_rate": 2.348833354055286e-06, "loss": 0.7028, "step": 12795 }, { "epoch": 0.5303162170002901, "grad_norm": 0.40524542331695557, "learning_rate": 2.348626134526918e-06, "loss": 0.6747, "step": 12796 }, { "epoch": 0.5303576609059638, "grad_norm": 0.3959845304489136, "learning_rate": 2.34841891499855e-06, "loss": 0.7095, "step": 12797 }, { "epoch": 0.5303991048116374, "grad_norm": 0.420844703912735, "learning_rate": 2.348211695470181e-06, "loss": 0.6611, "step": 12798 }, { "epoch": 0.5304405487173112, "grad_norm": 0.4011492431163788, "learning_rate": 2.348004475941813e-06, "loss": 0.7131, "step": 12799 }, { "epoch": 0.5304819926229848, "grad_norm": 0.3901159465312958, "learning_rate": 2.347797256413445e-06, "loss": 0.684, "step": 12800 }, { "epoch": 0.5305234365286584, "grad_norm": 0.409708708524704, "learning_rate": 2.347590036885076e-06, "loss": 0.6779, "step": 12801 }, { "epoch": 0.5305648804343321, "grad_norm": 0.4444390535354614, "learning_rate": 2.347382817356708e-06, "loss": 0.7104, "step": 12802 }, { "epoch": 0.5306063243400058, "grad_norm": 0.40092501044273376, "learning_rate": 2.3471755978283394e-06, "loss": 0.6616, "step": 12803 }, { "epoch": 0.5306477682456795, "grad_norm": 0.4154167175292969, "learning_rate": 2.346968378299971e-06, "loss": 0.6578, "step": 12804 }, { "epoch": 0.5306892121513531, "grad_norm": 0.4358821213245392, "learning_rate": 2.3467611587716026e-06, "loss": 0.7517, "step": 12805 }, { "epoch": 0.5307306560570269, "grad_norm": 0.4447653591632843, "learning_rate": 2.3465539392432344e-06, "loss": 0.7336, "step": 12806 }, { "epoch": 0.5307720999627005, "grad_norm": 0.3950003385543823, "learning_rate": 2.3463467197148662e-06, "loss": 0.7269, "step": 12807 }, { "epoch": 0.5308135438683742, "grad_norm": 0.37115126848220825, "learning_rate": 2.346139500186498e-06, "loss": 0.6614, "step": 12808 }, { "epoch": 0.5308549877740478, "grad_norm": 0.4073227643966675, "learning_rate": 2.3459322806581294e-06, "loss": 0.7185, "step": 12809 }, { "epoch": 0.5308964316797214, "grad_norm": 0.3942599594593048, "learning_rate": 2.3457250611297612e-06, "loss": 0.6995, "step": 12810 }, { "epoch": 0.5309378755853952, "grad_norm": 0.43371403217315674, "learning_rate": 2.3455178416013926e-06, "loss": 0.7314, "step": 12811 }, { "epoch": 0.5309793194910688, "grad_norm": 0.40421175956726074, "learning_rate": 2.3453106220730244e-06, "loss": 0.6592, "step": 12812 }, { "epoch": 0.5310207633967425, "grad_norm": 0.3891434371471405, "learning_rate": 2.345103402544656e-06, "loss": 0.6693, "step": 12813 }, { "epoch": 0.5310622073024162, "grad_norm": 0.450286865234375, "learning_rate": 2.3448961830162876e-06, "loss": 0.662, "step": 12814 }, { "epoch": 0.5311036512080899, "grad_norm": 0.3891145884990692, "learning_rate": 2.3446889634879194e-06, "loss": 0.6337, "step": 12815 }, { "epoch": 0.5311450951137635, "grad_norm": 0.41346439719200134, "learning_rate": 2.3444817439595512e-06, "loss": 0.6487, "step": 12816 }, { "epoch": 0.5311865390194372, "grad_norm": 0.4364015758037567, "learning_rate": 2.3442745244311826e-06, "loss": 0.6799, "step": 12817 }, { "epoch": 0.5312279829251109, "grad_norm": 0.4026208221912384, "learning_rate": 2.3440673049028144e-06, "loss": 0.6807, "step": 12818 }, { "epoch": 0.5312694268307845, "grad_norm": 0.410280704498291, "learning_rate": 2.343860085374446e-06, "loss": 0.6699, "step": 12819 }, { "epoch": 0.5313108707364582, "grad_norm": 0.399504691362381, "learning_rate": 2.3436528658460776e-06, "loss": 0.6875, "step": 12820 }, { "epoch": 0.5313523146421318, "grad_norm": 0.44626593589782715, "learning_rate": 2.343445646317709e-06, "loss": 0.6777, "step": 12821 }, { "epoch": 0.5313937585478056, "grad_norm": 0.39921003580093384, "learning_rate": 2.343238426789341e-06, "loss": 0.6747, "step": 12822 }, { "epoch": 0.5314352024534792, "grad_norm": 0.4259141981601715, "learning_rate": 2.343031207260972e-06, "loss": 0.6655, "step": 12823 }, { "epoch": 0.5314766463591529, "grad_norm": 0.38230735063552856, "learning_rate": 2.3428239877326044e-06, "loss": 0.6328, "step": 12824 }, { "epoch": 0.5315180902648265, "grad_norm": 0.4410659670829773, "learning_rate": 2.342616768204236e-06, "loss": 0.6464, "step": 12825 }, { "epoch": 0.5315595341705003, "grad_norm": 0.4279730021953583, "learning_rate": 2.3424095486758676e-06, "loss": 0.635, "step": 12826 }, { "epoch": 0.5316009780761739, "grad_norm": 0.40151217579841614, "learning_rate": 2.342202329147499e-06, "loss": 0.7274, "step": 12827 }, { "epoch": 0.5316424219818475, "grad_norm": 0.44970500469207764, "learning_rate": 2.341995109619131e-06, "loss": 0.6853, "step": 12828 }, { "epoch": 0.5316838658875213, "grad_norm": 0.37925848364830017, "learning_rate": 2.341787890090762e-06, "loss": 0.7417, "step": 12829 }, { "epoch": 0.5317253097931949, "grad_norm": 0.4270873963832855, "learning_rate": 2.341580670562394e-06, "loss": 0.6987, "step": 12830 }, { "epoch": 0.5317667536988686, "grad_norm": 0.4409238398075104, "learning_rate": 2.3413734510340254e-06, "loss": 0.6285, "step": 12831 }, { "epoch": 0.5318081976045422, "grad_norm": 0.41487544775009155, "learning_rate": 2.341166231505657e-06, "loss": 0.7117, "step": 12832 }, { "epoch": 0.531849641510216, "grad_norm": 0.40720516443252563, "learning_rate": 2.340959011977289e-06, "loss": 0.6917, "step": 12833 }, { "epoch": 0.5318910854158896, "grad_norm": 0.44723257422447205, "learning_rate": 2.340751792448921e-06, "loss": 0.6538, "step": 12834 }, { "epoch": 0.5319325293215633, "grad_norm": 0.4195338189601898, "learning_rate": 2.3405445729205522e-06, "loss": 0.6726, "step": 12835 }, { "epoch": 0.5319739732272369, "grad_norm": 0.4372144043445587, "learning_rate": 2.340337353392184e-06, "loss": 0.6698, "step": 12836 }, { "epoch": 0.5320154171329106, "grad_norm": 0.380083292722702, "learning_rate": 2.3401301338638154e-06, "loss": 0.6514, "step": 12837 }, { "epoch": 0.5320568610385843, "grad_norm": 0.4389864206314087, "learning_rate": 2.3399229143354472e-06, "loss": 0.6982, "step": 12838 }, { "epoch": 0.5320983049442579, "grad_norm": 0.4643430709838867, "learning_rate": 2.3397156948070786e-06, "loss": 0.6929, "step": 12839 }, { "epoch": 0.5321397488499316, "grad_norm": 0.4307169020175934, "learning_rate": 2.3395084752787104e-06, "loss": 0.6705, "step": 12840 }, { "epoch": 0.5321811927556053, "grad_norm": 0.3644630014896393, "learning_rate": 2.3393012557503422e-06, "loss": 0.6692, "step": 12841 }, { "epoch": 0.532222636661279, "grad_norm": 0.39188143610954285, "learning_rate": 2.339094036221974e-06, "loss": 0.6528, "step": 12842 }, { "epoch": 0.5322640805669526, "grad_norm": 0.4374130666255951, "learning_rate": 2.3388868166936054e-06, "loss": 0.7328, "step": 12843 }, { "epoch": 0.5323055244726262, "grad_norm": 0.4222624599933624, "learning_rate": 2.3386795971652372e-06, "loss": 0.6958, "step": 12844 }, { "epoch": 0.5323469683783, "grad_norm": 0.40722373127937317, "learning_rate": 2.3384723776368686e-06, "loss": 0.7096, "step": 12845 }, { "epoch": 0.5323884122839736, "grad_norm": 0.43191152811050415, "learning_rate": 2.3382651581085004e-06, "loss": 0.7136, "step": 12846 }, { "epoch": 0.5324298561896473, "grad_norm": 0.41238442063331604, "learning_rate": 2.338057938580132e-06, "loss": 0.6904, "step": 12847 }, { "epoch": 0.532471300095321, "grad_norm": 0.39594894647598267, "learning_rate": 2.3378507190517636e-06, "loss": 0.6998, "step": 12848 }, { "epoch": 0.5325127440009947, "grad_norm": 0.3915773630142212, "learning_rate": 2.337643499523395e-06, "loss": 0.6641, "step": 12849 }, { "epoch": 0.5325541879066683, "grad_norm": 0.4006320536136627, "learning_rate": 2.3374362799950272e-06, "loss": 0.6722, "step": 12850 }, { "epoch": 0.532595631812342, "grad_norm": 0.4360206723213196, "learning_rate": 2.3372290604666586e-06, "loss": 0.699, "step": 12851 }, { "epoch": 0.5326370757180157, "grad_norm": 0.4406997263431549, "learning_rate": 2.3370218409382904e-06, "loss": 0.683, "step": 12852 }, { "epoch": 0.5326785196236893, "grad_norm": 0.41712766885757446, "learning_rate": 2.336814621409922e-06, "loss": 0.6932, "step": 12853 }, { "epoch": 0.532719963529363, "grad_norm": 0.4048572778701782, "learning_rate": 2.3366074018815536e-06, "loss": 0.7043, "step": 12854 }, { "epoch": 0.5327614074350366, "grad_norm": 0.41389012336730957, "learning_rate": 2.336400182353185e-06, "loss": 0.6814, "step": 12855 }, { "epoch": 0.5328028513407104, "grad_norm": 0.4192982017993927, "learning_rate": 2.336192962824817e-06, "loss": 0.6752, "step": 12856 }, { "epoch": 0.532844295246384, "grad_norm": 0.42561307549476624, "learning_rate": 2.335985743296448e-06, "loss": 0.7051, "step": 12857 }, { "epoch": 0.5328857391520577, "grad_norm": 0.3976946473121643, "learning_rate": 2.3357785237680804e-06, "loss": 0.6765, "step": 12858 }, { "epoch": 0.5329271830577313, "grad_norm": 0.4183340072631836, "learning_rate": 2.335571304239712e-06, "loss": 0.6635, "step": 12859 }, { "epoch": 0.5329686269634051, "grad_norm": 0.4339468479156494, "learning_rate": 2.3353640847113436e-06, "loss": 0.6851, "step": 12860 }, { "epoch": 0.5330100708690787, "grad_norm": 0.4078211784362793, "learning_rate": 2.335156865182975e-06, "loss": 0.6407, "step": 12861 }, { "epoch": 0.5330515147747523, "grad_norm": 0.4416934549808502, "learning_rate": 2.334949645654607e-06, "loss": 0.7, "step": 12862 }, { "epoch": 0.533092958680426, "grad_norm": 0.43863627314567566, "learning_rate": 2.334742426126238e-06, "loss": 0.6904, "step": 12863 }, { "epoch": 0.5331344025860997, "grad_norm": 0.3993176817893982, "learning_rate": 2.33453520659787e-06, "loss": 0.6235, "step": 12864 }, { "epoch": 0.5331758464917734, "grad_norm": 0.3826536536216736, "learning_rate": 2.3343279870695014e-06, "loss": 0.6876, "step": 12865 }, { "epoch": 0.533217290397447, "grad_norm": 0.3884011507034302, "learning_rate": 2.3341207675411332e-06, "loss": 0.652, "step": 12866 }, { "epoch": 0.5332587343031208, "grad_norm": 0.4264260530471802, "learning_rate": 2.333913548012765e-06, "loss": 0.7207, "step": 12867 }, { "epoch": 0.5333001782087944, "grad_norm": 0.4118197560310364, "learning_rate": 2.333706328484397e-06, "loss": 0.6823, "step": 12868 }, { "epoch": 0.5333416221144681, "grad_norm": 0.3943627178668976, "learning_rate": 2.3334991089560282e-06, "loss": 0.7007, "step": 12869 }, { "epoch": 0.5333830660201417, "grad_norm": 0.42532142996788025, "learning_rate": 2.33329188942766e-06, "loss": 0.7336, "step": 12870 }, { "epoch": 0.5334245099258154, "grad_norm": 0.4259392023086548, "learning_rate": 2.3330846698992914e-06, "loss": 0.679, "step": 12871 }, { "epoch": 0.5334659538314891, "grad_norm": 0.43316420912742615, "learning_rate": 2.3328774503709232e-06, "loss": 0.6692, "step": 12872 }, { "epoch": 0.5335073977371627, "grad_norm": 0.39270442724227905, "learning_rate": 2.3326702308425546e-06, "loss": 0.6836, "step": 12873 }, { "epoch": 0.5335488416428364, "grad_norm": 0.39156314730644226, "learning_rate": 2.3324630113141864e-06, "loss": 0.7212, "step": 12874 }, { "epoch": 0.5335902855485101, "grad_norm": 0.4253687858581543, "learning_rate": 2.332255791785818e-06, "loss": 0.6375, "step": 12875 }, { "epoch": 0.5336317294541838, "grad_norm": 0.38181617856025696, "learning_rate": 2.33204857225745e-06, "loss": 0.6726, "step": 12876 }, { "epoch": 0.5336731733598574, "grad_norm": 0.4157556891441345, "learning_rate": 2.3318413527290814e-06, "loss": 0.7169, "step": 12877 }, { "epoch": 0.5337146172655312, "grad_norm": 0.41273123025894165, "learning_rate": 2.3316341332007132e-06, "loss": 0.6705, "step": 12878 }, { "epoch": 0.5337560611712048, "grad_norm": 0.42082834243774414, "learning_rate": 2.3314269136723446e-06, "loss": 0.6857, "step": 12879 }, { "epoch": 0.5337975050768784, "grad_norm": 0.41242021322250366, "learning_rate": 2.3312196941439764e-06, "loss": 0.6882, "step": 12880 }, { "epoch": 0.5338389489825521, "grad_norm": 0.3927420973777771, "learning_rate": 2.331012474615608e-06, "loss": 0.6721, "step": 12881 }, { "epoch": 0.5338803928882258, "grad_norm": 0.4605309069156647, "learning_rate": 2.3308052550872396e-06, "loss": 0.7207, "step": 12882 }, { "epoch": 0.5339218367938995, "grad_norm": 0.45265206694602966, "learning_rate": 2.330598035558871e-06, "loss": 0.6671, "step": 12883 }, { "epoch": 0.5339632806995731, "grad_norm": 0.42870867252349854, "learning_rate": 2.330390816030503e-06, "loss": 0.7317, "step": 12884 }, { "epoch": 0.5340047246052468, "grad_norm": 0.410745769739151, "learning_rate": 2.3301835965021346e-06, "loss": 0.7073, "step": 12885 }, { "epoch": 0.5340461685109205, "grad_norm": 0.3888387382030487, "learning_rate": 2.3299763769737664e-06, "loss": 0.6732, "step": 12886 }, { "epoch": 0.5340876124165942, "grad_norm": 0.390068382024765, "learning_rate": 2.329769157445398e-06, "loss": 0.6716, "step": 12887 }, { "epoch": 0.5341290563222678, "grad_norm": 0.39509692788124084, "learning_rate": 2.3295619379170296e-06, "loss": 0.7114, "step": 12888 }, { "epoch": 0.5341705002279414, "grad_norm": 0.4518096148967743, "learning_rate": 2.329354718388661e-06, "loss": 0.7043, "step": 12889 }, { "epoch": 0.5342119441336152, "grad_norm": 0.4201490879058838, "learning_rate": 2.329147498860293e-06, "loss": 0.6642, "step": 12890 }, { "epoch": 0.5342533880392888, "grad_norm": 0.39589551091194153, "learning_rate": 2.328940279331924e-06, "loss": 0.6792, "step": 12891 }, { "epoch": 0.5342948319449625, "grad_norm": 0.38695114850997925, "learning_rate": 2.328733059803556e-06, "loss": 0.7246, "step": 12892 }, { "epoch": 0.5343362758506361, "grad_norm": 0.4321115016937256, "learning_rate": 2.328525840275188e-06, "loss": 0.6835, "step": 12893 }, { "epoch": 0.5343777197563099, "grad_norm": 0.4100632071495056, "learning_rate": 2.3283186207468196e-06, "loss": 0.6437, "step": 12894 }, { "epoch": 0.5344191636619835, "grad_norm": 0.4007258117198944, "learning_rate": 2.328111401218451e-06, "loss": 0.6823, "step": 12895 }, { "epoch": 0.5344606075676572, "grad_norm": 0.4309125542640686, "learning_rate": 2.327904181690083e-06, "loss": 0.6941, "step": 12896 }, { "epoch": 0.5345020514733309, "grad_norm": 0.3988111913204193, "learning_rate": 2.3276969621617142e-06, "loss": 0.6477, "step": 12897 }, { "epoch": 0.5345434953790045, "grad_norm": 0.3994053602218628, "learning_rate": 2.327489742633346e-06, "loss": 0.6633, "step": 12898 }, { "epoch": 0.5345849392846782, "grad_norm": 0.4092133045196533, "learning_rate": 2.3272825231049774e-06, "loss": 0.6558, "step": 12899 }, { "epoch": 0.5346263831903518, "grad_norm": 0.3984118103981018, "learning_rate": 2.3270753035766092e-06, "loss": 0.6448, "step": 12900 }, { "epoch": 0.5346678270960256, "grad_norm": 0.4333925247192383, "learning_rate": 2.3268680840482406e-06, "loss": 0.6746, "step": 12901 }, { "epoch": 0.5347092710016992, "grad_norm": 0.39333856105804443, "learning_rate": 2.326660864519873e-06, "loss": 0.6797, "step": 12902 }, { "epoch": 0.5347507149073729, "grad_norm": 0.42644229531288147, "learning_rate": 2.3264536449915042e-06, "loss": 0.7175, "step": 12903 }, { "epoch": 0.5347921588130465, "grad_norm": 0.45764684677124023, "learning_rate": 2.326246425463136e-06, "loss": 0.6582, "step": 12904 }, { "epoch": 0.5348336027187202, "grad_norm": 0.3673115074634552, "learning_rate": 2.3260392059347674e-06, "loss": 0.6013, "step": 12905 }, { "epoch": 0.5348750466243939, "grad_norm": 0.4003862738609314, "learning_rate": 2.3258319864063992e-06, "loss": 0.6985, "step": 12906 }, { "epoch": 0.5349164905300675, "grad_norm": 0.3869233727455139, "learning_rate": 2.3256247668780306e-06, "loss": 0.6875, "step": 12907 }, { "epoch": 0.5349579344357412, "grad_norm": 0.4463527798652649, "learning_rate": 2.3254175473496624e-06, "loss": 0.7288, "step": 12908 }, { "epoch": 0.5349993783414149, "grad_norm": 0.40733829140663147, "learning_rate": 2.325210327821294e-06, "loss": 0.6958, "step": 12909 }, { "epoch": 0.5350408222470886, "grad_norm": 0.4178887903690338, "learning_rate": 2.3250031082929256e-06, "loss": 0.7068, "step": 12910 }, { "epoch": 0.5350822661527622, "grad_norm": 0.43788233399391174, "learning_rate": 2.3247958887645574e-06, "loss": 0.7014, "step": 12911 }, { "epoch": 0.535123710058436, "grad_norm": 0.3919357359409332, "learning_rate": 2.3245886692361892e-06, "loss": 0.6715, "step": 12912 }, { "epoch": 0.5351651539641096, "grad_norm": 0.40420424938201904, "learning_rate": 2.3243814497078206e-06, "loss": 0.7185, "step": 12913 }, { "epoch": 0.5352065978697832, "grad_norm": 0.41175714135169983, "learning_rate": 2.3241742301794524e-06, "loss": 0.6589, "step": 12914 }, { "epoch": 0.5352480417754569, "grad_norm": 0.4010791778564453, "learning_rate": 2.323967010651084e-06, "loss": 0.6362, "step": 12915 }, { "epoch": 0.5352894856811306, "grad_norm": 0.3885844945907593, "learning_rate": 2.3237597911227156e-06, "loss": 0.688, "step": 12916 }, { "epoch": 0.5353309295868043, "grad_norm": 0.44161322712898254, "learning_rate": 2.323552571594347e-06, "loss": 0.7446, "step": 12917 }, { "epoch": 0.5353723734924779, "grad_norm": 0.4468418061733246, "learning_rate": 2.323345352065979e-06, "loss": 0.6985, "step": 12918 }, { "epoch": 0.5354138173981516, "grad_norm": 0.4260421097278595, "learning_rate": 2.3231381325376106e-06, "loss": 0.656, "step": 12919 }, { "epoch": 0.5354552613038253, "grad_norm": 0.36235854029655457, "learning_rate": 2.3229309130092424e-06, "loss": 0.6028, "step": 12920 }, { "epoch": 0.535496705209499, "grad_norm": 0.4143977165222168, "learning_rate": 2.322723693480874e-06, "loss": 0.7085, "step": 12921 }, { "epoch": 0.5355381491151726, "grad_norm": 0.40639543533325195, "learning_rate": 2.3225164739525056e-06, "loss": 0.688, "step": 12922 }, { "epoch": 0.5355795930208462, "grad_norm": 0.40311139822006226, "learning_rate": 2.322309254424137e-06, "loss": 0.6824, "step": 12923 }, { "epoch": 0.53562103692652, "grad_norm": 0.43853384256362915, "learning_rate": 2.322102034895769e-06, "loss": 0.7562, "step": 12924 }, { "epoch": 0.5356624808321936, "grad_norm": 0.4531860053539276, "learning_rate": 2.3218948153674002e-06, "loss": 0.7606, "step": 12925 }, { "epoch": 0.5357039247378673, "grad_norm": 0.40941768884658813, "learning_rate": 2.321687595839032e-06, "loss": 0.7014, "step": 12926 }, { "epoch": 0.5357453686435409, "grad_norm": 0.40554338693618774, "learning_rate": 2.3214803763106634e-06, "loss": 0.7173, "step": 12927 }, { "epoch": 0.5357868125492147, "grad_norm": 0.40341389179229736, "learning_rate": 2.3212731567822956e-06, "loss": 0.6444, "step": 12928 }, { "epoch": 0.5358282564548883, "grad_norm": 0.44897788763046265, "learning_rate": 2.321065937253927e-06, "loss": 0.6959, "step": 12929 }, { "epoch": 0.535869700360562, "grad_norm": 0.420905739068985, "learning_rate": 2.320858717725559e-06, "loss": 0.6906, "step": 12930 }, { "epoch": 0.5359111442662357, "grad_norm": 0.4090760350227356, "learning_rate": 2.3206514981971902e-06, "loss": 0.6523, "step": 12931 }, { "epoch": 0.5359525881719093, "grad_norm": 0.40057849884033203, "learning_rate": 2.320444278668822e-06, "loss": 0.6506, "step": 12932 }, { "epoch": 0.535994032077583, "grad_norm": 0.4215586185455322, "learning_rate": 2.3202370591404534e-06, "loss": 0.6974, "step": 12933 }, { "epoch": 0.5360354759832566, "grad_norm": 0.400571346282959, "learning_rate": 2.3200298396120852e-06, "loss": 0.6593, "step": 12934 }, { "epoch": 0.5360769198889304, "grad_norm": 0.36831507086753845, "learning_rate": 2.3198226200837166e-06, "loss": 0.5818, "step": 12935 }, { "epoch": 0.536118363794604, "grad_norm": 0.4073820114135742, "learning_rate": 2.3196154005553484e-06, "loss": 0.6714, "step": 12936 }, { "epoch": 0.5361598077002777, "grad_norm": 0.3813076615333557, "learning_rate": 2.3194081810269802e-06, "loss": 0.645, "step": 12937 }, { "epoch": 0.5362012516059513, "grad_norm": 0.39923980832099915, "learning_rate": 2.319200961498612e-06, "loss": 0.646, "step": 12938 }, { "epoch": 0.5362426955116251, "grad_norm": 0.42261260747909546, "learning_rate": 2.3189937419702434e-06, "loss": 0.6794, "step": 12939 }, { "epoch": 0.5362841394172987, "grad_norm": 0.4347153604030609, "learning_rate": 2.3187865224418752e-06, "loss": 0.7045, "step": 12940 }, { "epoch": 0.5363255833229723, "grad_norm": 0.43233221769332886, "learning_rate": 2.3185793029135066e-06, "loss": 0.6848, "step": 12941 }, { "epoch": 0.536367027228646, "grad_norm": 0.4340842664241791, "learning_rate": 2.3183720833851384e-06, "loss": 0.7057, "step": 12942 }, { "epoch": 0.5364084711343197, "grad_norm": 0.3948730528354645, "learning_rate": 2.31816486385677e-06, "loss": 0.5989, "step": 12943 }, { "epoch": 0.5364499150399934, "grad_norm": 0.42701467871665955, "learning_rate": 2.3179576443284016e-06, "loss": 0.7151, "step": 12944 }, { "epoch": 0.536491358945667, "grad_norm": 0.42283880710601807, "learning_rate": 2.3177504248000334e-06, "loss": 0.6971, "step": 12945 }, { "epoch": 0.5365328028513408, "grad_norm": 0.44455471634864807, "learning_rate": 2.3175432052716652e-06, "loss": 0.7498, "step": 12946 }, { "epoch": 0.5365742467570144, "grad_norm": 0.42121943831443787, "learning_rate": 2.3173359857432966e-06, "loss": 0.6836, "step": 12947 }, { "epoch": 0.5366156906626881, "grad_norm": 0.4126746952533722, "learning_rate": 2.3171287662149284e-06, "loss": 0.6343, "step": 12948 }, { "epoch": 0.5366571345683617, "grad_norm": 0.43476393818855286, "learning_rate": 2.31692154668656e-06, "loss": 0.6781, "step": 12949 }, { "epoch": 0.5366985784740353, "grad_norm": 0.3706677556037903, "learning_rate": 2.3167143271581916e-06, "loss": 0.6541, "step": 12950 }, { "epoch": 0.5367400223797091, "grad_norm": 0.43021148443222046, "learning_rate": 2.316507107629823e-06, "loss": 0.7, "step": 12951 }, { "epoch": 0.5367814662853827, "grad_norm": 0.4301229417324066, "learning_rate": 2.316299888101455e-06, "loss": 0.7422, "step": 12952 }, { "epoch": 0.5368229101910564, "grad_norm": 0.3899882137775421, "learning_rate": 2.3160926685730862e-06, "loss": 0.636, "step": 12953 }, { "epoch": 0.5368643540967301, "grad_norm": 0.3972240388393402, "learning_rate": 2.3158854490447184e-06, "loss": 0.6757, "step": 12954 }, { "epoch": 0.5369057980024038, "grad_norm": 0.39857980608940125, "learning_rate": 2.31567822951635e-06, "loss": 0.6572, "step": 12955 }, { "epoch": 0.5369472419080774, "grad_norm": 0.42433953285217285, "learning_rate": 2.3154710099879816e-06, "loss": 0.6927, "step": 12956 }, { "epoch": 0.5369886858137511, "grad_norm": 0.4580519497394562, "learning_rate": 2.315263790459613e-06, "loss": 0.7014, "step": 12957 }, { "epoch": 0.5370301297194248, "grad_norm": 0.38678693771362305, "learning_rate": 2.315056570931245e-06, "loss": 0.6218, "step": 12958 }, { "epoch": 0.5370715736250984, "grad_norm": 0.45386382937431335, "learning_rate": 2.3148493514028762e-06, "loss": 0.7103, "step": 12959 }, { "epoch": 0.5371130175307721, "grad_norm": 0.3997993767261505, "learning_rate": 2.314642131874508e-06, "loss": 0.6548, "step": 12960 }, { "epoch": 0.5371544614364457, "grad_norm": 0.40142855048179626, "learning_rate": 2.3144349123461394e-06, "loss": 0.6785, "step": 12961 }, { "epoch": 0.5371959053421195, "grad_norm": 0.4283410310745239, "learning_rate": 2.3142276928177712e-06, "loss": 0.6876, "step": 12962 }, { "epoch": 0.5372373492477931, "grad_norm": 0.3978479504585266, "learning_rate": 2.314020473289403e-06, "loss": 0.6908, "step": 12963 }, { "epoch": 0.5372787931534668, "grad_norm": 0.40223902463912964, "learning_rate": 2.313813253761035e-06, "loss": 0.6826, "step": 12964 }, { "epoch": 0.5373202370591404, "grad_norm": 0.41629084944725037, "learning_rate": 2.3136060342326662e-06, "loss": 0.7134, "step": 12965 }, { "epoch": 0.5373616809648141, "grad_norm": 0.4367891252040863, "learning_rate": 2.313398814704298e-06, "loss": 0.7087, "step": 12966 }, { "epoch": 0.5374031248704878, "grad_norm": 0.39007118344306946, "learning_rate": 2.3131915951759294e-06, "loss": 0.6567, "step": 12967 }, { "epoch": 0.5374445687761614, "grad_norm": 0.3793109357357025, "learning_rate": 2.3129843756475612e-06, "loss": 0.6497, "step": 12968 }, { "epoch": 0.5374860126818352, "grad_norm": 0.39812222123146057, "learning_rate": 2.3127771561191926e-06, "loss": 0.6345, "step": 12969 }, { "epoch": 0.5375274565875088, "grad_norm": 0.4116498827934265, "learning_rate": 2.3125699365908244e-06, "loss": 0.67, "step": 12970 }, { "epoch": 0.5375689004931825, "grad_norm": 0.45759615302085876, "learning_rate": 2.312362717062456e-06, "loss": 0.7313, "step": 12971 }, { "epoch": 0.5376103443988561, "grad_norm": 0.42657917737960815, "learning_rate": 2.312155497534088e-06, "loss": 0.666, "step": 12972 }, { "epoch": 0.5376517883045299, "grad_norm": 0.3863115906715393, "learning_rate": 2.3119482780057194e-06, "loss": 0.6904, "step": 12973 }, { "epoch": 0.5376932322102035, "grad_norm": 0.41953253746032715, "learning_rate": 2.3117410584773512e-06, "loss": 0.7051, "step": 12974 }, { "epoch": 0.5377346761158771, "grad_norm": 0.41634392738342285, "learning_rate": 2.3115338389489826e-06, "loss": 0.6764, "step": 12975 }, { "epoch": 0.5377761200215508, "grad_norm": 0.4194655120372772, "learning_rate": 2.3113266194206144e-06, "loss": 0.6388, "step": 12976 }, { "epoch": 0.5378175639272245, "grad_norm": 0.44342562556266785, "learning_rate": 2.311119399892246e-06, "loss": 0.6992, "step": 12977 }, { "epoch": 0.5378590078328982, "grad_norm": 0.3915787637233734, "learning_rate": 2.3109121803638776e-06, "loss": 0.66, "step": 12978 }, { "epoch": 0.5379004517385718, "grad_norm": 0.4389720857143402, "learning_rate": 2.310704960835509e-06, "loss": 0.7151, "step": 12979 }, { "epoch": 0.5379418956442455, "grad_norm": 0.4177413582801819, "learning_rate": 2.3104977413071412e-06, "loss": 0.71, "step": 12980 }, { "epoch": 0.5379833395499192, "grad_norm": 0.3874242603778839, "learning_rate": 2.3102905217787726e-06, "loss": 0.6283, "step": 12981 }, { "epoch": 0.5380247834555929, "grad_norm": 0.42663660645484924, "learning_rate": 2.3100833022504044e-06, "loss": 0.7231, "step": 12982 }, { "epoch": 0.5380662273612665, "grad_norm": 0.40798890590667725, "learning_rate": 2.309876082722036e-06, "loss": 0.6487, "step": 12983 }, { "epoch": 0.5381076712669401, "grad_norm": 0.4156686067581177, "learning_rate": 2.3096688631936676e-06, "loss": 0.6783, "step": 12984 }, { "epoch": 0.5381491151726139, "grad_norm": 0.42959898710250854, "learning_rate": 2.309461643665299e-06, "loss": 0.7039, "step": 12985 }, { "epoch": 0.5381905590782875, "grad_norm": 0.3906037211418152, "learning_rate": 2.309254424136931e-06, "loss": 0.636, "step": 12986 }, { "epoch": 0.5382320029839612, "grad_norm": 0.4188729226589203, "learning_rate": 2.3090472046085622e-06, "loss": 0.6715, "step": 12987 }, { "epoch": 0.5382734468896349, "grad_norm": 0.4583487808704376, "learning_rate": 2.308839985080194e-06, "loss": 0.7466, "step": 12988 }, { "epoch": 0.5383148907953086, "grad_norm": 0.3973032236099243, "learning_rate": 2.308632765551826e-06, "loss": 0.6965, "step": 12989 }, { "epoch": 0.5383563347009822, "grad_norm": 0.3715957999229431, "learning_rate": 2.3084255460234576e-06, "loss": 0.6655, "step": 12990 }, { "epoch": 0.5383977786066559, "grad_norm": 0.4440552294254303, "learning_rate": 2.308218326495089e-06, "loss": 0.6726, "step": 12991 }, { "epoch": 0.5384392225123296, "grad_norm": 0.3799477517604828, "learning_rate": 2.308011106966721e-06, "loss": 0.6692, "step": 12992 }, { "epoch": 0.5384806664180032, "grad_norm": 0.4284555912017822, "learning_rate": 2.3078038874383522e-06, "loss": 0.7047, "step": 12993 }, { "epoch": 0.5385221103236769, "grad_norm": 0.41752544045448303, "learning_rate": 2.307596667909984e-06, "loss": 0.6764, "step": 12994 }, { "epoch": 0.5385635542293505, "grad_norm": 0.4005182981491089, "learning_rate": 2.3073894483816154e-06, "loss": 0.6997, "step": 12995 }, { "epoch": 0.5386049981350243, "grad_norm": 0.42117759585380554, "learning_rate": 2.3071822288532472e-06, "loss": 0.6832, "step": 12996 }, { "epoch": 0.5386464420406979, "grad_norm": 0.4225638210773468, "learning_rate": 2.306975009324879e-06, "loss": 0.6997, "step": 12997 }, { "epoch": 0.5386878859463716, "grad_norm": 0.4211042821407318, "learning_rate": 2.306767789796511e-06, "loss": 0.7388, "step": 12998 }, { "epoch": 0.5387293298520452, "grad_norm": 0.43044334650039673, "learning_rate": 2.3065605702681422e-06, "loss": 0.6791, "step": 12999 }, { "epoch": 0.538770773757719, "grad_norm": 0.3610936105251312, "learning_rate": 2.306353350739774e-06, "loss": 0.6117, "step": 13000 }, { "epoch": 0.5388122176633926, "grad_norm": 0.41754060983657837, "learning_rate": 2.3061461312114054e-06, "loss": 0.6658, "step": 13001 }, { "epoch": 0.5388536615690662, "grad_norm": 0.4171181917190552, "learning_rate": 2.3059389116830372e-06, "loss": 0.6783, "step": 13002 }, { "epoch": 0.53889510547474, "grad_norm": 0.43441829085350037, "learning_rate": 2.3057316921546686e-06, "loss": 0.6483, "step": 13003 }, { "epoch": 0.5389365493804136, "grad_norm": 0.4103885591030121, "learning_rate": 2.3055244726263004e-06, "loss": 0.7065, "step": 13004 }, { "epoch": 0.5389779932860873, "grad_norm": 0.4269576370716095, "learning_rate": 2.305317253097932e-06, "loss": 0.6709, "step": 13005 }, { "epoch": 0.5390194371917609, "grad_norm": 0.4174657166004181, "learning_rate": 2.305110033569564e-06, "loss": 0.6273, "step": 13006 }, { "epoch": 0.5390608810974347, "grad_norm": 0.43940219283103943, "learning_rate": 2.3049028140411954e-06, "loss": 0.717, "step": 13007 }, { "epoch": 0.5391023250031083, "grad_norm": 0.4273514747619629, "learning_rate": 2.3046955945128272e-06, "loss": 0.7415, "step": 13008 }, { "epoch": 0.539143768908782, "grad_norm": 0.42901578545570374, "learning_rate": 2.3044883749844586e-06, "loss": 0.6608, "step": 13009 }, { "epoch": 0.5391852128144556, "grad_norm": 0.4408095180988312, "learning_rate": 2.3042811554560904e-06, "loss": 0.6877, "step": 13010 }, { "epoch": 0.5392266567201293, "grad_norm": 0.4260903596878052, "learning_rate": 2.304073935927722e-06, "loss": 0.6853, "step": 13011 }, { "epoch": 0.539268100625803, "grad_norm": 0.4214371144771576, "learning_rate": 2.3038667163993536e-06, "loss": 0.7219, "step": 13012 }, { "epoch": 0.5393095445314766, "grad_norm": 0.3758764863014221, "learning_rate": 2.303659496870985e-06, "loss": 0.6512, "step": 13013 }, { "epoch": 0.5393509884371503, "grad_norm": 0.4344061315059662, "learning_rate": 2.303452277342617e-06, "loss": 0.7063, "step": 13014 }, { "epoch": 0.539392432342824, "grad_norm": 0.4331554174423218, "learning_rate": 2.3032450578142486e-06, "loss": 0.7026, "step": 13015 }, { "epoch": 0.5394338762484977, "grad_norm": 0.4216785430908203, "learning_rate": 2.3030378382858804e-06, "loss": 0.6227, "step": 13016 }, { "epoch": 0.5394753201541713, "grad_norm": 0.4197157919406891, "learning_rate": 2.302830618757512e-06, "loss": 0.666, "step": 13017 }, { "epoch": 0.5395167640598449, "grad_norm": 0.3976689279079437, "learning_rate": 2.3026233992291436e-06, "loss": 0.6589, "step": 13018 }, { "epoch": 0.5395582079655187, "grad_norm": 0.39127814769744873, "learning_rate": 2.302416179700775e-06, "loss": 0.7114, "step": 13019 }, { "epoch": 0.5395996518711923, "grad_norm": 0.4111287593841553, "learning_rate": 2.302208960172407e-06, "loss": 0.6707, "step": 13020 }, { "epoch": 0.539641095776866, "grad_norm": 0.42271688580513, "learning_rate": 2.3020017406440382e-06, "loss": 0.7271, "step": 13021 }, { "epoch": 0.5396825396825397, "grad_norm": 0.41561102867126465, "learning_rate": 2.30179452111567e-06, "loss": 0.6709, "step": 13022 }, { "epoch": 0.5397239835882134, "grad_norm": 0.39351606369018555, "learning_rate": 2.301587301587302e-06, "loss": 0.6759, "step": 13023 }, { "epoch": 0.539765427493887, "grad_norm": 0.3768216371536255, "learning_rate": 2.3013800820589337e-06, "loss": 0.6565, "step": 13024 }, { "epoch": 0.5398068713995607, "grad_norm": 0.4016602337360382, "learning_rate": 2.301172862530565e-06, "loss": 0.6904, "step": 13025 }, { "epoch": 0.5398483153052344, "grad_norm": 0.395116925239563, "learning_rate": 2.300965643002197e-06, "loss": 0.6925, "step": 13026 }, { "epoch": 0.539889759210908, "grad_norm": 0.3856351375579834, "learning_rate": 2.3007584234738282e-06, "loss": 0.644, "step": 13027 }, { "epoch": 0.5399312031165817, "grad_norm": 0.4040999412536621, "learning_rate": 2.30055120394546e-06, "loss": 0.7013, "step": 13028 }, { "epoch": 0.5399726470222553, "grad_norm": 0.3965044319629669, "learning_rate": 2.3003439844170914e-06, "loss": 0.6924, "step": 13029 }, { "epoch": 0.5400140909279291, "grad_norm": 0.40864816308021545, "learning_rate": 2.3001367648887232e-06, "loss": 0.7048, "step": 13030 }, { "epoch": 0.5400555348336027, "grad_norm": 0.4118366241455078, "learning_rate": 2.299929545360355e-06, "loss": 0.6825, "step": 13031 }, { "epoch": 0.5400969787392764, "grad_norm": 0.42553189396858215, "learning_rate": 2.2997223258319864e-06, "loss": 0.7012, "step": 13032 }, { "epoch": 0.54013842264495, "grad_norm": 0.45001623034477234, "learning_rate": 2.2995151063036182e-06, "loss": 0.6578, "step": 13033 }, { "epoch": 0.5401798665506238, "grad_norm": 0.43337246775627136, "learning_rate": 2.29930788677525e-06, "loss": 0.6746, "step": 13034 }, { "epoch": 0.5402213104562974, "grad_norm": 0.4214021563529968, "learning_rate": 2.2991006672468814e-06, "loss": 0.7346, "step": 13035 }, { "epoch": 0.540262754361971, "grad_norm": 0.456148236989975, "learning_rate": 2.2988934477185132e-06, "loss": 0.7015, "step": 13036 }, { "epoch": 0.5403041982676448, "grad_norm": 0.47132354974746704, "learning_rate": 2.2986862281901446e-06, "loss": 0.7229, "step": 13037 }, { "epoch": 0.5403456421733184, "grad_norm": 0.4156130850315094, "learning_rate": 2.2984790086617764e-06, "loss": 0.6733, "step": 13038 }, { "epoch": 0.5403870860789921, "grad_norm": 0.4088706076145172, "learning_rate": 2.2982717891334082e-06, "loss": 0.6875, "step": 13039 }, { "epoch": 0.5404285299846657, "grad_norm": 0.3998040556907654, "learning_rate": 2.2980645696050396e-06, "loss": 0.7097, "step": 13040 }, { "epoch": 0.5404699738903395, "grad_norm": 0.39529040455818176, "learning_rate": 2.2978573500766714e-06, "loss": 0.6826, "step": 13041 }, { "epoch": 0.5405114177960131, "grad_norm": 0.37041836977005005, "learning_rate": 2.2976501305483033e-06, "loss": 0.6418, "step": 13042 }, { "epoch": 0.5405528617016868, "grad_norm": 0.4389377236366272, "learning_rate": 2.2974429110199346e-06, "loss": 0.7157, "step": 13043 }, { "epoch": 0.5405943056073604, "grad_norm": 0.427168071269989, "learning_rate": 2.2972356914915664e-06, "loss": 0.7134, "step": 13044 }, { "epoch": 0.5406357495130341, "grad_norm": 0.3949218988418579, "learning_rate": 2.297028471963198e-06, "loss": 0.6924, "step": 13045 }, { "epoch": 0.5406771934187078, "grad_norm": 0.3986227810382843, "learning_rate": 2.2968212524348296e-06, "loss": 0.6403, "step": 13046 }, { "epoch": 0.5407186373243814, "grad_norm": 0.3988339304924011, "learning_rate": 2.296614032906461e-06, "loss": 0.6826, "step": 13047 }, { "epoch": 0.5407600812300551, "grad_norm": 0.43006977438926697, "learning_rate": 2.296406813378093e-06, "loss": 0.7433, "step": 13048 }, { "epoch": 0.5408015251357288, "grad_norm": 0.47492456436157227, "learning_rate": 2.2961995938497246e-06, "loss": 0.7815, "step": 13049 }, { "epoch": 0.5408429690414025, "grad_norm": 0.4300835132598877, "learning_rate": 2.2959923743213565e-06, "loss": 0.6898, "step": 13050 }, { "epoch": 0.5408844129470761, "grad_norm": 0.3882294297218323, "learning_rate": 2.295785154792988e-06, "loss": 0.6832, "step": 13051 }, { "epoch": 0.5409258568527499, "grad_norm": 0.43443813920021057, "learning_rate": 2.2955779352646197e-06, "loss": 0.6836, "step": 13052 }, { "epoch": 0.5409673007584235, "grad_norm": 0.4253323972225189, "learning_rate": 2.295370715736251e-06, "loss": 0.7162, "step": 13053 }, { "epoch": 0.5410087446640971, "grad_norm": 0.41642314195632935, "learning_rate": 2.295163496207883e-06, "loss": 0.7063, "step": 13054 }, { "epoch": 0.5410501885697708, "grad_norm": 0.40942347049713135, "learning_rate": 2.2949562766795142e-06, "loss": 0.7388, "step": 13055 }, { "epoch": 0.5410916324754445, "grad_norm": 0.4091856777667999, "learning_rate": 2.294749057151146e-06, "loss": 0.6875, "step": 13056 }, { "epoch": 0.5411330763811182, "grad_norm": 0.43276774883270264, "learning_rate": 2.294541837622778e-06, "loss": 0.7205, "step": 13057 }, { "epoch": 0.5411745202867918, "grad_norm": 0.4142039716243744, "learning_rate": 2.2943346180944092e-06, "loss": 0.6831, "step": 13058 }, { "epoch": 0.5412159641924655, "grad_norm": 0.41540977358818054, "learning_rate": 2.294127398566041e-06, "loss": 0.6842, "step": 13059 }, { "epoch": 0.5412574080981392, "grad_norm": 0.412065327167511, "learning_rate": 2.293920179037673e-06, "loss": 0.6858, "step": 13060 }, { "epoch": 0.5412988520038129, "grad_norm": 0.39477625489234924, "learning_rate": 2.2937129595093042e-06, "loss": 0.6562, "step": 13061 }, { "epoch": 0.5413402959094865, "grad_norm": 0.39126113057136536, "learning_rate": 2.293505739980936e-06, "loss": 0.6746, "step": 13062 }, { "epoch": 0.5413817398151601, "grad_norm": 0.43605178594589233, "learning_rate": 2.2932985204525674e-06, "loss": 0.6919, "step": 13063 }, { "epoch": 0.5414231837208339, "grad_norm": 0.4126887023448944, "learning_rate": 2.2930913009241992e-06, "loss": 0.6813, "step": 13064 }, { "epoch": 0.5414646276265075, "grad_norm": 0.4009850323200226, "learning_rate": 2.292884081395831e-06, "loss": 0.6904, "step": 13065 }, { "epoch": 0.5415060715321812, "grad_norm": 0.43473395705223083, "learning_rate": 2.2926768618674624e-06, "loss": 0.6543, "step": 13066 }, { "epoch": 0.5415475154378548, "grad_norm": 0.4203380346298218, "learning_rate": 2.2924696423390942e-06, "loss": 0.701, "step": 13067 }, { "epoch": 0.5415889593435286, "grad_norm": 0.3945593237876892, "learning_rate": 2.292262422810726e-06, "loss": 0.6633, "step": 13068 }, { "epoch": 0.5416304032492022, "grad_norm": 0.4337891936302185, "learning_rate": 2.2920552032823574e-06, "loss": 0.6993, "step": 13069 }, { "epoch": 0.5416718471548759, "grad_norm": 0.3984043300151825, "learning_rate": 2.2918479837539893e-06, "loss": 0.6451, "step": 13070 }, { "epoch": 0.5417132910605496, "grad_norm": 0.39748844504356384, "learning_rate": 2.2916407642256206e-06, "loss": 0.6968, "step": 13071 }, { "epoch": 0.5417547349662232, "grad_norm": 0.40924501419067383, "learning_rate": 2.2914335446972524e-06, "loss": 0.7, "step": 13072 }, { "epoch": 0.5417961788718969, "grad_norm": 0.4150150716304779, "learning_rate": 2.2912263251688843e-06, "loss": 0.6799, "step": 13073 }, { "epoch": 0.5418376227775705, "grad_norm": 0.40606558322906494, "learning_rate": 2.2910191056405156e-06, "loss": 0.6582, "step": 13074 }, { "epoch": 0.5418790666832443, "grad_norm": 0.39317601919174194, "learning_rate": 2.2908118861121474e-06, "loss": 0.6757, "step": 13075 }, { "epoch": 0.5419205105889179, "grad_norm": 0.3806534707546234, "learning_rate": 2.2906046665837793e-06, "loss": 0.6742, "step": 13076 }, { "epoch": 0.5419619544945916, "grad_norm": 0.44157591462135315, "learning_rate": 2.2903974470554106e-06, "loss": 0.6985, "step": 13077 }, { "epoch": 0.5420033984002652, "grad_norm": 0.46515941619873047, "learning_rate": 2.2901902275270425e-06, "loss": 0.7184, "step": 13078 }, { "epoch": 0.5420448423059389, "grad_norm": 0.45570215582847595, "learning_rate": 2.289983007998674e-06, "loss": 0.6932, "step": 13079 }, { "epoch": 0.5420862862116126, "grad_norm": 0.4363085925579071, "learning_rate": 2.2897757884703056e-06, "loss": 0.6932, "step": 13080 }, { "epoch": 0.5421277301172862, "grad_norm": 0.38323503732681274, "learning_rate": 2.289568568941937e-06, "loss": 0.6365, "step": 13081 }, { "epoch": 0.5421691740229599, "grad_norm": 0.4592910706996918, "learning_rate": 2.289361349413569e-06, "loss": 0.7134, "step": 13082 }, { "epoch": 0.5422106179286336, "grad_norm": 0.4227254092693329, "learning_rate": 2.2891541298852007e-06, "loss": 0.6667, "step": 13083 }, { "epoch": 0.5422520618343073, "grad_norm": 0.4155708849430084, "learning_rate": 2.288946910356832e-06, "loss": 0.6907, "step": 13084 }, { "epoch": 0.5422935057399809, "grad_norm": 0.42054152488708496, "learning_rate": 2.288739690828464e-06, "loss": 0.6886, "step": 13085 }, { "epoch": 0.5423349496456547, "grad_norm": 0.41738632321357727, "learning_rate": 2.2885324713000957e-06, "loss": 0.682, "step": 13086 }, { "epoch": 0.5423763935513283, "grad_norm": 0.4195530414581299, "learning_rate": 2.288325251771727e-06, "loss": 0.7346, "step": 13087 }, { "epoch": 0.5424178374570019, "grad_norm": 0.38131043314933777, "learning_rate": 2.288118032243359e-06, "loss": 0.6802, "step": 13088 }, { "epoch": 0.5424592813626756, "grad_norm": 0.3956884741783142, "learning_rate": 2.2879108127149902e-06, "loss": 0.6836, "step": 13089 }, { "epoch": 0.5425007252683492, "grad_norm": 0.4074039161205292, "learning_rate": 2.287703593186622e-06, "loss": 0.7024, "step": 13090 }, { "epoch": 0.542542169174023, "grad_norm": 0.41772302985191345, "learning_rate": 2.287496373658254e-06, "loss": 0.6877, "step": 13091 }, { "epoch": 0.5425836130796966, "grad_norm": 0.4184700846672058, "learning_rate": 2.2872891541298852e-06, "loss": 0.6973, "step": 13092 }, { "epoch": 0.5426250569853703, "grad_norm": 0.3956819772720337, "learning_rate": 2.287081934601517e-06, "loss": 0.7009, "step": 13093 }, { "epoch": 0.542666500891044, "grad_norm": 0.4205375909805298, "learning_rate": 2.286874715073149e-06, "loss": 0.6975, "step": 13094 }, { "epoch": 0.5427079447967177, "grad_norm": 0.42318397760391235, "learning_rate": 2.2866674955447802e-06, "loss": 0.6733, "step": 13095 }, { "epoch": 0.5427493887023913, "grad_norm": 0.38692793250083923, "learning_rate": 2.286460276016412e-06, "loss": 0.6378, "step": 13096 }, { "epoch": 0.5427908326080649, "grad_norm": 0.4002832770347595, "learning_rate": 2.2862530564880434e-06, "loss": 0.6361, "step": 13097 }, { "epoch": 0.5428322765137387, "grad_norm": 0.40159061551094055, "learning_rate": 2.2860458369596752e-06, "loss": 0.7712, "step": 13098 }, { "epoch": 0.5428737204194123, "grad_norm": 0.40783387422561646, "learning_rate": 2.285838617431307e-06, "loss": 0.6782, "step": 13099 }, { "epoch": 0.542915164325086, "grad_norm": 0.38862863183021545, "learning_rate": 2.2856313979029384e-06, "loss": 0.6938, "step": 13100 }, { "epoch": 0.5429566082307596, "grad_norm": 0.42051997780799866, "learning_rate": 2.2854241783745703e-06, "loss": 0.6851, "step": 13101 }, { "epoch": 0.5429980521364334, "grad_norm": 0.3822377920150757, "learning_rate": 2.285216958846202e-06, "loss": 0.6453, "step": 13102 }, { "epoch": 0.543039496042107, "grad_norm": 0.42325931787490845, "learning_rate": 2.2850097393178334e-06, "loss": 0.6246, "step": 13103 }, { "epoch": 0.5430809399477807, "grad_norm": 0.4015417695045471, "learning_rate": 2.2848025197894653e-06, "loss": 0.6528, "step": 13104 }, { "epoch": 0.5431223838534543, "grad_norm": 0.37978196144104004, "learning_rate": 2.2845953002610966e-06, "loss": 0.6548, "step": 13105 }, { "epoch": 0.543163827759128, "grad_norm": 0.4474363923072815, "learning_rate": 2.2843880807327285e-06, "loss": 0.7147, "step": 13106 }, { "epoch": 0.5432052716648017, "grad_norm": 0.38809752464294434, "learning_rate": 2.2841808612043603e-06, "loss": 0.6755, "step": 13107 }, { "epoch": 0.5432467155704753, "grad_norm": 0.40150949358940125, "learning_rate": 2.2839736416759916e-06, "loss": 0.6897, "step": 13108 }, { "epoch": 0.5432881594761491, "grad_norm": 0.3872639834880829, "learning_rate": 2.2837664221476235e-06, "loss": 0.7091, "step": 13109 }, { "epoch": 0.5433296033818227, "grad_norm": 0.4326481819152832, "learning_rate": 2.283559202619255e-06, "loss": 0.7122, "step": 13110 }, { "epoch": 0.5433710472874964, "grad_norm": 0.40404340624809265, "learning_rate": 2.2833519830908867e-06, "loss": 0.6909, "step": 13111 }, { "epoch": 0.54341249119317, "grad_norm": 0.4216577112674713, "learning_rate": 2.2831447635625185e-06, "loss": 0.689, "step": 13112 }, { "epoch": 0.5434539350988438, "grad_norm": 0.39716944098472595, "learning_rate": 2.28293754403415e-06, "loss": 0.67, "step": 13113 }, { "epoch": 0.5434953790045174, "grad_norm": 0.4619634449481964, "learning_rate": 2.2827303245057817e-06, "loss": 0.6847, "step": 13114 }, { "epoch": 0.543536822910191, "grad_norm": 0.40128597617149353, "learning_rate": 2.282523104977413e-06, "loss": 0.6565, "step": 13115 }, { "epoch": 0.5435782668158647, "grad_norm": 0.4360540807247162, "learning_rate": 2.282315885449045e-06, "loss": 0.6587, "step": 13116 }, { "epoch": 0.5436197107215384, "grad_norm": 0.40819254517555237, "learning_rate": 2.2821086659206767e-06, "loss": 0.7219, "step": 13117 }, { "epoch": 0.5436611546272121, "grad_norm": 0.4125180244445801, "learning_rate": 2.281901446392308e-06, "loss": 0.6675, "step": 13118 }, { "epoch": 0.5437025985328857, "grad_norm": 0.430863618850708, "learning_rate": 2.28169422686394e-06, "loss": 0.6963, "step": 13119 }, { "epoch": 0.5437440424385594, "grad_norm": 0.35979706048965454, "learning_rate": 2.2814870073355717e-06, "loss": 0.6492, "step": 13120 }, { "epoch": 0.5437854863442331, "grad_norm": 0.43866249918937683, "learning_rate": 2.281279787807203e-06, "loss": 0.7395, "step": 13121 }, { "epoch": 0.5438269302499068, "grad_norm": 0.4313240647315979, "learning_rate": 2.281072568278835e-06, "loss": 0.6444, "step": 13122 }, { "epoch": 0.5438683741555804, "grad_norm": 0.4026164412498474, "learning_rate": 2.2808653487504662e-06, "loss": 0.6144, "step": 13123 }, { "epoch": 0.543909818061254, "grad_norm": 0.3814598321914673, "learning_rate": 2.280658129222098e-06, "loss": 0.6433, "step": 13124 }, { "epoch": 0.5439512619669278, "grad_norm": 0.4032510221004486, "learning_rate": 2.28045090969373e-06, "loss": 0.6511, "step": 13125 }, { "epoch": 0.5439927058726014, "grad_norm": 0.3973713517189026, "learning_rate": 2.2802436901653612e-06, "loss": 0.677, "step": 13126 }, { "epoch": 0.5440341497782751, "grad_norm": 0.39668890833854675, "learning_rate": 2.280036470636993e-06, "loss": 0.6393, "step": 13127 }, { "epoch": 0.5440755936839488, "grad_norm": 0.42134392261505127, "learning_rate": 2.279829251108625e-06, "loss": 0.718, "step": 13128 }, { "epoch": 0.5441170375896225, "grad_norm": 0.38493219017982483, "learning_rate": 2.2796220315802563e-06, "loss": 0.6833, "step": 13129 }, { "epoch": 0.5441584814952961, "grad_norm": 0.40522825717926025, "learning_rate": 2.279414812051888e-06, "loss": 0.7354, "step": 13130 }, { "epoch": 0.5441999254009698, "grad_norm": 0.43816831707954407, "learning_rate": 2.2792075925235194e-06, "loss": 0.7561, "step": 13131 }, { "epoch": 0.5442413693066435, "grad_norm": 0.4227769076824188, "learning_rate": 2.2790003729951513e-06, "loss": 0.6897, "step": 13132 }, { "epoch": 0.5442828132123171, "grad_norm": 0.4518188238143921, "learning_rate": 2.278793153466783e-06, "loss": 0.7461, "step": 13133 }, { "epoch": 0.5443242571179908, "grad_norm": 0.5409524440765381, "learning_rate": 2.2785859339384144e-06, "loss": 0.6655, "step": 13134 }, { "epoch": 0.5443657010236644, "grad_norm": 0.4639953374862671, "learning_rate": 2.2783787144100463e-06, "loss": 0.7366, "step": 13135 }, { "epoch": 0.5444071449293382, "grad_norm": 0.41584667563438416, "learning_rate": 2.2781714948816776e-06, "loss": 0.6802, "step": 13136 }, { "epoch": 0.5444485888350118, "grad_norm": 0.4627249538898468, "learning_rate": 2.2779642753533095e-06, "loss": 0.7185, "step": 13137 }, { "epoch": 0.5444900327406855, "grad_norm": 0.40552303194999695, "learning_rate": 2.2777570558249413e-06, "loss": 0.6664, "step": 13138 }, { "epoch": 0.5445314766463591, "grad_norm": 0.4061635136604309, "learning_rate": 2.2775498362965726e-06, "loss": 0.6912, "step": 13139 }, { "epoch": 0.5445729205520328, "grad_norm": 0.4271814525127411, "learning_rate": 2.2773426167682045e-06, "loss": 0.7148, "step": 13140 }, { "epoch": 0.5446143644577065, "grad_norm": 0.44705790281295776, "learning_rate": 2.2771353972398363e-06, "loss": 0.707, "step": 13141 }, { "epoch": 0.5446558083633801, "grad_norm": 0.39973461627960205, "learning_rate": 2.2769281777114677e-06, "loss": 0.7122, "step": 13142 }, { "epoch": 0.5446972522690539, "grad_norm": 0.4686422049999237, "learning_rate": 2.2767209581830995e-06, "loss": 0.7623, "step": 13143 }, { "epoch": 0.5447386961747275, "grad_norm": 0.41152653098106384, "learning_rate": 2.276513738654731e-06, "loss": 0.6787, "step": 13144 }, { "epoch": 0.5447801400804012, "grad_norm": 0.4192121922969818, "learning_rate": 2.2763065191263627e-06, "loss": 0.6924, "step": 13145 }, { "epoch": 0.5448215839860748, "grad_norm": 0.4294746518135071, "learning_rate": 2.2760992995979945e-06, "loss": 0.7168, "step": 13146 }, { "epoch": 0.5448630278917486, "grad_norm": 0.37345921993255615, "learning_rate": 2.275892080069626e-06, "loss": 0.6898, "step": 13147 }, { "epoch": 0.5449044717974222, "grad_norm": 0.42853042483329773, "learning_rate": 2.2756848605412577e-06, "loss": 0.6921, "step": 13148 }, { "epoch": 0.5449459157030958, "grad_norm": 0.43928271532058716, "learning_rate": 2.2754776410128895e-06, "loss": 0.657, "step": 13149 }, { "epoch": 0.5449873596087695, "grad_norm": 0.3801010549068451, "learning_rate": 2.275270421484521e-06, "loss": 0.6858, "step": 13150 }, { "epoch": 0.5450288035144432, "grad_norm": 0.41162487864494324, "learning_rate": 2.2750632019561527e-06, "loss": 0.6892, "step": 13151 }, { "epoch": 0.5450702474201169, "grad_norm": 0.4198351502418518, "learning_rate": 2.274855982427784e-06, "loss": 0.6488, "step": 13152 }, { "epoch": 0.5451116913257905, "grad_norm": 0.446866512298584, "learning_rate": 2.274648762899416e-06, "loss": 0.6951, "step": 13153 }, { "epoch": 0.5451531352314642, "grad_norm": 0.4092423915863037, "learning_rate": 2.2744415433710477e-06, "loss": 0.6873, "step": 13154 }, { "epoch": 0.5451945791371379, "grad_norm": 0.4121929705142975, "learning_rate": 2.274234323842679e-06, "loss": 0.6512, "step": 13155 }, { "epoch": 0.5452360230428116, "grad_norm": 0.41316282749176025, "learning_rate": 2.274027104314311e-06, "loss": 0.6753, "step": 13156 }, { "epoch": 0.5452774669484852, "grad_norm": 0.4032987654209137, "learning_rate": 2.2738198847859422e-06, "loss": 0.7125, "step": 13157 }, { "epoch": 0.5453189108541588, "grad_norm": 0.391738623380661, "learning_rate": 2.273612665257574e-06, "loss": 0.6499, "step": 13158 }, { "epoch": 0.5453603547598326, "grad_norm": 0.41248148679733276, "learning_rate": 2.273405445729206e-06, "loss": 0.6793, "step": 13159 }, { "epoch": 0.5454017986655062, "grad_norm": 0.45113813877105713, "learning_rate": 2.2731982262008373e-06, "loss": 0.7449, "step": 13160 }, { "epoch": 0.5454432425711799, "grad_norm": 0.4089980125427246, "learning_rate": 2.272991006672469e-06, "loss": 0.6821, "step": 13161 }, { "epoch": 0.5454846864768536, "grad_norm": 0.39982855319976807, "learning_rate": 2.2727837871441004e-06, "loss": 0.6772, "step": 13162 }, { "epoch": 0.5455261303825273, "grad_norm": 0.4435079097747803, "learning_rate": 2.2725765676157323e-06, "loss": 0.6919, "step": 13163 }, { "epoch": 0.5455675742882009, "grad_norm": 0.4188969135284424, "learning_rate": 2.272369348087364e-06, "loss": 0.6659, "step": 13164 }, { "epoch": 0.5456090181938746, "grad_norm": 0.38747015595436096, "learning_rate": 2.2721621285589955e-06, "loss": 0.6436, "step": 13165 }, { "epoch": 0.5456504620995483, "grad_norm": 0.40399423241615295, "learning_rate": 2.2719549090306273e-06, "loss": 0.6725, "step": 13166 }, { "epoch": 0.5456919060052219, "grad_norm": 0.4200986325740814, "learning_rate": 2.271747689502259e-06, "loss": 0.7147, "step": 13167 }, { "epoch": 0.5457333499108956, "grad_norm": 0.4272199869155884, "learning_rate": 2.2715404699738905e-06, "loss": 0.7373, "step": 13168 }, { "epoch": 0.5457747938165692, "grad_norm": 0.5077555179595947, "learning_rate": 2.2713332504455223e-06, "loss": 0.738, "step": 13169 }, { "epoch": 0.545816237722243, "grad_norm": 0.3806942403316498, "learning_rate": 2.2711260309171537e-06, "loss": 0.6208, "step": 13170 }, { "epoch": 0.5458576816279166, "grad_norm": 0.3827495574951172, "learning_rate": 2.2709188113887855e-06, "loss": 0.6858, "step": 13171 }, { "epoch": 0.5458991255335903, "grad_norm": 0.3979736566543579, "learning_rate": 2.2707115918604173e-06, "loss": 0.6787, "step": 13172 }, { "epoch": 0.545940569439264, "grad_norm": 0.4549827575683594, "learning_rate": 2.2705043723320487e-06, "loss": 0.7212, "step": 13173 }, { "epoch": 0.5459820133449377, "grad_norm": 0.4281477928161621, "learning_rate": 2.2702971528036805e-06, "loss": 0.7002, "step": 13174 }, { "epoch": 0.5460234572506113, "grad_norm": 0.4416317343711853, "learning_rate": 2.2700899332753123e-06, "loss": 0.6508, "step": 13175 }, { "epoch": 0.5460649011562849, "grad_norm": 0.4436587989330292, "learning_rate": 2.2698827137469437e-06, "loss": 0.6719, "step": 13176 }, { "epoch": 0.5461063450619587, "grad_norm": 0.4420471489429474, "learning_rate": 2.2696754942185755e-06, "loss": 0.6978, "step": 13177 }, { "epoch": 0.5461477889676323, "grad_norm": 0.40397176146507263, "learning_rate": 2.269468274690207e-06, "loss": 0.6769, "step": 13178 }, { "epoch": 0.546189232873306, "grad_norm": 0.4033898115158081, "learning_rate": 2.2692610551618387e-06, "loss": 0.6987, "step": 13179 }, { "epoch": 0.5462306767789796, "grad_norm": 0.4174647927284241, "learning_rate": 2.26905383563347e-06, "loss": 0.6802, "step": 13180 }, { "epoch": 0.5462721206846534, "grad_norm": 0.45528751611709595, "learning_rate": 2.268846616105102e-06, "loss": 0.6853, "step": 13181 }, { "epoch": 0.546313564590327, "grad_norm": 0.4134081304073334, "learning_rate": 2.2686393965767337e-06, "loss": 0.7244, "step": 13182 }, { "epoch": 0.5463550084960007, "grad_norm": 0.377729207277298, "learning_rate": 2.2684321770483655e-06, "loss": 0.6685, "step": 13183 }, { "epoch": 0.5463964524016743, "grad_norm": 0.3915073871612549, "learning_rate": 2.268224957519997e-06, "loss": 0.6958, "step": 13184 }, { "epoch": 0.546437896307348, "grad_norm": 0.38253268599510193, "learning_rate": 2.2680177379916287e-06, "loss": 0.6853, "step": 13185 }, { "epoch": 0.5464793402130217, "grad_norm": 0.41328439116477966, "learning_rate": 2.26781051846326e-06, "loss": 0.6653, "step": 13186 }, { "epoch": 0.5465207841186953, "grad_norm": 0.4042830765247345, "learning_rate": 2.267603298934892e-06, "loss": 0.6847, "step": 13187 }, { "epoch": 0.546562228024369, "grad_norm": 0.4443589746952057, "learning_rate": 2.2673960794065233e-06, "loss": 0.7046, "step": 13188 }, { "epoch": 0.5466036719300427, "grad_norm": 0.41989824175834656, "learning_rate": 2.267188859878155e-06, "loss": 0.6438, "step": 13189 }, { "epoch": 0.5466451158357164, "grad_norm": 0.4113078713417053, "learning_rate": 2.266981640349787e-06, "loss": 0.6528, "step": 13190 }, { "epoch": 0.54668655974139, "grad_norm": 0.42277461290359497, "learning_rate": 2.2667744208214183e-06, "loss": 0.6592, "step": 13191 }, { "epoch": 0.5467280036470638, "grad_norm": 0.4090423583984375, "learning_rate": 2.26656720129305e-06, "loss": 0.6849, "step": 13192 }, { "epoch": 0.5467694475527374, "grad_norm": 0.43814489245414734, "learning_rate": 2.266359981764682e-06, "loss": 0.662, "step": 13193 }, { "epoch": 0.546810891458411, "grad_norm": 0.4450767934322357, "learning_rate": 2.2661527622363133e-06, "loss": 0.7466, "step": 13194 }, { "epoch": 0.5468523353640847, "grad_norm": 0.4039347767829895, "learning_rate": 2.265945542707945e-06, "loss": 0.6963, "step": 13195 }, { "epoch": 0.5468937792697584, "grad_norm": 0.39635682106018066, "learning_rate": 2.2657383231795765e-06, "loss": 0.7201, "step": 13196 }, { "epoch": 0.5469352231754321, "grad_norm": 0.41493159532546997, "learning_rate": 2.2655311036512083e-06, "loss": 0.6906, "step": 13197 }, { "epoch": 0.5469766670811057, "grad_norm": 0.43690812587738037, "learning_rate": 2.26532388412284e-06, "loss": 0.7153, "step": 13198 }, { "epoch": 0.5470181109867794, "grad_norm": 0.44056639075279236, "learning_rate": 2.2651166645944715e-06, "loss": 0.6562, "step": 13199 }, { "epoch": 0.5470595548924531, "grad_norm": 0.41806933283805847, "learning_rate": 2.2649094450661033e-06, "loss": 0.6848, "step": 13200 }, { "epoch": 0.5471009987981267, "grad_norm": 0.3945106565952301, "learning_rate": 2.264702225537735e-06, "loss": 0.6643, "step": 13201 }, { "epoch": 0.5471424427038004, "grad_norm": 0.4359179735183716, "learning_rate": 2.2644950060093665e-06, "loss": 0.7153, "step": 13202 }, { "epoch": 0.547183886609474, "grad_norm": 0.43853121995925903, "learning_rate": 2.2642877864809983e-06, "loss": 0.6973, "step": 13203 }, { "epoch": 0.5472253305151478, "grad_norm": 0.44614842534065247, "learning_rate": 2.2640805669526297e-06, "loss": 0.6396, "step": 13204 }, { "epoch": 0.5472667744208214, "grad_norm": 0.44142046570777893, "learning_rate": 2.2638733474242615e-06, "loss": 0.6945, "step": 13205 }, { "epoch": 0.5473082183264951, "grad_norm": 0.8966847658157349, "learning_rate": 2.263666127895893e-06, "loss": 0.6576, "step": 13206 }, { "epoch": 0.5473496622321687, "grad_norm": 0.39809247851371765, "learning_rate": 2.2634589083675247e-06, "loss": 0.7126, "step": 13207 }, { "epoch": 0.5473911061378425, "grad_norm": 0.431170791387558, "learning_rate": 2.2632516888391565e-06, "loss": 0.6633, "step": 13208 }, { "epoch": 0.5474325500435161, "grad_norm": 0.41954490542411804, "learning_rate": 2.2630444693107883e-06, "loss": 0.6902, "step": 13209 }, { "epoch": 0.5474739939491897, "grad_norm": 0.452528178691864, "learning_rate": 2.2628372497824197e-06, "loss": 0.6694, "step": 13210 }, { "epoch": 0.5475154378548635, "grad_norm": 0.4167577028274536, "learning_rate": 2.2626300302540515e-06, "loss": 0.627, "step": 13211 }, { "epoch": 0.5475568817605371, "grad_norm": 0.41400668025016785, "learning_rate": 2.262422810725683e-06, "loss": 0.6821, "step": 13212 }, { "epoch": 0.5475983256662108, "grad_norm": 0.3875214755535126, "learning_rate": 2.2622155911973147e-06, "loss": 0.6517, "step": 13213 }, { "epoch": 0.5476397695718844, "grad_norm": 0.38791581988334656, "learning_rate": 2.262008371668946e-06, "loss": 0.6417, "step": 13214 }, { "epoch": 0.5476812134775582, "grad_norm": 0.39880916476249695, "learning_rate": 2.261801152140578e-06, "loss": 0.6558, "step": 13215 }, { "epoch": 0.5477226573832318, "grad_norm": 0.3855287432670593, "learning_rate": 2.2615939326122097e-06, "loss": 0.7129, "step": 13216 }, { "epoch": 0.5477641012889055, "grad_norm": 0.39640384912490845, "learning_rate": 2.2613867130838415e-06, "loss": 0.6631, "step": 13217 }, { "epoch": 0.5478055451945791, "grad_norm": 0.4335428476333618, "learning_rate": 2.261179493555473e-06, "loss": 0.6565, "step": 13218 }, { "epoch": 0.5478469891002528, "grad_norm": 0.4185199737548828, "learning_rate": 2.2609722740271047e-06, "loss": 0.6544, "step": 13219 }, { "epoch": 0.5478884330059265, "grad_norm": 0.44557300209999084, "learning_rate": 2.260765054498736e-06, "loss": 0.6923, "step": 13220 }, { "epoch": 0.5479298769116001, "grad_norm": 0.4069538116455078, "learning_rate": 2.260557834970368e-06, "loss": 0.6913, "step": 13221 }, { "epoch": 0.5479713208172738, "grad_norm": 0.39331233501434326, "learning_rate": 2.2603506154419993e-06, "loss": 0.6794, "step": 13222 }, { "epoch": 0.5480127647229475, "grad_norm": 0.3724033236503601, "learning_rate": 2.260143395913631e-06, "loss": 0.6251, "step": 13223 }, { "epoch": 0.5480542086286212, "grad_norm": 0.4439789354801178, "learning_rate": 2.259936176385263e-06, "loss": 0.7126, "step": 13224 }, { "epoch": 0.5480956525342948, "grad_norm": 0.443485289812088, "learning_rate": 2.2597289568568947e-06, "loss": 0.6924, "step": 13225 }, { "epoch": 0.5481370964399686, "grad_norm": 0.4384426474571228, "learning_rate": 2.259521737328526e-06, "loss": 0.7039, "step": 13226 }, { "epoch": 0.5481785403456422, "grad_norm": 0.44989538192749023, "learning_rate": 2.259314517800158e-06, "loss": 0.7468, "step": 13227 }, { "epoch": 0.5482199842513158, "grad_norm": 0.4054316282272339, "learning_rate": 2.2591072982717893e-06, "loss": 0.6938, "step": 13228 }, { "epoch": 0.5482614281569895, "grad_norm": 0.42390158772468567, "learning_rate": 2.258900078743421e-06, "loss": 0.7091, "step": 13229 }, { "epoch": 0.5483028720626631, "grad_norm": 0.4133685827255249, "learning_rate": 2.2586928592150525e-06, "loss": 0.7507, "step": 13230 }, { "epoch": 0.5483443159683369, "grad_norm": 0.41284993290901184, "learning_rate": 2.2584856396866843e-06, "loss": 0.7224, "step": 13231 }, { "epoch": 0.5483857598740105, "grad_norm": 0.4839017689228058, "learning_rate": 2.2582784201583157e-06, "loss": 0.6425, "step": 13232 }, { "epoch": 0.5484272037796842, "grad_norm": 0.42022016644477844, "learning_rate": 2.2580712006299475e-06, "loss": 0.6659, "step": 13233 }, { "epoch": 0.5484686476853579, "grad_norm": 0.39305222034454346, "learning_rate": 2.2578639811015793e-06, "loss": 0.6298, "step": 13234 }, { "epoch": 0.5485100915910316, "grad_norm": 0.4334964454174042, "learning_rate": 2.257656761573211e-06, "loss": 0.6958, "step": 13235 }, { "epoch": 0.5485515354967052, "grad_norm": 0.4476449489593506, "learning_rate": 2.2574495420448425e-06, "loss": 0.7383, "step": 13236 }, { "epoch": 0.5485929794023788, "grad_norm": 0.4088231921195984, "learning_rate": 2.2572423225164743e-06, "loss": 0.6968, "step": 13237 }, { "epoch": 0.5486344233080526, "grad_norm": 0.40412062406539917, "learning_rate": 2.2570351029881057e-06, "loss": 0.6982, "step": 13238 }, { "epoch": 0.5486758672137262, "grad_norm": 0.4167626202106476, "learning_rate": 2.2568278834597375e-06, "loss": 0.673, "step": 13239 }, { "epoch": 0.5487173111193999, "grad_norm": 0.4079391062259674, "learning_rate": 2.256620663931369e-06, "loss": 0.6562, "step": 13240 }, { "epoch": 0.5487587550250735, "grad_norm": 0.4207518994808197, "learning_rate": 2.2564134444030007e-06, "loss": 0.6934, "step": 13241 }, { "epoch": 0.5488001989307473, "grad_norm": 0.4155116081237793, "learning_rate": 2.2562062248746325e-06, "loss": 0.7029, "step": 13242 }, { "epoch": 0.5488416428364209, "grad_norm": 0.40218666195869446, "learning_rate": 2.2559990053462643e-06, "loss": 0.6495, "step": 13243 }, { "epoch": 0.5488830867420946, "grad_norm": 0.3839605450630188, "learning_rate": 2.2557917858178957e-06, "loss": 0.6687, "step": 13244 }, { "epoch": 0.5489245306477682, "grad_norm": 0.4050431549549103, "learning_rate": 2.2555845662895275e-06, "loss": 0.696, "step": 13245 }, { "epoch": 0.5489659745534419, "grad_norm": 0.41162705421447754, "learning_rate": 2.255377346761159e-06, "loss": 0.6327, "step": 13246 }, { "epoch": 0.5490074184591156, "grad_norm": 0.4483102560043335, "learning_rate": 2.2551701272327907e-06, "loss": 0.6671, "step": 13247 }, { "epoch": 0.5490488623647892, "grad_norm": 0.3960977792739868, "learning_rate": 2.254962907704422e-06, "loss": 0.6934, "step": 13248 }, { "epoch": 0.549090306270463, "grad_norm": 0.4219824969768524, "learning_rate": 2.254755688176054e-06, "loss": 0.6908, "step": 13249 }, { "epoch": 0.5491317501761366, "grad_norm": 0.3973199129104614, "learning_rate": 2.2545484686476857e-06, "loss": 0.645, "step": 13250 }, { "epoch": 0.5491731940818103, "grad_norm": 0.4612855017185211, "learning_rate": 2.2543412491193175e-06, "loss": 0.6764, "step": 13251 }, { "epoch": 0.5492146379874839, "grad_norm": 0.384584903717041, "learning_rate": 2.254134029590949e-06, "loss": 0.6985, "step": 13252 }, { "epoch": 0.5492560818931577, "grad_norm": 0.45565611124038696, "learning_rate": 2.2539268100625807e-06, "loss": 0.7134, "step": 13253 }, { "epoch": 0.5492975257988313, "grad_norm": 0.4270487427711487, "learning_rate": 2.253719590534212e-06, "loss": 0.7317, "step": 13254 }, { "epoch": 0.5493389697045049, "grad_norm": 0.4032677412033081, "learning_rate": 2.253512371005844e-06, "loss": 0.6855, "step": 13255 }, { "epoch": 0.5493804136101786, "grad_norm": 0.38839104771614075, "learning_rate": 2.2533051514774753e-06, "loss": 0.6567, "step": 13256 }, { "epoch": 0.5494218575158523, "grad_norm": 0.4111059606075287, "learning_rate": 2.253097931949107e-06, "loss": 0.6907, "step": 13257 }, { "epoch": 0.549463301421526, "grad_norm": 0.4253048598766327, "learning_rate": 2.2528907124207385e-06, "loss": 0.7207, "step": 13258 }, { "epoch": 0.5495047453271996, "grad_norm": 0.4157562553882599, "learning_rate": 2.2526834928923707e-06, "loss": 0.6483, "step": 13259 }, { "epoch": 0.5495461892328733, "grad_norm": 0.42656418681144714, "learning_rate": 2.252476273364002e-06, "loss": 0.6869, "step": 13260 }, { "epoch": 0.549587633138547, "grad_norm": 0.36805260181427, "learning_rate": 2.252269053835634e-06, "loss": 0.6427, "step": 13261 }, { "epoch": 0.5496290770442206, "grad_norm": 0.41254109144210815, "learning_rate": 2.2520618343072653e-06, "loss": 0.6276, "step": 13262 }, { "epoch": 0.5496705209498943, "grad_norm": 0.3962934911251068, "learning_rate": 2.251854614778897e-06, "loss": 0.67, "step": 13263 }, { "epoch": 0.549711964855568, "grad_norm": 0.39917755126953125, "learning_rate": 2.2516473952505285e-06, "loss": 0.6926, "step": 13264 }, { "epoch": 0.5497534087612417, "grad_norm": 0.43411973118782043, "learning_rate": 2.2514401757221603e-06, "loss": 0.6863, "step": 13265 }, { "epoch": 0.5497948526669153, "grad_norm": 0.4580281674861908, "learning_rate": 2.2512329561937917e-06, "loss": 0.7317, "step": 13266 }, { "epoch": 0.549836296572589, "grad_norm": 0.40566927194595337, "learning_rate": 2.2510257366654235e-06, "loss": 0.6594, "step": 13267 }, { "epoch": 0.5498777404782627, "grad_norm": 0.4104520380496979, "learning_rate": 2.2508185171370553e-06, "loss": 0.7013, "step": 13268 }, { "epoch": 0.5499191843839364, "grad_norm": 0.4234115481376648, "learning_rate": 2.250611297608687e-06, "loss": 0.7188, "step": 13269 }, { "epoch": 0.54996062828961, "grad_norm": 0.40120241045951843, "learning_rate": 2.2504040780803185e-06, "loss": 0.7178, "step": 13270 }, { "epoch": 0.5500020721952836, "grad_norm": 0.4030784070491791, "learning_rate": 2.2501968585519503e-06, "loss": 0.6787, "step": 13271 }, { "epoch": 0.5500435161009574, "grad_norm": 0.40126433968544006, "learning_rate": 2.2499896390235817e-06, "loss": 0.6936, "step": 13272 }, { "epoch": 0.550084960006631, "grad_norm": 0.3900946080684662, "learning_rate": 2.2497824194952135e-06, "loss": 0.6633, "step": 13273 }, { "epoch": 0.5501264039123047, "grad_norm": 0.39549195766448975, "learning_rate": 2.249575199966845e-06, "loss": 0.6417, "step": 13274 }, { "epoch": 0.5501678478179783, "grad_norm": 0.3908202350139618, "learning_rate": 2.2493679804384767e-06, "loss": 0.6134, "step": 13275 }, { "epoch": 0.5502092917236521, "grad_norm": 0.39231616258621216, "learning_rate": 2.2491607609101085e-06, "loss": 0.6418, "step": 13276 }, { "epoch": 0.5502507356293257, "grad_norm": 0.39186495542526245, "learning_rate": 2.2489535413817403e-06, "loss": 0.6826, "step": 13277 }, { "epoch": 0.5502921795349994, "grad_norm": 0.4371165335178375, "learning_rate": 2.2487463218533717e-06, "loss": 0.71, "step": 13278 }, { "epoch": 0.550333623440673, "grad_norm": 0.42693352699279785, "learning_rate": 2.2485391023250035e-06, "loss": 0.6573, "step": 13279 }, { "epoch": 0.5503750673463467, "grad_norm": 0.44119614362716675, "learning_rate": 2.248331882796635e-06, "loss": 0.7068, "step": 13280 }, { "epoch": 0.5504165112520204, "grad_norm": 0.39260008931159973, "learning_rate": 2.2481246632682667e-06, "loss": 0.7062, "step": 13281 }, { "epoch": 0.550457955157694, "grad_norm": 0.41641637682914734, "learning_rate": 2.247917443739898e-06, "loss": 0.6481, "step": 13282 }, { "epoch": 0.5504993990633678, "grad_norm": 0.49975037574768066, "learning_rate": 2.24771022421153e-06, "loss": 0.6426, "step": 13283 }, { "epoch": 0.5505408429690414, "grad_norm": 0.3906380534172058, "learning_rate": 2.2475030046831613e-06, "loss": 0.7134, "step": 13284 }, { "epoch": 0.5505822868747151, "grad_norm": 0.4052414894104004, "learning_rate": 2.2472957851547935e-06, "loss": 0.6818, "step": 13285 }, { "epoch": 0.5506237307803887, "grad_norm": 0.4188137650489807, "learning_rate": 2.247088565626425e-06, "loss": 0.7053, "step": 13286 }, { "epoch": 0.5506651746860625, "grad_norm": 0.4122617840766907, "learning_rate": 2.2468813460980567e-06, "loss": 0.6829, "step": 13287 }, { "epoch": 0.5507066185917361, "grad_norm": 0.4464053213596344, "learning_rate": 2.246674126569688e-06, "loss": 0.7141, "step": 13288 }, { "epoch": 0.5507480624974097, "grad_norm": 0.428396999835968, "learning_rate": 2.24646690704132e-06, "loss": 0.7168, "step": 13289 }, { "epoch": 0.5507895064030834, "grad_norm": 0.40052929520606995, "learning_rate": 2.2462596875129513e-06, "loss": 0.6953, "step": 13290 }, { "epoch": 0.5508309503087571, "grad_norm": 0.4280436635017395, "learning_rate": 2.246052467984583e-06, "loss": 0.6721, "step": 13291 }, { "epoch": 0.5508723942144308, "grad_norm": 0.41729235649108887, "learning_rate": 2.2458452484562145e-06, "loss": 0.7041, "step": 13292 }, { "epoch": 0.5509138381201044, "grad_norm": 0.42635929584503174, "learning_rate": 2.2456380289278463e-06, "loss": 0.7134, "step": 13293 }, { "epoch": 0.5509552820257781, "grad_norm": 0.4178532361984253, "learning_rate": 2.245430809399478e-06, "loss": 0.7241, "step": 13294 }, { "epoch": 0.5509967259314518, "grad_norm": 0.39530080556869507, "learning_rate": 2.24522358987111e-06, "loss": 0.688, "step": 13295 }, { "epoch": 0.5510381698371255, "grad_norm": 0.4052293598651886, "learning_rate": 2.2450163703427413e-06, "loss": 0.6963, "step": 13296 }, { "epoch": 0.5510796137427991, "grad_norm": 0.4075140357017517, "learning_rate": 2.244809150814373e-06, "loss": 0.677, "step": 13297 }, { "epoch": 0.5511210576484727, "grad_norm": 0.40253809094429016, "learning_rate": 2.2446019312860045e-06, "loss": 0.6547, "step": 13298 }, { "epoch": 0.5511625015541465, "grad_norm": 0.4387081265449524, "learning_rate": 2.2443947117576363e-06, "loss": 0.6918, "step": 13299 }, { "epoch": 0.5512039454598201, "grad_norm": 0.40122395753860474, "learning_rate": 2.2441874922292677e-06, "loss": 0.6665, "step": 13300 }, { "epoch": 0.5512453893654938, "grad_norm": 0.40657299757003784, "learning_rate": 2.2439802727008995e-06, "loss": 0.708, "step": 13301 }, { "epoch": 0.5512868332711675, "grad_norm": 0.38221287727355957, "learning_rate": 2.2437730531725313e-06, "loss": 0.6476, "step": 13302 }, { "epoch": 0.5513282771768412, "grad_norm": 0.39684078097343445, "learning_rate": 2.243565833644163e-06, "loss": 0.7073, "step": 13303 }, { "epoch": 0.5513697210825148, "grad_norm": 0.42343392968177795, "learning_rate": 2.2433586141157945e-06, "loss": 0.7365, "step": 13304 }, { "epoch": 0.5514111649881885, "grad_norm": 0.4186185300350189, "learning_rate": 2.2431513945874263e-06, "loss": 0.6666, "step": 13305 }, { "epoch": 0.5514526088938622, "grad_norm": 0.4141731858253479, "learning_rate": 2.2429441750590577e-06, "loss": 0.666, "step": 13306 }, { "epoch": 0.5514940527995358, "grad_norm": 0.4046260416507721, "learning_rate": 2.2427369555306895e-06, "loss": 0.6678, "step": 13307 }, { "epoch": 0.5515354967052095, "grad_norm": 0.40718501806259155, "learning_rate": 2.242529736002321e-06, "loss": 0.6548, "step": 13308 }, { "epoch": 0.5515769406108831, "grad_norm": 0.4141186475753784, "learning_rate": 2.2423225164739527e-06, "loss": 0.6973, "step": 13309 }, { "epoch": 0.5516183845165569, "grad_norm": 0.3941904604434967, "learning_rate": 2.242115296945584e-06, "loss": 0.6531, "step": 13310 }, { "epoch": 0.5516598284222305, "grad_norm": 0.37318119406700134, "learning_rate": 2.2419080774172163e-06, "loss": 0.6586, "step": 13311 }, { "epoch": 0.5517012723279042, "grad_norm": 0.40590736269950867, "learning_rate": 2.2417008578888477e-06, "loss": 0.6931, "step": 13312 }, { "epoch": 0.5517427162335778, "grad_norm": 0.43241578340530396, "learning_rate": 2.2414936383604795e-06, "loss": 0.738, "step": 13313 }, { "epoch": 0.5517841601392516, "grad_norm": 0.430996298789978, "learning_rate": 2.241286418832111e-06, "loss": 0.6921, "step": 13314 }, { "epoch": 0.5518256040449252, "grad_norm": 0.4031265079975128, "learning_rate": 2.2410791993037427e-06, "loss": 0.693, "step": 13315 }, { "epoch": 0.5518670479505988, "grad_norm": 0.43074992299079895, "learning_rate": 2.240871979775374e-06, "loss": 0.6172, "step": 13316 }, { "epoch": 0.5519084918562726, "grad_norm": 0.4218733608722687, "learning_rate": 2.240664760247006e-06, "loss": 0.7051, "step": 13317 }, { "epoch": 0.5519499357619462, "grad_norm": 0.38145074248313904, "learning_rate": 2.2404575407186373e-06, "loss": 0.6907, "step": 13318 }, { "epoch": 0.5519913796676199, "grad_norm": 0.40226516127586365, "learning_rate": 2.240250321190269e-06, "loss": 0.7002, "step": 13319 }, { "epoch": 0.5520328235732935, "grad_norm": 0.4473637640476227, "learning_rate": 2.240043101661901e-06, "loss": 0.7427, "step": 13320 }, { "epoch": 0.5520742674789673, "grad_norm": 0.3917519152164459, "learning_rate": 2.2398358821335327e-06, "loss": 0.6997, "step": 13321 }, { "epoch": 0.5521157113846409, "grad_norm": 0.3805406391620636, "learning_rate": 2.239628662605164e-06, "loss": 0.6067, "step": 13322 }, { "epoch": 0.5521571552903145, "grad_norm": 0.38390281796455383, "learning_rate": 2.239421443076796e-06, "loss": 0.6407, "step": 13323 }, { "epoch": 0.5521985991959882, "grad_norm": 0.395438551902771, "learning_rate": 2.2392142235484273e-06, "loss": 0.6602, "step": 13324 }, { "epoch": 0.5522400431016619, "grad_norm": 0.39681094884872437, "learning_rate": 2.239007004020059e-06, "loss": 0.6621, "step": 13325 }, { "epoch": 0.5522814870073356, "grad_norm": 0.4080069065093994, "learning_rate": 2.2387997844916905e-06, "loss": 0.6978, "step": 13326 }, { "epoch": 0.5523229309130092, "grad_norm": 0.41217124462127686, "learning_rate": 2.2385925649633223e-06, "loss": 0.7383, "step": 13327 }, { "epoch": 0.552364374818683, "grad_norm": 0.40536928176879883, "learning_rate": 2.2383853454349537e-06, "loss": 0.6564, "step": 13328 }, { "epoch": 0.5524058187243566, "grad_norm": 0.40895333886146545, "learning_rate": 2.238178125906586e-06, "loss": 0.7205, "step": 13329 }, { "epoch": 0.5524472626300303, "grad_norm": 0.39871978759765625, "learning_rate": 2.2379709063782173e-06, "loss": 0.6755, "step": 13330 }, { "epoch": 0.5524887065357039, "grad_norm": 0.41123121976852417, "learning_rate": 2.237763686849849e-06, "loss": 0.7473, "step": 13331 }, { "epoch": 0.5525301504413775, "grad_norm": 0.40804433822631836, "learning_rate": 2.2375564673214805e-06, "loss": 0.6782, "step": 13332 }, { "epoch": 0.5525715943470513, "grad_norm": 0.4030790627002716, "learning_rate": 2.2373492477931123e-06, "loss": 0.6848, "step": 13333 }, { "epoch": 0.5526130382527249, "grad_norm": 0.3813791275024414, "learning_rate": 2.2371420282647437e-06, "loss": 0.6738, "step": 13334 }, { "epoch": 0.5526544821583986, "grad_norm": 0.43365219235420227, "learning_rate": 2.2369348087363755e-06, "loss": 0.6936, "step": 13335 }, { "epoch": 0.5526959260640723, "grad_norm": 0.43022313714027405, "learning_rate": 2.236727589208007e-06, "loss": 0.7285, "step": 13336 }, { "epoch": 0.552737369969746, "grad_norm": 0.4141666293144226, "learning_rate": 2.236520369679639e-06, "loss": 0.6554, "step": 13337 }, { "epoch": 0.5527788138754196, "grad_norm": 0.39094865322113037, "learning_rate": 2.2363131501512705e-06, "loss": 0.6609, "step": 13338 }, { "epoch": 0.5528202577810933, "grad_norm": 0.40162554383277893, "learning_rate": 2.2361059306229023e-06, "loss": 0.613, "step": 13339 }, { "epoch": 0.552861701686767, "grad_norm": 0.4159516990184784, "learning_rate": 2.2358987110945337e-06, "loss": 0.6792, "step": 13340 }, { "epoch": 0.5529031455924406, "grad_norm": 0.4038596749305725, "learning_rate": 2.2356914915661655e-06, "loss": 0.6833, "step": 13341 }, { "epoch": 0.5529445894981143, "grad_norm": 0.3969716429710388, "learning_rate": 2.235484272037797e-06, "loss": 0.6929, "step": 13342 }, { "epoch": 0.5529860334037879, "grad_norm": 0.4287538528442383, "learning_rate": 2.2352770525094287e-06, "loss": 0.7012, "step": 13343 }, { "epoch": 0.5530274773094617, "grad_norm": 0.4244667887687683, "learning_rate": 2.23506983298106e-06, "loss": 0.672, "step": 13344 }, { "epoch": 0.5530689212151353, "grad_norm": 0.41264766454696655, "learning_rate": 2.234862613452692e-06, "loss": 0.6907, "step": 13345 }, { "epoch": 0.553110365120809, "grad_norm": 0.375772088766098, "learning_rate": 2.2346553939243237e-06, "loss": 0.6744, "step": 13346 }, { "epoch": 0.5531518090264826, "grad_norm": 0.4508207440376282, "learning_rate": 2.2344481743959555e-06, "loss": 0.7206, "step": 13347 }, { "epoch": 0.5531932529321564, "grad_norm": 0.4071665108203888, "learning_rate": 2.234240954867587e-06, "loss": 0.6775, "step": 13348 }, { "epoch": 0.55323469683783, "grad_norm": 0.4171450734138489, "learning_rate": 2.2340337353392187e-06, "loss": 0.6787, "step": 13349 }, { "epoch": 0.5532761407435036, "grad_norm": 0.4202789068222046, "learning_rate": 2.23382651581085e-06, "loss": 0.6863, "step": 13350 }, { "epoch": 0.5533175846491774, "grad_norm": 0.43829914927482605, "learning_rate": 2.233619296282482e-06, "loss": 0.6877, "step": 13351 }, { "epoch": 0.553359028554851, "grad_norm": 0.40467071533203125, "learning_rate": 2.2334120767541133e-06, "loss": 0.6951, "step": 13352 }, { "epoch": 0.5534004724605247, "grad_norm": 0.3977621793746948, "learning_rate": 2.233204857225745e-06, "loss": 0.676, "step": 13353 }, { "epoch": 0.5534419163661983, "grad_norm": 0.38484591245651245, "learning_rate": 2.2329976376973765e-06, "loss": 0.6423, "step": 13354 }, { "epoch": 0.5534833602718721, "grad_norm": 0.44045478105545044, "learning_rate": 2.2327904181690087e-06, "loss": 0.7419, "step": 13355 }, { "epoch": 0.5535248041775457, "grad_norm": 0.38302960991859436, "learning_rate": 2.23258319864064e-06, "loss": 0.6158, "step": 13356 }, { "epoch": 0.5535662480832194, "grad_norm": 0.4010436236858368, "learning_rate": 2.232375979112272e-06, "loss": 0.6357, "step": 13357 }, { "epoch": 0.553607691988893, "grad_norm": 0.42946529388427734, "learning_rate": 2.2321687595839033e-06, "loss": 0.6854, "step": 13358 }, { "epoch": 0.5536491358945667, "grad_norm": 0.4361298084259033, "learning_rate": 2.231961540055535e-06, "loss": 0.6263, "step": 13359 }, { "epoch": 0.5536905798002404, "grad_norm": 0.43426117300987244, "learning_rate": 2.2317543205271665e-06, "loss": 0.6722, "step": 13360 }, { "epoch": 0.553732023705914, "grad_norm": 0.43082141876220703, "learning_rate": 2.2315471009987983e-06, "loss": 0.7673, "step": 13361 }, { "epoch": 0.5537734676115877, "grad_norm": 0.4068543314933777, "learning_rate": 2.2313398814704297e-06, "loss": 0.6659, "step": 13362 }, { "epoch": 0.5538149115172614, "grad_norm": 0.40030407905578613, "learning_rate": 2.231132661942062e-06, "loss": 0.6687, "step": 13363 }, { "epoch": 0.5538563554229351, "grad_norm": 0.40095648169517517, "learning_rate": 2.2309254424136933e-06, "loss": 0.6782, "step": 13364 }, { "epoch": 0.5538977993286087, "grad_norm": 0.49406012892723083, "learning_rate": 2.230718222885325e-06, "loss": 0.7488, "step": 13365 }, { "epoch": 0.5539392432342825, "grad_norm": 0.3838699460029602, "learning_rate": 2.2305110033569565e-06, "loss": 0.6638, "step": 13366 }, { "epoch": 0.5539806871399561, "grad_norm": 0.4086756110191345, "learning_rate": 2.2303037838285883e-06, "loss": 0.6797, "step": 13367 }, { "epoch": 0.5540221310456297, "grad_norm": 0.39085254073143005, "learning_rate": 2.2300965643002197e-06, "loss": 0.6222, "step": 13368 }, { "epoch": 0.5540635749513034, "grad_norm": 0.41899600625038147, "learning_rate": 2.2298893447718515e-06, "loss": 0.6958, "step": 13369 }, { "epoch": 0.554105018856977, "grad_norm": 0.4285520315170288, "learning_rate": 2.229682125243483e-06, "loss": 0.6976, "step": 13370 }, { "epoch": 0.5541464627626508, "grad_norm": 0.39842110872268677, "learning_rate": 2.2294749057151147e-06, "loss": 0.6702, "step": 13371 }, { "epoch": 0.5541879066683244, "grad_norm": 0.38456428050994873, "learning_rate": 2.2292676861867465e-06, "loss": 0.6259, "step": 13372 }, { "epoch": 0.5542293505739981, "grad_norm": 0.3744107484817505, "learning_rate": 2.2290604666583783e-06, "loss": 0.6565, "step": 13373 }, { "epoch": 0.5542707944796718, "grad_norm": 0.4025529623031616, "learning_rate": 2.2288532471300097e-06, "loss": 0.7324, "step": 13374 }, { "epoch": 0.5543122383853455, "grad_norm": 0.4216817021369934, "learning_rate": 2.2286460276016415e-06, "loss": 0.7095, "step": 13375 }, { "epoch": 0.5543536822910191, "grad_norm": 0.3994043171405792, "learning_rate": 2.228438808073273e-06, "loss": 0.6736, "step": 13376 }, { "epoch": 0.5543951261966927, "grad_norm": 0.38003724813461304, "learning_rate": 2.2282315885449047e-06, "loss": 0.7078, "step": 13377 }, { "epoch": 0.5544365701023665, "grad_norm": 0.4104180634021759, "learning_rate": 2.228024369016536e-06, "loss": 0.6694, "step": 13378 }, { "epoch": 0.5544780140080401, "grad_norm": 0.43352511525154114, "learning_rate": 2.227817149488168e-06, "loss": 0.7068, "step": 13379 }, { "epoch": 0.5545194579137138, "grad_norm": 0.414272278547287, "learning_rate": 2.2276099299597993e-06, "loss": 0.6548, "step": 13380 }, { "epoch": 0.5545609018193874, "grad_norm": 0.40755319595336914, "learning_rate": 2.2274027104314315e-06, "loss": 0.7058, "step": 13381 }, { "epoch": 0.5546023457250612, "grad_norm": 0.4305060803890228, "learning_rate": 2.227195490903063e-06, "loss": 0.7056, "step": 13382 }, { "epoch": 0.5546437896307348, "grad_norm": 0.3956022262573242, "learning_rate": 2.2269882713746947e-06, "loss": 0.6588, "step": 13383 }, { "epoch": 0.5546852335364084, "grad_norm": 0.4256497025489807, "learning_rate": 2.226781051846326e-06, "loss": 0.649, "step": 13384 }, { "epoch": 0.5547266774420822, "grad_norm": 0.4398941695690155, "learning_rate": 2.226573832317958e-06, "loss": 0.719, "step": 13385 }, { "epoch": 0.5547681213477558, "grad_norm": 0.39232495427131653, "learning_rate": 2.2263666127895893e-06, "loss": 0.6964, "step": 13386 }, { "epoch": 0.5548095652534295, "grad_norm": 0.4180092215538025, "learning_rate": 2.226159393261221e-06, "loss": 0.6829, "step": 13387 }, { "epoch": 0.5548510091591031, "grad_norm": 0.4017854332923889, "learning_rate": 2.2259521737328525e-06, "loss": 0.6421, "step": 13388 }, { "epoch": 0.5548924530647769, "grad_norm": 0.3935394585132599, "learning_rate": 2.2257449542044843e-06, "loss": 0.6541, "step": 13389 }, { "epoch": 0.5549338969704505, "grad_norm": 0.4190226197242737, "learning_rate": 2.225537734676116e-06, "loss": 0.7004, "step": 13390 }, { "epoch": 0.5549753408761242, "grad_norm": 0.4204018712043762, "learning_rate": 2.225330515147748e-06, "loss": 0.6387, "step": 13391 }, { "epoch": 0.5550167847817978, "grad_norm": 0.42090368270874023, "learning_rate": 2.2251232956193793e-06, "loss": 0.6611, "step": 13392 }, { "epoch": 0.5550582286874715, "grad_norm": 0.4075533449649811, "learning_rate": 2.224916076091011e-06, "loss": 0.6379, "step": 13393 }, { "epoch": 0.5550996725931452, "grad_norm": 0.431539386510849, "learning_rate": 2.2247088565626425e-06, "loss": 0.6959, "step": 13394 }, { "epoch": 0.5551411164988188, "grad_norm": 0.4098464250564575, "learning_rate": 2.2245016370342743e-06, "loss": 0.6372, "step": 13395 }, { "epoch": 0.5551825604044925, "grad_norm": 0.4535987377166748, "learning_rate": 2.2242944175059057e-06, "loss": 0.7329, "step": 13396 }, { "epoch": 0.5552240043101662, "grad_norm": 0.4095035791397095, "learning_rate": 2.2240871979775375e-06, "loss": 0.6621, "step": 13397 }, { "epoch": 0.5552654482158399, "grad_norm": 0.38087335228919983, "learning_rate": 2.2238799784491693e-06, "loss": 0.6787, "step": 13398 }, { "epoch": 0.5553068921215135, "grad_norm": 0.42278924584388733, "learning_rate": 2.223672758920801e-06, "loss": 0.7124, "step": 13399 }, { "epoch": 0.5553483360271873, "grad_norm": 0.425914466381073, "learning_rate": 2.2234655393924325e-06, "loss": 0.7185, "step": 13400 }, { "epoch": 0.5553897799328609, "grad_norm": 0.4198751151561737, "learning_rate": 2.2232583198640643e-06, "loss": 0.696, "step": 13401 }, { "epoch": 0.5554312238385345, "grad_norm": 0.4298326075077057, "learning_rate": 2.2230511003356957e-06, "loss": 0.6584, "step": 13402 }, { "epoch": 0.5554726677442082, "grad_norm": 0.4123218357563019, "learning_rate": 2.2228438808073275e-06, "loss": 0.7009, "step": 13403 }, { "epoch": 0.5555141116498818, "grad_norm": 0.4475235939025879, "learning_rate": 2.222636661278959e-06, "loss": 0.6807, "step": 13404 }, { "epoch": 0.5555555555555556, "grad_norm": 0.44484037160873413, "learning_rate": 2.2224294417505907e-06, "loss": 0.6478, "step": 13405 }, { "epoch": 0.5555969994612292, "grad_norm": 0.45554664731025696, "learning_rate": 2.222222222222222e-06, "loss": 0.7017, "step": 13406 }, { "epoch": 0.5556384433669029, "grad_norm": 0.4124221205711365, "learning_rate": 2.2220150026938543e-06, "loss": 0.6881, "step": 13407 }, { "epoch": 0.5556798872725766, "grad_norm": 0.4102628529071808, "learning_rate": 2.2218077831654857e-06, "loss": 0.7053, "step": 13408 }, { "epoch": 0.5557213311782503, "grad_norm": 0.4004441499710083, "learning_rate": 2.2216005636371175e-06, "loss": 0.6232, "step": 13409 }, { "epoch": 0.5557627750839239, "grad_norm": 0.3827805519104004, "learning_rate": 2.221393344108749e-06, "loss": 0.6565, "step": 13410 }, { "epoch": 0.5558042189895975, "grad_norm": 0.4330747127532959, "learning_rate": 2.2211861245803807e-06, "loss": 0.6655, "step": 13411 }, { "epoch": 0.5558456628952713, "grad_norm": 0.42457517981529236, "learning_rate": 2.220978905052012e-06, "loss": 0.7222, "step": 13412 }, { "epoch": 0.5558871068009449, "grad_norm": 0.42491310834884644, "learning_rate": 2.220771685523644e-06, "loss": 0.6335, "step": 13413 }, { "epoch": 0.5559285507066186, "grad_norm": 0.3926023244857788, "learning_rate": 2.2205644659952753e-06, "loss": 0.6913, "step": 13414 }, { "epoch": 0.5559699946122922, "grad_norm": 0.39806604385375977, "learning_rate": 2.220357246466907e-06, "loss": 0.6642, "step": 13415 }, { "epoch": 0.556011438517966, "grad_norm": 0.3912261426448822, "learning_rate": 2.220150026938539e-06, "loss": 0.694, "step": 13416 }, { "epoch": 0.5560528824236396, "grad_norm": 0.4065779447555542, "learning_rate": 2.2199428074101707e-06, "loss": 0.6438, "step": 13417 }, { "epoch": 0.5560943263293133, "grad_norm": 0.41474640369415283, "learning_rate": 2.219735587881802e-06, "loss": 0.6702, "step": 13418 }, { "epoch": 0.556135770234987, "grad_norm": 0.40799984335899353, "learning_rate": 2.219528368353434e-06, "loss": 0.6831, "step": 13419 }, { "epoch": 0.5561772141406606, "grad_norm": 0.386981725692749, "learning_rate": 2.2193211488250653e-06, "loss": 0.6487, "step": 13420 }, { "epoch": 0.5562186580463343, "grad_norm": 0.4154641330242157, "learning_rate": 2.219113929296697e-06, "loss": 0.647, "step": 13421 }, { "epoch": 0.5562601019520079, "grad_norm": 0.4300985038280487, "learning_rate": 2.2189067097683285e-06, "loss": 0.6721, "step": 13422 }, { "epoch": 0.5563015458576817, "grad_norm": 0.4128133952617645, "learning_rate": 2.2186994902399603e-06, "loss": 0.6978, "step": 13423 }, { "epoch": 0.5563429897633553, "grad_norm": 0.3875913619995117, "learning_rate": 2.218492270711592e-06, "loss": 0.6897, "step": 13424 }, { "epoch": 0.556384433669029, "grad_norm": 0.3978937566280365, "learning_rate": 2.218285051183224e-06, "loss": 0.7024, "step": 13425 }, { "epoch": 0.5564258775747026, "grad_norm": 0.4331439435482025, "learning_rate": 2.2180778316548553e-06, "loss": 0.7061, "step": 13426 }, { "epoch": 0.5564673214803764, "grad_norm": 0.4399000108242035, "learning_rate": 2.217870612126487e-06, "loss": 0.6729, "step": 13427 }, { "epoch": 0.55650876538605, "grad_norm": 0.40281373262405396, "learning_rate": 2.2176633925981185e-06, "loss": 0.6663, "step": 13428 }, { "epoch": 0.5565502092917236, "grad_norm": 0.3918147385120392, "learning_rate": 2.2174561730697503e-06, "loss": 0.6843, "step": 13429 }, { "epoch": 0.5565916531973973, "grad_norm": 0.43212953209877014, "learning_rate": 2.2172489535413817e-06, "loss": 0.6829, "step": 13430 }, { "epoch": 0.556633097103071, "grad_norm": 0.3771352767944336, "learning_rate": 2.2170417340130135e-06, "loss": 0.6494, "step": 13431 }, { "epoch": 0.5566745410087447, "grad_norm": 0.3982904255390167, "learning_rate": 2.2168345144846453e-06, "loss": 0.7012, "step": 13432 }, { "epoch": 0.5567159849144183, "grad_norm": 0.3984405994415283, "learning_rate": 2.216627294956277e-06, "loss": 0.6512, "step": 13433 }, { "epoch": 0.556757428820092, "grad_norm": 0.4121124744415283, "learning_rate": 2.2164200754279085e-06, "loss": 0.6899, "step": 13434 }, { "epoch": 0.5567988727257657, "grad_norm": 0.4117922782897949, "learning_rate": 2.2162128558995403e-06, "loss": 0.6898, "step": 13435 }, { "epoch": 0.5568403166314393, "grad_norm": 0.4423362612724304, "learning_rate": 2.2160056363711717e-06, "loss": 0.6992, "step": 13436 }, { "epoch": 0.556881760537113, "grad_norm": 0.4063716232776642, "learning_rate": 2.2157984168428035e-06, "loss": 0.6932, "step": 13437 }, { "epoch": 0.5569232044427866, "grad_norm": 0.40465348958969116, "learning_rate": 2.215591197314435e-06, "loss": 0.6439, "step": 13438 }, { "epoch": 0.5569646483484604, "grad_norm": 0.4145529270172119, "learning_rate": 2.2153839777860667e-06, "loss": 0.6752, "step": 13439 }, { "epoch": 0.557006092254134, "grad_norm": 0.49542340636253357, "learning_rate": 2.215176758257698e-06, "loss": 0.7131, "step": 13440 }, { "epoch": 0.5570475361598077, "grad_norm": 0.43253093957901, "learning_rate": 2.21496953872933e-06, "loss": 0.7047, "step": 13441 }, { "epoch": 0.5570889800654814, "grad_norm": 0.4001549184322357, "learning_rate": 2.2147623192009617e-06, "loss": 0.6738, "step": 13442 }, { "epoch": 0.5571304239711551, "grad_norm": 0.3856462836265564, "learning_rate": 2.2145550996725935e-06, "loss": 0.6427, "step": 13443 }, { "epoch": 0.5571718678768287, "grad_norm": 0.36404281854629517, "learning_rate": 2.214347880144225e-06, "loss": 0.6241, "step": 13444 }, { "epoch": 0.5572133117825023, "grad_norm": 0.39453092217445374, "learning_rate": 2.2141406606158567e-06, "loss": 0.7024, "step": 13445 }, { "epoch": 0.5572547556881761, "grad_norm": 0.41613492369651794, "learning_rate": 2.213933441087488e-06, "loss": 0.6711, "step": 13446 }, { "epoch": 0.5572961995938497, "grad_norm": 0.4494464695453644, "learning_rate": 2.21372622155912e-06, "loss": 0.7129, "step": 13447 }, { "epoch": 0.5573376434995234, "grad_norm": 0.4255913496017456, "learning_rate": 2.2135190020307513e-06, "loss": 0.6926, "step": 13448 }, { "epoch": 0.557379087405197, "grad_norm": 0.407270222902298, "learning_rate": 2.213311782502383e-06, "loss": 0.6613, "step": 13449 }, { "epoch": 0.5574205313108708, "grad_norm": 0.40119943022727966, "learning_rate": 2.213104562974015e-06, "loss": 0.678, "step": 13450 }, { "epoch": 0.5574619752165444, "grad_norm": 0.37802988290786743, "learning_rate": 2.2128973434456467e-06, "loss": 0.6285, "step": 13451 }, { "epoch": 0.5575034191222181, "grad_norm": 0.40623581409454346, "learning_rate": 2.212690123917278e-06, "loss": 0.6956, "step": 13452 }, { "epoch": 0.5575448630278917, "grad_norm": 0.44938820600509644, "learning_rate": 2.21248290438891e-06, "loss": 0.7327, "step": 13453 }, { "epoch": 0.5575863069335654, "grad_norm": 0.3923630118370056, "learning_rate": 2.2122756848605413e-06, "loss": 0.6724, "step": 13454 }, { "epoch": 0.5576277508392391, "grad_norm": 0.46525418758392334, "learning_rate": 2.212068465332173e-06, "loss": 0.7512, "step": 13455 }, { "epoch": 0.5576691947449127, "grad_norm": 0.3949797451496124, "learning_rate": 2.2118612458038045e-06, "loss": 0.6484, "step": 13456 }, { "epoch": 0.5577106386505865, "grad_norm": 0.4021584689617157, "learning_rate": 2.2116540262754363e-06, "loss": 0.7385, "step": 13457 }, { "epoch": 0.5577520825562601, "grad_norm": 0.4018990695476532, "learning_rate": 2.211446806747068e-06, "loss": 0.6512, "step": 13458 }, { "epoch": 0.5577935264619338, "grad_norm": 0.4004579484462738, "learning_rate": 2.2112395872187e-06, "loss": 0.7473, "step": 13459 }, { "epoch": 0.5578349703676074, "grad_norm": 0.40786340832710266, "learning_rate": 2.2110323676903313e-06, "loss": 0.7065, "step": 13460 }, { "epoch": 0.5578764142732812, "grad_norm": 0.4096389412879944, "learning_rate": 2.210825148161963e-06, "loss": 0.6504, "step": 13461 }, { "epoch": 0.5579178581789548, "grad_norm": 0.40300965309143066, "learning_rate": 2.2106179286335945e-06, "loss": 0.7253, "step": 13462 }, { "epoch": 0.5579593020846284, "grad_norm": 0.4301587641239166, "learning_rate": 2.2104107091052263e-06, "loss": 0.7092, "step": 13463 }, { "epoch": 0.5580007459903021, "grad_norm": 0.4605104625225067, "learning_rate": 2.2102034895768577e-06, "loss": 0.7021, "step": 13464 }, { "epoch": 0.5580421898959758, "grad_norm": 0.3844835162162781, "learning_rate": 2.2099962700484895e-06, "loss": 0.6652, "step": 13465 }, { "epoch": 0.5580836338016495, "grad_norm": 0.39506399631500244, "learning_rate": 2.2097890505201213e-06, "loss": 0.7017, "step": 13466 }, { "epoch": 0.5581250777073231, "grad_norm": 0.431597501039505, "learning_rate": 2.2095818309917527e-06, "loss": 0.6343, "step": 13467 }, { "epoch": 0.5581665216129968, "grad_norm": 0.4331507980823517, "learning_rate": 2.2093746114633845e-06, "loss": 0.7197, "step": 13468 }, { "epoch": 0.5582079655186705, "grad_norm": 0.41448405385017395, "learning_rate": 2.2091673919350163e-06, "loss": 0.6917, "step": 13469 }, { "epoch": 0.5582494094243442, "grad_norm": 0.42382121086120605, "learning_rate": 2.2089601724066477e-06, "loss": 0.6398, "step": 13470 }, { "epoch": 0.5582908533300178, "grad_norm": 0.390537828207016, "learning_rate": 2.2087529528782795e-06, "loss": 0.6421, "step": 13471 }, { "epoch": 0.5583322972356914, "grad_norm": 0.41863149404525757, "learning_rate": 2.208545733349911e-06, "loss": 0.6694, "step": 13472 }, { "epoch": 0.5583737411413652, "grad_norm": 0.44366341829299927, "learning_rate": 2.2083385138215427e-06, "loss": 0.6776, "step": 13473 }, { "epoch": 0.5584151850470388, "grad_norm": 0.41243746876716614, "learning_rate": 2.2081312942931745e-06, "loss": 0.7064, "step": 13474 }, { "epoch": 0.5584566289527125, "grad_norm": 0.3989080488681793, "learning_rate": 2.207924074764806e-06, "loss": 0.6655, "step": 13475 }, { "epoch": 0.5584980728583862, "grad_norm": 0.43861666321754456, "learning_rate": 2.2077168552364377e-06, "loss": 0.6903, "step": 13476 }, { "epoch": 0.5585395167640599, "grad_norm": 0.39581409096717834, "learning_rate": 2.2075096357080695e-06, "loss": 0.671, "step": 13477 }, { "epoch": 0.5585809606697335, "grad_norm": 0.40590617060661316, "learning_rate": 2.207302416179701e-06, "loss": 0.6526, "step": 13478 }, { "epoch": 0.5586224045754072, "grad_norm": 0.41925570368766785, "learning_rate": 2.2070951966513327e-06, "loss": 0.6869, "step": 13479 }, { "epoch": 0.5586638484810809, "grad_norm": 0.40661245584487915, "learning_rate": 2.206887977122964e-06, "loss": 0.6802, "step": 13480 }, { "epoch": 0.5587052923867545, "grad_norm": 0.40267014503479004, "learning_rate": 2.206680757594596e-06, "loss": 0.6696, "step": 13481 }, { "epoch": 0.5587467362924282, "grad_norm": 0.4519302248954773, "learning_rate": 2.2064735380662273e-06, "loss": 0.717, "step": 13482 }, { "epoch": 0.5587881801981018, "grad_norm": 0.3907933235168457, "learning_rate": 2.206266318537859e-06, "loss": 0.6992, "step": 13483 }, { "epoch": 0.5588296241037756, "grad_norm": 0.4434550106525421, "learning_rate": 2.206059099009491e-06, "loss": 0.6979, "step": 13484 }, { "epoch": 0.5588710680094492, "grad_norm": 0.4492528438568115, "learning_rate": 2.2058518794811227e-06, "loss": 0.7117, "step": 13485 }, { "epoch": 0.5589125119151229, "grad_norm": 0.44632333517074585, "learning_rate": 2.205644659952754e-06, "loss": 0.7374, "step": 13486 }, { "epoch": 0.5589539558207965, "grad_norm": 0.39630189538002014, "learning_rate": 2.205437440424386e-06, "loss": 0.6914, "step": 13487 }, { "epoch": 0.5589953997264703, "grad_norm": 0.426482230424881, "learning_rate": 2.2052302208960173e-06, "loss": 0.6332, "step": 13488 }, { "epoch": 0.5590368436321439, "grad_norm": 0.4317478835582733, "learning_rate": 2.205023001367649e-06, "loss": 0.7627, "step": 13489 }, { "epoch": 0.5590782875378175, "grad_norm": 0.4025958180427551, "learning_rate": 2.2048157818392805e-06, "loss": 0.6543, "step": 13490 }, { "epoch": 0.5591197314434913, "grad_norm": 0.4113334119319916, "learning_rate": 2.2046085623109123e-06, "loss": 0.6853, "step": 13491 }, { "epoch": 0.5591611753491649, "grad_norm": 0.3804090917110443, "learning_rate": 2.204401342782544e-06, "loss": 0.6903, "step": 13492 }, { "epoch": 0.5592026192548386, "grad_norm": 0.42987972497940063, "learning_rate": 2.2041941232541755e-06, "loss": 0.7095, "step": 13493 }, { "epoch": 0.5592440631605122, "grad_norm": 0.39299386739730835, "learning_rate": 2.2039869037258073e-06, "loss": 0.6493, "step": 13494 }, { "epoch": 0.559285507066186, "grad_norm": 0.39975452423095703, "learning_rate": 2.203779684197439e-06, "loss": 0.6755, "step": 13495 }, { "epoch": 0.5593269509718596, "grad_norm": 0.4169398844242096, "learning_rate": 2.2035724646690705e-06, "loss": 0.6431, "step": 13496 }, { "epoch": 0.5593683948775332, "grad_norm": 0.3925769329071045, "learning_rate": 2.2033652451407023e-06, "loss": 0.6604, "step": 13497 }, { "epoch": 0.5594098387832069, "grad_norm": 0.3608100712299347, "learning_rate": 2.2031580256123337e-06, "loss": 0.6648, "step": 13498 }, { "epoch": 0.5594512826888806, "grad_norm": 0.415886789560318, "learning_rate": 2.2029508060839655e-06, "loss": 0.6871, "step": 13499 }, { "epoch": 0.5594927265945543, "grad_norm": 0.4178953170776367, "learning_rate": 2.2027435865555973e-06, "loss": 0.7006, "step": 13500 }, { "epoch": 0.5595341705002279, "grad_norm": 0.39562898874282837, "learning_rate": 2.2025363670272287e-06, "loss": 0.6331, "step": 13501 }, { "epoch": 0.5595756144059016, "grad_norm": 0.4548182785511017, "learning_rate": 2.2023291474988605e-06, "loss": 0.7021, "step": 13502 }, { "epoch": 0.5596170583115753, "grad_norm": 0.42953452467918396, "learning_rate": 2.2021219279704923e-06, "loss": 0.739, "step": 13503 }, { "epoch": 0.559658502217249, "grad_norm": 0.3848244547843933, "learning_rate": 2.2019147084421237e-06, "loss": 0.6604, "step": 13504 }, { "epoch": 0.5596999461229226, "grad_norm": 0.45081838965415955, "learning_rate": 2.2017074889137555e-06, "loss": 0.6697, "step": 13505 }, { "epoch": 0.5597413900285962, "grad_norm": 0.38033729791641235, "learning_rate": 2.201500269385387e-06, "loss": 0.6462, "step": 13506 }, { "epoch": 0.55978283393427, "grad_norm": 0.42892688512802124, "learning_rate": 2.2012930498570187e-06, "loss": 0.6722, "step": 13507 }, { "epoch": 0.5598242778399436, "grad_norm": 0.3980858623981476, "learning_rate": 2.2010858303286505e-06, "loss": 0.7052, "step": 13508 }, { "epoch": 0.5598657217456173, "grad_norm": 0.42280325293540955, "learning_rate": 2.200878610800282e-06, "loss": 0.7065, "step": 13509 }, { "epoch": 0.559907165651291, "grad_norm": 0.38851016759872437, "learning_rate": 2.2006713912719137e-06, "loss": 0.7131, "step": 13510 }, { "epoch": 0.5599486095569647, "grad_norm": 0.43656688928604126, "learning_rate": 2.2004641717435455e-06, "loss": 0.6797, "step": 13511 }, { "epoch": 0.5599900534626383, "grad_norm": 0.41167059540748596, "learning_rate": 2.200256952215177e-06, "loss": 0.6598, "step": 13512 }, { "epoch": 0.560031497368312, "grad_norm": 0.48567354679107666, "learning_rate": 2.2000497326868087e-06, "loss": 0.6899, "step": 13513 }, { "epoch": 0.5600729412739857, "grad_norm": 0.4317682087421417, "learning_rate": 2.19984251315844e-06, "loss": 0.6748, "step": 13514 }, { "epoch": 0.5601143851796593, "grad_norm": 0.47222238779067993, "learning_rate": 2.199635293630072e-06, "loss": 0.6973, "step": 13515 }, { "epoch": 0.560155829085333, "grad_norm": 0.4293147921562195, "learning_rate": 2.1994280741017033e-06, "loss": 0.6655, "step": 13516 }, { "epoch": 0.5601972729910066, "grad_norm": 0.404299259185791, "learning_rate": 2.199220854573335e-06, "loss": 0.6638, "step": 13517 }, { "epoch": 0.5602387168966804, "grad_norm": 0.399644672870636, "learning_rate": 2.199013635044967e-06, "loss": 0.6677, "step": 13518 }, { "epoch": 0.560280160802354, "grad_norm": 0.4088263213634491, "learning_rate": 2.1988064155165983e-06, "loss": 0.7126, "step": 13519 }, { "epoch": 0.5603216047080277, "grad_norm": 0.38518163561820984, "learning_rate": 2.19859919598823e-06, "loss": 0.6458, "step": 13520 }, { "epoch": 0.5603630486137013, "grad_norm": 0.4290962219238281, "learning_rate": 2.198391976459862e-06, "loss": 0.6698, "step": 13521 }, { "epoch": 0.5604044925193751, "grad_norm": 0.44641464948654175, "learning_rate": 2.1981847569314933e-06, "loss": 0.71, "step": 13522 }, { "epoch": 0.5604459364250487, "grad_norm": 0.40030407905578613, "learning_rate": 2.197977537403125e-06, "loss": 0.6549, "step": 13523 }, { "epoch": 0.5604873803307223, "grad_norm": 0.43866628408432007, "learning_rate": 2.1977703178747565e-06, "loss": 0.7046, "step": 13524 }, { "epoch": 0.560528824236396, "grad_norm": 0.3886450231075287, "learning_rate": 2.1975630983463883e-06, "loss": 0.6985, "step": 13525 }, { "epoch": 0.5605702681420697, "grad_norm": 0.3982860743999481, "learning_rate": 2.19735587881802e-06, "loss": 0.6184, "step": 13526 }, { "epoch": 0.5606117120477434, "grad_norm": 0.40546050667762756, "learning_rate": 2.1971486592896515e-06, "loss": 0.6575, "step": 13527 }, { "epoch": 0.560653155953417, "grad_norm": 0.43203985691070557, "learning_rate": 2.1969414397612833e-06, "loss": 0.6145, "step": 13528 }, { "epoch": 0.5606945998590908, "grad_norm": 0.45873114466667175, "learning_rate": 2.196734220232915e-06, "loss": 0.7437, "step": 13529 }, { "epoch": 0.5607360437647644, "grad_norm": 0.3934037387371063, "learning_rate": 2.1965270007045465e-06, "loss": 0.6167, "step": 13530 }, { "epoch": 0.5607774876704381, "grad_norm": 0.42633056640625, "learning_rate": 2.1963197811761783e-06, "loss": 0.7183, "step": 13531 }, { "epoch": 0.5608189315761117, "grad_norm": 0.37371826171875, "learning_rate": 2.1961125616478097e-06, "loss": 0.6919, "step": 13532 }, { "epoch": 0.5608603754817854, "grad_norm": 0.40359827876091003, "learning_rate": 2.1959053421194415e-06, "loss": 0.712, "step": 13533 }, { "epoch": 0.5609018193874591, "grad_norm": 0.3969515264034271, "learning_rate": 2.1956981225910733e-06, "loss": 0.6078, "step": 13534 }, { "epoch": 0.5609432632931327, "grad_norm": 0.39733344316482544, "learning_rate": 2.1954909030627047e-06, "loss": 0.6575, "step": 13535 }, { "epoch": 0.5609847071988064, "grad_norm": 0.43039190769195557, "learning_rate": 2.1952836835343365e-06, "loss": 0.6627, "step": 13536 }, { "epoch": 0.5610261511044801, "grad_norm": 0.42951494455337524, "learning_rate": 2.195076464005968e-06, "loss": 0.7037, "step": 13537 }, { "epoch": 0.5610675950101538, "grad_norm": 0.40532562136650085, "learning_rate": 2.1948692444775997e-06, "loss": 0.7158, "step": 13538 }, { "epoch": 0.5611090389158274, "grad_norm": 0.4063150882720947, "learning_rate": 2.1946620249492315e-06, "loss": 0.6858, "step": 13539 }, { "epoch": 0.5611504828215012, "grad_norm": 0.38967394828796387, "learning_rate": 2.194454805420863e-06, "loss": 0.6356, "step": 13540 }, { "epoch": 0.5611919267271748, "grad_norm": 0.3996219336986542, "learning_rate": 2.1942475858924947e-06, "loss": 0.728, "step": 13541 }, { "epoch": 0.5612333706328484, "grad_norm": 0.37087124586105347, "learning_rate": 2.1940403663641265e-06, "loss": 0.7058, "step": 13542 }, { "epoch": 0.5612748145385221, "grad_norm": 0.41605815291404724, "learning_rate": 2.193833146835758e-06, "loss": 0.6902, "step": 13543 }, { "epoch": 0.5613162584441957, "grad_norm": 0.41409724950790405, "learning_rate": 2.1936259273073897e-06, "loss": 0.6687, "step": 13544 }, { "epoch": 0.5613577023498695, "grad_norm": 0.39960646629333496, "learning_rate": 2.193418707779021e-06, "loss": 0.6833, "step": 13545 }, { "epoch": 0.5613991462555431, "grad_norm": 0.4606805443763733, "learning_rate": 2.193211488250653e-06, "loss": 0.7052, "step": 13546 }, { "epoch": 0.5614405901612168, "grad_norm": 0.4342947006225586, "learning_rate": 2.1930042687222847e-06, "loss": 0.6829, "step": 13547 }, { "epoch": 0.5614820340668905, "grad_norm": 0.39312323927879333, "learning_rate": 2.192797049193916e-06, "loss": 0.6843, "step": 13548 }, { "epoch": 0.5615234779725642, "grad_norm": 0.4197835326194763, "learning_rate": 2.192589829665548e-06, "loss": 0.7122, "step": 13549 }, { "epoch": 0.5615649218782378, "grad_norm": 0.4010487496852875, "learning_rate": 2.1923826101371793e-06, "loss": 0.6394, "step": 13550 }, { "epoch": 0.5616063657839114, "grad_norm": 0.38023194670677185, "learning_rate": 2.192175390608811e-06, "loss": 0.6494, "step": 13551 }, { "epoch": 0.5616478096895852, "grad_norm": 0.3523508906364441, "learning_rate": 2.191968171080443e-06, "loss": 0.6522, "step": 13552 }, { "epoch": 0.5616892535952588, "grad_norm": 0.40138936042785645, "learning_rate": 2.1917609515520743e-06, "loss": 0.6376, "step": 13553 }, { "epoch": 0.5617306975009325, "grad_norm": 0.41854575276374817, "learning_rate": 2.191553732023706e-06, "loss": 0.6841, "step": 13554 }, { "epoch": 0.5617721414066061, "grad_norm": 0.3792282044887543, "learning_rate": 2.191346512495338e-06, "loss": 0.686, "step": 13555 }, { "epoch": 0.5618135853122799, "grad_norm": 0.4282967448234558, "learning_rate": 2.1911392929669693e-06, "loss": 0.7009, "step": 13556 }, { "epoch": 0.5618550292179535, "grad_norm": 0.41302335262298584, "learning_rate": 2.190932073438601e-06, "loss": 0.691, "step": 13557 }, { "epoch": 0.5618964731236271, "grad_norm": 0.39084434509277344, "learning_rate": 2.1907248539102325e-06, "loss": 0.6556, "step": 13558 }, { "epoch": 0.5619379170293008, "grad_norm": 0.4302116632461548, "learning_rate": 2.1905176343818643e-06, "loss": 0.7017, "step": 13559 }, { "epoch": 0.5619793609349745, "grad_norm": 0.4295697510242462, "learning_rate": 2.190310414853496e-06, "loss": 0.6951, "step": 13560 }, { "epoch": 0.5620208048406482, "grad_norm": 0.4126332402229309, "learning_rate": 2.1901031953251275e-06, "loss": 0.6764, "step": 13561 }, { "epoch": 0.5620622487463218, "grad_norm": 0.38885870575904846, "learning_rate": 2.1898959757967593e-06, "loss": 0.7075, "step": 13562 }, { "epoch": 0.5621036926519956, "grad_norm": 0.46495676040649414, "learning_rate": 2.1896887562683907e-06, "loss": 0.7583, "step": 13563 }, { "epoch": 0.5621451365576692, "grad_norm": 0.4231015741825104, "learning_rate": 2.1894815367400225e-06, "loss": 0.6581, "step": 13564 }, { "epoch": 0.5621865804633429, "grad_norm": 0.39117616415023804, "learning_rate": 2.1892743172116543e-06, "loss": 0.6722, "step": 13565 }, { "epoch": 0.5622280243690165, "grad_norm": 0.42484748363494873, "learning_rate": 2.1890670976832857e-06, "loss": 0.6843, "step": 13566 }, { "epoch": 0.5622694682746902, "grad_norm": 0.4442344009876251, "learning_rate": 2.1888598781549175e-06, "loss": 0.6982, "step": 13567 }, { "epoch": 0.5623109121803639, "grad_norm": 0.4921358525753021, "learning_rate": 2.1886526586265493e-06, "loss": 0.7661, "step": 13568 }, { "epoch": 0.5623523560860375, "grad_norm": 0.4076802134513855, "learning_rate": 2.1884454390981807e-06, "loss": 0.6641, "step": 13569 }, { "epoch": 0.5623937999917112, "grad_norm": 0.38109737634658813, "learning_rate": 2.1882382195698125e-06, "loss": 0.6285, "step": 13570 }, { "epoch": 0.5624352438973849, "grad_norm": 0.4086001217365265, "learning_rate": 2.188031000041444e-06, "loss": 0.6786, "step": 13571 }, { "epoch": 0.5624766878030586, "grad_norm": 0.39237698912620544, "learning_rate": 2.1878237805130757e-06, "loss": 0.6426, "step": 13572 }, { "epoch": 0.5625181317087322, "grad_norm": 0.43000200390815735, "learning_rate": 2.1876165609847075e-06, "loss": 0.7117, "step": 13573 }, { "epoch": 0.562559575614406, "grad_norm": 0.3930050730705261, "learning_rate": 2.187409341456339e-06, "loss": 0.7026, "step": 13574 }, { "epoch": 0.5626010195200796, "grad_norm": 0.42346909642219543, "learning_rate": 2.1872021219279707e-06, "loss": 0.7075, "step": 13575 }, { "epoch": 0.5626424634257532, "grad_norm": 0.3909718990325928, "learning_rate": 2.1869949023996025e-06, "loss": 0.6968, "step": 13576 }, { "epoch": 0.5626839073314269, "grad_norm": 0.43160635232925415, "learning_rate": 2.186787682871234e-06, "loss": 0.6934, "step": 13577 }, { "epoch": 0.5627253512371005, "grad_norm": 0.4162079989910126, "learning_rate": 2.1865804633428657e-06, "loss": 0.7036, "step": 13578 }, { "epoch": 0.5627667951427743, "grad_norm": 0.4165841042995453, "learning_rate": 2.186373243814497e-06, "loss": 0.6854, "step": 13579 }, { "epoch": 0.5628082390484479, "grad_norm": 0.40297171473503113, "learning_rate": 2.186166024286129e-06, "loss": 0.6818, "step": 13580 }, { "epoch": 0.5628496829541216, "grad_norm": 0.44012030959129333, "learning_rate": 2.1859588047577607e-06, "loss": 0.696, "step": 13581 }, { "epoch": 0.5628911268597953, "grad_norm": 0.40899658203125, "learning_rate": 2.185751585229392e-06, "loss": 0.6912, "step": 13582 }, { "epoch": 0.562932570765469, "grad_norm": 0.438486784696579, "learning_rate": 2.185544365701024e-06, "loss": 0.6721, "step": 13583 }, { "epoch": 0.5629740146711426, "grad_norm": 0.4379560351371765, "learning_rate": 2.1853371461726557e-06, "loss": 0.6831, "step": 13584 }, { "epoch": 0.5630154585768162, "grad_norm": 0.43165647983551025, "learning_rate": 2.185129926644287e-06, "loss": 0.7225, "step": 13585 }, { "epoch": 0.56305690248249, "grad_norm": 0.43417444825172424, "learning_rate": 2.184922707115919e-06, "loss": 0.7329, "step": 13586 }, { "epoch": 0.5630983463881636, "grad_norm": 0.41414275765419006, "learning_rate": 2.1847154875875503e-06, "loss": 0.6658, "step": 13587 }, { "epoch": 0.5631397902938373, "grad_norm": 0.36921992897987366, "learning_rate": 2.184508268059182e-06, "loss": 0.6631, "step": 13588 }, { "epoch": 0.5631812341995109, "grad_norm": 0.4376254379749298, "learning_rate": 2.1843010485308135e-06, "loss": 0.7074, "step": 13589 }, { "epoch": 0.5632226781051847, "grad_norm": 0.3795234262943268, "learning_rate": 2.1840938290024453e-06, "loss": 0.6577, "step": 13590 }, { "epoch": 0.5632641220108583, "grad_norm": 0.41531500220298767, "learning_rate": 2.183886609474077e-06, "loss": 0.7317, "step": 13591 }, { "epoch": 0.563305565916532, "grad_norm": 0.4200824499130249, "learning_rate": 2.1836793899457085e-06, "loss": 0.6783, "step": 13592 }, { "epoch": 0.5633470098222056, "grad_norm": 0.4127451777458191, "learning_rate": 2.1834721704173403e-06, "loss": 0.6967, "step": 13593 }, { "epoch": 0.5633884537278793, "grad_norm": 0.432097464799881, "learning_rate": 2.183264950888972e-06, "loss": 0.697, "step": 13594 }, { "epoch": 0.563429897633553, "grad_norm": 0.419119268655777, "learning_rate": 2.1830577313606035e-06, "loss": 0.6628, "step": 13595 }, { "epoch": 0.5634713415392266, "grad_norm": 0.46469399333000183, "learning_rate": 2.1828505118322353e-06, "loss": 0.7124, "step": 13596 }, { "epoch": 0.5635127854449004, "grad_norm": 0.42279893159866333, "learning_rate": 2.1826432923038667e-06, "loss": 0.7249, "step": 13597 }, { "epoch": 0.563554229350574, "grad_norm": 0.42545968294143677, "learning_rate": 2.1824360727754985e-06, "loss": 0.739, "step": 13598 }, { "epoch": 0.5635956732562477, "grad_norm": 0.417981892824173, "learning_rate": 2.1822288532471303e-06, "loss": 0.7002, "step": 13599 }, { "epoch": 0.5636371171619213, "grad_norm": 0.41238218545913696, "learning_rate": 2.1820216337187617e-06, "loss": 0.636, "step": 13600 }, { "epoch": 0.5636785610675951, "grad_norm": 0.41787609457969666, "learning_rate": 2.1818144141903935e-06, "loss": 0.6713, "step": 13601 }, { "epoch": 0.5637200049732687, "grad_norm": 0.4048405885696411, "learning_rate": 2.1816071946620253e-06, "loss": 0.6621, "step": 13602 }, { "epoch": 0.5637614488789423, "grad_norm": 0.3973867893218994, "learning_rate": 2.1813999751336567e-06, "loss": 0.6768, "step": 13603 }, { "epoch": 0.563802892784616, "grad_norm": 0.4192880094051361, "learning_rate": 2.1811927556052885e-06, "loss": 0.6827, "step": 13604 }, { "epoch": 0.5638443366902897, "grad_norm": 0.40680718421936035, "learning_rate": 2.18098553607692e-06, "loss": 0.6987, "step": 13605 }, { "epoch": 0.5638857805959634, "grad_norm": 0.3910078704357147, "learning_rate": 2.1807783165485517e-06, "loss": 0.6764, "step": 13606 }, { "epoch": 0.563927224501637, "grad_norm": 0.39003387093544006, "learning_rate": 2.1805710970201835e-06, "loss": 0.6926, "step": 13607 }, { "epoch": 0.5639686684073107, "grad_norm": 0.409464955329895, "learning_rate": 2.180363877491815e-06, "loss": 0.7141, "step": 13608 }, { "epoch": 0.5640101123129844, "grad_norm": 0.3894730806350708, "learning_rate": 2.1801566579634467e-06, "loss": 0.6536, "step": 13609 }, { "epoch": 0.5640515562186581, "grad_norm": 0.4156385362148285, "learning_rate": 2.1799494384350785e-06, "loss": 0.7189, "step": 13610 }, { "epoch": 0.5640930001243317, "grad_norm": 0.3780062198638916, "learning_rate": 2.17974221890671e-06, "loss": 0.6509, "step": 13611 }, { "epoch": 0.5641344440300053, "grad_norm": 0.3846317231655121, "learning_rate": 2.1795349993783417e-06, "loss": 0.6589, "step": 13612 }, { "epoch": 0.5641758879356791, "grad_norm": 0.45072177052497864, "learning_rate": 2.179327779849973e-06, "loss": 0.6807, "step": 13613 }, { "epoch": 0.5642173318413527, "grad_norm": 0.42618077993392944, "learning_rate": 2.179120560321605e-06, "loss": 0.7559, "step": 13614 }, { "epoch": 0.5642587757470264, "grad_norm": 0.42198190093040466, "learning_rate": 2.1789133407932363e-06, "loss": 0.7437, "step": 13615 }, { "epoch": 0.5643002196527, "grad_norm": 0.42946889996528625, "learning_rate": 2.178706121264868e-06, "loss": 0.6769, "step": 13616 }, { "epoch": 0.5643416635583738, "grad_norm": 0.41309380531311035, "learning_rate": 2.1784989017365e-06, "loss": 0.6541, "step": 13617 }, { "epoch": 0.5643831074640474, "grad_norm": 0.41217321157455444, "learning_rate": 2.1782916822081317e-06, "loss": 0.7053, "step": 13618 }, { "epoch": 0.564424551369721, "grad_norm": 0.4197849929332733, "learning_rate": 2.178084462679763e-06, "loss": 0.6619, "step": 13619 }, { "epoch": 0.5644659952753948, "grad_norm": 0.3985639214515686, "learning_rate": 2.177877243151395e-06, "loss": 0.6697, "step": 13620 }, { "epoch": 0.5645074391810684, "grad_norm": 0.5045851469039917, "learning_rate": 2.1776700236230263e-06, "loss": 0.7589, "step": 13621 }, { "epoch": 0.5645488830867421, "grad_norm": 0.3928382992744446, "learning_rate": 2.177462804094658e-06, "loss": 0.6602, "step": 13622 }, { "epoch": 0.5645903269924157, "grad_norm": 0.40125736594200134, "learning_rate": 2.1772555845662895e-06, "loss": 0.6973, "step": 13623 }, { "epoch": 0.5646317708980895, "grad_norm": 0.38337793946266174, "learning_rate": 2.1770483650379213e-06, "loss": 0.6484, "step": 13624 }, { "epoch": 0.5646732148037631, "grad_norm": 0.41787585616111755, "learning_rate": 2.176841145509553e-06, "loss": 0.6863, "step": 13625 }, { "epoch": 0.5647146587094368, "grad_norm": 0.4291447699069977, "learning_rate": 2.1766339259811845e-06, "loss": 0.653, "step": 13626 }, { "epoch": 0.5647561026151104, "grad_norm": 0.4179460108280182, "learning_rate": 2.1764267064528163e-06, "loss": 0.7097, "step": 13627 }, { "epoch": 0.5647975465207841, "grad_norm": 0.4421672821044922, "learning_rate": 2.176219486924448e-06, "loss": 0.6846, "step": 13628 }, { "epoch": 0.5648389904264578, "grad_norm": 0.43097344040870667, "learning_rate": 2.1760122673960795e-06, "loss": 0.6749, "step": 13629 }, { "epoch": 0.5648804343321314, "grad_norm": 0.3987502157688141, "learning_rate": 2.1758050478677113e-06, "loss": 0.6821, "step": 13630 }, { "epoch": 0.5649218782378052, "grad_norm": 0.41023147106170654, "learning_rate": 2.1755978283393427e-06, "loss": 0.6837, "step": 13631 }, { "epoch": 0.5649633221434788, "grad_norm": 0.4206581115722656, "learning_rate": 2.1753906088109745e-06, "loss": 0.72, "step": 13632 }, { "epoch": 0.5650047660491525, "grad_norm": 0.38721954822540283, "learning_rate": 2.1751833892826063e-06, "loss": 0.6873, "step": 13633 }, { "epoch": 0.5650462099548261, "grad_norm": 0.4084838926792145, "learning_rate": 2.1749761697542377e-06, "loss": 0.7052, "step": 13634 }, { "epoch": 0.5650876538604999, "grad_norm": 0.4202449321746826, "learning_rate": 2.1747689502258695e-06, "loss": 0.6937, "step": 13635 }, { "epoch": 0.5651290977661735, "grad_norm": 0.42408743500709534, "learning_rate": 2.1745617306975013e-06, "loss": 0.7532, "step": 13636 }, { "epoch": 0.5651705416718471, "grad_norm": 0.43342190980911255, "learning_rate": 2.1743545111691327e-06, "loss": 0.6487, "step": 13637 }, { "epoch": 0.5652119855775208, "grad_norm": 0.40214189887046814, "learning_rate": 2.1741472916407645e-06, "loss": 0.6683, "step": 13638 }, { "epoch": 0.5652534294831945, "grad_norm": 0.43389853835105896, "learning_rate": 2.173940072112396e-06, "loss": 0.6746, "step": 13639 }, { "epoch": 0.5652948733888682, "grad_norm": 0.40097397565841675, "learning_rate": 2.1737328525840277e-06, "loss": 0.6161, "step": 13640 }, { "epoch": 0.5653363172945418, "grad_norm": 0.42159661650657654, "learning_rate": 2.173525633055659e-06, "loss": 0.6465, "step": 13641 }, { "epoch": 0.5653777612002155, "grad_norm": 0.3988941013813019, "learning_rate": 2.173318413527291e-06, "loss": 0.6567, "step": 13642 }, { "epoch": 0.5654192051058892, "grad_norm": 0.4075339138507843, "learning_rate": 2.1731111939989227e-06, "loss": 0.6696, "step": 13643 }, { "epoch": 0.5654606490115629, "grad_norm": 0.4084129333496094, "learning_rate": 2.1729039744705545e-06, "loss": 0.6882, "step": 13644 }, { "epoch": 0.5655020929172365, "grad_norm": 0.4367200434207916, "learning_rate": 2.172696754942186e-06, "loss": 0.7271, "step": 13645 }, { "epoch": 0.5655435368229101, "grad_norm": 0.4443969428539276, "learning_rate": 2.1724895354138177e-06, "loss": 0.7247, "step": 13646 }, { "epoch": 0.5655849807285839, "grad_norm": 0.399844765663147, "learning_rate": 2.172282315885449e-06, "loss": 0.6094, "step": 13647 }, { "epoch": 0.5656264246342575, "grad_norm": 0.43936195969581604, "learning_rate": 2.172075096357081e-06, "loss": 0.6501, "step": 13648 }, { "epoch": 0.5656678685399312, "grad_norm": 0.44922569394111633, "learning_rate": 2.1718678768287123e-06, "loss": 0.77, "step": 13649 }, { "epoch": 0.5657093124456049, "grad_norm": 0.403138130903244, "learning_rate": 2.171660657300344e-06, "loss": 0.7223, "step": 13650 }, { "epoch": 0.5657507563512786, "grad_norm": 0.3982759416103363, "learning_rate": 2.171453437771976e-06, "loss": 0.6179, "step": 13651 }, { "epoch": 0.5657922002569522, "grad_norm": 0.47598057985305786, "learning_rate": 2.1712462182436077e-06, "loss": 0.7263, "step": 13652 }, { "epoch": 0.5658336441626259, "grad_norm": 0.40538671612739563, "learning_rate": 2.171038998715239e-06, "loss": 0.6686, "step": 13653 }, { "epoch": 0.5658750880682996, "grad_norm": 0.3769107162952423, "learning_rate": 2.170831779186871e-06, "loss": 0.666, "step": 13654 }, { "epoch": 0.5659165319739732, "grad_norm": 0.4285000264644623, "learning_rate": 2.1706245596585023e-06, "loss": 0.7588, "step": 13655 }, { "epoch": 0.5659579758796469, "grad_norm": 0.42035356163978577, "learning_rate": 2.170417340130134e-06, "loss": 0.7063, "step": 13656 }, { "epoch": 0.5659994197853205, "grad_norm": 0.409315288066864, "learning_rate": 2.1702101206017655e-06, "loss": 0.6619, "step": 13657 }, { "epoch": 0.5660408636909943, "grad_norm": 0.41570279002189636, "learning_rate": 2.1700029010733973e-06, "loss": 0.6914, "step": 13658 }, { "epoch": 0.5660823075966679, "grad_norm": 0.4277610182762146, "learning_rate": 2.1697956815450287e-06, "loss": 0.6931, "step": 13659 }, { "epoch": 0.5661237515023416, "grad_norm": 0.45485353469848633, "learning_rate": 2.169588462016661e-06, "loss": 0.7485, "step": 13660 }, { "epoch": 0.5661651954080152, "grad_norm": 0.41995564103126526, "learning_rate": 2.1693812424882923e-06, "loss": 0.6598, "step": 13661 }, { "epoch": 0.566206639313689, "grad_norm": 0.3717178404331207, "learning_rate": 2.169174022959924e-06, "loss": 0.6459, "step": 13662 }, { "epoch": 0.5662480832193626, "grad_norm": 0.45705389976501465, "learning_rate": 2.1689668034315555e-06, "loss": 0.7139, "step": 13663 }, { "epoch": 0.5662895271250362, "grad_norm": 0.39327868819236755, "learning_rate": 2.1687595839031873e-06, "loss": 0.6796, "step": 13664 }, { "epoch": 0.56633097103071, "grad_norm": 0.4053371250629425, "learning_rate": 2.1685523643748187e-06, "loss": 0.7068, "step": 13665 }, { "epoch": 0.5663724149363836, "grad_norm": 0.39311107993125916, "learning_rate": 2.1683451448464505e-06, "loss": 0.666, "step": 13666 }, { "epoch": 0.5664138588420573, "grad_norm": 0.4119820296764374, "learning_rate": 2.168137925318082e-06, "loss": 0.6135, "step": 13667 }, { "epoch": 0.5664553027477309, "grad_norm": 0.42161619663238525, "learning_rate": 2.1679307057897137e-06, "loss": 0.7018, "step": 13668 }, { "epoch": 0.5664967466534047, "grad_norm": 0.40414151549339294, "learning_rate": 2.1677234862613455e-06, "loss": 0.705, "step": 13669 }, { "epoch": 0.5665381905590783, "grad_norm": 0.41660094261169434, "learning_rate": 2.1675162667329773e-06, "loss": 0.698, "step": 13670 }, { "epoch": 0.566579634464752, "grad_norm": 0.4019080102443695, "learning_rate": 2.1673090472046087e-06, "loss": 0.6747, "step": 13671 }, { "epoch": 0.5666210783704256, "grad_norm": 0.42354291677474976, "learning_rate": 2.1671018276762405e-06, "loss": 0.6637, "step": 13672 }, { "epoch": 0.5666625222760993, "grad_norm": 0.39122623205184937, "learning_rate": 2.166894608147872e-06, "loss": 0.6497, "step": 13673 }, { "epoch": 0.566703966181773, "grad_norm": 0.3951494097709656, "learning_rate": 2.1666873886195037e-06, "loss": 0.6309, "step": 13674 }, { "epoch": 0.5667454100874466, "grad_norm": 0.41508620977401733, "learning_rate": 2.166480169091135e-06, "loss": 0.6573, "step": 13675 }, { "epoch": 0.5667868539931203, "grad_norm": 0.42675304412841797, "learning_rate": 2.166272949562767e-06, "loss": 0.7111, "step": 13676 }, { "epoch": 0.566828297898794, "grad_norm": 0.3958274722099304, "learning_rate": 2.1660657300343987e-06, "loss": 0.6514, "step": 13677 }, { "epoch": 0.5668697418044677, "grad_norm": 0.4230049252510071, "learning_rate": 2.1658585105060305e-06, "loss": 0.671, "step": 13678 }, { "epoch": 0.5669111857101413, "grad_norm": 0.3982155919075012, "learning_rate": 2.165651290977662e-06, "loss": 0.7126, "step": 13679 }, { "epoch": 0.5669526296158149, "grad_norm": 0.39908599853515625, "learning_rate": 2.1654440714492937e-06, "loss": 0.6863, "step": 13680 }, { "epoch": 0.5669940735214887, "grad_norm": 0.38187214732170105, "learning_rate": 2.165236851920925e-06, "loss": 0.6233, "step": 13681 }, { "epoch": 0.5670355174271623, "grad_norm": 0.4256102442741394, "learning_rate": 2.165029632392557e-06, "loss": 0.6489, "step": 13682 }, { "epoch": 0.567076961332836, "grad_norm": 0.40588271617889404, "learning_rate": 2.1648224128641883e-06, "loss": 0.6528, "step": 13683 }, { "epoch": 0.5671184052385096, "grad_norm": 0.459468275308609, "learning_rate": 2.16461519333582e-06, "loss": 0.7408, "step": 13684 }, { "epoch": 0.5671598491441834, "grad_norm": 0.40608805418014526, "learning_rate": 2.1644079738074515e-06, "loss": 0.7405, "step": 13685 }, { "epoch": 0.567201293049857, "grad_norm": 0.4137836694717407, "learning_rate": 2.1642007542790837e-06, "loss": 0.6968, "step": 13686 }, { "epoch": 0.5672427369555307, "grad_norm": 0.40074464678764343, "learning_rate": 2.163993534750715e-06, "loss": 0.6981, "step": 13687 }, { "epoch": 0.5672841808612044, "grad_norm": 0.4191500246524811, "learning_rate": 2.163786315222347e-06, "loss": 0.7041, "step": 13688 }, { "epoch": 0.567325624766878, "grad_norm": 0.39591097831726074, "learning_rate": 2.1635790956939783e-06, "loss": 0.6797, "step": 13689 }, { "epoch": 0.5673670686725517, "grad_norm": 0.42156359553337097, "learning_rate": 2.16337187616561e-06, "loss": 0.7107, "step": 13690 }, { "epoch": 0.5674085125782253, "grad_norm": 0.4297883212566376, "learning_rate": 2.1631646566372415e-06, "loss": 0.7062, "step": 13691 }, { "epoch": 0.5674499564838991, "grad_norm": 0.39979955554008484, "learning_rate": 2.1629574371088733e-06, "loss": 0.6594, "step": 13692 }, { "epoch": 0.5674914003895727, "grad_norm": 0.3831852376461029, "learning_rate": 2.1627502175805047e-06, "loss": 0.6766, "step": 13693 }, { "epoch": 0.5675328442952464, "grad_norm": 0.4017987549304962, "learning_rate": 2.162542998052137e-06, "loss": 0.6586, "step": 13694 }, { "epoch": 0.56757428820092, "grad_norm": 0.39799532294273376, "learning_rate": 2.1623357785237683e-06, "loss": 0.6877, "step": 13695 }, { "epoch": 0.5676157321065938, "grad_norm": 0.4323866665363312, "learning_rate": 2.1621285589954e-06, "loss": 0.6799, "step": 13696 }, { "epoch": 0.5676571760122674, "grad_norm": 0.45041754841804504, "learning_rate": 2.1619213394670315e-06, "loss": 0.7537, "step": 13697 }, { "epoch": 0.567698619917941, "grad_norm": 0.4311353862285614, "learning_rate": 2.1617141199386633e-06, "loss": 0.6831, "step": 13698 }, { "epoch": 0.5677400638236147, "grad_norm": 0.391366571187973, "learning_rate": 2.1615069004102947e-06, "loss": 0.6667, "step": 13699 }, { "epoch": 0.5677815077292884, "grad_norm": 0.41093045473098755, "learning_rate": 2.1612996808819265e-06, "loss": 0.6666, "step": 13700 }, { "epoch": 0.5678229516349621, "grad_norm": 0.40989986062049866, "learning_rate": 2.161092461353558e-06, "loss": 0.673, "step": 13701 }, { "epoch": 0.5678643955406357, "grad_norm": 0.41437962651252747, "learning_rate": 2.1608852418251897e-06, "loss": 0.6793, "step": 13702 }, { "epoch": 0.5679058394463095, "grad_norm": 0.426816463470459, "learning_rate": 2.1606780222968215e-06, "loss": 0.7134, "step": 13703 }, { "epoch": 0.5679472833519831, "grad_norm": 0.408489853143692, "learning_rate": 2.1604708027684534e-06, "loss": 0.7332, "step": 13704 }, { "epoch": 0.5679887272576568, "grad_norm": 0.39941567182540894, "learning_rate": 2.1602635832400847e-06, "loss": 0.6719, "step": 13705 }, { "epoch": 0.5680301711633304, "grad_norm": 0.4368651509284973, "learning_rate": 2.1600563637117165e-06, "loss": 0.6829, "step": 13706 }, { "epoch": 0.568071615069004, "grad_norm": 0.4045748710632324, "learning_rate": 2.159849144183348e-06, "loss": 0.7046, "step": 13707 }, { "epoch": 0.5681130589746778, "grad_norm": 0.39960777759552, "learning_rate": 2.1596419246549797e-06, "loss": 0.739, "step": 13708 }, { "epoch": 0.5681545028803514, "grad_norm": 0.4345571994781494, "learning_rate": 2.159434705126611e-06, "loss": 0.7012, "step": 13709 }, { "epoch": 0.5681959467860251, "grad_norm": 0.41364234685897827, "learning_rate": 2.159227485598243e-06, "loss": 0.6837, "step": 13710 }, { "epoch": 0.5682373906916988, "grad_norm": 0.4114598035812378, "learning_rate": 2.1590202660698743e-06, "loss": 0.7018, "step": 13711 }, { "epoch": 0.5682788345973725, "grad_norm": 0.40810200572013855, "learning_rate": 2.1588130465415066e-06, "loss": 0.6921, "step": 13712 }, { "epoch": 0.5683202785030461, "grad_norm": 0.41379567980766296, "learning_rate": 2.158605827013138e-06, "loss": 0.6821, "step": 13713 }, { "epoch": 0.5683617224087198, "grad_norm": 0.42374250292778015, "learning_rate": 2.1583986074847697e-06, "loss": 0.6943, "step": 13714 }, { "epoch": 0.5684031663143935, "grad_norm": 0.44670814275741577, "learning_rate": 2.158191387956401e-06, "loss": 0.6809, "step": 13715 }, { "epoch": 0.5684446102200671, "grad_norm": 0.40701714158058167, "learning_rate": 2.157984168428033e-06, "loss": 0.6671, "step": 13716 }, { "epoch": 0.5684860541257408, "grad_norm": 0.3853479325771332, "learning_rate": 2.1577769488996643e-06, "loss": 0.7104, "step": 13717 }, { "epoch": 0.5685274980314144, "grad_norm": 0.46662768721580505, "learning_rate": 2.157569729371296e-06, "loss": 0.6747, "step": 13718 }, { "epoch": 0.5685689419370882, "grad_norm": 0.39216598868370056, "learning_rate": 2.1573625098429275e-06, "loss": 0.6648, "step": 13719 }, { "epoch": 0.5686103858427618, "grad_norm": 0.4249568283557892, "learning_rate": 2.1571552903145598e-06, "loss": 0.7056, "step": 13720 }, { "epoch": 0.5686518297484355, "grad_norm": 0.4172024428844452, "learning_rate": 2.156948070786191e-06, "loss": 0.6733, "step": 13721 }, { "epoch": 0.5686932736541092, "grad_norm": 0.4098511338233948, "learning_rate": 2.156740851257823e-06, "loss": 0.6484, "step": 13722 }, { "epoch": 0.5687347175597829, "grad_norm": 0.3991549015045166, "learning_rate": 2.1565336317294543e-06, "loss": 0.6499, "step": 13723 }, { "epoch": 0.5687761614654565, "grad_norm": 0.4105137586593628, "learning_rate": 2.156326412201086e-06, "loss": 0.6633, "step": 13724 }, { "epoch": 0.5688176053711301, "grad_norm": 0.45932409167289734, "learning_rate": 2.1561191926727175e-06, "loss": 0.701, "step": 13725 }, { "epoch": 0.5688590492768039, "grad_norm": 0.41389814019203186, "learning_rate": 2.1559119731443493e-06, "loss": 0.655, "step": 13726 }, { "epoch": 0.5689004931824775, "grad_norm": 0.40950700640678406, "learning_rate": 2.1557047536159807e-06, "loss": 0.681, "step": 13727 }, { "epoch": 0.5689419370881512, "grad_norm": 0.4194731116294861, "learning_rate": 2.1554975340876125e-06, "loss": 0.7085, "step": 13728 }, { "epoch": 0.5689833809938248, "grad_norm": 0.4191286861896515, "learning_rate": 2.1552903145592443e-06, "loss": 0.6691, "step": 13729 }, { "epoch": 0.5690248248994986, "grad_norm": 0.4002556800842285, "learning_rate": 2.155083095030876e-06, "loss": 0.7159, "step": 13730 }, { "epoch": 0.5690662688051722, "grad_norm": 0.4023125469684601, "learning_rate": 2.1548758755025075e-06, "loss": 0.6554, "step": 13731 }, { "epoch": 0.5691077127108459, "grad_norm": 0.445223867893219, "learning_rate": 2.1546686559741393e-06, "loss": 0.6445, "step": 13732 }, { "epoch": 0.5691491566165195, "grad_norm": 0.4298916459083557, "learning_rate": 2.1544614364457707e-06, "loss": 0.7014, "step": 13733 }, { "epoch": 0.5691906005221932, "grad_norm": 0.42163944244384766, "learning_rate": 2.1542542169174025e-06, "loss": 0.6787, "step": 13734 }, { "epoch": 0.5692320444278669, "grad_norm": 0.41559505462646484, "learning_rate": 2.154046997389034e-06, "loss": 0.6796, "step": 13735 }, { "epoch": 0.5692734883335405, "grad_norm": 0.39828386902809143, "learning_rate": 2.1538397778606657e-06, "loss": 0.728, "step": 13736 }, { "epoch": 0.5693149322392143, "grad_norm": 0.3913366496562958, "learning_rate": 2.153632558332297e-06, "loss": 0.6708, "step": 13737 }, { "epoch": 0.5693563761448879, "grad_norm": 0.4238438904285431, "learning_rate": 2.1534253388039294e-06, "loss": 0.7235, "step": 13738 }, { "epoch": 0.5693978200505616, "grad_norm": 0.42004093527793884, "learning_rate": 2.1532181192755607e-06, "loss": 0.6587, "step": 13739 }, { "epoch": 0.5694392639562352, "grad_norm": 0.41418296098709106, "learning_rate": 2.1530108997471926e-06, "loss": 0.6498, "step": 13740 }, { "epoch": 0.5694807078619089, "grad_norm": 0.3728832006454468, "learning_rate": 2.152803680218824e-06, "loss": 0.5989, "step": 13741 }, { "epoch": 0.5695221517675826, "grad_norm": 0.37687769532203674, "learning_rate": 2.1525964606904557e-06, "loss": 0.6122, "step": 13742 }, { "epoch": 0.5695635956732562, "grad_norm": 0.4326122999191284, "learning_rate": 2.152389241162087e-06, "loss": 0.6514, "step": 13743 }, { "epoch": 0.5696050395789299, "grad_norm": 0.41060319542884827, "learning_rate": 2.152182021633719e-06, "loss": 0.6667, "step": 13744 }, { "epoch": 0.5696464834846036, "grad_norm": 0.38674196600914, "learning_rate": 2.1519748021053503e-06, "loss": 0.6929, "step": 13745 }, { "epoch": 0.5696879273902773, "grad_norm": 0.4441470801830292, "learning_rate": 2.151767582576982e-06, "loss": 0.6776, "step": 13746 }, { "epoch": 0.5697293712959509, "grad_norm": 0.38636910915374756, "learning_rate": 2.151560363048614e-06, "loss": 0.6777, "step": 13747 }, { "epoch": 0.5697708152016246, "grad_norm": 0.44262778759002686, "learning_rate": 2.1513531435202458e-06, "loss": 0.672, "step": 13748 }, { "epoch": 0.5698122591072983, "grad_norm": 0.4004232883453369, "learning_rate": 2.151145923991877e-06, "loss": 0.6592, "step": 13749 }, { "epoch": 0.5698537030129719, "grad_norm": 0.3744255304336548, "learning_rate": 2.150938704463509e-06, "loss": 0.6207, "step": 13750 }, { "epoch": 0.5698951469186456, "grad_norm": 0.3982994258403778, "learning_rate": 2.1507314849351403e-06, "loss": 0.6565, "step": 13751 }, { "epoch": 0.5699365908243192, "grad_norm": 0.3812541961669922, "learning_rate": 2.150524265406772e-06, "loss": 0.657, "step": 13752 }, { "epoch": 0.569978034729993, "grad_norm": 0.44724100828170776, "learning_rate": 2.1503170458784035e-06, "loss": 0.7054, "step": 13753 }, { "epoch": 0.5700194786356666, "grad_norm": 0.3762550354003906, "learning_rate": 2.1501098263500353e-06, "loss": 0.668, "step": 13754 }, { "epoch": 0.5700609225413403, "grad_norm": 0.3970949947834015, "learning_rate": 2.149902606821667e-06, "loss": 0.6481, "step": 13755 }, { "epoch": 0.570102366447014, "grad_norm": 0.47364190220832825, "learning_rate": 2.149695387293299e-06, "loss": 0.6887, "step": 13756 }, { "epoch": 0.5701438103526877, "grad_norm": 0.41700872778892517, "learning_rate": 2.1494881677649303e-06, "loss": 0.6436, "step": 13757 }, { "epoch": 0.5701852542583613, "grad_norm": 0.43467146158218384, "learning_rate": 2.149280948236562e-06, "loss": 0.678, "step": 13758 }, { "epoch": 0.5702266981640349, "grad_norm": 0.38637763261795044, "learning_rate": 2.1490737287081935e-06, "loss": 0.6899, "step": 13759 }, { "epoch": 0.5702681420697087, "grad_norm": 0.43201443552970886, "learning_rate": 2.1488665091798253e-06, "loss": 0.6729, "step": 13760 }, { "epoch": 0.5703095859753823, "grad_norm": 0.4037129282951355, "learning_rate": 2.1486592896514567e-06, "loss": 0.6642, "step": 13761 }, { "epoch": 0.570351029881056, "grad_norm": 0.41157811880111694, "learning_rate": 2.1484520701230885e-06, "loss": 0.6854, "step": 13762 }, { "epoch": 0.5703924737867296, "grad_norm": 0.44422221183776855, "learning_rate": 2.14824485059472e-06, "loss": 0.7046, "step": 13763 }, { "epoch": 0.5704339176924034, "grad_norm": 0.4180927574634552, "learning_rate": 2.148037631066352e-06, "loss": 0.6779, "step": 13764 }, { "epoch": 0.570475361598077, "grad_norm": 0.40046486258506775, "learning_rate": 2.1478304115379835e-06, "loss": 0.6335, "step": 13765 }, { "epoch": 0.5705168055037507, "grad_norm": 0.41191163659095764, "learning_rate": 2.1476231920096154e-06, "loss": 0.6509, "step": 13766 }, { "epoch": 0.5705582494094243, "grad_norm": 0.379047155380249, "learning_rate": 2.1474159724812467e-06, "loss": 0.7106, "step": 13767 }, { "epoch": 0.570599693315098, "grad_norm": 0.39221251010894775, "learning_rate": 2.1472087529528785e-06, "loss": 0.6707, "step": 13768 }, { "epoch": 0.5706411372207717, "grad_norm": 0.4121342599391937, "learning_rate": 2.14700153342451e-06, "loss": 0.6826, "step": 13769 }, { "epoch": 0.5706825811264453, "grad_norm": 0.41001054644584656, "learning_rate": 2.1467943138961417e-06, "loss": 0.6635, "step": 13770 }, { "epoch": 0.570724025032119, "grad_norm": 0.40033721923828125, "learning_rate": 2.146587094367773e-06, "loss": 0.6334, "step": 13771 }, { "epoch": 0.5707654689377927, "grad_norm": 0.4297398030757904, "learning_rate": 2.146379874839405e-06, "loss": 0.7141, "step": 13772 }, { "epoch": 0.5708069128434664, "grad_norm": 0.4179666042327881, "learning_rate": 2.1461726553110367e-06, "loss": 0.6857, "step": 13773 }, { "epoch": 0.57084835674914, "grad_norm": 0.41863933205604553, "learning_rate": 2.1459654357826686e-06, "loss": 0.6777, "step": 13774 }, { "epoch": 0.5708898006548138, "grad_norm": 0.39949366450309753, "learning_rate": 2.1457582162543e-06, "loss": 0.702, "step": 13775 }, { "epoch": 0.5709312445604874, "grad_norm": 0.4511047601699829, "learning_rate": 2.1455509967259318e-06, "loss": 0.7673, "step": 13776 }, { "epoch": 0.570972688466161, "grad_norm": 0.4202826917171478, "learning_rate": 2.145343777197563e-06, "loss": 0.7074, "step": 13777 }, { "epoch": 0.5710141323718347, "grad_norm": 0.38963907957077026, "learning_rate": 2.145136557669195e-06, "loss": 0.7131, "step": 13778 }, { "epoch": 0.5710555762775084, "grad_norm": 0.400794118642807, "learning_rate": 2.1449293381408263e-06, "loss": 0.6281, "step": 13779 }, { "epoch": 0.5710970201831821, "grad_norm": 0.41980409622192383, "learning_rate": 2.144722118612458e-06, "loss": 0.6696, "step": 13780 }, { "epoch": 0.5711384640888557, "grad_norm": 0.4336474537849426, "learning_rate": 2.14451489908409e-06, "loss": 0.6592, "step": 13781 }, { "epoch": 0.5711799079945294, "grad_norm": 0.41137534379959106, "learning_rate": 2.1443076795557218e-06, "loss": 0.7068, "step": 13782 }, { "epoch": 0.5712213519002031, "grad_norm": 0.4011869430541992, "learning_rate": 2.144100460027353e-06, "loss": 0.6423, "step": 13783 }, { "epoch": 0.5712627958058768, "grad_norm": 0.40804818272590637, "learning_rate": 2.143893240498985e-06, "loss": 0.6698, "step": 13784 }, { "epoch": 0.5713042397115504, "grad_norm": 0.38665348291397095, "learning_rate": 2.1436860209706163e-06, "loss": 0.7109, "step": 13785 }, { "epoch": 0.571345683617224, "grad_norm": 0.4442654252052307, "learning_rate": 2.143478801442248e-06, "loss": 0.6573, "step": 13786 }, { "epoch": 0.5713871275228978, "grad_norm": 0.42189088463783264, "learning_rate": 2.1432715819138795e-06, "loss": 0.6826, "step": 13787 }, { "epoch": 0.5714285714285714, "grad_norm": 0.40870416164398193, "learning_rate": 2.1430643623855113e-06, "loss": 0.6851, "step": 13788 }, { "epoch": 0.5714700153342451, "grad_norm": 0.4245125353336334, "learning_rate": 2.1428571428571427e-06, "loss": 0.6802, "step": 13789 }, { "epoch": 0.5715114592399188, "grad_norm": 0.39853158593177795, "learning_rate": 2.142649923328775e-06, "loss": 0.6565, "step": 13790 }, { "epoch": 0.5715529031455925, "grad_norm": 0.41548410058021545, "learning_rate": 2.1424427038004063e-06, "loss": 0.6812, "step": 13791 }, { "epoch": 0.5715943470512661, "grad_norm": 0.45132410526275635, "learning_rate": 2.142235484272038e-06, "loss": 0.7573, "step": 13792 }, { "epoch": 0.5716357909569397, "grad_norm": 0.42637890577316284, "learning_rate": 2.1420282647436695e-06, "loss": 0.687, "step": 13793 }, { "epoch": 0.5716772348626135, "grad_norm": 0.41940370202064514, "learning_rate": 2.1418210452153014e-06, "loss": 0.6538, "step": 13794 }, { "epoch": 0.5717186787682871, "grad_norm": 0.4022725224494934, "learning_rate": 2.1416138256869327e-06, "loss": 0.6652, "step": 13795 }, { "epoch": 0.5717601226739608, "grad_norm": 0.4475555121898651, "learning_rate": 2.1414066061585645e-06, "loss": 0.7107, "step": 13796 }, { "epoch": 0.5718015665796344, "grad_norm": 0.4290507137775421, "learning_rate": 2.141199386630196e-06, "loss": 0.7019, "step": 13797 }, { "epoch": 0.5718430104853082, "grad_norm": 0.4369785189628601, "learning_rate": 2.1409921671018277e-06, "loss": 0.6628, "step": 13798 }, { "epoch": 0.5718844543909818, "grad_norm": 0.4405632019042969, "learning_rate": 2.1407849475734596e-06, "loss": 0.6472, "step": 13799 }, { "epoch": 0.5719258982966555, "grad_norm": 0.4329068958759308, "learning_rate": 2.1405777280450914e-06, "loss": 0.693, "step": 13800 }, { "epoch": 0.5719673422023291, "grad_norm": 0.4378667175769806, "learning_rate": 2.1403705085167227e-06, "loss": 0.7378, "step": 13801 }, { "epoch": 0.5720087861080028, "grad_norm": 0.4579119086265564, "learning_rate": 2.1401632889883546e-06, "loss": 0.699, "step": 13802 }, { "epoch": 0.5720502300136765, "grad_norm": 0.42940330505371094, "learning_rate": 2.139956069459986e-06, "loss": 0.6899, "step": 13803 }, { "epoch": 0.5720916739193501, "grad_norm": 0.4073435068130493, "learning_rate": 2.1397488499316177e-06, "loss": 0.6431, "step": 13804 }, { "epoch": 0.5721331178250239, "grad_norm": 0.40492361783981323, "learning_rate": 2.139541630403249e-06, "loss": 0.6919, "step": 13805 }, { "epoch": 0.5721745617306975, "grad_norm": 0.39874666929244995, "learning_rate": 2.139334410874881e-06, "loss": 0.6517, "step": 13806 }, { "epoch": 0.5722160056363712, "grad_norm": 0.4402739107608795, "learning_rate": 2.1391271913465123e-06, "loss": 0.707, "step": 13807 }, { "epoch": 0.5722574495420448, "grad_norm": 0.4273405075073242, "learning_rate": 2.1389199718181446e-06, "loss": 0.7058, "step": 13808 }, { "epoch": 0.5722988934477186, "grad_norm": 0.4061214327812195, "learning_rate": 2.138712752289776e-06, "loss": 0.6553, "step": 13809 }, { "epoch": 0.5723403373533922, "grad_norm": 0.3824925422668457, "learning_rate": 2.1385055327614078e-06, "loss": 0.5905, "step": 13810 }, { "epoch": 0.5723817812590658, "grad_norm": 0.4068290889263153, "learning_rate": 2.138298313233039e-06, "loss": 0.6754, "step": 13811 }, { "epoch": 0.5724232251647395, "grad_norm": 0.40650874376296997, "learning_rate": 2.138091093704671e-06, "loss": 0.649, "step": 13812 }, { "epoch": 0.5724646690704132, "grad_norm": 0.40890997648239136, "learning_rate": 2.1378838741763023e-06, "loss": 0.6941, "step": 13813 }, { "epoch": 0.5725061129760869, "grad_norm": 0.44699788093566895, "learning_rate": 2.137676654647934e-06, "loss": 0.7012, "step": 13814 }, { "epoch": 0.5725475568817605, "grad_norm": 0.4492000937461853, "learning_rate": 2.1374694351195655e-06, "loss": 0.7224, "step": 13815 }, { "epoch": 0.5725890007874342, "grad_norm": 0.4396720230579376, "learning_rate": 2.1372622155911978e-06, "loss": 0.6687, "step": 13816 }, { "epoch": 0.5726304446931079, "grad_norm": 0.42881765961647034, "learning_rate": 2.137054996062829e-06, "loss": 0.6665, "step": 13817 }, { "epoch": 0.5726718885987816, "grad_norm": 0.47013214230537415, "learning_rate": 2.136847776534461e-06, "loss": 0.7179, "step": 13818 }, { "epoch": 0.5727133325044552, "grad_norm": 0.424792617559433, "learning_rate": 2.1366405570060923e-06, "loss": 0.7047, "step": 13819 }, { "epoch": 0.5727547764101288, "grad_norm": 0.40681713819503784, "learning_rate": 2.136433337477724e-06, "loss": 0.6531, "step": 13820 }, { "epoch": 0.5727962203158026, "grad_norm": 0.4198211431503296, "learning_rate": 2.1362261179493555e-06, "loss": 0.7065, "step": 13821 }, { "epoch": 0.5728376642214762, "grad_norm": 0.42295414209365845, "learning_rate": 2.1360188984209874e-06, "loss": 0.6975, "step": 13822 }, { "epoch": 0.5728791081271499, "grad_norm": 0.4182445704936981, "learning_rate": 2.1358116788926187e-06, "loss": 0.7019, "step": 13823 }, { "epoch": 0.5729205520328235, "grad_norm": 0.43964332342147827, "learning_rate": 2.1356044593642505e-06, "loss": 0.7148, "step": 13824 }, { "epoch": 0.5729619959384973, "grad_norm": 0.46299123764038086, "learning_rate": 2.1353972398358824e-06, "loss": 0.72, "step": 13825 }, { "epoch": 0.5730034398441709, "grad_norm": 0.44356223940849304, "learning_rate": 2.135190020307514e-06, "loss": 0.6831, "step": 13826 }, { "epoch": 0.5730448837498446, "grad_norm": 0.3931635320186615, "learning_rate": 2.1349828007791455e-06, "loss": 0.6936, "step": 13827 }, { "epoch": 0.5730863276555183, "grad_norm": 0.4080120325088501, "learning_rate": 2.1347755812507774e-06, "loss": 0.6963, "step": 13828 }, { "epoch": 0.5731277715611919, "grad_norm": 0.411540687084198, "learning_rate": 2.1345683617224087e-06, "loss": 0.6394, "step": 13829 }, { "epoch": 0.5731692154668656, "grad_norm": 0.40596064925193787, "learning_rate": 2.1343611421940406e-06, "loss": 0.6508, "step": 13830 }, { "epoch": 0.5732106593725392, "grad_norm": 0.3929804265499115, "learning_rate": 2.134153922665672e-06, "loss": 0.6572, "step": 13831 }, { "epoch": 0.573252103278213, "grad_norm": 0.4054909944534302, "learning_rate": 2.1339467031373037e-06, "loss": 0.667, "step": 13832 }, { "epoch": 0.5732935471838866, "grad_norm": 0.4164449870586395, "learning_rate": 2.1337394836089356e-06, "loss": 0.6875, "step": 13833 }, { "epoch": 0.5733349910895603, "grad_norm": 0.4241515100002289, "learning_rate": 2.1335322640805674e-06, "loss": 0.6956, "step": 13834 }, { "epoch": 0.5733764349952339, "grad_norm": 0.39518874883651733, "learning_rate": 2.1333250445521988e-06, "loss": 0.6748, "step": 13835 }, { "epoch": 0.5734178789009077, "grad_norm": 0.438277006149292, "learning_rate": 2.1331178250238306e-06, "loss": 0.7003, "step": 13836 }, { "epoch": 0.5734593228065813, "grad_norm": 0.41692548990249634, "learning_rate": 2.132910605495462e-06, "loss": 0.7333, "step": 13837 }, { "epoch": 0.5735007667122549, "grad_norm": 0.392770379781723, "learning_rate": 2.1327033859670938e-06, "loss": 0.6005, "step": 13838 }, { "epoch": 0.5735422106179286, "grad_norm": 0.4963643550872803, "learning_rate": 2.132496166438725e-06, "loss": 0.7476, "step": 13839 }, { "epoch": 0.5735836545236023, "grad_norm": 0.46543484926223755, "learning_rate": 2.132288946910357e-06, "loss": 0.6995, "step": 13840 }, { "epoch": 0.573625098429276, "grad_norm": 0.42931169271469116, "learning_rate": 2.1320817273819883e-06, "loss": 0.7004, "step": 13841 }, { "epoch": 0.5736665423349496, "grad_norm": 0.442949503660202, "learning_rate": 2.1318745078536206e-06, "loss": 0.7385, "step": 13842 }, { "epoch": 0.5737079862406234, "grad_norm": 0.4072267711162567, "learning_rate": 2.131667288325252e-06, "loss": 0.6479, "step": 13843 }, { "epoch": 0.573749430146297, "grad_norm": 0.4108417332172394, "learning_rate": 2.1314600687968838e-06, "loss": 0.7329, "step": 13844 }, { "epoch": 0.5737908740519707, "grad_norm": 0.4182269275188446, "learning_rate": 2.131252849268515e-06, "loss": 0.6821, "step": 13845 }, { "epoch": 0.5738323179576443, "grad_norm": 0.4258277714252472, "learning_rate": 2.131045629740147e-06, "loss": 0.6788, "step": 13846 }, { "epoch": 0.573873761863318, "grad_norm": 0.4154553711414337, "learning_rate": 2.1308384102117783e-06, "loss": 0.6749, "step": 13847 }, { "epoch": 0.5739152057689917, "grad_norm": 0.42243313789367676, "learning_rate": 2.13063119068341e-06, "loss": 0.6995, "step": 13848 }, { "epoch": 0.5739566496746653, "grad_norm": 0.430134117603302, "learning_rate": 2.1304239711550415e-06, "loss": 0.6849, "step": 13849 }, { "epoch": 0.573998093580339, "grad_norm": 0.40155595541000366, "learning_rate": 2.1302167516266733e-06, "loss": 0.6785, "step": 13850 }, { "epoch": 0.5740395374860127, "grad_norm": 0.44085079431533813, "learning_rate": 2.130009532098305e-06, "loss": 0.6726, "step": 13851 }, { "epoch": 0.5740809813916864, "grad_norm": 0.41348183155059814, "learning_rate": 2.129802312569937e-06, "loss": 0.7339, "step": 13852 }, { "epoch": 0.57412242529736, "grad_norm": 0.4230993092060089, "learning_rate": 2.1295950930415684e-06, "loss": 0.7393, "step": 13853 }, { "epoch": 0.5741638692030336, "grad_norm": 0.39222705364227295, "learning_rate": 2.1293878735132e-06, "loss": 0.6124, "step": 13854 }, { "epoch": 0.5742053131087074, "grad_norm": 0.4003104865550995, "learning_rate": 2.1291806539848315e-06, "loss": 0.662, "step": 13855 }, { "epoch": 0.574246757014381, "grad_norm": 0.43984320759773254, "learning_rate": 2.1289734344564634e-06, "loss": 0.7003, "step": 13856 }, { "epoch": 0.5742882009200547, "grad_norm": 0.41393032670021057, "learning_rate": 2.1287662149280947e-06, "loss": 0.656, "step": 13857 }, { "epoch": 0.5743296448257283, "grad_norm": 0.40789929032325745, "learning_rate": 2.1285589953997266e-06, "loss": 0.649, "step": 13858 }, { "epoch": 0.5743710887314021, "grad_norm": 0.4211776554584503, "learning_rate": 2.1283517758713584e-06, "loss": 0.6843, "step": 13859 }, { "epoch": 0.5744125326370757, "grad_norm": 0.4022696018218994, "learning_rate": 2.12814455634299e-06, "loss": 0.6799, "step": 13860 }, { "epoch": 0.5744539765427494, "grad_norm": 0.4194873869419098, "learning_rate": 2.1279373368146216e-06, "loss": 0.7008, "step": 13861 }, { "epoch": 0.574495420448423, "grad_norm": 0.45870786905288696, "learning_rate": 2.1277301172862534e-06, "loss": 0.6429, "step": 13862 }, { "epoch": 0.5745368643540967, "grad_norm": 0.4796798825263977, "learning_rate": 2.1275228977578848e-06, "loss": 0.6804, "step": 13863 }, { "epoch": 0.5745783082597704, "grad_norm": 0.42008277773857117, "learning_rate": 2.1273156782295166e-06, "loss": 0.6816, "step": 13864 }, { "epoch": 0.574619752165444, "grad_norm": 0.5288835167884827, "learning_rate": 2.127108458701148e-06, "loss": 0.7085, "step": 13865 }, { "epoch": 0.5746611960711178, "grad_norm": 0.37532493472099304, "learning_rate": 2.1269012391727798e-06, "loss": 0.655, "step": 13866 }, { "epoch": 0.5747026399767914, "grad_norm": 0.42376279830932617, "learning_rate": 2.1266940196444116e-06, "loss": 0.7367, "step": 13867 }, { "epoch": 0.5747440838824651, "grad_norm": 0.41587236523628235, "learning_rate": 2.1264868001160434e-06, "loss": 0.6924, "step": 13868 }, { "epoch": 0.5747855277881387, "grad_norm": 0.4085468649864197, "learning_rate": 2.1262795805876748e-06, "loss": 0.674, "step": 13869 }, { "epoch": 0.5748269716938125, "grad_norm": 0.42116639018058777, "learning_rate": 2.1260723610593066e-06, "loss": 0.6665, "step": 13870 }, { "epoch": 0.5748684155994861, "grad_norm": 0.40271204710006714, "learning_rate": 2.125865141530938e-06, "loss": 0.6528, "step": 13871 }, { "epoch": 0.5749098595051597, "grad_norm": 0.3918752670288086, "learning_rate": 2.1256579220025698e-06, "loss": 0.6481, "step": 13872 }, { "epoch": 0.5749513034108334, "grad_norm": 0.42959052324295044, "learning_rate": 2.125450702474201e-06, "loss": 0.6517, "step": 13873 }, { "epoch": 0.5749927473165071, "grad_norm": 0.429431289434433, "learning_rate": 2.125243482945833e-06, "loss": 0.6992, "step": 13874 }, { "epoch": 0.5750341912221808, "grad_norm": 0.4196905195713043, "learning_rate": 2.1250362634174643e-06, "loss": 0.689, "step": 13875 }, { "epoch": 0.5750756351278544, "grad_norm": 0.3998565077781677, "learning_rate": 2.124829043889096e-06, "loss": 0.6993, "step": 13876 }, { "epoch": 0.5751170790335282, "grad_norm": 0.39429956674575806, "learning_rate": 2.124621824360728e-06, "loss": 0.6664, "step": 13877 }, { "epoch": 0.5751585229392018, "grad_norm": 0.40669935941696167, "learning_rate": 2.1244146048323598e-06, "loss": 0.6694, "step": 13878 }, { "epoch": 0.5751999668448755, "grad_norm": 0.41118210554122925, "learning_rate": 2.124207385303991e-06, "loss": 0.6812, "step": 13879 }, { "epoch": 0.5752414107505491, "grad_norm": 0.38151469826698303, "learning_rate": 2.124000165775623e-06, "loss": 0.6501, "step": 13880 }, { "epoch": 0.5752828546562228, "grad_norm": 0.42089179158210754, "learning_rate": 2.1237929462472544e-06, "loss": 0.6371, "step": 13881 }, { "epoch": 0.5753242985618965, "grad_norm": 0.37730225920677185, "learning_rate": 2.123585726718886e-06, "loss": 0.6096, "step": 13882 }, { "epoch": 0.5753657424675701, "grad_norm": 0.39145150780677795, "learning_rate": 2.1233785071905175e-06, "loss": 0.6379, "step": 13883 }, { "epoch": 0.5754071863732438, "grad_norm": 0.38824498653411865, "learning_rate": 2.1231712876621494e-06, "loss": 0.6388, "step": 13884 }, { "epoch": 0.5754486302789175, "grad_norm": 0.4227767884731293, "learning_rate": 2.122964068133781e-06, "loss": 0.7136, "step": 13885 }, { "epoch": 0.5754900741845912, "grad_norm": 0.42485684156417847, "learning_rate": 2.122756848605413e-06, "loss": 0.7224, "step": 13886 }, { "epoch": 0.5755315180902648, "grad_norm": 0.423724502325058, "learning_rate": 2.1225496290770444e-06, "loss": 0.6672, "step": 13887 }, { "epoch": 0.5755729619959385, "grad_norm": 0.41348734498023987, "learning_rate": 2.122342409548676e-06, "loss": 0.66, "step": 13888 }, { "epoch": 0.5756144059016122, "grad_norm": 0.4288453161716461, "learning_rate": 2.1221351900203076e-06, "loss": 0.6956, "step": 13889 }, { "epoch": 0.5756558498072858, "grad_norm": 0.4332231283187866, "learning_rate": 2.1219279704919394e-06, "loss": 0.6665, "step": 13890 }, { "epoch": 0.5756972937129595, "grad_norm": 0.424470454454422, "learning_rate": 2.1217207509635707e-06, "loss": 0.7085, "step": 13891 }, { "epoch": 0.5757387376186331, "grad_norm": 0.39516445994377136, "learning_rate": 2.1215135314352026e-06, "loss": 0.6473, "step": 13892 }, { "epoch": 0.5757801815243069, "grad_norm": 0.3949078917503357, "learning_rate": 2.1213063119068344e-06, "loss": 0.7131, "step": 13893 }, { "epoch": 0.5758216254299805, "grad_norm": 0.40617242455482483, "learning_rate": 2.1210990923784658e-06, "loss": 0.6826, "step": 13894 }, { "epoch": 0.5758630693356542, "grad_norm": 0.4483875632286072, "learning_rate": 2.1208918728500976e-06, "loss": 0.698, "step": 13895 }, { "epoch": 0.5759045132413279, "grad_norm": 0.40919235348701477, "learning_rate": 2.1206846533217294e-06, "loss": 0.6431, "step": 13896 }, { "epoch": 0.5759459571470016, "grad_norm": 0.40146422386169434, "learning_rate": 2.1204774337933608e-06, "loss": 0.7385, "step": 13897 }, { "epoch": 0.5759874010526752, "grad_norm": 0.39921867847442627, "learning_rate": 2.1202702142649926e-06, "loss": 0.6965, "step": 13898 }, { "epoch": 0.5760288449583488, "grad_norm": 0.36559563875198364, "learning_rate": 2.120062994736624e-06, "loss": 0.6464, "step": 13899 }, { "epoch": 0.5760702888640226, "grad_norm": 0.3731408417224884, "learning_rate": 2.1198557752082558e-06, "loss": 0.6831, "step": 13900 }, { "epoch": 0.5761117327696962, "grad_norm": 0.4240875542163849, "learning_rate": 2.1196485556798876e-06, "loss": 0.7101, "step": 13901 }, { "epoch": 0.5761531766753699, "grad_norm": 0.4716005325317383, "learning_rate": 2.119441336151519e-06, "loss": 0.7327, "step": 13902 }, { "epoch": 0.5761946205810435, "grad_norm": 0.442240834236145, "learning_rate": 2.1192341166231508e-06, "loss": 0.7054, "step": 13903 }, { "epoch": 0.5762360644867173, "grad_norm": 0.3637550473213196, "learning_rate": 2.1190268970947826e-06, "loss": 0.6183, "step": 13904 }, { "epoch": 0.5762775083923909, "grad_norm": 0.3845212459564209, "learning_rate": 2.118819677566414e-06, "loss": 0.6786, "step": 13905 }, { "epoch": 0.5763189522980646, "grad_norm": 0.3772569000720978, "learning_rate": 2.1186124580380458e-06, "loss": 0.6543, "step": 13906 }, { "epoch": 0.5763603962037382, "grad_norm": 0.411650151014328, "learning_rate": 2.118405238509677e-06, "loss": 0.7021, "step": 13907 }, { "epoch": 0.5764018401094119, "grad_norm": 0.40500912070274353, "learning_rate": 2.118198018981309e-06, "loss": 0.6726, "step": 13908 }, { "epoch": 0.5764432840150856, "grad_norm": 0.377911776304245, "learning_rate": 2.1179907994529403e-06, "loss": 0.6035, "step": 13909 }, { "epoch": 0.5764847279207592, "grad_norm": 0.38176068663597107, "learning_rate": 2.117783579924572e-06, "loss": 0.7012, "step": 13910 }, { "epoch": 0.576526171826433, "grad_norm": 0.47060608863830566, "learning_rate": 2.117576360396204e-06, "loss": 0.7239, "step": 13911 }, { "epoch": 0.5765676157321066, "grad_norm": 0.3953971862792969, "learning_rate": 2.1173691408678358e-06, "loss": 0.6682, "step": 13912 }, { "epoch": 0.5766090596377803, "grad_norm": 0.40225908160209656, "learning_rate": 2.117161921339467e-06, "loss": 0.6663, "step": 13913 }, { "epoch": 0.5766505035434539, "grad_norm": 0.46893617510795593, "learning_rate": 2.116954701811099e-06, "loss": 0.7341, "step": 13914 }, { "epoch": 0.5766919474491276, "grad_norm": 0.4166678190231323, "learning_rate": 2.1167474822827304e-06, "loss": 0.7495, "step": 13915 }, { "epoch": 0.5767333913548013, "grad_norm": 0.4028873145580292, "learning_rate": 2.116540262754362e-06, "loss": 0.6929, "step": 13916 }, { "epoch": 0.5767748352604749, "grad_norm": 0.396801620721817, "learning_rate": 2.1163330432259936e-06, "loss": 0.7092, "step": 13917 }, { "epoch": 0.5768162791661486, "grad_norm": 0.4214009940624237, "learning_rate": 2.1161258236976254e-06, "loss": 0.6917, "step": 13918 }, { "epoch": 0.5768577230718223, "grad_norm": 0.38364648818969727, "learning_rate": 2.115918604169257e-06, "loss": 0.6406, "step": 13919 }, { "epoch": 0.576899166977496, "grad_norm": 0.37897318601608276, "learning_rate": 2.1157113846408886e-06, "loss": 0.6926, "step": 13920 }, { "epoch": 0.5769406108831696, "grad_norm": 0.39252403378486633, "learning_rate": 2.1155041651125204e-06, "loss": 0.6772, "step": 13921 }, { "epoch": 0.5769820547888433, "grad_norm": 0.39853599667549133, "learning_rate": 2.115296945584152e-06, "loss": 0.6996, "step": 13922 }, { "epoch": 0.577023498694517, "grad_norm": 0.41421592235565186, "learning_rate": 2.1150897260557836e-06, "loss": 0.6614, "step": 13923 }, { "epoch": 0.5770649426001906, "grad_norm": 0.414110004901886, "learning_rate": 2.1148825065274154e-06, "loss": 0.7024, "step": 13924 }, { "epoch": 0.5771063865058643, "grad_norm": 0.38315826654434204, "learning_rate": 2.1146752869990468e-06, "loss": 0.6564, "step": 13925 }, { "epoch": 0.5771478304115379, "grad_norm": 0.42277082800865173, "learning_rate": 2.1144680674706786e-06, "loss": 0.6936, "step": 13926 }, { "epoch": 0.5771892743172117, "grad_norm": 0.42121320962905884, "learning_rate": 2.1142608479423104e-06, "loss": 0.6827, "step": 13927 }, { "epoch": 0.5772307182228853, "grad_norm": 0.39676469564437866, "learning_rate": 2.1140536284139418e-06, "loss": 0.6777, "step": 13928 }, { "epoch": 0.577272162128559, "grad_norm": 0.4426287114620209, "learning_rate": 2.1138464088855736e-06, "loss": 0.6866, "step": 13929 }, { "epoch": 0.5773136060342327, "grad_norm": 0.445340096950531, "learning_rate": 2.1136391893572054e-06, "loss": 0.7344, "step": 13930 }, { "epoch": 0.5773550499399064, "grad_norm": 0.4261067509651184, "learning_rate": 2.1134319698288368e-06, "loss": 0.6808, "step": 13931 }, { "epoch": 0.57739649384558, "grad_norm": 0.4126306176185608, "learning_rate": 2.1132247503004686e-06, "loss": 0.6567, "step": 13932 }, { "epoch": 0.5774379377512536, "grad_norm": 0.40023842453956604, "learning_rate": 2.1130175307721e-06, "loss": 0.6672, "step": 13933 }, { "epoch": 0.5774793816569274, "grad_norm": 0.42071107029914856, "learning_rate": 2.1128103112437318e-06, "loss": 0.6779, "step": 13934 }, { "epoch": 0.577520825562601, "grad_norm": 0.4039819538593292, "learning_rate": 2.1126030917153636e-06, "loss": 0.6672, "step": 13935 }, { "epoch": 0.5775622694682747, "grad_norm": 0.37775951623916626, "learning_rate": 2.112395872186995e-06, "loss": 0.6362, "step": 13936 }, { "epoch": 0.5776037133739483, "grad_norm": 0.392974317073822, "learning_rate": 2.1121886526586268e-06, "loss": 0.7028, "step": 13937 }, { "epoch": 0.5776451572796221, "grad_norm": 0.40517041087150574, "learning_rate": 2.1119814331302586e-06, "loss": 0.6847, "step": 13938 }, { "epoch": 0.5776866011852957, "grad_norm": 0.42223483324050903, "learning_rate": 2.11177421360189e-06, "loss": 0.6447, "step": 13939 }, { "epoch": 0.5777280450909694, "grad_norm": 0.41729047894477844, "learning_rate": 2.1115669940735218e-06, "loss": 0.6838, "step": 13940 }, { "epoch": 0.577769488996643, "grad_norm": 0.37146613001823425, "learning_rate": 2.111359774545153e-06, "loss": 0.6379, "step": 13941 }, { "epoch": 0.5778109329023167, "grad_norm": 0.4196239113807678, "learning_rate": 2.111152555016785e-06, "loss": 0.7222, "step": 13942 }, { "epoch": 0.5778523768079904, "grad_norm": 0.4621763527393341, "learning_rate": 2.1109453354884168e-06, "loss": 0.7058, "step": 13943 }, { "epoch": 0.577893820713664, "grad_norm": 0.4035604000091553, "learning_rate": 2.110738115960048e-06, "loss": 0.6885, "step": 13944 }, { "epoch": 0.5779352646193378, "grad_norm": 0.4347308576107025, "learning_rate": 2.11053089643168e-06, "loss": 0.6709, "step": 13945 }, { "epoch": 0.5779767085250114, "grad_norm": 0.40753793716430664, "learning_rate": 2.1103236769033114e-06, "loss": 0.6503, "step": 13946 }, { "epoch": 0.5780181524306851, "grad_norm": 0.4089842140674591, "learning_rate": 2.110116457374943e-06, "loss": 0.6884, "step": 13947 }, { "epoch": 0.5780595963363587, "grad_norm": 0.3859905004501343, "learning_rate": 2.109909237846575e-06, "loss": 0.7175, "step": 13948 }, { "epoch": 0.5781010402420325, "grad_norm": 0.39515092968940735, "learning_rate": 2.1097020183182064e-06, "loss": 0.673, "step": 13949 }, { "epoch": 0.5781424841477061, "grad_norm": 0.3759388029575348, "learning_rate": 2.109494798789838e-06, "loss": 0.6719, "step": 13950 }, { "epoch": 0.5781839280533797, "grad_norm": 0.39402976632118225, "learning_rate": 2.1092875792614696e-06, "loss": 0.7001, "step": 13951 }, { "epoch": 0.5782253719590534, "grad_norm": 0.44954344630241394, "learning_rate": 2.1090803597331014e-06, "loss": 0.7028, "step": 13952 }, { "epoch": 0.5782668158647271, "grad_norm": 0.41271165013313293, "learning_rate": 2.108873140204733e-06, "loss": 0.7, "step": 13953 }, { "epoch": 0.5783082597704008, "grad_norm": 0.4078815281391144, "learning_rate": 2.1086659206763646e-06, "loss": 0.6899, "step": 13954 }, { "epoch": 0.5783497036760744, "grad_norm": 0.41494059562683105, "learning_rate": 2.1084587011479964e-06, "loss": 0.6602, "step": 13955 }, { "epoch": 0.5783911475817481, "grad_norm": 0.41329336166381836, "learning_rate": 2.108251481619628e-06, "loss": 0.7163, "step": 13956 }, { "epoch": 0.5784325914874218, "grad_norm": 0.4477458894252777, "learning_rate": 2.1080442620912596e-06, "loss": 0.658, "step": 13957 }, { "epoch": 0.5784740353930955, "grad_norm": 0.41085171699523926, "learning_rate": 2.1078370425628914e-06, "loss": 0.6288, "step": 13958 }, { "epoch": 0.5785154792987691, "grad_norm": 0.4114709794521332, "learning_rate": 2.1076298230345228e-06, "loss": 0.6348, "step": 13959 }, { "epoch": 0.5785569232044427, "grad_norm": 0.41593390703201294, "learning_rate": 2.1074226035061546e-06, "loss": 0.7344, "step": 13960 }, { "epoch": 0.5785983671101165, "grad_norm": 0.4081556797027588, "learning_rate": 2.1072153839777864e-06, "loss": 0.6809, "step": 13961 }, { "epoch": 0.5786398110157901, "grad_norm": 0.4164953827857971, "learning_rate": 2.1070081644494178e-06, "loss": 0.7029, "step": 13962 }, { "epoch": 0.5786812549214638, "grad_norm": 0.39169254899024963, "learning_rate": 2.1068009449210496e-06, "loss": 0.6732, "step": 13963 }, { "epoch": 0.5787226988271374, "grad_norm": 0.43679386377334595, "learning_rate": 2.1065937253926814e-06, "loss": 0.658, "step": 13964 }, { "epoch": 0.5787641427328112, "grad_norm": 0.3995024561882019, "learning_rate": 2.1063865058643128e-06, "loss": 0.6558, "step": 13965 }, { "epoch": 0.5788055866384848, "grad_norm": 0.39450815320014954, "learning_rate": 2.1061792863359446e-06, "loss": 0.6512, "step": 13966 }, { "epoch": 0.5788470305441585, "grad_norm": 0.39770016074180603, "learning_rate": 2.105972066807576e-06, "loss": 0.644, "step": 13967 }, { "epoch": 0.5788884744498322, "grad_norm": 0.4254592955112457, "learning_rate": 2.1057648472792078e-06, "loss": 0.661, "step": 13968 }, { "epoch": 0.5789299183555058, "grad_norm": 0.4329374432563782, "learning_rate": 2.1055576277508396e-06, "loss": 0.7463, "step": 13969 }, { "epoch": 0.5789713622611795, "grad_norm": 0.411597341299057, "learning_rate": 2.105350408222471e-06, "loss": 0.6606, "step": 13970 }, { "epoch": 0.5790128061668531, "grad_norm": 0.4047931432723999, "learning_rate": 2.1051431886941028e-06, "loss": 0.7437, "step": 13971 }, { "epoch": 0.5790542500725269, "grad_norm": 0.43294963240623474, "learning_rate": 2.104935969165734e-06, "loss": 0.7292, "step": 13972 }, { "epoch": 0.5790956939782005, "grad_norm": 0.37362104654312134, "learning_rate": 2.104728749637366e-06, "loss": 0.636, "step": 13973 }, { "epoch": 0.5791371378838742, "grad_norm": 0.4424699544906616, "learning_rate": 2.1045215301089978e-06, "loss": 0.6877, "step": 13974 }, { "epoch": 0.5791785817895478, "grad_norm": 0.42981305718421936, "learning_rate": 2.104314310580629e-06, "loss": 0.6904, "step": 13975 }, { "epoch": 0.5792200256952215, "grad_norm": 0.44261765480041504, "learning_rate": 2.104107091052261e-06, "loss": 0.661, "step": 13976 }, { "epoch": 0.5792614696008952, "grad_norm": 0.3819520175457001, "learning_rate": 2.1038998715238928e-06, "loss": 0.6655, "step": 13977 }, { "epoch": 0.5793029135065688, "grad_norm": 0.41112220287323, "learning_rate": 2.103692651995524e-06, "loss": 0.6726, "step": 13978 }, { "epoch": 0.5793443574122425, "grad_norm": 0.4099596440792084, "learning_rate": 2.103485432467156e-06, "loss": 0.6781, "step": 13979 }, { "epoch": 0.5793858013179162, "grad_norm": 0.42493852972984314, "learning_rate": 2.1032782129387874e-06, "loss": 0.7344, "step": 13980 }, { "epoch": 0.5794272452235899, "grad_norm": 0.414516419172287, "learning_rate": 2.103070993410419e-06, "loss": 0.7002, "step": 13981 }, { "epoch": 0.5794686891292635, "grad_norm": 0.4188615381717682, "learning_rate": 2.102863773882051e-06, "loss": 0.7136, "step": 13982 }, { "epoch": 0.5795101330349373, "grad_norm": 0.4281579256057739, "learning_rate": 2.1026565543536824e-06, "loss": 0.639, "step": 13983 }, { "epoch": 0.5795515769406109, "grad_norm": 0.39307263493537903, "learning_rate": 2.102449334825314e-06, "loss": 0.6086, "step": 13984 }, { "epoch": 0.5795930208462845, "grad_norm": 0.4173050820827484, "learning_rate": 2.1022421152969456e-06, "loss": 0.707, "step": 13985 }, { "epoch": 0.5796344647519582, "grad_norm": 0.41683340072631836, "learning_rate": 2.1020348957685774e-06, "loss": 0.6954, "step": 13986 }, { "epoch": 0.5796759086576319, "grad_norm": 0.41456449031829834, "learning_rate": 2.101827676240209e-06, "loss": 0.6599, "step": 13987 }, { "epoch": 0.5797173525633056, "grad_norm": 0.43093299865722656, "learning_rate": 2.1016204567118406e-06, "loss": 0.6927, "step": 13988 }, { "epoch": 0.5797587964689792, "grad_norm": 0.3923068940639496, "learning_rate": 2.1014132371834724e-06, "loss": 0.6962, "step": 13989 }, { "epoch": 0.5798002403746529, "grad_norm": 0.4108723998069763, "learning_rate": 2.101206017655104e-06, "loss": 0.6964, "step": 13990 }, { "epoch": 0.5798416842803266, "grad_norm": 0.3935836851596832, "learning_rate": 2.1009987981267356e-06, "loss": 0.7115, "step": 13991 }, { "epoch": 0.5798831281860003, "grad_norm": 0.46721214056015015, "learning_rate": 2.1007915785983674e-06, "loss": 0.7024, "step": 13992 }, { "epoch": 0.5799245720916739, "grad_norm": 0.4238993525505066, "learning_rate": 2.1005843590699988e-06, "loss": 0.6588, "step": 13993 }, { "epoch": 0.5799660159973475, "grad_norm": 0.4162410795688629, "learning_rate": 2.1003771395416306e-06, "loss": 0.6853, "step": 13994 }, { "epoch": 0.5800074599030213, "grad_norm": 0.4014846086502075, "learning_rate": 2.1001699200132624e-06, "loss": 0.7139, "step": 13995 }, { "epoch": 0.5800489038086949, "grad_norm": 0.3958783745765686, "learning_rate": 2.0999627004848938e-06, "loss": 0.6814, "step": 13996 }, { "epoch": 0.5800903477143686, "grad_norm": 0.42397916316986084, "learning_rate": 2.0997554809565256e-06, "loss": 0.6693, "step": 13997 }, { "epoch": 0.5801317916200422, "grad_norm": 0.4234289228916168, "learning_rate": 2.099548261428157e-06, "loss": 0.6841, "step": 13998 }, { "epoch": 0.580173235525716, "grad_norm": 0.4102562665939331, "learning_rate": 2.0993410418997888e-06, "loss": 0.661, "step": 13999 }, { "epoch": 0.5802146794313896, "grad_norm": 0.4328835904598236, "learning_rate": 2.0991338223714206e-06, "loss": 0.6965, "step": 14000 }, { "epoch": 0.5802561233370633, "grad_norm": 0.40245530009269714, "learning_rate": 2.098926602843052e-06, "loss": 0.668, "step": 14001 }, { "epoch": 0.580297567242737, "grad_norm": 0.4202079474925995, "learning_rate": 2.0987193833146838e-06, "loss": 0.6792, "step": 14002 }, { "epoch": 0.5803390111484106, "grad_norm": 0.4056127071380615, "learning_rate": 2.0985121637863156e-06, "loss": 0.6166, "step": 14003 }, { "epoch": 0.5803804550540843, "grad_norm": 0.3893952965736389, "learning_rate": 2.098304944257947e-06, "loss": 0.7041, "step": 14004 }, { "epoch": 0.5804218989597579, "grad_norm": 0.4257561266422272, "learning_rate": 2.0980977247295788e-06, "loss": 0.7095, "step": 14005 }, { "epoch": 0.5804633428654317, "grad_norm": 0.40364140272140503, "learning_rate": 2.09789050520121e-06, "loss": 0.6917, "step": 14006 }, { "epoch": 0.5805047867711053, "grad_norm": 0.42216676473617554, "learning_rate": 2.097683285672842e-06, "loss": 0.6649, "step": 14007 }, { "epoch": 0.580546230676779, "grad_norm": 0.42590832710266113, "learning_rate": 2.0974760661444738e-06, "loss": 0.6851, "step": 14008 }, { "epoch": 0.5805876745824526, "grad_norm": 0.43560436367988586, "learning_rate": 2.097268846616105e-06, "loss": 0.6949, "step": 14009 }, { "epoch": 0.5806291184881264, "grad_norm": 0.46034926176071167, "learning_rate": 2.097061627087737e-06, "loss": 0.7258, "step": 14010 }, { "epoch": 0.5806705623938, "grad_norm": 0.39238816499710083, "learning_rate": 2.096854407559369e-06, "loss": 0.6702, "step": 14011 }, { "epoch": 0.5807120062994736, "grad_norm": 0.458793967962265, "learning_rate": 2.096647188031e-06, "loss": 0.6923, "step": 14012 }, { "epoch": 0.5807534502051473, "grad_norm": 0.44276076555252075, "learning_rate": 2.096439968502632e-06, "loss": 0.7288, "step": 14013 }, { "epoch": 0.580794894110821, "grad_norm": 0.43984249234199524, "learning_rate": 2.0962327489742634e-06, "loss": 0.6885, "step": 14014 }, { "epoch": 0.5808363380164947, "grad_norm": 0.4300515353679657, "learning_rate": 2.096025529445895e-06, "loss": 0.6884, "step": 14015 }, { "epoch": 0.5808777819221683, "grad_norm": 0.38118037581443787, "learning_rate": 2.0958183099175266e-06, "loss": 0.6621, "step": 14016 }, { "epoch": 0.5809192258278421, "grad_norm": 0.4052985906600952, "learning_rate": 2.0956110903891584e-06, "loss": 0.6935, "step": 14017 }, { "epoch": 0.5809606697335157, "grad_norm": 0.4254544973373413, "learning_rate": 2.09540387086079e-06, "loss": 0.7144, "step": 14018 }, { "epoch": 0.5810021136391894, "grad_norm": 0.43935781717300415, "learning_rate": 2.095196651332422e-06, "loss": 0.646, "step": 14019 }, { "epoch": 0.581043557544863, "grad_norm": 0.4152074456214905, "learning_rate": 2.0949894318040534e-06, "loss": 0.6792, "step": 14020 }, { "epoch": 0.5810850014505367, "grad_norm": 0.4134312570095062, "learning_rate": 2.094782212275685e-06, "loss": 0.7014, "step": 14021 }, { "epoch": 0.5811264453562104, "grad_norm": 0.41419047117233276, "learning_rate": 2.0945749927473166e-06, "loss": 0.6827, "step": 14022 }, { "epoch": 0.581167889261884, "grad_norm": 0.4122260808944702, "learning_rate": 2.0943677732189484e-06, "loss": 0.6721, "step": 14023 }, { "epoch": 0.5812093331675577, "grad_norm": 0.43012627959251404, "learning_rate": 2.0941605536905798e-06, "loss": 0.7273, "step": 14024 }, { "epoch": 0.5812507770732314, "grad_norm": 0.4375002086162567, "learning_rate": 2.0939533341622116e-06, "loss": 0.7246, "step": 14025 }, { "epoch": 0.5812922209789051, "grad_norm": 0.426711767911911, "learning_rate": 2.0937461146338434e-06, "loss": 0.6973, "step": 14026 }, { "epoch": 0.5813336648845787, "grad_norm": 0.4303346872329712, "learning_rate": 2.0935388951054748e-06, "loss": 0.6963, "step": 14027 }, { "epoch": 0.5813751087902524, "grad_norm": 0.4281289279460907, "learning_rate": 2.0933316755771066e-06, "loss": 0.709, "step": 14028 }, { "epoch": 0.5814165526959261, "grad_norm": 0.40809744596481323, "learning_rate": 2.0931244560487384e-06, "loss": 0.6946, "step": 14029 }, { "epoch": 0.5814579966015997, "grad_norm": 0.41791069507598877, "learning_rate": 2.0929172365203698e-06, "loss": 0.6626, "step": 14030 }, { "epoch": 0.5814994405072734, "grad_norm": 0.3951282799243927, "learning_rate": 2.0927100169920016e-06, "loss": 0.636, "step": 14031 }, { "epoch": 0.581540884412947, "grad_norm": 0.4155261814594269, "learning_rate": 2.092502797463633e-06, "loss": 0.6512, "step": 14032 }, { "epoch": 0.5815823283186208, "grad_norm": 0.41479218006134033, "learning_rate": 2.0922955779352648e-06, "loss": 0.6729, "step": 14033 }, { "epoch": 0.5816237722242944, "grad_norm": 0.44920557737350464, "learning_rate": 2.0920883584068966e-06, "loss": 0.647, "step": 14034 }, { "epoch": 0.5816652161299681, "grad_norm": 0.40076035261154175, "learning_rate": 2.091881138878528e-06, "loss": 0.7493, "step": 14035 }, { "epoch": 0.5817066600356418, "grad_norm": 0.3887951076030731, "learning_rate": 2.0916739193501598e-06, "loss": 0.6597, "step": 14036 }, { "epoch": 0.5817481039413154, "grad_norm": 0.377981573343277, "learning_rate": 2.0914666998217916e-06, "loss": 0.6577, "step": 14037 }, { "epoch": 0.5817895478469891, "grad_norm": 0.3933688700199127, "learning_rate": 2.091259480293423e-06, "loss": 0.6609, "step": 14038 }, { "epoch": 0.5818309917526627, "grad_norm": 0.4089418947696686, "learning_rate": 2.091052260765055e-06, "loss": 0.696, "step": 14039 }, { "epoch": 0.5818724356583365, "grad_norm": 0.44607532024383545, "learning_rate": 2.090845041236686e-06, "loss": 0.7231, "step": 14040 }, { "epoch": 0.5819138795640101, "grad_norm": 0.40787380933761597, "learning_rate": 2.090637821708318e-06, "loss": 0.7097, "step": 14041 }, { "epoch": 0.5819553234696838, "grad_norm": 0.3894723653793335, "learning_rate": 2.0904306021799494e-06, "loss": 0.7095, "step": 14042 }, { "epoch": 0.5819967673753574, "grad_norm": 0.412477046251297, "learning_rate": 2.090223382651581e-06, "loss": 0.6914, "step": 14043 }, { "epoch": 0.5820382112810312, "grad_norm": 0.4269634187221527, "learning_rate": 2.090016163123213e-06, "loss": 0.7019, "step": 14044 }, { "epoch": 0.5820796551867048, "grad_norm": 0.39182087779045105, "learning_rate": 2.089808943594845e-06, "loss": 0.6982, "step": 14045 }, { "epoch": 0.5821210990923784, "grad_norm": 0.421166330575943, "learning_rate": 2.089601724066476e-06, "loss": 0.6863, "step": 14046 }, { "epoch": 0.5821625429980521, "grad_norm": 0.3929734528064728, "learning_rate": 2.089394504538108e-06, "loss": 0.675, "step": 14047 }, { "epoch": 0.5822039869037258, "grad_norm": 0.38887181878089905, "learning_rate": 2.0891872850097394e-06, "loss": 0.6443, "step": 14048 }, { "epoch": 0.5822454308093995, "grad_norm": 0.36460423469543457, "learning_rate": 2.088980065481371e-06, "loss": 0.6357, "step": 14049 }, { "epoch": 0.5822868747150731, "grad_norm": 0.40347644686698914, "learning_rate": 2.0887728459530026e-06, "loss": 0.6467, "step": 14050 }, { "epoch": 0.5823283186207469, "grad_norm": 0.4432804584503174, "learning_rate": 2.0885656264246344e-06, "loss": 0.6707, "step": 14051 }, { "epoch": 0.5823697625264205, "grad_norm": 0.45212048292160034, "learning_rate": 2.088358406896266e-06, "loss": 0.6661, "step": 14052 }, { "epoch": 0.5824112064320942, "grad_norm": 0.4463343024253845, "learning_rate": 2.088151187367898e-06, "loss": 0.6898, "step": 14053 }, { "epoch": 0.5824526503377678, "grad_norm": 0.391155868768692, "learning_rate": 2.0879439678395294e-06, "loss": 0.6729, "step": 14054 }, { "epoch": 0.5824940942434415, "grad_norm": 0.4303790032863617, "learning_rate": 2.087736748311161e-06, "loss": 0.709, "step": 14055 }, { "epoch": 0.5825355381491152, "grad_norm": 0.41908955574035645, "learning_rate": 2.0875295287827926e-06, "loss": 0.7419, "step": 14056 }, { "epoch": 0.5825769820547888, "grad_norm": 0.3932686150074005, "learning_rate": 2.0873223092544244e-06, "loss": 0.6476, "step": 14057 }, { "epoch": 0.5826184259604625, "grad_norm": 0.4087761640548706, "learning_rate": 2.0871150897260558e-06, "loss": 0.6523, "step": 14058 }, { "epoch": 0.5826598698661362, "grad_norm": 0.392632395029068, "learning_rate": 2.0869078701976876e-06, "loss": 0.6705, "step": 14059 }, { "epoch": 0.5827013137718099, "grad_norm": 0.43129754066467285, "learning_rate": 2.0867006506693194e-06, "loss": 0.7214, "step": 14060 }, { "epoch": 0.5827427576774835, "grad_norm": 0.4126093089580536, "learning_rate": 2.0864934311409508e-06, "loss": 0.6641, "step": 14061 }, { "epoch": 0.5827842015831572, "grad_norm": 0.42566001415252686, "learning_rate": 2.0862862116125826e-06, "loss": 0.6702, "step": 14062 }, { "epoch": 0.5828256454888309, "grad_norm": 0.4286690950393677, "learning_rate": 2.0860789920842144e-06, "loss": 0.6797, "step": 14063 }, { "epoch": 0.5828670893945045, "grad_norm": 0.39859017729759216, "learning_rate": 2.0858717725558458e-06, "loss": 0.5922, "step": 14064 }, { "epoch": 0.5829085333001782, "grad_norm": 0.4190301299095154, "learning_rate": 2.0856645530274776e-06, "loss": 0.665, "step": 14065 }, { "epoch": 0.5829499772058518, "grad_norm": 0.39661139249801636, "learning_rate": 2.085457333499109e-06, "loss": 0.6641, "step": 14066 }, { "epoch": 0.5829914211115256, "grad_norm": 0.42437002062797546, "learning_rate": 2.0852501139707408e-06, "loss": 0.6414, "step": 14067 }, { "epoch": 0.5830328650171992, "grad_norm": 0.4082057476043701, "learning_rate": 2.085042894442372e-06, "loss": 0.7206, "step": 14068 }, { "epoch": 0.5830743089228729, "grad_norm": 0.39763063192367554, "learning_rate": 2.084835674914004e-06, "loss": 0.6653, "step": 14069 }, { "epoch": 0.5831157528285466, "grad_norm": 0.4525987505912781, "learning_rate": 2.084628455385636e-06, "loss": 0.7134, "step": 14070 }, { "epoch": 0.5831571967342203, "grad_norm": 0.4134485423564911, "learning_rate": 2.0844212358572676e-06, "loss": 0.6985, "step": 14071 }, { "epoch": 0.5831986406398939, "grad_norm": 0.4152374565601349, "learning_rate": 2.084214016328899e-06, "loss": 0.7048, "step": 14072 }, { "epoch": 0.5832400845455675, "grad_norm": 0.4049505293369293, "learning_rate": 2.084006796800531e-06, "loss": 0.7151, "step": 14073 }, { "epoch": 0.5832815284512413, "grad_norm": 0.43901824951171875, "learning_rate": 2.083799577272162e-06, "loss": 0.6777, "step": 14074 }, { "epoch": 0.5833229723569149, "grad_norm": 0.4292544424533844, "learning_rate": 2.083592357743794e-06, "loss": 0.7249, "step": 14075 }, { "epoch": 0.5833644162625886, "grad_norm": 0.4330682158470154, "learning_rate": 2.0833851382154254e-06, "loss": 0.696, "step": 14076 }, { "epoch": 0.5834058601682622, "grad_norm": 0.4338514506816864, "learning_rate": 2.083177918687057e-06, "loss": 0.6658, "step": 14077 }, { "epoch": 0.583447304073936, "grad_norm": 0.4213811755180359, "learning_rate": 2.082970699158689e-06, "loss": 0.6731, "step": 14078 }, { "epoch": 0.5834887479796096, "grad_norm": 0.41014501452445984, "learning_rate": 2.082763479630321e-06, "loss": 0.703, "step": 14079 }, { "epoch": 0.5835301918852833, "grad_norm": 0.41408291459083557, "learning_rate": 2.082556260101952e-06, "loss": 0.7018, "step": 14080 }, { "epoch": 0.5835716357909569, "grad_norm": 0.40027403831481934, "learning_rate": 2.082349040573584e-06, "loss": 0.7004, "step": 14081 }, { "epoch": 0.5836130796966306, "grad_norm": 0.4236155152320862, "learning_rate": 2.0821418210452154e-06, "loss": 0.662, "step": 14082 }, { "epoch": 0.5836545236023043, "grad_norm": 0.46726009249687195, "learning_rate": 2.081934601516847e-06, "loss": 0.7764, "step": 14083 }, { "epoch": 0.5836959675079779, "grad_norm": 0.3876621425151825, "learning_rate": 2.0817273819884786e-06, "loss": 0.6401, "step": 14084 }, { "epoch": 0.5837374114136517, "grad_norm": 0.4253304898738861, "learning_rate": 2.0815201624601104e-06, "loss": 0.6727, "step": 14085 }, { "epoch": 0.5837788553193253, "grad_norm": 0.40640705823898315, "learning_rate": 2.081312942931742e-06, "loss": 0.688, "step": 14086 }, { "epoch": 0.583820299224999, "grad_norm": 0.37806129455566406, "learning_rate": 2.081105723403374e-06, "loss": 0.7046, "step": 14087 }, { "epoch": 0.5838617431306726, "grad_norm": 0.4474796950817108, "learning_rate": 2.0808985038750054e-06, "loss": 0.7021, "step": 14088 }, { "epoch": 0.5839031870363464, "grad_norm": 0.42185741662979126, "learning_rate": 2.080691284346637e-06, "loss": 0.7468, "step": 14089 }, { "epoch": 0.58394463094202, "grad_norm": 0.4272300899028778, "learning_rate": 2.0804840648182686e-06, "loss": 0.707, "step": 14090 }, { "epoch": 0.5839860748476936, "grad_norm": 0.4150744378566742, "learning_rate": 2.0802768452899004e-06, "loss": 0.6898, "step": 14091 }, { "epoch": 0.5840275187533673, "grad_norm": 0.42361462116241455, "learning_rate": 2.0800696257615318e-06, "loss": 0.6718, "step": 14092 }, { "epoch": 0.584068962659041, "grad_norm": 0.4008960723876953, "learning_rate": 2.0798624062331636e-06, "loss": 0.7017, "step": 14093 }, { "epoch": 0.5841104065647147, "grad_norm": 0.4131130576133728, "learning_rate": 2.079655186704795e-06, "loss": 0.6704, "step": 14094 }, { "epoch": 0.5841518504703883, "grad_norm": 0.4030848741531372, "learning_rate": 2.0794479671764268e-06, "loss": 0.6887, "step": 14095 }, { "epoch": 0.584193294376062, "grad_norm": 0.44151321053504944, "learning_rate": 2.0792407476480586e-06, "loss": 0.7126, "step": 14096 }, { "epoch": 0.5842347382817357, "grad_norm": 0.4266393482685089, "learning_rate": 2.0790335281196904e-06, "loss": 0.7125, "step": 14097 }, { "epoch": 0.5842761821874093, "grad_norm": 0.39625313878059387, "learning_rate": 2.078826308591322e-06, "loss": 0.6713, "step": 14098 }, { "epoch": 0.584317626093083, "grad_norm": 0.4193464517593384, "learning_rate": 2.0786190890629536e-06, "loss": 0.6829, "step": 14099 }, { "epoch": 0.5843590699987566, "grad_norm": 0.38401955366134644, "learning_rate": 2.078411869534585e-06, "loss": 0.6537, "step": 14100 }, { "epoch": 0.5844005139044304, "grad_norm": 0.41671890020370483, "learning_rate": 2.078204650006217e-06, "loss": 0.7051, "step": 14101 }, { "epoch": 0.584441957810104, "grad_norm": 0.4694873094558716, "learning_rate": 2.077997430477848e-06, "loss": 0.7837, "step": 14102 }, { "epoch": 0.5844834017157777, "grad_norm": 0.39645621180534363, "learning_rate": 2.07779021094948e-06, "loss": 0.6787, "step": 14103 }, { "epoch": 0.5845248456214513, "grad_norm": 0.43636074662208557, "learning_rate": 2.077582991421112e-06, "loss": 0.6642, "step": 14104 }, { "epoch": 0.5845662895271251, "grad_norm": 0.4149452745914459, "learning_rate": 2.0773757718927436e-06, "loss": 0.7064, "step": 14105 }, { "epoch": 0.5846077334327987, "grad_norm": 0.3590863347053528, "learning_rate": 2.077168552364375e-06, "loss": 0.6582, "step": 14106 }, { "epoch": 0.5846491773384723, "grad_norm": 0.3875824213027954, "learning_rate": 2.076961332836007e-06, "loss": 0.6342, "step": 14107 }, { "epoch": 0.5846906212441461, "grad_norm": 0.3891555964946747, "learning_rate": 2.076754113307638e-06, "loss": 0.6814, "step": 14108 }, { "epoch": 0.5847320651498197, "grad_norm": 0.4402729868888855, "learning_rate": 2.07654689377927e-06, "loss": 0.73, "step": 14109 }, { "epoch": 0.5847735090554934, "grad_norm": 0.4177851378917694, "learning_rate": 2.0763396742509014e-06, "loss": 0.6543, "step": 14110 }, { "epoch": 0.584814952961167, "grad_norm": 0.4017607569694519, "learning_rate": 2.076132454722533e-06, "loss": 0.6793, "step": 14111 }, { "epoch": 0.5848563968668408, "grad_norm": 0.39510414004325867, "learning_rate": 2.075925235194165e-06, "loss": 0.6372, "step": 14112 }, { "epoch": 0.5848978407725144, "grad_norm": 0.39579302072525024, "learning_rate": 2.075718015665797e-06, "loss": 0.6782, "step": 14113 }, { "epoch": 0.5849392846781881, "grad_norm": 0.43034523725509644, "learning_rate": 2.075510796137428e-06, "loss": 0.6582, "step": 14114 }, { "epoch": 0.5849807285838617, "grad_norm": 0.39743393659591675, "learning_rate": 2.07530357660906e-06, "loss": 0.6073, "step": 14115 }, { "epoch": 0.5850221724895354, "grad_norm": 0.45189177989959717, "learning_rate": 2.0750963570806914e-06, "loss": 0.7058, "step": 14116 }, { "epoch": 0.5850636163952091, "grad_norm": 0.4064171612262726, "learning_rate": 2.074889137552323e-06, "loss": 0.6562, "step": 14117 }, { "epoch": 0.5851050603008827, "grad_norm": 0.37119773030281067, "learning_rate": 2.0746819180239546e-06, "loss": 0.6084, "step": 14118 }, { "epoch": 0.5851465042065565, "grad_norm": 0.40333500504493713, "learning_rate": 2.0744746984955864e-06, "loss": 0.6833, "step": 14119 }, { "epoch": 0.5851879481122301, "grad_norm": 0.4300459623336792, "learning_rate": 2.0742674789672178e-06, "loss": 0.6809, "step": 14120 }, { "epoch": 0.5852293920179038, "grad_norm": 0.3845449388027191, "learning_rate": 2.07406025943885e-06, "loss": 0.6061, "step": 14121 }, { "epoch": 0.5852708359235774, "grad_norm": 0.37670525908470154, "learning_rate": 2.0738530399104814e-06, "loss": 0.6179, "step": 14122 }, { "epoch": 0.5853122798292512, "grad_norm": 0.3845716118812561, "learning_rate": 2.073645820382113e-06, "loss": 0.6604, "step": 14123 }, { "epoch": 0.5853537237349248, "grad_norm": 0.4287910759449005, "learning_rate": 2.0734386008537446e-06, "loss": 0.6799, "step": 14124 }, { "epoch": 0.5853951676405984, "grad_norm": 0.41032832860946655, "learning_rate": 2.0732313813253764e-06, "loss": 0.6327, "step": 14125 }, { "epoch": 0.5854366115462721, "grad_norm": 0.4158497750759125, "learning_rate": 2.0730241617970078e-06, "loss": 0.6594, "step": 14126 }, { "epoch": 0.5854780554519458, "grad_norm": 0.4010414481163025, "learning_rate": 2.0728169422686396e-06, "loss": 0.6772, "step": 14127 }, { "epoch": 0.5855194993576195, "grad_norm": 0.4181547462940216, "learning_rate": 2.072609722740271e-06, "loss": 0.6838, "step": 14128 }, { "epoch": 0.5855609432632931, "grad_norm": 0.4231301248073578, "learning_rate": 2.072402503211903e-06, "loss": 0.6812, "step": 14129 }, { "epoch": 0.5856023871689668, "grad_norm": 0.39424529671669006, "learning_rate": 2.0721952836835346e-06, "loss": 0.6697, "step": 14130 }, { "epoch": 0.5856438310746405, "grad_norm": 0.3838825821876526, "learning_rate": 2.0719880641551664e-06, "loss": 0.6622, "step": 14131 }, { "epoch": 0.5856852749803142, "grad_norm": 0.42421436309814453, "learning_rate": 2.071780844626798e-06, "loss": 0.6764, "step": 14132 }, { "epoch": 0.5857267188859878, "grad_norm": 0.4287389814853668, "learning_rate": 2.0715736250984296e-06, "loss": 0.7095, "step": 14133 }, { "epoch": 0.5857681627916614, "grad_norm": 0.4140169620513916, "learning_rate": 2.071366405570061e-06, "loss": 0.6687, "step": 14134 }, { "epoch": 0.5858096066973352, "grad_norm": 0.4443388283252716, "learning_rate": 2.071159186041693e-06, "loss": 0.6719, "step": 14135 }, { "epoch": 0.5858510506030088, "grad_norm": 0.4401494562625885, "learning_rate": 2.070951966513324e-06, "loss": 0.7019, "step": 14136 }, { "epoch": 0.5858924945086825, "grad_norm": 0.4076954424381256, "learning_rate": 2.070744746984956e-06, "loss": 0.7429, "step": 14137 }, { "epoch": 0.5859339384143561, "grad_norm": 0.37651047110557556, "learning_rate": 2.070537527456588e-06, "loss": 0.6368, "step": 14138 }, { "epoch": 0.5859753823200299, "grad_norm": 0.39002805948257446, "learning_rate": 2.0703303079282196e-06, "loss": 0.6721, "step": 14139 }, { "epoch": 0.5860168262257035, "grad_norm": 0.4409191608428955, "learning_rate": 2.070123088399851e-06, "loss": 0.6902, "step": 14140 }, { "epoch": 0.5860582701313772, "grad_norm": 0.4271860718727112, "learning_rate": 2.069915868871483e-06, "loss": 0.6837, "step": 14141 }, { "epoch": 0.5860997140370509, "grad_norm": 0.42550286650657654, "learning_rate": 2.069708649343114e-06, "loss": 0.6964, "step": 14142 }, { "epoch": 0.5861411579427245, "grad_norm": 0.4464608430862427, "learning_rate": 2.069501429814746e-06, "loss": 0.6812, "step": 14143 }, { "epoch": 0.5861826018483982, "grad_norm": 0.3930616080760956, "learning_rate": 2.0692942102863774e-06, "loss": 0.6404, "step": 14144 }, { "epoch": 0.5862240457540718, "grad_norm": 0.39899328351020813, "learning_rate": 2.069086990758009e-06, "loss": 0.7302, "step": 14145 }, { "epoch": 0.5862654896597456, "grad_norm": 0.43091467022895813, "learning_rate": 2.0688797712296406e-06, "loss": 0.6946, "step": 14146 }, { "epoch": 0.5863069335654192, "grad_norm": 0.4289466142654419, "learning_rate": 2.068672551701273e-06, "loss": 0.6608, "step": 14147 }, { "epoch": 0.5863483774710929, "grad_norm": 0.4070889949798584, "learning_rate": 2.068465332172904e-06, "loss": 0.6678, "step": 14148 }, { "epoch": 0.5863898213767665, "grad_norm": 0.3978257477283478, "learning_rate": 2.068258112644536e-06, "loss": 0.6858, "step": 14149 }, { "epoch": 0.5864312652824402, "grad_norm": 0.388560950756073, "learning_rate": 2.0680508931161674e-06, "loss": 0.7251, "step": 14150 }, { "epoch": 0.5864727091881139, "grad_norm": 0.4146293103694916, "learning_rate": 2.067843673587799e-06, "loss": 0.7009, "step": 14151 }, { "epoch": 0.5865141530937875, "grad_norm": 0.41662558913230896, "learning_rate": 2.0676364540594306e-06, "loss": 0.6726, "step": 14152 }, { "epoch": 0.5865555969994612, "grad_norm": 0.423968106508255, "learning_rate": 2.0674292345310624e-06, "loss": 0.6919, "step": 14153 }, { "epoch": 0.5865970409051349, "grad_norm": 0.46350449323654175, "learning_rate": 2.0672220150026938e-06, "loss": 0.6415, "step": 14154 }, { "epoch": 0.5866384848108086, "grad_norm": 0.41528844833374023, "learning_rate": 2.0670147954743256e-06, "loss": 0.6743, "step": 14155 }, { "epoch": 0.5866799287164822, "grad_norm": 0.4210805296897888, "learning_rate": 2.0668075759459574e-06, "loss": 0.6669, "step": 14156 }, { "epoch": 0.586721372622156, "grad_norm": 0.44741693139076233, "learning_rate": 2.0666003564175892e-06, "loss": 0.6649, "step": 14157 }, { "epoch": 0.5867628165278296, "grad_norm": 0.3907623887062073, "learning_rate": 2.0663931368892206e-06, "loss": 0.6217, "step": 14158 }, { "epoch": 0.5868042604335032, "grad_norm": 0.4113163948059082, "learning_rate": 2.0661859173608524e-06, "loss": 0.6947, "step": 14159 }, { "epoch": 0.5868457043391769, "grad_norm": 0.3909599184989929, "learning_rate": 2.065978697832484e-06, "loss": 0.6433, "step": 14160 }, { "epoch": 0.5868871482448506, "grad_norm": 0.4082099199295044, "learning_rate": 2.0657714783041156e-06, "loss": 0.6646, "step": 14161 }, { "epoch": 0.5869285921505243, "grad_norm": 0.4048808813095093, "learning_rate": 2.065564258775747e-06, "loss": 0.6425, "step": 14162 }, { "epoch": 0.5869700360561979, "grad_norm": 0.4205514192581177, "learning_rate": 2.065357039247379e-06, "loss": 0.6774, "step": 14163 }, { "epoch": 0.5870114799618716, "grad_norm": 0.42061248421669006, "learning_rate": 2.06514981971901e-06, "loss": 0.7256, "step": 14164 }, { "epoch": 0.5870529238675453, "grad_norm": 0.4397643506526947, "learning_rate": 2.0649426001906424e-06, "loss": 0.7102, "step": 14165 }, { "epoch": 0.587094367773219, "grad_norm": 0.41509339213371277, "learning_rate": 2.064735380662274e-06, "loss": 0.7024, "step": 14166 }, { "epoch": 0.5871358116788926, "grad_norm": 0.41387051343917847, "learning_rate": 2.0645281611339056e-06, "loss": 0.668, "step": 14167 }, { "epoch": 0.5871772555845662, "grad_norm": 0.40030667185783386, "learning_rate": 2.064320941605537e-06, "loss": 0.6914, "step": 14168 }, { "epoch": 0.58721869949024, "grad_norm": 0.4140687882900238, "learning_rate": 2.064113722077169e-06, "loss": 0.6741, "step": 14169 }, { "epoch": 0.5872601433959136, "grad_norm": 0.40622541308403015, "learning_rate": 2.0639065025488e-06, "loss": 0.6731, "step": 14170 }, { "epoch": 0.5873015873015873, "grad_norm": 0.41607508063316345, "learning_rate": 2.063699283020432e-06, "loss": 0.6686, "step": 14171 }, { "epoch": 0.587343031207261, "grad_norm": 0.38662946224212646, "learning_rate": 2.0634920634920634e-06, "loss": 0.668, "step": 14172 }, { "epoch": 0.5873844751129347, "grad_norm": 0.44469600915908813, "learning_rate": 2.0632848439636956e-06, "loss": 0.7322, "step": 14173 }, { "epoch": 0.5874259190186083, "grad_norm": 0.40012022852897644, "learning_rate": 2.063077624435327e-06, "loss": 0.6555, "step": 14174 }, { "epoch": 0.587467362924282, "grad_norm": 0.4459618926048279, "learning_rate": 2.062870404906959e-06, "loss": 0.7169, "step": 14175 }, { "epoch": 0.5875088068299557, "grad_norm": 0.42265668511390686, "learning_rate": 2.06266318537859e-06, "loss": 0.6715, "step": 14176 }, { "epoch": 0.5875502507356293, "grad_norm": 0.40955111384391785, "learning_rate": 2.062455965850222e-06, "loss": 0.6659, "step": 14177 }, { "epoch": 0.587591694641303, "grad_norm": 0.39265507459640503, "learning_rate": 2.0622487463218534e-06, "loss": 0.6783, "step": 14178 }, { "epoch": 0.5876331385469766, "grad_norm": 0.4169926643371582, "learning_rate": 2.062041526793485e-06, "loss": 0.6649, "step": 14179 }, { "epoch": 0.5876745824526504, "grad_norm": 0.3616180419921875, "learning_rate": 2.0618343072651166e-06, "loss": 0.6393, "step": 14180 }, { "epoch": 0.587716026358324, "grad_norm": 0.3996827304363251, "learning_rate": 2.0616270877367484e-06, "loss": 0.6549, "step": 14181 }, { "epoch": 0.5877574702639977, "grad_norm": 0.4011535048484802, "learning_rate": 2.06141986820838e-06, "loss": 0.7051, "step": 14182 }, { "epoch": 0.5877989141696713, "grad_norm": 0.44790586829185486, "learning_rate": 2.061212648680012e-06, "loss": 0.7067, "step": 14183 }, { "epoch": 0.5878403580753451, "grad_norm": 0.4208473861217499, "learning_rate": 2.0610054291516434e-06, "loss": 0.6904, "step": 14184 }, { "epoch": 0.5878818019810187, "grad_norm": 0.43749651312828064, "learning_rate": 2.060798209623275e-06, "loss": 0.7058, "step": 14185 }, { "epoch": 0.5879232458866923, "grad_norm": 0.48676642775535583, "learning_rate": 2.0605909900949066e-06, "loss": 0.7285, "step": 14186 }, { "epoch": 0.587964689792366, "grad_norm": 0.4017510414123535, "learning_rate": 2.0603837705665384e-06, "loss": 0.6812, "step": 14187 }, { "epoch": 0.5880061336980397, "grad_norm": 0.42195677757263184, "learning_rate": 2.06017655103817e-06, "loss": 0.6521, "step": 14188 }, { "epoch": 0.5880475776037134, "grad_norm": 0.416572242975235, "learning_rate": 2.0599693315098016e-06, "loss": 0.6793, "step": 14189 }, { "epoch": 0.588089021509387, "grad_norm": 0.3998049199581146, "learning_rate": 2.059762111981433e-06, "loss": 0.717, "step": 14190 }, { "epoch": 0.5881304654150608, "grad_norm": 0.4125073254108429, "learning_rate": 2.0595548924530652e-06, "loss": 0.6288, "step": 14191 }, { "epoch": 0.5881719093207344, "grad_norm": 0.39791011810302734, "learning_rate": 2.0593476729246966e-06, "loss": 0.7048, "step": 14192 }, { "epoch": 0.5882133532264081, "grad_norm": 0.43792593479156494, "learning_rate": 2.0591404533963284e-06, "loss": 0.6848, "step": 14193 }, { "epoch": 0.5882547971320817, "grad_norm": 0.4008736312389374, "learning_rate": 2.05893323386796e-06, "loss": 0.6805, "step": 14194 }, { "epoch": 0.5882962410377554, "grad_norm": 0.43492022156715393, "learning_rate": 2.0587260143395916e-06, "loss": 0.7092, "step": 14195 }, { "epoch": 0.5883376849434291, "grad_norm": 0.41053488850593567, "learning_rate": 2.058518794811223e-06, "loss": 0.6882, "step": 14196 }, { "epoch": 0.5883791288491027, "grad_norm": 0.4040428400039673, "learning_rate": 2.058311575282855e-06, "loss": 0.7021, "step": 14197 }, { "epoch": 0.5884205727547764, "grad_norm": 0.3852170705795288, "learning_rate": 2.058104355754486e-06, "loss": 0.6099, "step": 14198 }, { "epoch": 0.5884620166604501, "grad_norm": 0.42070117592811584, "learning_rate": 2.0578971362261184e-06, "loss": 0.6958, "step": 14199 }, { "epoch": 0.5885034605661238, "grad_norm": 0.38195353746414185, "learning_rate": 2.05768991669775e-06, "loss": 0.6316, "step": 14200 }, { "epoch": 0.5885449044717974, "grad_norm": 0.4075537919998169, "learning_rate": 2.0574826971693816e-06, "loss": 0.688, "step": 14201 }, { "epoch": 0.5885863483774711, "grad_norm": 0.418608158826828, "learning_rate": 2.057275477641013e-06, "loss": 0.7302, "step": 14202 }, { "epoch": 0.5886277922831448, "grad_norm": 0.42793625593185425, "learning_rate": 2.057068258112645e-06, "loss": 0.7595, "step": 14203 }, { "epoch": 0.5886692361888184, "grad_norm": 0.5078359842300415, "learning_rate": 2.056861038584276e-06, "loss": 0.6714, "step": 14204 }, { "epoch": 0.5887106800944921, "grad_norm": 0.4163070619106293, "learning_rate": 2.056653819055908e-06, "loss": 0.6855, "step": 14205 }, { "epoch": 0.5887521240001657, "grad_norm": 0.4410068988800049, "learning_rate": 2.0564465995275394e-06, "loss": 0.6711, "step": 14206 }, { "epoch": 0.5887935679058395, "grad_norm": 0.4828309714794159, "learning_rate": 2.056239379999171e-06, "loss": 0.6816, "step": 14207 }, { "epoch": 0.5888350118115131, "grad_norm": 0.4332621991634369, "learning_rate": 2.056032160470803e-06, "loss": 0.739, "step": 14208 }, { "epoch": 0.5888764557171868, "grad_norm": 0.3900606036186218, "learning_rate": 2.055824940942435e-06, "loss": 0.6449, "step": 14209 }, { "epoch": 0.5889178996228605, "grad_norm": 0.39068275690078735, "learning_rate": 2.055617721414066e-06, "loss": 0.6165, "step": 14210 }, { "epoch": 0.5889593435285341, "grad_norm": 0.39331457018852234, "learning_rate": 2.055410501885698e-06, "loss": 0.7122, "step": 14211 }, { "epoch": 0.5890007874342078, "grad_norm": 0.43049222230911255, "learning_rate": 2.0552032823573294e-06, "loss": 0.6737, "step": 14212 }, { "epoch": 0.5890422313398814, "grad_norm": 0.4973275661468506, "learning_rate": 2.054996062828961e-06, "loss": 0.6823, "step": 14213 }, { "epoch": 0.5890836752455552, "grad_norm": 0.4248104393482208, "learning_rate": 2.0547888433005926e-06, "loss": 0.6973, "step": 14214 }, { "epoch": 0.5891251191512288, "grad_norm": 0.4152061343193054, "learning_rate": 2.0545816237722244e-06, "loss": 0.6758, "step": 14215 }, { "epoch": 0.5891665630569025, "grad_norm": 0.42476075887680054, "learning_rate": 2.054374404243856e-06, "loss": 0.7295, "step": 14216 }, { "epoch": 0.5892080069625761, "grad_norm": 0.43457522988319397, "learning_rate": 2.054167184715488e-06, "loss": 0.7045, "step": 14217 }, { "epoch": 0.5892494508682499, "grad_norm": 0.43032944202423096, "learning_rate": 2.0539599651871194e-06, "loss": 0.6405, "step": 14218 }, { "epoch": 0.5892908947739235, "grad_norm": 0.4209657907485962, "learning_rate": 2.0537527456587512e-06, "loss": 0.6852, "step": 14219 }, { "epoch": 0.5893323386795971, "grad_norm": 0.4044792950153351, "learning_rate": 2.0535455261303826e-06, "loss": 0.6681, "step": 14220 }, { "epoch": 0.5893737825852708, "grad_norm": 0.4333532452583313, "learning_rate": 2.0533383066020144e-06, "loss": 0.7072, "step": 14221 }, { "epoch": 0.5894152264909445, "grad_norm": 0.3966088593006134, "learning_rate": 2.053131087073646e-06, "loss": 0.6648, "step": 14222 }, { "epoch": 0.5894566703966182, "grad_norm": 0.4488344192504883, "learning_rate": 2.0529238675452776e-06, "loss": 0.7272, "step": 14223 }, { "epoch": 0.5894981143022918, "grad_norm": 0.40928444266319275, "learning_rate": 2.052716648016909e-06, "loss": 0.6787, "step": 14224 }, { "epoch": 0.5895395582079656, "grad_norm": 0.39797958731651306, "learning_rate": 2.052509428488541e-06, "loss": 0.6606, "step": 14225 }, { "epoch": 0.5895810021136392, "grad_norm": 0.4518476724624634, "learning_rate": 2.0523022089601726e-06, "loss": 0.7538, "step": 14226 }, { "epoch": 0.5896224460193129, "grad_norm": 0.3876931071281433, "learning_rate": 2.0520949894318044e-06, "loss": 0.632, "step": 14227 }, { "epoch": 0.5896638899249865, "grad_norm": 0.40297946333885193, "learning_rate": 2.051887769903436e-06, "loss": 0.6755, "step": 14228 }, { "epoch": 0.5897053338306601, "grad_norm": 0.38023218512535095, "learning_rate": 2.0516805503750676e-06, "loss": 0.6863, "step": 14229 }, { "epoch": 0.5897467777363339, "grad_norm": 0.3974701762199402, "learning_rate": 2.051473330846699e-06, "loss": 0.665, "step": 14230 }, { "epoch": 0.5897882216420075, "grad_norm": 0.3757016062736511, "learning_rate": 2.051266111318331e-06, "loss": 0.6733, "step": 14231 }, { "epoch": 0.5898296655476812, "grad_norm": 0.42480897903442383, "learning_rate": 2.051058891789962e-06, "loss": 0.6477, "step": 14232 }, { "epoch": 0.5898711094533549, "grad_norm": 0.3969673216342926, "learning_rate": 2.050851672261594e-06, "loss": 0.6265, "step": 14233 }, { "epoch": 0.5899125533590286, "grad_norm": 0.4334273934364319, "learning_rate": 2.050644452733226e-06, "loss": 0.6522, "step": 14234 }, { "epoch": 0.5899539972647022, "grad_norm": 0.38328373432159424, "learning_rate": 2.0504372332048576e-06, "loss": 0.6353, "step": 14235 }, { "epoch": 0.589995441170376, "grad_norm": 0.4175913631916046, "learning_rate": 2.050230013676489e-06, "loss": 0.6569, "step": 14236 }, { "epoch": 0.5900368850760496, "grad_norm": 0.4121232032775879, "learning_rate": 2.050022794148121e-06, "loss": 0.6967, "step": 14237 }, { "epoch": 0.5900783289817232, "grad_norm": 0.4246392548084259, "learning_rate": 2.049815574619752e-06, "loss": 0.6494, "step": 14238 }, { "epoch": 0.5901197728873969, "grad_norm": 0.4085242748260498, "learning_rate": 2.049608355091384e-06, "loss": 0.6484, "step": 14239 }, { "epoch": 0.5901612167930705, "grad_norm": 0.3751453757286072, "learning_rate": 2.0494011355630154e-06, "loss": 0.6704, "step": 14240 }, { "epoch": 0.5902026606987443, "grad_norm": 0.4172389507293701, "learning_rate": 2.049193916034647e-06, "loss": 0.6914, "step": 14241 }, { "epoch": 0.5902441046044179, "grad_norm": 0.38432854413986206, "learning_rate": 2.0489866965062786e-06, "loss": 0.6768, "step": 14242 }, { "epoch": 0.5902855485100916, "grad_norm": 0.4108058214187622, "learning_rate": 2.048779476977911e-06, "loss": 0.6688, "step": 14243 }, { "epoch": 0.5903269924157653, "grad_norm": 0.3812786340713501, "learning_rate": 2.048572257449542e-06, "loss": 0.6836, "step": 14244 }, { "epoch": 0.590368436321439, "grad_norm": 0.40980976819992065, "learning_rate": 2.048365037921174e-06, "loss": 0.6814, "step": 14245 }, { "epoch": 0.5904098802271126, "grad_norm": 0.42524954676628113, "learning_rate": 2.0481578183928054e-06, "loss": 0.6675, "step": 14246 }, { "epoch": 0.5904513241327862, "grad_norm": 0.38327327370643616, "learning_rate": 2.0479505988644372e-06, "loss": 0.6255, "step": 14247 }, { "epoch": 0.59049276803846, "grad_norm": 0.4526336193084717, "learning_rate": 2.0477433793360686e-06, "loss": 0.699, "step": 14248 }, { "epoch": 0.5905342119441336, "grad_norm": 0.39851605892181396, "learning_rate": 2.0475361598077004e-06, "loss": 0.6843, "step": 14249 }, { "epoch": 0.5905756558498073, "grad_norm": 0.42890194058418274, "learning_rate": 2.047328940279332e-06, "loss": 0.6862, "step": 14250 }, { "epoch": 0.5906170997554809, "grad_norm": 0.4282785952091217, "learning_rate": 2.0471217207509636e-06, "loss": 0.7461, "step": 14251 }, { "epoch": 0.5906585436611547, "grad_norm": 0.38665252923965454, "learning_rate": 2.0469145012225954e-06, "loss": 0.641, "step": 14252 }, { "epoch": 0.5906999875668283, "grad_norm": 0.43085816502571106, "learning_rate": 2.0467072816942272e-06, "loss": 0.6613, "step": 14253 }, { "epoch": 0.590741431472502, "grad_norm": 0.4050680994987488, "learning_rate": 2.0465000621658586e-06, "loss": 0.6207, "step": 14254 }, { "epoch": 0.5907828753781756, "grad_norm": 0.4099087119102478, "learning_rate": 2.0462928426374904e-06, "loss": 0.6528, "step": 14255 }, { "epoch": 0.5908243192838493, "grad_norm": 0.4211459457874298, "learning_rate": 2.046085623109122e-06, "loss": 0.6531, "step": 14256 }, { "epoch": 0.590865763189523, "grad_norm": 0.4365153908729553, "learning_rate": 2.0458784035807536e-06, "loss": 0.6667, "step": 14257 }, { "epoch": 0.5909072070951966, "grad_norm": 0.42215296626091003, "learning_rate": 2.045671184052385e-06, "loss": 0.6401, "step": 14258 }, { "epoch": 0.5909486510008704, "grad_norm": 0.4347688555717468, "learning_rate": 2.045463964524017e-06, "loss": 0.7175, "step": 14259 }, { "epoch": 0.590990094906544, "grad_norm": 0.39373746514320374, "learning_rate": 2.0452567449956486e-06, "loss": 0.6174, "step": 14260 }, { "epoch": 0.5910315388122177, "grad_norm": 0.4205906093120575, "learning_rate": 2.0450495254672804e-06, "loss": 0.6654, "step": 14261 }, { "epoch": 0.5910729827178913, "grad_norm": 0.44716185331344604, "learning_rate": 2.044842305938912e-06, "loss": 0.7771, "step": 14262 }, { "epoch": 0.5911144266235651, "grad_norm": 0.3970456123352051, "learning_rate": 2.0446350864105436e-06, "loss": 0.5822, "step": 14263 }, { "epoch": 0.5911558705292387, "grad_norm": 0.4157412052154541, "learning_rate": 2.044427866882175e-06, "loss": 0.7002, "step": 14264 }, { "epoch": 0.5911973144349123, "grad_norm": 0.42289999127388, "learning_rate": 2.044220647353807e-06, "loss": 0.6569, "step": 14265 }, { "epoch": 0.591238758340586, "grad_norm": 0.3884695768356323, "learning_rate": 2.044013427825438e-06, "loss": 0.6506, "step": 14266 }, { "epoch": 0.5912802022462597, "grad_norm": 0.4208281636238098, "learning_rate": 2.04380620829707e-06, "loss": 0.6958, "step": 14267 }, { "epoch": 0.5913216461519334, "grad_norm": 0.41270431876182556, "learning_rate": 2.043598988768702e-06, "loss": 0.6796, "step": 14268 }, { "epoch": 0.591363090057607, "grad_norm": 0.37933364510536194, "learning_rate": 2.0433917692403336e-06, "loss": 0.6858, "step": 14269 }, { "epoch": 0.5914045339632807, "grad_norm": 0.4286758601665497, "learning_rate": 2.043184549711965e-06, "loss": 0.6788, "step": 14270 }, { "epoch": 0.5914459778689544, "grad_norm": 0.39950719475746155, "learning_rate": 2.042977330183597e-06, "loss": 0.6906, "step": 14271 }, { "epoch": 0.591487421774628, "grad_norm": 0.41800302267074585, "learning_rate": 2.042770110655228e-06, "loss": 0.6768, "step": 14272 }, { "epoch": 0.5915288656803017, "grad_norm": 0.4239901006221771, "learning_rate": 2.04256289112686e-06, "loss": 0.6614, "step": 14273 }, { "epoch": 0.5915703095859753, "grad_norm": 0.40485909581184387, "learning_rate": 2.0423556715984914e-06, "loss": 0.6812, "step": 14274 }, { "epoch": 0.5916117534916491, "grad_norm": 0.379594624042511, "learning_rate": 2.0421484520701232e-06, "loss": 0.6519, "step": 14275 }, { "epoch": 0.5916531973973227, "grad_norm": 0.42538881301879883, "learning_rate": 2.0419412325417546e-06, "loss": 0.6735, "step": 14276 }, { "epoch": 0.5916946413029964, "grad_norm": 0.37060701847076416, "learning_rate": 2.0417340130133864e-06, "loss": 0.5886, "step": 14277 }, { "epoch": 0.59173608520867, "grad_norm": 0.427942156791687, "learning_rate": 2.0415267934850182e-06, "loss": 0.6492, "step": 14278 }, { "epoch": 0.5917775291143438, "grad_norm": 0.42558640241622925, "learning_rate": 2.04131957395665e-06, "loss": 0.6973, "step": 14279 }, { "epoch": 0.5918189730200174, "grad_norm": 0.42972731590270996, "learning_rate": 2.0411123544282814e-06, "loss": 0.6775, "step": 14280 }, { "epoch": 0.591860416925691, "grad_norm": 0.40738192200660706, "learning_rate": 2.0409051348999132e-06, "loss": 0.7073, "step": 14281 }, { "epoch": 0.5919018608313648, "grad_norm": 0.405779629945755, "learning_rate": 2.0406979153715446e-06, "loss": 0.6504, "step": 14282 }, { "epoch": 0.5919433047370384, "grad_norm": 0.4060389995574951, "learning_rate": 2.0404906958431764e-06, "loss": 0.6484, "step": 14283 }, { "epoch": 0.5919847486427121, "grad_norm": 0.4020824134349823, "learning_rate": 2.040283476314808e-06, "loss": 0.6729, "step": 14284 }, { "epoch": 0.5920261925483857, "grad_norm": 0.4039686620235443, "learning_rate": 2.0400762567864396e-06, "loss": 0.7025, "step": 14285 }, { "epoch": 0.5920676364540595, "grad_norm": 0.3836396634578705, "learning_rate": 2.0398690372580714e-06, "loss": 0.6731, "step": 14286 }, { "epoch": 0.5921090803597331, "grad_norm": 0.3903673589229584, "learning_rate": 2.0396618177297032e-06, "loss": 0.6946, "step": 14287 }, { "epoch": 0.5921505242654068, "grad_norm": 0.4101863503456116, "learning_rate": 2.0394545982013346e-06, "loss": 0.7, "step": 14288 }, { "epoch": 0.5921919681710804, "grad_norm": 0.4006079435348511, "learning_rate": 2.0392473786729664e-06, "loss": 0.6334, "step": 14289 }, { "epoch": 0.5922334120767541, "grad_norm": 0.43651461601257324, "learning_rate": 2.039040159144598e-06, "loss": 0.7045, "step": 14290 }, { "epoch": 0.5922748559824278, "grad_norm": 0.39669862389564514, "learning_rate": 2.0388329396162296e-06, "loss": 0.7063, "step": 14291 }, { "epoch": 0.5923162998881014, "grad_norm": 0.3962988555431366, "learning_rate": 2.038625720087861e-06, "loss": 0.699, "step": 14292 }, { "epoch": 0.5923577437937751, "grad_norm": 0.4120485186576843, "learning_rate": 2.038418500559493e-06, "loss": 0.6752, "step": 14293 }, { "epoch": 0.5923991876994488, "grad_norm": 0.39418941736221313, "learning_rate": 2.0382112810311246e-06, "loss": 0.6908, "step": 14294 }, { "epoch": 0.5924406316051225, "grad_norm": 0.4442867934703827, "learning_rate": 2.0380040615027564e-06, "loss": 0.7605, "step": 14295 }, { "epoch": 0.5924820755107961, "grad_norm": 0.3840016722679138, "learning_rate": 2.037796841974388e-06, "loss": 0.6521, "step": 14296 }, { "epoch": 0.5925235194164699, "grad_norm": 0.3866652846336365, "learning_rate": 2.0375896224460196e-06, "loss": 0.6837, "step": 14297 }, { "epoch": 0.5925649633221435, "grad_norm": 0.4449007213115692, "learning_rate": 2.037382402917651e-06, "loss": 0.7148, "step": 14298 }, { "epoch": 0.5926064072278171, "grad_norm": 0.40215498208999634, "learning_rate": 2.037175183389283e-06, "loss": 0.682, "step": 14299 }, { "epoch": 0.5926478511334908, "grad_norm": 0.40515628457069397, "learning_rate": 2.036967963860914e-06, "loss": 0.6914, "step": 14300 }, { "epoch": 0.5926892950391645, "grad_norm": 0.403740257024765, "learning_rate": 2.036760744332546e-06, "loss": 0.6713, "step": 14301 }, { "epoch": 0.5927307389448382, "grad_norm": 0.4088153541088104, "learning_rate": 2.036553524804178e-06, "loss": 0.6692, "step": 14302 }, { "epoch": 0.5927721828505118, "grad_norm": 0.4251941442489624, "learning_rate": 2.036346305275809e-06, "loss": 0.6888, "step": 14303 }, { "epoch": 0.5928136267561855, "grad_norm": 0.4285546839237213, "learning_rate": 2.036139085747441e-06, "loss": 0.7031, "step": 14304 }, { "epoch": 0.5928550706618592, "grad_norm": 0.37559422850608826, "learning_rate": 2.035931866219073e-06, "loss": 0.6919, "step": 14305 }, { "epoch": 0.5928965145675329, "grad_norm": 0.37371811270713806, "learning_rate": 2.0357246466907042e-06, "loss": 0.6575, "step": 14306 }, { "epoch": 0.5929379584732065, "grad_norm": 0.40498480200767517, "learning_rate": 2.035517427162336e-06, "loss": 0.6324, "step": 14307 }, { "epoch": 0.5929794023788801, "grad_norm": 0.435937762260437, "learning_rate": 2.0353102076339674e-06, "loss": 0.6826, "step": 14308 }, { "epoch": 0.5930208462845539, "grad_norm": 0.3992414176464081, "learning_rate": 2.0351029881055992e-06, "loss": 0.6014, "step": 14309 }, { "epoch": 0.5930622901902275, "grad_norm": 0.4087291657924652, "learning_rate": 2.0348957685772306e-06, "loss": 0.6705, "step": 14310 }, { "epoch": 0.5931037340959012, "grad_norm": 0.41874590516090393, "learning_rate": 2.0346885490488624e-06, "loss": 0.7043, "step": 14311 }, { "epoch": 0.5931451780015748, "grad_norm": 0.40523335337638855, "learning_rate": 2.0344813295204942e-06, "loss": 0.6589, "step": 14312 }, { "epoch": 0.5931866219072486, "grad_norm": 0.4835190176963806, "learning_rate": 2.034274109992126e-06, "loss": 0.7689, "step": 14313 }, { "epoch": 0.5932280658129222, "grad_norm": 0.41533568501472473, "learning_rate": 2.0340668904637574e-06, "loss": 0.7424, "step": 14314 }, { "epoch": 0.5932695097185959, "grad_norm": 0.46231433749198914, "learning_rate": 2.0338596709353892e-06, "loss": 0.7261, "step": 14315 }, { "epoch": 0.5933109536242696, "grad_norm": 0.4329625964164734, "learning_rate": 2.0336524514070206e-06, "loss": 0.7162, "step": 14316 }, { "epoch": 0.5933523975299432, "grad_norm": 0.4204372465610504, "learning_rate": 2.0334452318786524e-06, "loss": 0.7168, "step": 14317 }, { "epoch": 0.5933938414356169, "grad_norm": 0.4126313328742981, "learning_rate": 2.033238012350284e-06, "loss": 0.6572, "step": 14318 }, { "epoch": 0.5934352853412905, "grad_norm": 0.3935967981815338, "learning_rate": 2.0330307928219156e-06, "loss": 0.6528, "step": 14319 }, { "epoch": 0.5934767292469643, "grad_norm": 0.42331936955451965, "learning_rate": 2.0328235732935474e-06, "loss": 0.6707, "step": 14320 }, { "epoch": 0.5935181731526379, "grad_norm": 0.46018701791763306, "learning_rate": 2.0326163537651792e-06, "loss": 0.6904, "step": 14321 }, { "epoch": 0.5935596170583116, "grad_norm": 0.40509653091430664, "learning_rate": 2.0324091342368106e-06, "loss": 0.6567, "step": 14322 }, { "epoch": 0.5936010609639852, "grad_norm": 0.39532938599586487, "learning_rate": 2.0322019147084424e-06, "loss": 0.6555, "step": 14323 }, { "epoch": 0.593642504869659, "grad_norm": 0.38313978910446167, "learning_rate": 2.031994695180074e-06, "loss": 0.7109, "step": 14324 }, { "epoch": 0.5936839487753326, "grad_norm": 0.44797050952911377, "learning_rate": 2.0317874756517056e-06, "loss": 0.674, "step": 14325 }, { "epoch": 0.5937253926810062, "grad_norm": 0.41121363639831543, "learning_rate": 2.031580256123337e-06, "loss": 0.7063, "step": 14326 }, { "epoch": 0.59376683658668, "grad_norm": 0.40165460109710693, "learning_rate": 2.031373036594969e-06, "loss": 0.6633, "step": 14327 }, { "epoch": 0.5938082804923536, "grad_norm": 0.3795650601387024, "learning_rate": 2.0311658170666006e-06, "loss": 0.6321, "step": 14328 }, { "epoch": 0.5938497243980273, "grad_norm": 0.417758047580719, "learning_rate": 2.030958597538232e-06, "loss": 0.6343, "step": 14329 }, { "epoch": 0.5938911683037009, "grad_norm": 0.4302422106266022, "learning_rate": 2.030751378009864e-06, "loss": 0.6797, "step": 14330 }, { "epoch": 0.5939326122093747, "grad_norm": 0.4040430784225464, "learning_rate": 2.0305441584814956e-06, "loss": 0.6449, "step": 14331 }, { "epoch": 0.5939740561150483, "grad_norm": 0.43822744488716125, "learning_rate": 2.030336938953127e-06, "loss": 0.6913, "step": 14332 }, { "epoch": 0.5940155000207219, "grad_norm": 0.39323458075523376, "learning_rate": 2.030129719424759e-06, "loss": 0.6752, "step": 14333 }, { "epoch": 0.5940569439263956, "grad_norm": 0.4040904641151428, "learning_rate": 2.0299224998963902e-06, "loss": 0.6654, "step": 14334 }, { "epoch": 0.5940983878320693, "grad_norm": 0.4468677341938019, "learning_rate": 2.029715280368022e-06, "loss": 0.6488, "step": 14335 }, { "epoch": 0.594139831737743, "grad_norm": 0.3944384455680847, "learning_rate": 2.029508060839654e-06, "loss": 0.6292, "step": 14336 }, { "epoch": 0.5941812756434166, "grad_norm": 0.39719298481941223, "learning_rate": 2.0293008413112852e-06, "loss": 0.6509, "step": 14337 }, { "epoch": 0.5942227195490903, "grad_norm": 0.42398104071617126, "learning_rate": 2.029093621782917e-06, "loss": 0.653, "step": 14338 }, { "epoch": 0.594264163454764, "grad_norm": 0.43199828267097473, "learning_rate": 2.028886402254549e-06, "loss": 0.6903, "step": 14339 }, { "epoch": 0.5943056073604377, "grad_norm": 0.41490867733955383, "learning_rate": 2.0286791827261802e-06, "loss": 0.7046, "step": 14340 }, { "epoch": 0.5943470512661113, "grad_norm": 0.3960203528404236, "learning_rate": 2.028471963197812e-06, "loss": 0.6527, "step": 14341 }, { "epoch": 0.5943884951717849, "grad_norm": 0.39756324887275696, "learning_rate": 2.0282647436694434e-06, "loss": 0.6592, "step": 14342 }, { "epoch": 0.5944299390774587, "grad_norm": 0.4263734221458435, "learning_rate": 2.0280575241410752e-06, "loss": 0.6831, "step": 14343 }, { "epoch": 0.5944713829831323, "grad_norm": 0.43112796545028687, "learning_rate": 2.0278503046127066e-06, "loss": 0.6477, "step": 14344 }, { "epoch": 0.594512826888806, "grad_norm": 0.4024803042411804, "learning_rate": 2.0276430850843384e-06, "loss": 0.6594, "step": 14345 }, { "epoch": 0.5945542707944796, "grad_norm": 0.43984800577163696, "learning_rate": 2.0274358655559702e-06, "loss": 0.7222, "step": 14346 }, { "epoch": 0.5945957147001534, "grad_norm": 0.4023248255252838, "learning_rate": 2.027228646027602e-06, "loss": 0.6915, "step": 14347 }, { "epoch": 0.594637158605827, "grad_norm": 0.38585853576660156, "learning_rate": 2.0270214264992334e-06, "loss": 0.6384, "step": 14348 }, { "epoch": 0.5946786025115007, "grad_norm": 0.40259602665901184, "learning_rate": 2.0268142069708652e-06, "loss": 0.7063, "step": 14349 }, { "epoch": 0.5947200464171744, "grad_norm": 0.3822415769100189, "learning_rate": 2.0266069874424966e-06, "loss": 0.6473, "step": 14350 }, { "epoch": 0.594761490322848, "grad_norm": 0.38178327679634094, "learning_rate": 2.0263997679141284e-06, "loss": 0.6323, "step": 14351 }, { "epoch": 0.5948029342285217, "grad_norm": 0.4423728287220001, "learning_rate": 2.02619254838576e-06, "loss": 0.733, "step": 14352 }, { "epoch": 0.5948443781341953, "grad_norm": 0.4118400812149048, "learning_rate": 2.0259853288573916e-06, "loss": 0.6709, "step": 14353 }, { "epoch": 0.5948858220398691, "grad_norm": 0.3957553803920746, "learning_rate": 2.0257781093290234e-06, "loss": 0.657, "step": 14354 }, { "epoch": 0.5949272659455427, "grad_norm": 0.4174366891384125, "learning_rate": 2.025570889800655e-06, "loss": 0.6953, "step": 14355 }, { "epoch": 0.5949687098512164, "grad_norm": 0.4107711613178253, "learning_rate": 2.0253636702722866e-06, "loss": 0.6799, "step": 14356 }, { "epoch": 0.59501015375689, "grad_norm": 0.4277287721633911, "learning_rate": 2.0251564507439184e-06, "loss": 0.6824, "step": 14357 }, { "epoch": 0.5950515976625638, "grad_norm": 0.38754233717918396, "learning_rate": 2.02494923121555e-06, "loss": 0.6927, "step": 14358 }, { "epoch": 0.5950930415682374, "grad_norm": 0.402620404958725, "learning_rate": 2.0247420116871816e-06, "loss": 0.6436, "step": 14359 }, { "epoch": 0.595134485473911, "grad_norm": 0.41613373160362244, "learning_rate": 2.024534792158813e-06, "loss": 0.6388, "step": 14360 }, { "epoch": 0.5951759293795847, "grad_norm": 0.40306299924850464, "learning_rate": 2.024327572630445e-06, "loss": 0.6335, "step": 14361 }, { "epoch": 0.5952173732852584, "grad_norm": 0.4143815040588379, "learning_rate": 2.0241203531020766e-06, "loss": 0.6877, "step": 14362 }, { "epoch": 0.5952588171909321, "grad_norm": 0.4262343645095825, "learning_rate": 2.023913133573708e-06, "loss": 0.729, "step": 14363 }, { "epoch": 0.5953002610966057, "grad_norm": 0.4170742332935333, "learning_rate": 2.02370591404534e-06, "loss": 0.6917, "step": 14364 }, { "epoch": 0.5953417050022795, "grad_norm": 0.3892902433872223, "learning_rate": 2.0234986945169716e-06, "loss": 0.6572, "step": 14365 }, { "epoch": 0.5953831489079531, "grad_norm": 0.410569965839386, "learning_rate": 2.023291474988603e-06, "loss": 0.6975, "step": 14366 }, { "epoch": 0.5954245928136268, "grad_norm": 0.4237006902694702, "learning_rate": 2.023084255460235e-06, "loss": 0.6448, "step": 14367 }, { "epoch": 0.5954660367193004, "grad_norm": 0.4280565083026886, "learning_rate": 2.0228770359318662e-06, "loss": 0.6605, "step": 14368 }, { "epoch": 0.595507480624974, "grad_norm": 0.40148481726646423, "learning_rate": 2.022669816403498e-06, "loss": 0.6316, "step": 14369 }, { "epoch": 0.5955489245306478, "grad_norm": 0.4305514097213745, "learning_rate": 2.02246259687513e-06, "loss": 0.6743, "step": 14370 }, { "epoch": 0.5955903684363214, "grad_norm": 0.3879026770591736, "learning_rate": 2.0222553773467612e-06, "loss": 0.6876, "step": 14371 }, { "epoch": 0.5956318123419951, "grad_norm": 0.4012109637260437, "learning_rate": 2.022048157818393e-06, "loss": 0.6307, "step": 14372 }, { "epoch": 0.5956732562476688, "grad_norm": 0.4105861485004425, "learning_rate": 2.0218409382900244e-06, "loss": 0.6792, "step": 14373 }, { "epoch": 0.5957147001533425, "grad_norm": 0.43092086911201477, "learning_rate": 2.0216337187616562e-06, "loss": 0.7158, "step": 14374 }, { "epoch": 0.5957561440590161, "grad_norm": 0.42445626854896545, "learning_rate": 2.021426499233288e-06, "loss": 0.6763, "step": 14375 }, { "epoch": 0.5957975879646898, "grad_norm": 0.3859517276287079, "learning_rate": 2.0212192797049194e-06, "loss": 0.6846, "step": 14376 }, { "epoch": 0.5958390318703635, "grad_norm": 0.4226225018501282, "learning_rate": 2.0210120601765512e-06, "loss": 0.6646, "step": 14377 }, { "epoch": 0.5958804757760371, "grad_norm": 0.4132973551750183, "learning_rate": 2.020804840648183e-06, "loss": 0.7074, "step": 14378 }, { "epoch": 0.5959219196817108, "grad_norm": 0.3989001512527466, "learning_rate": 2.0205976211198144e-06, "loss": 0.6801, "step": 14379 }, { "epoch": 0.5959633635873844, "grad_norm": 0.38745129108428955, "learning_rate": 2.0203904015914462e-06, "loss": 0.6508, "step": 14380 }, { "epoch": 0.5960048074930582, "grad_norm": 0.4824681282043457, "learning_rate": 2.0201831820630776e-06, "loss": 0.7175, "step": 14381 }, { "epoch": 0.5960462513987318, "grad_norm": 0.37309813499450684, "learning_rate": 2.0199759625347094e-06, "loss": 0.6705, "step": 14382 }, { "epoch": 0.5960876953044055, "grad_norm": 0.4019153118133545, "learning_rate": 2.0197687430063412e-06, "loss": 0.72, "step": 14383 }, { "epoch": 0.5961291392100792, "grad_norm": 0.39924439787864685, "learning_rate": 2.0195615234779726e-06, "loss": 0.665, "step": 14384 }, { "epoch": 0.5961705831157529, "grad_norm": 0.41613510251045227, "learning_rate": 2.0193543039496044e-06, "loss": 0.675, "step": 14385 }, { "epoch": 0.5962120270214265, "grad_norm": 0.43853211402893066, "learning_rate": 2.019147084421236e-06, "loss": 0.7218, "step": 14386 }, { "epoch": 0.5962534709271001, "grad_norm": 0.42514365911483765, "learning_rate": 2.0189398648928676e-06, "loss": 0.6516, "step": 14387 }, { "epoch": 0.5962949148327739, "grad_norm": 0.3866899907588959, "learning_rate": 2.0187326453644994e-06, "loss": 0.6561, "step": 14388 }, { "epoch": 0.5963363587384475, "grad_norm": 0.4057970345020294, "learning_rate": 2.018525425836131e-06, "loss": 0.7063, "step": 14389 }, { "epoch": 0.5963778026441212, "grad_norm": 0.40204766392707825, "learning_rate": 2.0183182063077626e-06, "loss": 0.6464, "step": 14390 }, { "epoch": 0.5964192465497948, "grad_norm": 0.45571938157081604, "learning_rate": 2.0181109867793944e-06, "loss": 0.7185, "step": 14391 }, { "epoch": 0.5964606904554686, "grad_norm": 0.41326677799224854, "learning_rate": 2.017903767251026e-06, "loss": 0.7009, "step": 14392 }, { "epoch": 0.5965021343611422, "grad_norm": 0.4094535708427429, "learning_rate": 2.0176965477226576e-06, "loss": 0.7144, "step": 14393 }, { "epoch": 0.5965435782668158, "grad_norm": 0.4272117614746094, "learning_rate": 2.017489328194289e-06, "loss": 0.6992, "step": 14394 }, { "epoch": 0.5965850221724895, "grad_norm": 0.4230491518974304, "learning_rate": 2.017282108665921e-06, "loss": 0.6652, "step": 14395 }, { "epoch": 0.5966264660781632, "grad_norm": 0.43777748942375183, "learning_rate": 2.0170748891375526e-06, "loss": 0.7042, "step": 14396 }, { "epoch": 0.5966679099838369, "grad_norm": 0.40034741163253784, "learning_rate": 2.016867669609184e-06, "loss": 0.6714, "step": 14397 }, { "epoch": 0.5967093538895105, "grad_norm": 0.4250170886516571, "learning_rate": 2.016660450080816e-06, "loss": 0.6992, "step": 14398 }, { "epoch": 0.5967507977951843, "grad_norm": 0.41979166865348816, "learning_rate": 2.0164532305524472e-06, "loss": 0.6688, "step": 14399 }, { "epoch": 0.5967922417008579, "grad_norm": 0.42394569516181946, "learning_rate": 2.016246011024079e-06, "loss": 0.7651, "step": 14400 }, { "epoch": 0.5968336856065316, "grad_norm": 0.4051402807235718, "learning_rate": 2.016038791495711e-06, "loss": 0.6473, "step": 14401 }, { "epoch": 0.5968751295122052, "grad_norm": 0.40897953510284424, "learning_rate": 2.0158315719673422e-06, "loss": 0.7021, "step": 14402 }, { "epoch": 0.5969165734178788, "grad_norm": 0.447746604681015, "learning_rate": 2.015624352438974e-06, "loss": 0.7339, "step": 14403 }, { "epoch": 0.5969580173235526, "grad_norm": 0.41407710313796997, "learning_rate": 2.015417132910606e-06, "loss": 0.6638, "step": 14404 }, { "epoch": 0.5969994612292262, "grad_norm": 0.3679262697696686, "learning_rate": 2.0152099133822372e-06, "loss": 0.6106, "step": 14405 }, { "epoch": 0.5970409051348999, "grad_norm": 0.3960670530796051, "learning_rate": 2.015002693853869e-06, "loss": 0.6482, "step": 14406 }, { "epoch": 0.5970823490405736, "grad_norm": 0.40117040276527405, "learning_rate": 2.0147954743255004e-06, "loss": 0.6354, "step": 14407 }, { "epoch": 0.5971237929462473, "grad_norm": 0.39229118824005127, "learning_rate": 2.0145882547971322e-06, "loss": 0.6498, "step": 14408 }, { "epoch": 0.5971652368519209, "grad_norm": 0.37588411569595337, "learning_rate": 2.014381035268764e-06, "loss": 0.6459, "step": 14409 }, { "epoch": 0.5972066807575946, "grad_norm": 0.40859487652778625, "learning_rate": 2.0141738157403954e-06, "loss": 0.6667, "step": 14410 }, { "epoch": 0.5972481246632683, "grad_norm": 0.3877080976963043, "learning_rate": 2.0139665962120272e-06, "loss": 0.7, "step": 14411 }, { "epoch": 0.5972895685689419, "grad_norm": 0.4293051064014435, "learning_rate": 2.013759376683659e-06, "loss": 0.6704, "step": 14412 }, { "epoch": 0.5973310124746156, "grad_norm": 0.40750938653945923, "learning_rate": 2.0135521571552904e-06, "loss": 0.7, "step": 14413 }, { "epoch": 0.5973724563802892, "grad_norm": 0.38827887177467346, "learning_rate": 2.0133449376269222e-06, "loss": 0.6345, "step": 14414 }, { "epoch": 0.597413900285963, "grad_norm": 0.40926864743232727, "learning_rate": 2.0131377180985536e-06, "loss": 0.7163, "step": 14415 }, { "epoch": 0.5974553441916366, "grad_norm": 0.3905308246612549, "learning_rate": 2.0129304985701854e-06, "loss": 0.6689, "step": 14416 }, { "epoch": 0.5974967880973103, "grad_norm": 0.3963407874107361, "learning_rate": 2.0127232790418172e-06, "loss": 0.6273, "step": 14417 }, { "epoch": 0.597538232002984, "grad_norm": 0.40965408086776733, "learning_rate": 2.0125160595134486e-06, "loss": 0.6429, "step": 14418 }, { "epoch": 0.5975796759086577, "grad_norm": 0.7684953808784485, "learning_rate": 2.0123088399850804e-06, "loss": 0.6788, "step": 14419 }, { "epoch": 0.5976211198143313, "grad_norm": 0.43025216460227966, "learning_rate": 2.012101620456712e-06, "loss": 0.6809, "step": 14420 }, { "epoch": 0.5976625637200049, "grad_norm": 0.4062935709953308, "learning_rate": 2.0118944009283436e-06, "loss": 0.6544, "step": 14421 }, { "epoch": 0.5977040076256787, "grad_norm": 0.4146495461463928, "learning_rate": 2.0116871813999754e-06, "loss": 0.6593, "step": 14422 }, { "epoch": 0.5977454515313523, "grad_norm": 0.4446333646774292, "learning_rate": 2.011479961871607e-06, "loss": 0.7305, "step": 14423 }, { "epoch": 0.597786895437026, "grad_norm": 0.36641445755958557, "learning_rate": 2.0112727423432386e-06, "loss": 0.6379, "step": 14424 }, { "epoch": 0.5978283393426996, "grad_norm": 0.41936540603637695, "learning_rate": 2.01106552281487e-06, "loss": 0.7214, "step": 14425 }, { "epoch": 0.5978697832483734, "grad_norm": 0.3986991047859192, "learning_rate": 2.010858303286502e-06, "loss": 0.6935, "step": 14426 }, { "epoch": 0.597911227154047, "grad_norm": 0.43886488676071167, "learning_rate": 2.0106510837581336e-06, "loss": 0.6721, "step": 14427 }, { "epoch": 0.5979526710597207, "grad_norm": 0.38111263513565063, "learning_rate": 2.010443864229765e-06, "loss": 0.6093, "step": 14428 }, { "epoch": 0.5979941149653943, "grad_norm": 0.39000430703163147, "learning_rate": 2.010236644701397e-06, "loss": 0.6649, "step": 14429 }, { "epoch": 0.598035558871068, "grad_norm": 0.44159260392189026, "learning_rate": 2.0100294251730286e-06, "loss": 0.6953, "step": 14430 }, { "epoch": 0.5980770027767417, "grad_norm": 0.40329355001449585, "learning_rate": 2.00982220564466e-06, "loss": 0.6765, "step": 14431 }, { "epoch": 0.5981184466824153, "grad_norm": 0.41361838579177856, "learning_rate": 2.009614986116292e-06, "loss": 0.7236, "step": 14432 }, { "epoch": 0.598159890588089, "grad_norm": 0.42035967111587524, "learning_rate": 2.0094077665879232e-06, "loss": 0.6798, "step": 14433 }, { "epoch": 0.5982013344937627, "grad_norm": 0.3935181200504303, "learning_rate": 2.009200547059555e-06, "loss": 0.6393, "step": 14434 }, { "epoch": 0.5982427783994364, "grad_norm": 0.4304755628108978, "learning_rate": 2.008993327531187e-06, "loss": 0.686, "step": 14435 }, { "epoch": 0.59828422230511, "grad_norm": 0.43664875626564026, "learning_rate": 2.0087861080028182e-06, "loss": 0.6815, "step": 14436 }, { "epoch": 0.5983256662107838, "grad_norm": 0.42373546957969666, "learning_rate": 2.00857888847445e-06, "loss": 0.6608, "step": 14437 }, { "epoch": 0.5983671101164574, "grad_norm": 0.4077330529689789, "learning_rate": 2.008371668946082e-06, "loss": 0.6611, "step": 14438 }, { "epoch": 0.598408554022131, "grad_norm": 0.456106573343277, "learning_rate": 2.0081644494177132e-06, "loss": 0.7551, "step": 14439 }, { "epoch": 0.5984499979278047, "grad_norm": 0.3921237885951996, "learning_rate": 2.007957229889345e-06, "loss": 0.668, "step": 14440 }, { "epoch": 0.5984914418334784, "grad_norm": 0.44003742933273315, "learning_rate": 2.0077500103609764e-06, "loss": 0.7216, "step": 14441 }, { "epoch": 0.5985328857391521, "grad_norm": 0.39004865288734436, "learning_rate": 2.0075427908326082e-06, "loss": 0.6405, "step": 14442 }, { "epoch": 0.5985743296448257, "grad_norm": 0.4339534342288971, "learning_rate": 2.00733557130424e-06, "loss": 0.6891, "step": 14443 }, { "epoch": 0.5986157735504994, "grad_norm": 0.4058506488800049, "learning_rate": 2.0071283517758714e-06, "loss": 0.6937, "step": 14444 }, { "epoch": 0.5986572174561731, "grad_norm": 0.41155415773391724, "learning_rate": 2.0069211322475032e-06, "loss": 0.6907, "step": 14445 }, { "epoch": 0.5986986613618468, "grad_norm": 0.3971969485282898, "learning_rate": 2.006713912719135e-06, "loss": 0.6659, "step": 14446 }, { "epoch": 0.5987401052675204, "grad_norm": 0.4143161177635193, "learning_rate": 2.0065066931907664e-06, "loss": 0.6398, "step": 14447 }, { "epoch": 0.598781549173194, "grad_norm": 0.4407001733779907, "learning_rate": 2.0062994736623982e-06, "loss": 0.6849, "step": 14448 }, { "epoch": 0.5988229930788678, "grad_norm": 0.3922001123428345, "learning_rate": 2.0060922541340296e-06, "loss": 0.698, "step": 14449 }, { "epoch": 0.5988644369845414, "grad_norm": 0.4262036979198456, "learning_rate": 2.0058850346056614e-06, "loss": 0.6666, "step": 14450 }, { "epoch": 0.5989058808902151, "grad_norm": 0.4321430027484894, "learning_rate": 2.005677815077293e-06, "loss": 0.697, "step": 14451 }, { "epoch": 0.5989473247958887, "grad_norm": 0.4366486072540283, "learning_rate": 2.0054705955489246e-06, "loss": 0.682, "step": 14452 }, { "epoch": 0.5989887687015625, "grad_norm": 0.44760021567344666, "learning_rate": 2.0052633760205564e-06, "loss": 0.713, "step": 14453 }, { "epoch": 0.5990302126072361, "grad_norm": 0.4016120433807373, "learning_rate": 2.0050561564921883e-06, "loss": 0.6594, "step": 14454 }, { "epoch": 0.5990716565129097, "grad_norm": 0.4284817576408386, "learning_rate": 2.0048489369638196e-06, "loss": 0.6284, "step": 14455 }, { "epoch": 0.5991131004185835, "grad_norm": 0.3893854022026062, "learning_rate": 2.0046417174354514e-06, "loss": 0.667, "step": 14456 }, { "epoch": 0.5991545443242571, "grad_norm": 0.5569701790809631, "learning_rate": 2.004434497907083e-06, "loss": 0.6703, "step": 14457 }, { "epoch": 0.5991959882299308, "grad_norm": 0.41954365372657776, "learning_rate": 2.0042272783787146e-06, "loss": 0.6752, "step": 14458 }, { "epoch": 0.5992374321356044, "grad_norm": 0.37531939148902893, "learning_rate": 2.004020058850346e-06, "loss": 0.6366, "step": 14459 }, { "epoch": 0.5992788760412782, "grad_norm": 0.3718879222869873, "learning_rate": 2.003812839321978e-06, "loss": 0.6395, "step": 14460 }, { "epoch": 0.5993203199469518, "grad_norm": 0.46860793232917786, "learning_rate": 2.0036056197936096e-06, "loss": 0.6649, "step": 14461 }, { "epoch": 0.5993617638526255, "grad_norm": 0.4035191535949707, "learning_rate": 2.003398400265241e-06, "loss": 0.6808, "step": 14462 }, { "epoch": 0.5994032077582991, "grad_norm": 0.3986499607563019, "learning_rate": 2.003191180736873e-06, "loss": 0.6819, "step": 14463 }, { "epoch": 0.5994446516639728, "grad_norm": 0.41130176186561584, "learning_rate": 2.0029839612085047e-06, "loss": 0.6858, "step": 14464 }, { "epoch": 0.5994860955696465, "grad_norm": 0.3908282220363617, "learning_rate": 2.002776741680136e-06, "loss": 0.7253, "step": 14465 }, { "epoch": 0.5995275394753201, "grad_norm": 0.3756026029586792, "learning_rate": 2.002569522151768e-06, "loss": 0.6636, "step": 14466 }, { "epoch": 0.5995689833809938, "grad_norm": 0.418349951505661, "learning_rate": 2.0023623026233992e-06, "loss": 0.7065, "step": 14467 }, { "epoch": 0.5996104272866675, "grad_norm": 0.40787529945373535, "learning_rate": 2.002155083095031e-06, "loss": 0.6614, "step": 14468 }, { "epoch": 0.5996518711923412, "grad_norm": 0.43600380420684814, "learning_rate": 2.001947863566663e-06, "loss": 0.7219, "step": 14469 }, { "epoch": 0.5996933150980148, "grad_norm": 0.436775803565979, "learning_rate": 2.0017406440382942e-06, "loss": 0.6736, "step": 14470 }, { "epoch": 0.5997347590036886, "grad_norm": 0.3963083326816559, "learning_rate": 2.001533424509926e-06, "loss": 0.6956, "step": 14471 }, { "epoch": 0.5997762029093622, "grad_norm": 0.4196145236492157, "learning_rate": 2.001326204981558e-06, "loss": 0.6656, "step": 14472 }, { "epoch": 0.5998176468150358, "grad_norm": 0.3632254898548126, "learning_rate": 2.0011189854531892e-06, "loss": 0.616, "step": 14473 }, { "epoch": 0.5998590907207095, "grad_norm": 0.4482094347476959, "learning_rate": 2.000911765924821e-06, "loss": 0.6941, "step": 14474 }, { "epoch": 0.5999005346263832, "grad_norm": 0.406034916639328, "learning_rate": 2.0007045463964524e-06, "loss": 0.6583, "step": 14475 }, { "epoch": 0.5999419785320569, "grad_norm": 0.39717498421669006, "learning_rate": 2.0004973268680842e-06, "loss": 0.6941, "step": 14476 }, { "epoch": 0.5999834224377305, "grad_norm": 0.41896647214889526, "learning_rate": 2.0002901073397156e-06, "loss": 0.6892, "step": 14477 }, { "epoch": 0.6000248663434042, "grad_norm": 0.4048294126987457, "learning_rate": 2.0000828878113474e-06, "loss": 0.7074, "step": 14478 }, { "epoch": 0.6000663102490779, "grad_norm": 0.4020443260669708, "learning_rate": 1.9998756682829792e-06, "loss": 0.6508, "step": 14479 }, { "epoch": 0.6001077541547516, "grad_norm": 0.40686270594596863, "learning_rate": 1.999668448754611e-06, "loss": 0.7131, "step": 14480 }, { "epoch": 0.6001491980604252, "grad_norm": 0.3927430510520935, "learning_rate": 1.9994612292262424e-06, "loss": 0.6857, "step": 14481 }, { "epoch": 0.6001906419660988, "grad_norm": 0.440328449010849, "learning_rate": 1.9992540096978743e-06, "loss": 0.7278, "step": 14482 }, { "epoch": 0.6002320858717726, "grad_norm": 0.40897300839424133, "learning_rate": 1.9990467901695056e-06, "loss": 0.642, "step": 14483 }, { "epoch": 0.6002735297774462, "grad_norm": 0.43869850039482117, "learning_rate": 1.9988395706411374e-06, "loss": 0.6395, "step": 14484 }, { "epoch": 0.6003149736831199, "grad_norm": 0.407537579536438, "learning_rate": 1.998632351112769e-06, "loss": 0.6978, "step": 14485 }, { "epoch": 0.6003564175887935, "grad_norm": 0.40908050537109375, "learning_rate": 1.9984251315844006e-06, "loss": 0.6882, "step": 14486 }, { "epoch": 0.6003978614944673, "grad_norm": 0.42139047384262085, "learning_rate": 1.9982179120560325e-06, "loss": 0.6516, "step": 14487 }, { "epoch": 0.6004393054001409, "grad_norm": 0.42272502183914185, "learning_rate": 1.9980106925276643e-06, "loss": 0.7177, "step": 14488 }, { "epoch": 0.6004807493058146, "grad_norm": 0.40550610423088074, "learning_rate": 1.9978034729992956e-06, "loss": 0.7009, "step": 14489 }, { "epoch": 0.6005221932114883, "grad_norm": 0.4227939546108246, "learning_rate": 1.9975962534709275e-06, "loss": 0.7003, "step": 14490 }, { "epoch": 0.6005636371171619, "grad_norm": 0.41016584634780884, "learning_rate": 1.997389033942559e-06, "loss": 0.6743, "step": 14491 }, { "epoch": 0.6006050810228356, "grad_norm": 0.4081968069076538, "learning_rate": 1.9971818144141907e-06, "loss": 0.6488, "step": 14492 }, { "epoch": 0.6006465249285092, "grad_norm": 0.39581453800201416, "learning_rate": 1.996974594885822e-06, "loss": 0.6826, "step": 14493 }, { "epoch": 0.600687968834183, "grad_norm": 0.4807567894458771, "learning_rate": 1.996767375357454e-06, "loss": 0.7126, "step": 14494 }, { "epoch": 0.6007294127398566, "grad_norm": 0.3886212408542633, "learning_rate": 1.9965601558290857e-06, "loss": 0.6493, "step": 14495 }, { "epoch": 0.6007708566455303, "grad_norm": 0.4078727960586548, "learning_rate": 1.996352936300717e-06, "loss": 0.6946, "step": 14496 }, { "epoch": 0.6008123005512039, "grad_norm": 0.4337066411972046, "learning_rate": 1.996145716772349e-06, "loss": 0.7166, "step": 14497 }, { "epoch": 0.6008537444568777, "grad_norm": 0.39529719948768616, "learning_rate": 1.9959384972439807e-06, "loss": 0.6611, "step": 14498 }, { "epoch": 0.6008951883625513, "grad_norm": 0.39812585711479187, "learning_rate": 1.995731277715612e-06, "loss": 0.6567, "step": 14499 }, { "epoch": 0.6009366322682249, "grad_norm": 0.37952789664268494, "learning_rate": 1.995524058187244e-06, "loss": 0.6606, "step": 14500 }, { "epoch": 0.6009780761738986, "grad_norm": 0.4111202657222748, "learning_rate": 1.9953168386588752e-06, "loss": 0.6787, "step": 14501 }, { "epoch": 0.6010195200795723, "grad_norm": 0.4247913658618927, "learning_rate": 1.995109619130507e-06, "loss": 0.6807, "step": 14502 }, { "epoch": 0.601060963985246, "grad_norm": 0.3853343427181244, "learning_rate": 1.9949023996021384e-06, "loss": 0.6595, "step": 14503 }, { "epoch": 0.6011024078909196, "grad_norm": 0.4247717261314392, "learning_rate": 1.9946951800737702e-06, "loss": 0.7144, "step": 14504 }, { "epoch": 0.6011438517965934, "grad_norm": 0.4129229784011841, "learning_rate": 1.994487960545402e-06, "loss": 0.6875, "step": 14505 }, { "epoch": 0.601185295702267, "grad_norm": 0.4251071512699127, "learning_rate": 1.994280741017034e-06, "loss": 0.6934, "step": 14506 }, { "epoch": 0.6012267396079407, "grad_norm": 0.41461607813835144, "learning_rate": 1.9940735214886652e-06, "loss": 0.7327, "step": 14507 }, { "epoch": 0.6012681835136143, "grad_norm": 0.40719062089920044, "learning_rate": 1.993866301960297e-06, "loss": 0.6981, "step": 14508 }, { "epoch": 0.601309627419288, "grad_norm": 0.4209258556365967, "learning_rate": 1.9936590824319284e-06, "loss": 0.6876, "step": 14509 }, { "epoch": 0.6013510713249617, "grad_norm": 0.43294888734817505, "learning_rate": 1.9934518629035603e-06, "loss": 0.6602, "step": 14510 }, { "epoch": 0.6013925152306353, "grad_norm": 0.38607361912727356, "learning_rate": 1.9932446433751916e-06, "loss": 0.6853, "step": 14511 }, { "epoch": 0.601433959136309, "grad_norm": 0.41113215684890747, "learning_rate": 1.9930374238468234e-06, "loss": 0.6787, "step": 14512 }, { "epoch": 0.6014754030419827, "grad_norm": 0.39486944675445557, "learning_rate": 1.9928302043184553e-06, "loss": 0.6849, "step": 14513 }, { "epoch": 0.6015168469476564, "grad_norm": 0.5530759692192078, "learning_rate": 1.992622984790087e-06, "loss": 0.6517, "step": 14514 }, { "epoch": 0.60155829085333, "grad_norm": 0.3923383355140686, "learning_rate": 1.9924157652617185e-06, "loss": 0.7029, "step": 14515 }, { "epoch": 0.6015997347590036, "grad_norm": 0.4193214774131775, "learning_rate": 1.9922085457333503e-06, "loss": 0.6738, "step": 14516 }, { "epoch": 0.6016411786646774, "grad_norm": 0.41727590560913086, "learning_rate": 1.9920013262049816e-06, "loss": 0.6937, "step": 14517 }, { "epoch": 0.601682622570351, "grad_norm": 0.3999008536338806, "learning_rate": 1.9917941066766135e-06, "loss": 0.6741, "step": 14518 }, { "epoch": 0.6017240664760247, "grad_norm": 0.42372938990592957, "learning_rate": 1.991586887148245e-06, "loss": 0.6792, "step": 14519 }, { "epoch": 0.6017655103816983, "grad_norm": 0.4153328835964203, "learning_rate": 1.9913796676198766e-06, "loss": 0.6887, "step": 14520 }, { "epoch": 0.6018069542873721, "grad_norm": 0.43652811646461487, "learning_rate": 1.991172448091508e-06, "loss": 0.6991, "step": 14521 }, { "epoch": 0.6018483981930457, "grad_norm": 0.4087596535682678, "learning_rate": 1.9909652285631403e-06, "loss": 0.6622, "step": 14522 }, { "epoch": 0.6018898420987194, "grad_norm": 0.46677032113075256, "learning_rate": 1.9907580090347717e-06, "loss": 0.6914, "step": 14523 }, { "epoch": 0.601931286004393, "grad_norm": 0.40632516145706177, "learning_rate": 1.9905507895064035e-06, "loss": 0.652, "step": 14524 }, { "epoch": 0.6019727299100667, "grad_norm": 0.4029214382171631, "learning_rate": 1.990343569978035e-06, "loss": 0.7244, "step": 14525 }, { "epoch": 0.6020141738157404, "grad_norm": 0.4357822835445404, "learning_rate": 1.9901363504496667e-06, "loss": 0.666, "step": 14526 }, { "epoch": 0.602055617721414, "grad_norm": 0.40490397810935974, "learning_rate": 1.989929130921298e-06, "loss": 0.6598, "step": 14527 }, { "epoch": 0.6020970616270878, "grad_norm": 0.3937639892101288, "learning_rate": 1.98972191139293e-06, "loss": 0.6931, "step": 14528 }, { "epoch": 0.6021385055327614, "grad_norm": 0.4619409143924713, "learning_rate": 1.9895146918645612e-06, "loss": 0.7493, "step": 14529 }, { "epoch": 0.6021799494384351, "grad_norm": 0.42518556118011475, "learning_rate": 1.989307472336193e-06, "loss": 0.7072, "step": 14530 }, { "epoch": 0.6022213933441087, "grad_norm": 0.4330361783504486, "learning_rate": 1.989100252807825e-06, "loss": 0.6902, "step": 14531 }, { "epoch": 0.6022628372497825, "grad_norm": 0.4245862662792206, "learning_rate": 1.9888930332794567e-06, "loss": 0.7644, "step": 14532 }, { "epoch": 0.6023042811554561, "grad_norm": 0.4168750047683716, "learning_rate": 1.988685813751088e-06, "loss": 0.6797, "step": 14533 }, { "epoch": 0.6023457250611297, "grad_norm": 0.3907606303691864, "learning_rate": 1.98847859422272e-06, "loss": 0.6884, "step": 14534 }, { "epoch": 0.6023871689668034, "grad_norm": 0.4206346273422241, "learning_rate": 1.9882713746943512e-06, "loss": 0.7175, "step": 14535 }, { "epoch": 0.6024286128724771, "grad_norm": 0.4371950626373291, "learning_rate": 1.988064155165983e-06, "loss": 0.6782, "step": 14536 }, { "epoch": 0.6024700567781508, "grad_norm": 0.41218826174736023, "learning_rate": 1.9878569356376144e-06, "loss": 0.6677, "step": 14537 }, { "epoch": 0.6025115006838244, "grad_norm": 0.423749178647995, "learning_rate": 1.9876497161092462e-06, "loss": 0.6658, "step": 14538 }, { "epoch": 0.6025529445894982, "grad_norm": 0.4343227446079254, "learning_rate": 1.987442496580878e-06, "loss": 0.698, "step": 14539 }, { "epoch": 0.6025943884951718, "grad_norm": 0.39605939388275146, "learning_rate": 1.98723527705251e-06, "loss": 0.738, "step": 14540 }, { "epoch": 0.6026358324008455, "grad_norm": 0.3900437355041504, "learning_rate": 1.9870280575241413e-06, "loss": 0.6831, "step": 14541 }, { "epoch": 0.6026772763065191, "grad_norm": 0.40614962577819824, "learning_rate": 1.986820837995773e-06, "loss": 0.698, "step": 14542 }, { "epoch": 0.6027187202121927, "grad_norm": 0.40726831555366516, "learning_rate": 1.9866136184674044e-06, "loss": 0.7039, "step": 14543 }, { "epoch": 0.6027601641178665, "grad_norm": 0.4281916618347168, "learning_rate": 1.9864063989390363e-06, "loss": 0.663, "step": 14544 }, { "epoch": 0.6028016080235401, "grad_norm": 0.4359636604785919, "learning_rate": 1.9861991794106676e-06, "loss": 0.7024, "step": 14545 }, { "epoch": 0.6028430519292138, "grad_norm": 0.43118205666542053, "learning_rate": 1.9859919598822995e-06, "loss": 0.7151, "step": 14546 }, { "epoch": 0.6028844958348875, "grad_norm": 0.4598493278026581, "learning_rate": 1.985784740353931e-06, "loss": 0.7561, "step": 14547 }, { "epoch": 0.6029259397405612, "grad_norm": 0.43564271926879883, "learning_rate": 1.985577520825563e-06, "loss": 0.6676, "step": 14548 }, { "epoch": 0.6029673836462348, "grad_norm": 0.3962418735027313, "learning_rate": 1.9853703012971945e-06, "loss": 0.6897, "step": 14549 }, { "epoch": 0.6030088275519085, "grad_norm": 0.43822038173675537, "learning_rate": 1.9851630817688263e-06, "loss": 0.6912, "step": 14550 }, { "epoch": 0.6030502714575822, "grad_norm": 0.4078572392463684, "learning_rate": 1.9849558622404577e-06, "loss": 0.6803, "step": 14551 }, { "epoch": 0.6030917153632558, "grad_norm": 0.3871171176433563, "learning_rate": 1.9847486427120895e-06, "loss": 0.6785, "step": 14552 }, { "epoch": 0.6031331592689295, "grad_norm": 0.4164421260356903, "learning_rate": 1.984541423183721e-06, "loss": 0.642, "step": 14553 }, { "epoch": 0.6031746031746031, "grad_norm": 0.4098902642726898, "learning_rate": 1.9843342036553527e-06, "loss": 0.632, "step": 14554 }, { "epoch": 0.6032160470802769, "grad_norm": 0.4154486656188965, "learning_rate": 1.984126984126984e-06, "loss": 0.6725, "step": 14555 }, { "epoch": 0.6032574909859505, "grad_norm": 0.47231099009513855, "learning_rate": 1.9839197645986163e-06, "loss": 0.6755, "step": 14556 }, { "epoch": 0.6032989348916242, "grad_norm": 0.5256350636482239, "learning_rate": 1.9837125450702477e-06, "loss": 0.6489, "step": 14557 }, { "epoch": 0.6033403787972978, "grad_norm": 0.3788302540779114, "learning_rate": 1.9835053255418795e-06, "loss": 0.6382, "step": 14558 }, { "epoch": 0.6033818227029716, "grad_norm": 0.399811327457428, "learning_rate": 1.983298106013511e-06, "loss": 0.7139, "step": 14559 }, { "epoch": 0.6034232666086452, "grad_norm": 0.3467707633972168, "learning_rate": 1.9830908864851427e-06, "loss": 0.5829, "step": 14560 }, { "epoch": 0.6034647105143188, "grad_norm": 0.4133845269680023, "learning_rate": 1.982883666956774e-06, "loss": 0.6897, "step": 14561 }, { "epoch": 0.6035061544199926, "grad_norm": 0.423615962266922, "learning_rate": 1.982676447428406e-06, "loss": 0.6881, "step": 14562 }, { "epoch": 0.6035475983256662, "grad_norm": 0.3892373740673065, "learning_rate": 1.9824692279000372e-06, "loss": 0.6481, "step": 14563 }, { "epoch": 0.6035890422313399, "grad_norm": 0.4169505536556244, "learning_rate": 1.982262008371669e-06, "loss": 0.667, "step": 14564 }, { "epoch": 0.6036304861370135, "grad_norm": 0.3884958028793335, "learning_rate": 1.982054788843301e-06, "loss": 0.6415, "step": 14565 }, { "epoch": 0.6036719300426873, "grad_norm": 0.4303205609321594, "learning_rate": 1.9818475693149327e-06, "loss": 0.7024, "step": 14566 }, { "epoch": 0.6037133739483609, "grad_norm": 0.4007004499435425, "learning_rate": 1.981640349786564e-06, "loss": 0.7046, "step": 14567 }, { "epoch": 0.6037548178540345, "grad_norm": 0.4031788110733032, "learning_rate": 1.981433130258196e-06, "loss": 0.7051, "step": 14568 }, { "epoch": 0.6037962617597082, "grad_norm": 0.40699395537376404, "learning_rate": 1.9812259107298273e-06, "loss": 0.6539, "step": 14569 }, { "epoch": 0.6038377056653819, "grad_norm": 0.4458667039871216, "learning_rate": 1.981018691201459e-06, "loss": 0.7366, "step": 14570 }, { "epoch": 0.6038791495710556, "grad_norm": 0.44263502955436707, "learning_rate": 1.9808114716730904e-06, "loss": 0.7013, "step": 14571 }, { "epoch": 0.6039205934767292, "grad_norm": 0.4185800552368164, "learning_rate": 1.9806042521447223e-06, "loss": 0.6775, "step": 14572 }, { "epoch": 0.603962037382403, "grad_norm": 0.4165576994419098, "learning_rate": 1.9803970326163536e-06, "loss": 0.6711, "step": 14573 }, { "epoch": 0.6040034812880766, "grad_norm": 0.4288649559020996, "learning_rate": 1.980189813087986e-06, "loss": 0.7314, "step": 14574 }, { "epoch": 0.6040449251937503, "grad_norm": 0.4109044373035431, "learning_rate": 1.9799825935596173e-06, "loss": 0.7043, "step": 14575 }, { "epoch": 0.6040863690994239, "grad_norm": 0.43378946185112, "learning_rate": 1.979775374031249e-06, "loss": 0.6718, "step": 14576 }, { "epoch": 0.6041278130050975, "grad_norm": 0.40689921379089355, "learning_rate": 1.9795681545028805e-06, "loss": 0.682, "step": 14577 }, { "epoch": 0.6041692569107713, "grad_norm": 0.4111175537109375, "learning_rate": 1.9793609349745123e-06, "loss": 0.7153, "step": 14578 }, { "epoch": 0.6042107008164449, "grad_norm": 0.42727532982826233, "learning_rate": 1.9791537154461436e-06, "loss": 0.6909, "step": 14579 }, { "epoch": 0.6042521447221186, "grad_norm": 0.3969823718070984, "learning_rate": 1.9789464959177755e-06, "loss": 0.6895, "step": 14580 }, { "epoch": 0.6042935886277923, "grad_norm": 0.3935965895652771, "learning_rate": 1.978739276389407e-06, "loss": 0.6522, "step": 14581 }, { "epoch": 0.604335032533466, "grad_norm": 0.43941396474838257, "learning_rate": 1.9785320568610387e-06, "loss": 0.6952, "step": 14582 }, { "epoch": 0.6043764764391396, "grad_norm": 0.41477498412132263, "learning_rate": 1.9783248373326705e-06, "loss": 0.6844, "step": 14583 }, { "epoch": 0.6044179203448133, "grad_norm": 0.39874598383903503, "learning_rate": 1.9781176178043023e-06, "loss": 0.6285, "step": 14584 }, { "epoch": 0.604459364250487, "grad_norm": 0.39784592390060425, "learning_rate": 1.9779103982759337e-06, "loss": 0.6653, "step": 14585 }, { "epoch": 0.6045008081561606, "grad_norm": 0.4260646104812622, "learning_rate": 1.9777031787475655e-06, "loss": 0.6904, "step": 14586 }, { "epoch": 0.6045422520618343, "grad_norm": 0.430759996175766, "learning_rate": 1.977495959219197e-06, "loss": 0.741, "step": 14587 }, { "epoch": 0.6045836959675079, "grad_norm": 0.42136672139167786, "learning_rate": 1.9772887396908287e-06, "loss": 0.6552, "step": 14588 }, { "epoch": 0.6046251398731817, "grad_norm": 0.40124770998954773, "learning_rate": 1.97708152016246e-06, "loss": 0.6461, "step": 14589 }, { "epoch": 0.6046665837788553, "grad_norm": 0.40887436270713806, "learning_rate": 1.976874300634092e-06, "loss": 0.6331, "step": 14590 }, { "epoch": 0.604708027684529, "grad_norm": 0.43172261118888855, "learning_rate": 1.9766670811057237e-06, "loss": 0.6543, "step": 14591 }, { "epoch": 0.6047494715902026, "grad_norm": 0.43241265416145325, "learning_rate": 1.9764598615773555e-06, "loss": 0.7156, "step": 14592 }, { "epoch": 0.6047909154958764, "grad_norm": 0.42058658599853516, "learning_rate": 1.976252642048987e-06, "loss": 0.7065, "step": 14593 }, { "epoch": 0.60483235940155, "grad_norm": 0.40181276202201843, "learning_rate": 1.9760454225206187e-06, "loss": 0.6285, "step": 14594 }, { "epoch": 0.6048738033072236, "grad_norm": 0.40688827633857727, "learning_rate": 1.97583820299225e-06, "loss": 0.709, "step": 14595 }, { "epoch": 0.6049152472128974, "grad_norm": 0.3901410698890686, "learning_rate": 1.975630983463882e-06, "loss": 0.6766, "step": 14596 }, { "epoch": 0.604956691118571, "grad_norm": 0.4416082799434662, "learning_rate": 1.9754237639355132e-06, "loss": 0.7454, "step": 14597 }, { "epoch": 0.6049981350242447, "grad_norm": 0.40549105405807495, "learning_rate": 1.975216544407145e-06, "loss": 0.7012, "step": 14598 }, { "epoch": 0.6050395789299183, "grad_norm": 0.4303135573863983, "learning_rate": 1.9750093248787764e-06, "loss": 0.689, "step": 14599 }, { "epoch": 0.6050810228355921, "grad_norm": 0.45073187351226807, "learning_rate": 1.9748021053504087e-06, "loss": 0.73, "step": 14600 }, { "epoch": 0.6051224667412657, "grad_norm": 0.3766886591911316, "learning_rate": 1.97459488582204e-06, "loss": 0.6681, "step": 14601 }, { "epoch": 0.6051639106469394, "grad_norm": 0.43008551001548767, "learning_rate": 1.974387666293672e-06, "loss": 0.6895, "step": 14602 }, { "epoch": 0.605205354552613, "grad_norm": 0.4343551695346832, "learning_rate": 1.9741804467653033e-06, "loss": 0.7064, "step": 14603 }, { "epoch": 0.6052467984582867, "grad_norm": 0.4243672788143158, "learning_rate": 1.973973227236935e-06, "loss": 0.6992, "step": 14604 }, { "epoch": 0.6052882423639604, "grad_norm": 0.4331203103065491, "learning_rate": 1.9737660077085665e-06, "loss": 0.6613, "step": 14605 }, { "epoch": 0.605329686269634, "grad_norm": 0.42671963572502136, "learning_rate": 1.9735587881801983e-06, "loss": 0.6786, "step": 14606 }, { "epoch": 0.6053711301753077, "grad_norm": 0.4072897136211395, "learning_rate": 1.9733515686518296e-06, "loss": 0.6954, "step": 14607 }, { "epoch": 0.6054125740809814, "grad_norm": 0.44126808643341064, "learning_rate": 1.9731443491234615e-06, "loss": 0.6709, "step": 14608 }, { "epoch": 0.6054540179866551, "grad_norm": 0.4323432743549347, "learning_rate": 1.9729371295950933e-06, "loss": 0.6956, "step": 14609 }, { "epoch": 0.6054954618923287, "grad_norm": 0.3924527168273926, "learning_rate": 1.972729910066725e-06, "loss": 0.6937, "step": 14610 }, { "epoch": 0.6055369057980025, "grad_norm": 0.4285413920879364, "learning_rate": 1.9725226905383565e-06, "loss": 0.7222, "step": 14611 }, { "epoch": 0.6055783497036761, "grad_norm": 0.43098345398902893, "learning_rate": 1.9723154710099883e-06, "loss": 0.6912, "step": 14612 }, { "epoch": 0.6056197936093497, "grad_norm": 0.40945395827293396, "learning_rate": 1.9721082514816197e-06, "loss": 0.6238, "step": 14613 }, { "epoch": 0.6056612375150234, "grad_norm": 0.4322804808616638, "learning_rate": 1.9719010319532515e-06, "loss": 0.6721, "step": 14614 }, { "epoch": 0.605702681420697, "grad_norm": 0.3902067542076111, "learning_rate": 1.971693812424883e-06, "loss": 0.6871, "step": 14615 }, { "epoch": 0.6057441253263708, "grad_norm": 0.4011251628398895, "learning_rate": 1.9714865928965147e-06, "loss": 0.6912, "step": 14616 }, { "epoch": 0.6057855692320444, "grad_norm": 0.3997582495212555, "learning_rate": 1.9712793733681465e-06, "loss": 0.6847, "step": 14617 }, { "epoch": 0.6058270131377181, "grad_norm": 0.42463749647140503, "learning_rate": 1.9710721538397783e-06, "loss": 0.7384, "step": 14618 }, { "epoch": 0.6058684570433918, "grad_norm": 0.38292741775512695, "learning_rate": 1.9708649343114097e-06, "loss": 0.6478, "step": 14619 }, { "epoch": 0.6059099009490655, "grad_norm": 0.4069931209087372, "learning_rate": 1.9706577147830415e-06, "loss": 0.7119, "step": 14620 }, { "epoch": 0.6059513448547391, "grad_norm": 0.41083765029907227, "learning_rate": 1.970450495254673e-06, "loss": 0.641, "step": 14621 }, { "epoch": 0.6059927887604127, "grad_norm": 0.4617087244987488, "learning_rate": 1.9702432757263047e-06, "loss": 0.715, "step": 14622 }, { "epoch": 0.6060342326660865, "grad_norm": 0.44889751076698303, "learning_rate": 1.970036056197936e-06, "loss": 0.6981, "step": 14623 }, { "epoch": 0.6060756765717601, "grad_norm": 0.40447843074798584, "learning_rate": 1.969828836669568e-06, "loss": 0.7297, "step": 14624 }, { "epoch": 0.6061171204774338, "grad_norm": 0.4242262840270996, "learning_rate": 1.9696216171411992e-06, "loss": 0.7227, "step": 14625 }, { "epoch": 0.6061585643831074, "grad_norm": 0.39803069829940796, "learning_rate": 1.9694143976128315e-06, "loss": 0.6542, "step": 14626 }, { "epoch": 0.6062000082887812, "grad_norm": 0.3962688148021698, "learning_rate": 1.969207178084463e-06, "loss": 0.6766, "step": 14627 }, { "epoch": 0.6062414521944548, "grad_norm": 0.42824968695640564, "learning_rate": 1.9689999585560947e-06, "loss": 0.6677, "step": 14628 }, { "epoch": 0.6062828961001284, "grad_norm": 0.37805047631263733, "learning_rate": 1.968792739027726e-06, "loss": 0.6382, "step": 14629 }, { "epoch": 0.6063243400058022, "grad_norm": 0.39055702090263367, "learning_rate": 1.968585519499358e-06, "loss": 0.66, "step": 14630 }, { "epoch": 0.6063657839114758, "grad_norm": 0.43569448590278625, "learning_rate": 1.9683782999709893e-06, "loss": 0.7026, "step": 14631 }, { "epoch": 0.6064072278171495, "grad_norm": 0.3735261857509613, "learning_rate": 1.968171080442621e-06, "loss": 0.6677, "step": 14632 }, { "epoch": 0.6064486717228231, "grad_norm": 0.3640260696411133, "learning_rate": 1.9679638609142525e-06, "loss": 0.6362, "step": 14633 }, { "epoch": 0.6064901156284969, "grad_norm": 0.4008565843105316, "learning_rate": 1.9677566413858843e-06, "loss": 0.7051, "step": 14634 }, { "epoch": 0.6065315595341705, "grad_norm": 0.36494311690330505, "learning_rate": 1.967549421857516e-06, "loss": 0.6465, "step": 14635 }, { "epoch": 0.6065730034398442, "grad_norm": 0.4230460822582245, "learning_rate": 1.967342202329148e-06, "loss": 0.6949, "step": 14636 }, { "epoch": 0.6066144473455178, "grad_norm": 0.40854892134666443, "learning_rate": 1.9671349828007793e-06, "loss": 0.6903, "step": 14637 }, { "epoch": 0.6066558912511915, "grad_norm": 0.4415653944015503, "learning_rate": 1.966927763272411e-06, "loss": 0.656, "step": 14638 }, { "epoch": 0.6066973351568652, "grad_norm": 0.3898528814315796, "learning_rate": 1.9667205437440425e-06, "loss": 0.6653, "step": 14639 }, { "epoch": 0.6067387790625388, "grad_norm": 0.43967920541763306, "learning_rate": 1.9665133242156743e-06, "loss": 0.7502, "step": 14640 }, { "epoch": 0.6067802229682125, "grad_norm": 0.38948407769203186, "learning_rate": 1.9663061046873057e-06, "loss": 0.5983, "step": 14641 }, { "epoch": 0.6068216668738862, "grad_norm": 0.40707844495773315, "learning_rate": 1.9660988851589375e-06, "loss": 0.6031, "step": 14642 }, { "epoch": 0.6068631107795599, "grad_norm": 0.41765865683555603, "learning_rate": 1.9658916656305693e-06, "loss": 0.6714, "step": 14643 }, { "epoch": 0.6069045546852335, "grad_norm": 0.37631139159202576, "learning_rate": 1.965684446102201e-06, "loss": 0.6663, "step": 14644 }, { "epoch": 0.6069459985909073, "grad_norm": 0.3965761661529541, "learning_rate": 1.9654772265738325e-06, "loss": 0.7217, "step": 14645 }, { "epoch": 0.6069874424965809, "grad_norm": 0.37779703736305237, "learning_rate": 1.9652700070454643e-06, "loss": 0.6621, "step": 14646 }, { "epoch": 0.6070288864022545, "grad_norm": 0.4053974747657776, "learning_rate": 1.9650627875170957e-06, "loss": 0.6436, "step": 14647 }, { "epoch": 0.6070703303079282, "grad_norm": 0.39378875494003296, "learning_rate": 1.9648555679887275e-06, "loss": 0.6672, "step": 14648 }, { "epoch": 0.6071117742136019, "grad_norm": 0.4237856864929199, "learning_rate": 1.964648348460359e-06, "loss": 0.656, "step": 14649 }, { "epoch": 0.6071532181192756, "grad_norm": 0.39871126413345337, "learning_rate": 1.9644411289319907e-06, "loss": 0.6418, "step": 14650 }, { "epoch": 0.6071946620249492, "grad_norm": 0.4464757740497589, "learning_rate": 1.964233909403622e-06, "loss": 0.73, "step": 14651 }, { "epoch": 0.6072361059306229, "grad_norm": 0.4395763576030731, "learning_rate": 1.9640266898752543e-06, "loss": 0.704, "step": 14652 }, { "epoch": 0.6072775498362966, "grad_norm": 0.4002503752708435, "learning_rate": 1.9638194703468857e-06, "loss": 0.7141, "step": 14653 }, { "epoch": 0.6073189937419703, "grad_norm": 0.43121153116226196, "learning_rate": 1.9636122508185175e-06, "loss": 0.6525, "step": 14654 }, { "epoch": 0.6073604376476439, "grad_norm": 0.3972659111022949, "learning_rate": 1.963405031290149e-06, "loss": 0.639, "step": 14655 }, { "epoch": 0.6074018815533175, "grad_norm": 0.4078911244869232, "learning_rate": 1.9631978117617807e-06, "loss": 0.6934, "step": 14656 }, { "epoch": 0.6074433254589913, "grad_norm": 0.4000316262245178, "learning_rate": 1.962990592233412e-06, "loss": 0.7064, "step": 14657 }, { "epoch": 0.6074847693646649, "grad_norm": 0.3996182382106781, "learning_rate": 1.962783372705044e-06, "loss": 0.6946, "step": 14658 }, { "epoch": 0.6075262132703386, "grad_norm": 0.37282899022102356, "learning_rate": 1.9625761531766753e-06, "loss": 0.6648, "step": 14659 }, { "epoch": 0.6075676571760122, "grad_norm": 0.4299367070198059, "learning_rate": 1.962368933648307e-06, "loss": 0.6787, "step": 14660 }, { "epoch": 0.607609101081686, "grad_norm": 0.3983380198478699, "learning_rate": 1.962161714119939e-06, "loss": 0.6683, "step": 14661 }, { "epoch": 0.6076505449873596, "grad_norm": 0.419456422328949, "learning_rate": 1.9619544945915707e-06, "loss": 0.6985, "step": 14662 }, { "epoch": 0.6076919888930333, "grad_norm": 0.4241117537021637, "learning_rate": 1.961747275063202e-06, "loss": 0.739, "step": 14663 }, { "epoch": 0.607733432798707, "grad_norm": 0.3844147324562073, "learning_rate": 1.961540055534834e-06, "loss": 0.6271, "step": 14664 }, { "epoch": 0.6077748767043806, "grad_norm": 0.4263402819633484, "learning_rate": 1.9613328360064653e-06, "loss": 0.6388, "step": 14665 }, { "epoch": 0.6078163206100543, "grad_norm": 0.4393508732318878, "learning_rate": 1.961125616478097e-06, "loss": 0.6653, "step": 14666 }, { "epoch": 0.6078577645157279, "grad_norm": 0.42251884937286377, "learning_rate": 1.9609183969497285e-06, "loss": 0.6652, "step": 14667 }, { "epoch": 0.6078992084214017, "grad_norm": 0.4040488004684448, "learning_rate": 1.9607111774213603e-06, "loss": 0.6877, "step": 14668 }, { "epoch": 0.6079406523270753, "grad_norm": 0.41173335909843445, "learning_rate": 1.9605039578929917e-06, "loss": 0.6636, "step": 14669 }, { "epoch": 0.607982096232749, "grad_norm": 0.4789021611213684, "learning_rate": 1.960296738364624e-06, "loss": 0.7444, "step": 14670 }, { "epoch": 0.6080235401384226, "grad_norm": 0.40368106961250305, "learning_rate": 1.9600895188362553e-06, "loss": 0.6854, "step": 14671 }, { "epoch": 0.6080649840440964, "grad_norm": 0.40846213698387146, "learning_rate": 1.959882299307887e-06, "loss": 0.6909, "step": 14672 }, { "epoch": 0.60810642794977, "grad_norm": 0.36752641201019287, "learning_rate": 1.9596750797795185e-06, "loss": 0.6768, "step": 14673 }, { "epoch": 0.6081478718554436, "grad_norm": 0.41921013593673706, "learning_rate": 1.9594678602511503e-06, "loss": 0.6769, "step": 14674 }, { "epoch": 0.6081893157611173, "grad_norm": 0.3896467983722687, "learning_rate": 1.9592606407227817e-06, "loss": 0.6337, "step": 14675 }, { "epoch": 0.608230759666791, "grad_norm": 0.4102485775947571, "learning_rate": 1.9590534211944135e-06, "loss": 0.6882, "step": 14676 }, { "epoch": 0.6082722035724647, "grad_norm": 0.45134979486465454, "learning_rate": 1.958846201666045e-06, "loss": 0.6953, "step": 14677 }, { "epoch": 0.6083136474781383, "grad_norm": 0.4052216410636902, "learning_rate": 1.958638982137677e-06, "loss": 0.6211, "step": 14678 }, { "epoch": 0.608355091383812, "grad_norm": 0.4310140311717987, "learning_rate": 1.9584317626093085e-06, "loss": 0.7188, "step": 14679 }, { "epoch": 0.6083965352894857, "grad_norm": 0.4156208634376526, "learning_rate": 1.9582245430809403e-06, "loss": 0.679, "step": 14680 }, { "epoch": 0.6084379791951594, "grad_norm": 0.41505077481269836, "learning_rate": 1.9580173235525717e-06, "loss": 0.7222, "step": 14681 }, { "epoch": 0.608479423100833, "grad_norm": 0.42604413628578186, "learning_rate": 1.9578101040242035e-06, "loss": 0.6813, "step": 14682 }, { "epoch": 0.6085208670065066, "grad_norm": 0.43553459644317627, "learning_rate": 1.957602884495835e-06, "loss": 0.7175, "step": 14683 }, { "epoch": 0.6085623109121804, "grad_norm": 0.3961941599845886, "learning_rate": 1.9573956649674667e-06, "loss": 0.6522, "step": 14684 }, { "epoch": 0.608603754817854, "grad_norm": 0.4804244339466095, "learning_rate": 1.957188445439098e-06, "loss": 0.689, "step": 14685 }, { "epoch": 0.6086451987235277, "grad_norm": 0.3801030218601227, "learning_rate": 1.95698122591073e-06, "loss": 0.6589, "step": 14686 }, { "epoch": 0.6086866426292014, "grad_norm": 0.40263184905052185, "learning_rate": 1.9567740063823617e-06, "loss": 0.6545, "step": 14687 }, { "epoch": 0.6087280865348751, "grad_norm": 0.40567082166671753, "learning_rate": 1.9565667868539935e-06, "loss": 0.6658, "step": 14688 }, { "epoch": 0.6087695304405487, "grad_norm": 0.41482266783714294, "learning_rate": 1.956359567325625e-06, "loss": 0.6892, "step": 14689 }, { "epoch": 0.6088109743462223, "grad_norm": 0.42148539423942566, "learning_rate": 1.9561523477972567e-06, "loss": 0.6512, "step": 14690 }, { "epoch": 0.6088524182518961, "grad_norm": 0.3934229612350464, "learning_rate": 1.955945128268888e-06, "loss": 0.6576, "step": 14691 }, { "epoch": 0.6088938621575697, "grad_norm": 0.4452611207962036, "learning_rate": 1.95573790874052e-06, "loss": 0.6483, "step": 14692 }, { "epoch": 0.6089353060632434, "grad_norm": 0.39639490842819214, "learning_rate": 1.9555306892121513e-06, "loss": 0.6788, "step": 14693 }, { "epoch": 0.608976749968917, "grad_norm": 0.4151207506656647, "learning_rate": 1.955323469683783e-06, "loss": 0.6626, "step": 14694 }, { "epoch": 0.6090181938745908, "grad_norm": 0.42069268226623535, "learning_rate": 1.955116250155415e-06, "loss": 0.6582, "step": 14695 }, { "epoch": 0.6090596377802644, "grad_norm": 0.4175778329372406, "learning_rate": 1.9549090306270467e-06, "loss": 0.708, "step": 14696 }, { "epoch": 0.6091010816859381, "grad_norm": 0.42149442434310913, "learning_rate": 1.954701811098678e-06, "loss": 0.6527, "step": 14697 }, { "epoch": 0.6091425255916117, "grad_norm": 0.46400123834609985, "learning_rate": 1.95449459157031e-06, "loss": 0.7235, "step": 14698 }, { "epoch": 0.6091839694972854, "grad_norm": 0.4213758707046509, "learning_rate": 1.9542873720419413e-06, "loss": 0.7017, "step": 14699 }, { "epoch": 0.6092254134029591, "grad_norm": 0.40203556418418884, "learning_rate": 1.954080152513573e-06, "loss": 0.6819, "step": 14700 }, { "epoch": 0.6092668573086327, "grad_norm": 0.42237186431884766, "learning_rate": 1.9538729329852045e-06, "loss": 0.7161, "step": 14701 }, { "epoch": 0.6093083012143065, "grad_norm": 0.39565232396125793, "learning_rate": 1.9536657134568363e-06, "loss": 0.639, "step": 14702 }, { "epoch": 0.6093497451199801, "grad_norm": 0.41250309348106384, "learning_rate": 1.953458493928468e-06, "loss": 0.728, "step": 14703 }, { "epoch": 0.6093911890256538, "grad_norm": 0.3725629448890686, "learning_rate": 1.9532512744001e-06, "loss": 0.6351, "step": 14704 }, { "epoch": 0.6094326329313274, "grad_norm": 0.42929139733314514, "learning_rate": 1.9530440548717313e-06, "loss": 0.7141, "step": 14705 }, { "epoch": 0.6094740768370012, "grad_norm": 0.4003793001174927, "learning_rate": 1.952836835343363e-06, "loss": 0.6859, "step": 14706 }, { "epoch": 0.6095155207426748, "grad_norm": 0.43394553661346436, "learning_rate": 1.9526296158149945e-06, "loss": 0.6421, "step": 14707 }, { "epoch": 0.6095569646483484, "grad_norm": 0.4174637496471405, "learning_rate": 1.9524223962866263e-06, "loss": 0.6929, "step": 14708 }, { "epoch": 0.6095984085540221, "grad_norm": 0.46011701226234436, "learning_rate": 1.9522151767582577e-06, "loss": 0.7133, "step": 14709 }, { "epoch": 0.6096398524596958, "grad_norm": 0.4015706479549408, "learning_rate": 1.9520079572298895e-06, "loss": 0.6746, "step": 14710 }, { "epoch": 0.6096812963653695, "grad_norm": 0.4048427939414978, "learning_rate": 1.951800737701521e-06, "loss": 0.6288, "step": 14711 }, { "epoch": 0.6097227402710431, "grad_norm": 0.42263224720954895, "learning_rate": 1.9515935181731527e-06, "loss": 0.6617, "step": 14712 }, { "epoch": 0.6097641841767169, "grad_norm": 0.4037068486213684, "learning_rate": 1.9513862986447845e-06, "loss": 0.7188, "step": 14713 }, { "epoch": 0.6098056280823905, "grad_norm": 0.40357834100723267, "learning_rate": 1.9511790791164163e-06, "loss": 0.6393, "step": 14714 }, { "epoch": 0.6098470719880642, "grad_norm": 0.3954143822193146, "learning_rate": 1.9509718595880477e-06, "loss": 0.7173, "step": 14715 }, { "epoch": 0.6098885158937378, "grad_norm": 0.39964383840560913, "learning_rate": 1.9507646400596795e-06, "loss": 0.6263, "step": 14716 }, { "epoch": 0.6099299597994114, "grad_norm": 0.4229471981525421, "learning_rate": 1.950557420531311e-06, "loss": 0.6464, "step": 14717 }, { "epoch": 0.6099714037050852, "grad_norm": 0.3831164836883545, "learning_rate": 1.9503502010029427e-06, "loss": 0.6255, "step": 14718 }, { "epoch": 0.6100128476107588, "grad_norm": 0.39974895119667053, "learning_rate": 1.950142981474574e-06, "loss": 0.6611, "step": 14719 }, { "epoch": 0.6100542915164325, "grad_norm": 0.41141578555107117, "learning_rate": 1.949935761946206e-06, "loss": 0.6562, "step": 14720 }, { "epoch": 0.6100957354221062, "grad_norm": 0.4257267713546753, "learning_rate": 1.9497285424178377e-06, "loss": 0.7202, "step": 14721 }, { "epoch": 0.6101371793277799, "grad_norm": 0.4029546082019806, "learning_rate": 1.9495213228894695e-06, "loss": 0.6764, "step": 14722 }, { "epoch": 0.6101786232334535, "grad_norm": 0.4060714542865753, "learning_rate": 1.949314103361101e-06, "loss": 0.7137, "step": 14723 }, { "epoch": 0.6102200671391272, "grad_norm": 0.4145147204399109, "learning_rate": 1.9491068838327327e-06, "loss": 0.6779, "step": 14724 }, { "epoch": 0.6102615110448009, "grad_norm": 0.4077827036380768, "learning_rate": 1.948899664304364e-06, "loss": 0.6392, "step": 14725 }, { "epoch": 0.6103029549504745, "grad_norm": 0.4191453754901886, "learning_rate": 1.948692444775996e-06, "loss": 0.7249, "step": 14726 }, { "epoch": 0.6103443988561482, "grad_norm": 0.42236271500587463, "learning_rate": 1.9484852252476273e-06, "loss": 0.6591, "step": 14727 }, { "epoch": 0.6103858427618218, "grad_norm": 0.39032596349716187, "learning_rate": 1.948278005719259e-06, "loss": 0.6349, "step": 14728 }, { "epoch": 0.6104272866674956, "grad_norm": 0.4207191467285156, "learning_rate": 1.948070786190891e-06, "loss": 0.6525, "step": 14729 }, { "epoch": 0.6104687305731692, "grad_norm": 0.41551247239112854, "learning_rate": 1.9478635666625223e-06, "loss": 0.6556, "step": 14730 }, { "epoch": 0.6105101744788429, "grad_norm": 0.3975074887275696, "learning_rate": 1.947656347134154e-06, "loss": 0.6377, "step": 14731 }, { "epoch": 0.6105516183845165, "grad_norm": 0.40849095582962036, "learning_rate": 1.947449127605786e-06, "loss": 0.7202, "step": 14732 }, { "epoch": 0.6105930622901903, "grad_norm": 0.42856234312057495, "learning_rate": 1.9472419080774173e-06, "loss": 0.731, "step": 14733 }, { "epoch": 0.6106345061958639, "grad_norm": 0.3826061487197876, "learning_rate": 1.947034688549049e-06, "loss": 0.6271, "step": 14734 }, { "epoch": 0.6106759501015375, "grad_norm": 0.4518601596355438, "learning_rate": 1.9468274690206805e-06, "loss": 0.6938, "step": 14735 }, { "epoch": 0.6107173940072113, "grad_norm": 0.43080198764801025, "learning_rate": 1.9466202494923123e-06, "loss": 0.6666, "step": 14736 }, { "epoch": 0.6107588379128849, "grad_norm": 0.41246476769447327, "learning_rate": 1.946413029963944e-06, "loss": 0.6385, "step": 14737 }, { "epoch": 0.6108002818185586, "grad_norm": 0.4633418619632721, "learning_rate": 1.9462058104355755e-06, "loss": 0.6709, "step": 14738 }, { "epoch": 0.6108417257242322, "grad_norm": 0.381747305393219, "learning_rate": 1.9459985909072073e-06, "loss": 0.6484, "step": 14739 }, { "epoch": 0.610883169629906, "grad_norm": 0.40145793557167053, "learning_rate": 1.945791371378839e-06, "loss": 0.6951, "step": 14740 }, { "epoch": 0.6109246135355796, "grad_norm": 0.3760605454444885, "learning_rate": 1.9455841518504705e-06, "loss": 0.6711, "step": 14741 }, { "epoch": 0.6109660574412533, "grad_norm": 0.4248223304748535, "learning_rate": 1.9453769323221023e-06, "loss": 0.6812, "step": 14742 }, { "epoch": 0.6110075013469269, "grad_norm": 0.41525113582611084, "learning_rate": 1.9451697127937337e-06, "loss": 0.6963, "step": 14743 }, { "epoch": 0.6110489452526006, "grad_norm": 0.4122079014778137, "learning_rate": 1.9449624932653655e-06, "loss": 0.6904, "step": 14744 }, { "epoch": 0.6110903891582743, "grad_norm": 0.4312734603881836, "learning_rate": 1.944755273736997e-06, "loss": 0.6884, "step": 14745 }, { "epoch": 0.6111318330639479, "grad_norm": 0.4094502925872803, "learning_rate": 1.9445480542086287e-06, "loss": 0.6588, "step": 14746 }, { "epoch": 0.6111732769696216, "grad_norm": 0.37969970703125, "learning_rate": 1.9443408346802605e-06, "loss": 0.6785, "step": 14747 }, { "epoch": 0.6112147208752953, "grad_norm": 0.43941259384155273, "learning_rate": 1.9441336151518923e-06, "loss": 0.7341, "step": 14748 }, { "epoch": 0.611256164780969, "grad_norm": 0.42629697918891907, "learning_rate": 1.9439263956235237e-06, "loss": 0.6454, "step": 14749 }, { "epoch": 0.6112976086866426, "grad_norm": 0.39709287881851196, "learning_rate": 1.9437191760951555e-06, "loss": 0.6837, "step": 14750 }, { "epoch": 0.6113390525923162, "grad_norm": 0.43552830815315247, "learning_rate": 1.943511956566787e-06, "loss": 0.7227, "step": 14751 }, { "epoch": 0.61138049649799, "grad_norm": 0.4556885063648224, "learning_rate": 1.9433047370384187e-06, "loss": 0.6604, "step": 14752 }, { "epoch": 0.6114219404036636, "grad_norm": 0.4166381359100342, "learning_rate": 1.94309751751005e-06, "loss": 0.6516, "step": 14753 }, { "epoch": 0.6114633843093373, "grad_norm": 0.412934809923172, "learning_rate": 1.942890297981682e-06, "loss": 0.7112, "step": 14754 }, { "epoch": 0.611504828215011, "grad_norm": 0.42269274592399597, "learning_rate": 1.9426830784533137e-06, "loss": 0.7017, "step": 14755 }, { "epoch": 0.6115462721206847, "grad_norm": 0.40668049454689026, "learning_rate": 1.942475858924945e-06, "loss": 0.7124, "step": 14756 }, { "epoch": 0.6115877160263583, "grad_norm": 0.46549752354621887, "learning_rate": 1.942268639396577e-06, "loss": 0.6997, "step": 14757 }, { "epoch": 0.611629159932032, "grad_norm": 0.43218499422073364, "learning_rate": 1.9420614198682087e-06, "loss": 0.6184, "step": 14758 }, { "epoch": 0.6116706038377057, "grad_norm": 0.47965919971466064, "learning_rate": 1.94185420033984e-06, "loss": 0.7339, "step": 14759 }, { "epoch": 0.6117120477433793, "grad_norm": 0.4407515227794647, "learning_rate": 1.941646980811472e-06, "loss": 0.731, "step": 14760 }, { "epoch": 0.611753491649053, "grad_norm": 0.42993903160095215, "learning_rate": 1.9414397612831033e-06, "loss": 0.683, "step": 14761 }, { "epoch": 0.6117949355547266, "grad_norm": 0.3894854187965393, "learning_rate": 1.941232541754735e-06, "loss": 0.6549, "step": 14762 }, { "epoch": 0.6118363794604004, "grad_norm": 0.4283939003944397, "learning_rate": 1.941025322226367e-06, "loss": 0.6792, "step": 14763 }, { "epoch": 0.611877823366074, "grad_norm": 0.39911699295043945, "learning_rate": 1.9408181026979983e-06, "loss": 0.6931, "step": 14764 }, { "epoch": 0.6119192672717477, "grad_norm": 0.40781930088996887, "learning_rate": 1.94061088316963e-06, "loss": 0.7069, "step": 14765 }, { "epoch": 0.6119607111774213, "grad_norm": 0.4218117296695709, "learning_rate": 1.940403663641262e-06, "loss": 0.694, "step": 14766 }, { "epoch": 0.6120021550830951, "grad_norm": 0.44536203145980835, "learning_rate": 1.9401964441128933e-06, "loss": 0.6798, "step": 14767 }, { "epoch": 0.6120435989887687, "grad_norm": 0.4527055323123932, "learning_rate": 1.939989224584525e-06, "loss": 0.7269, "step": 14768 }, { "epoch": 0.6120850428944423, "grad_norm": 0.4155116677284241, "learning_rate": 1.9397820050561565e-06, "loss": 0.6353, "step": 14769 }, { "epoch": 0.612126486800116, "grad_norm": 0.3994438052177429, "learning_rate": 1.9395747855277883e-06, "loss": 0.675, "step": 14770 }, { "epoch": 0.6121679307057897, "grad_norm": 0.440207302570343, "learning_rate": 1.93936756599942e-06, "loss": 0.6967, "step": 14771 }, { "epoch": 0.6122093746114634, "grad_norm": 0.42069166898727417, "learning_rate": 1.9391603464710515e-06, "loss": 0.7585, "step": 14772 }, { "epoch": 0.612250818517137, "grad_norm": 0.41963914036750793, "learning_rate": 1.9389531269426833e-06, "loss": 0.6858, "step": 14773 }, { "epoch": 0.6122922624228108, "grad_norm": 0.43636664748191833, "learning_rate": 1.938745907414315e-06, "loss": 0.6995, "step": 14774 }, { "epoch": 0.6123337063284844, "grad_norm": 0.4307825565338135, "learning_rate": 1.9385386878859465e-06, "loss": 0.6653, "step": 14775 }, { "epoch": 0.6123751502341581, "grad_norm": 0.40891873836517334, "learning_rate": 1.9383314683575783e-06, "loss": 0.702, "step": 14776 }, { "epoch": 0.6124165941398317, "grad_norm": 0.3961033225059509, "learning_rate": 1.9381242488292097e-06, "loss": 0.6343, "step": 14777 }, { "epoch": 0.6124580380455054, "grad_norm": 0.41844549775123596, "learning_rate": 1.9379170293008415e-06, "loss": 0.6797, "step": 14778 }, { "epoch": 0.6124994819511791, "grad_norm": 0.39802807569503784, "learning_rate": 1.937709809772473e-06, "loss": 0.6855, "step": 14779 }, { "epoch": 0.6125409258568527, "grad_norm": 0.41358545422554016, "learning_rate": 1.9375025902441047e-06, "loss": 0.6729, "step": 14780 }, { "epoch": 0.6125823697625264, "grad_norm": 0.4196234941482544, "learning_rate": 1.9372953707157365e-06, "loss": 0.645, "step": 14781 }, { "epoch": 0.6126238136682001, "grad_norm": 0.4037451446056366, "learning_rate": 1.937088151187368e-06, "loss": 0.7357, "step": 14782 }, { "epoch": 0.6126652575738738, "grad_norm": 0.3709436357021332, "learning_rate": 1.9368809316589997e-06, "loss": 0.6184, "step": 14783 }, { "epoch": 0.6127067014795474, "grad_norm": 0.43360579013824463, "learning_rate": 1.9366737121306315e-06, "loss": 0.7009, "step": 14784 }, { "epoch": 0.6127481453852212, "grad_norm": 0.40229877829551697, "learning_rate": 1.936466492602263e-06, "loss": 0.621, "step": 14785 }, { "epoch": 0.6127895892908948, "grad_norm": 0.4053835868835449, "learning_rate": 1.9362592730738947e-06, "loss": 0.6846, "step": 14786 }, { "epoch": 0.6128310331965684, "grad_norm": 0.41047248244285583, "learning_rate": 1.936052053545526e-06, "loss": 0.6677, "step": 14787 }, { "epoch": 0.6128724771022421, "grad_norm": 0.42960086464881897, "learning_rate": 1.935844834017158e-06, "loss": 0.7406, "step": 14788 }, { "epoch": 0.6129139210079158, "grad_norm": 0.44884783029556274, "learning_rate": 1.9356376144887897e-06, "loss": 0.6868, "step": 14789 }, { "epoch": 0.6129553649135895, "grad_norm": 0.4040799140930176, "learning_rate": 1.935430394960421e-06, "loss": 0.6816, "step": 14790 }, { "epoch": 0.6129968088192631, "grad_norm": 0.4729327857494354, "learning_rate": 1.935223175432053e-06, "loss": 0.771, "step": 14791 }, { "epoch": 0.6130382527249368, "grad_norm": 0.3861408829689026, "learning_rate": 1.9350159559036847e-06, "loss": 0.6472, "step": 14792 }, { "epoch": 0.6130796966306105, "grad_norm": 0.41926270723342896, "learning_rate": 1.934808736375316e-06, "loss": 0.7162, "step": 14793 }, { "epoch": 0.6131211405362842, "grad_norm": 0.4058879315853119, "learning_rate": 1.934601516846948e-06, "loss": 0.6503, "step": 14794 }, { "epoch": 0.6131625844419578, "grad_norm": 0.4022805988788605, "learning_rate": 1.9343942973185793e-06, "loss": 0.6672, "step": 14795 }, { "epoch": 0.6132040283476314, "grad_norm": 0.3895222544670105, "learning_rate": 1.934187077790211e-06, "loss": 0.6167, "step": 14796 }, { "epoch": 0.6132454722533052, "grad_norm": 0.4324630796909332, "learning_rate": 1.933979858261843e-06, "loss": 0.7085, "step": 14797 }, { "epoch": 0.6132869161589788, "grad_norm": 0.38302984833717346, "learning_rate": 1.9337726387334743e-06, "loss": 0.6218, "step": 14798 }, { "epoch": 0.6133283600646525, "grad_norm": 0.38687869906425476, "learning_rate": 1.933565419205106e-06, "loss": 0.6348, "step": 14799 }, { "epoch": 0.6133698039703261, "grad_norm": 0.43772369623184204, "learning_rate": 1.933358199676738e-06, "loss": 0.7412, "step": 14800 }, { "epoch": 0.6134112478759999, "grad_norm": 0.4139874279499054, "learning_rate": 1.9331509801483693e-06, "loss": 0.6993, "step": 14801 }, { "epoch": 0.6134526917816735, "grad_norm": 0.40119612216949463, "learning_rate": 1.932943760620001e-06, "loss": 0.679, "step": 14802 }, { "epoch": 0.6134941356873472, "grad_norm": 0.411485493183136, "learning_rate": 1.9327365410916325e-06, "loss": 0.6689, "step": 14803 }, { "epoch": 0.6135355795930209, "grad_norm": 0.35470786690711975, "learning_rate": 1.9325293215632643e-06, "loss": 0.678, "step": 14804 }, { "epoch": 0.6135770234986945, "grad_norm": 0.4190214276313782, "learning_rate": 1.932322102034896e-06, "loss": 0.6953, "step": 14805 }, { "epoch": 0.6136184674043682, "grad_norm": 0.415465384721756, "learning_rate": 1.9321148825065275e-06, "loss": 0.6909, "step": 14806 }, { "epoch": 0.6136599113100418, "grad_norm": 0.394703209400177, "learning_rate": 1.9319076629781593e-06, "loss": 0.7031, "step": 14807 }, { "epoch": 0.6137013552157156, "grad_norm": 0.41245317459106445, "learning_rate": 1.9317004434497907e-06, "loss": 0.6533, "step": 14808 }, { "epoch": 0.6137427991213892, "grad_norm": 0.4211778938770294, "learning_rate": 1.9314932239214225e-06, "loss": 0.6455, "step": 14809 }, { "epoch": 0.6137842430270629, "grad_norm": 0.3993346393108368, "learning_rate": 1.9312860043930543e-06, "loss": 0.6135, "step": 14810 }, { "epoch": 0.6138256869327365, "grad_norm": 0.4276604652404785, "learning_rate": 1.9310787848646857e-06, "loss": 0.7346, "step": 14811 }, { "epoch": 0.6138671308384102, "grad_norm": 0.4268699288368225, "learning_rate": 1.9308715653363175e-06, "loss": 0.6561, "step": 14812 }, { "epoch": 0.6139085747440839, "grad_norm": 0.4180394113063812, "learning_rate": 1.9306643458079493e-06, "loss": 0.6792, "step": 14813 }, { "epoch": 0.6139500186497575, "grad_norm": 0.412332147359848, "learning_rate": 1.9304571262795807e-06, "loss": 0.6461, "step": 14814 }, { "epoch": 0.6139914625554312, "grad_norm": 0.39873701333999634, "learning_rate": 1.9302499067512125e-06, "loss": 0.6033, "step": 14815 }, { "epoch": 0.6140329064611049, "grad_norm": 0.4248320460319519, "learning_rate": 1.930042687222844e-06, "loss": 0.6696, "step": 14816 }, { "epoch": 0.6140743503667786, "grad_norm": 0.42183688282966614, "learning_rate": 1.9298354676944757e-06, "loss": 0.6688, "step": 14817 }, { "epoch": 0.6141157942724522, "grad_norm": 0.4743417501449585, "learning_rate": 1.9296282481661075e-06, "loss": 0.76, "step": 14818 }, { "epoch": 0.614157238178126, "grad_norm": 0.4166763722896576, "learning_rate": 1.929421028637739e-06, "loss": 0.6864, "step": 14819 }, { "epoch": 0.6141986820837996, "grad_norm": 0.44243475794792175, "learning_rate": 1.9292138091093707e-06, "loss": 0.699, "step": 14820 }, { "epoch": 0.6142401259894732, "grad_norm": 0.42346176505088806, "learning_rate": 1.929006589581002e-06, "loss": 0.6879, "step": 14821 }, { "epoch": 0.6142815698951469, "grad_norm": 0.39672473073005676, "learning_rate": 1.928799370052634e-06, "loss": 0.6372, "step": 14822 }, { "epoch": 0.6143230138008205, "grad_norm": 0.40520238876342773, "learning_rate": 1.9285921505242657e-06, "loss": 0.6537, "step": 14823 }, { "epoch": 0.6143644577064943, "grad_norm": 0.41232243180274963, "learning_rate": 1.928384930995897e-06, "loss": 0.674, "step": 14824 }, { "epoch": 0.6144059016121679, "grad_norm": 0.4242294728755951, "learning_rate": 1.928177711467529e-06, "loss": 0.6809, "step": 14825 }, { "epoch": 0.6144473455178416, "grad_norm": 0.42215973138809204, "learning_rate": 1.9279704919391607e-06, "loss": 0.6526, "step": 14826 }, { "epoch": 0.6144887894235153, "grad_norm": 0.39736199378967285, "learning_rate": 1.927763272410792e-06, "loss": 0.7258, "step": 14827 }, { "epoch": 0.614530233329189, "grad_norm": 0.4220333397388458, "learning_rate": 1.927556052882424e-06, "loss": 0.6699, "step": 14828 }, { "epoch": 0.6145716772348626, "grad_norm": 0.37379756569862366, "learning_rate": 1.9273488333540553e-06, "loss": 0.6736, "step": 14829 }, { "epoch": 0.6146131211405362, "grad_norm": 0.4303602874279022, "learning_rate": 1.927141613825687e-06, "loss": 0.6771, "step": 14830 }, { "epoch": 0.61465456504621, "grad_norm": 0.38644057512283325, "learning_rate": 1.926934394297319e-06, "loss": 0.6578, "step": 14831 }, { "epoch": 0.6146960089518836, "grad_norm": 0.4041668474674225, "learning_rate": 1.9267271747689503e-06, "loss": 0.7058, "step": 14832 }, { "epoch": 0.6147374528575573, "grad_norm": 0.4768451750278473, "learning_rate": 1.926519955240582e-06, "loss": 0.7351, "step": 14833 }, { "epoch": 0.6147788967632309, "grad_norm": 0.45298323035240173, "learning_rate": 1.9263127357122135e-06, "loss": 0.6958, "step": 14834 }, { "epoch": 0.6148203406689047, "grad_norm": 0.4267730414867401, "learning_rate": 1.9261055161838453e-06, "loss": 0.6726, "step": 14835 }, { "epoch": 0.6148617845745783, "grad_norm": 0.3876250982284546, "learning_rate": 1.925898296655477e-06, "loss": 0.6481, "step": 14836 }, { "epoch": 0.614903228480252, "grad_norm": 0.44948717951774597, "learning_rate": 1.9256910771271085e-06, "loss": 0.6877, "step": 14837 }, { "epoch": 0.6149446723859257, "grad_norm": 0.444139301776886, "learning_rate": 1.9254838575987403e-06, "loss": 0.7229, "step": 14838 }, { "epoch": 0.6149861162915993, "grad_norm": 0.38683393597602844, "learning_rate": 1.925276638070372e-06, "loss": 0.6497, "step": 14839 }, { "epoch": 0.615027560197273, "grad_norm": 0.44527480006217957, "learning_rate": 1.9250694185420035e-06, "loss": 0.6726, "step": 14840 }, { "epoch": 0.6150690041029466, "grad_norm": 0.45373091101646423, "learning_rate": 1.9248621990136353e-06, "loss": 0.6779, "step": 14841 }, { "epoch": 0.6151104480086204, "grad_norm": 0.38458240032196045, "learning_rate": 1.9246549794852667e-06, "loss": 0.6538, "step": 14842 }, { "epoch": 0.615151891914294, "grad_norm": 0.4040895104408264, "learning_rate": 1.9244477599568985e-06, "loss": 0.6624, "step": 14843 }, { "epoch": 0.6151933358199677, "grad_norm": 0.43295571208000183, "learning_rate": 1.9242405404285303e-06, "loss": 0.6943, "step": 14844 }, { "epoch": 0.6152347797256413, "grad_norm": 0.4030100405216217, "learning_rate": 1.9240333209001617e-06, "loss": 0.6733, "step": 14845 }, { "epoch": 0.6152762236313151, "grad_norm": 0.4356665313243866, "learning_rate": 1.9238261013717935e-06, "loss": 0.6639, "step": 14846 }, { "epoch": 0.6153176675369887, "grad_norm": 0.4142886996269226, "learning_rate": 1.9236188818434253e-06, "loss": 0.6759, "step": 14847 }, { "epoch": 0.6153591114426623, "grad_norm": 0.4127577245235443, "learning_rate": 1.9234116623150567e-06, "loss": 0.6764, "step": 14848 }, { "epoch": 0.615400555348336, "grad_norm": 0.42041096091270447, "learning_rate": 1.9232044427866885e-06, "loss": 0.6532, "step": 14849 }, { "epoch": 0.6154419992540097, "grad_norm": 0.4055597484111786, "learning_rate": 1.92299722325832e-06, "loss": 0.7266, "step": 14850 }, { "epoch": 0.6154834431596834, "grad_norm": 0.44921913743019104, "learning_rate": 1.9227900037299517e-06, "loss": 0.6921, "step": 14851 }, { "epoch": 0.615524887065357, "grad_norm": 0.41817083954811096, "learning_rate": 1.9225827842015835e-06, "loss": 0.7317, "step": 14852 }, { "epoch": 0.6155663309710308, "grad_norm": 0.40512707829475403, "learning_rate": 1.922375564673215e-06, "loss": 0.6655, "step": 14853 }, { "epoch": 0.6156077748767044, "grad_norm": 0.41038820147514343, "learning_rate": 1.9221683451448467e-06, "loss": 0.6577, "step": 14854 }, { "epoch": 0.6156492187823781, "grad_norm": 0.4313015341758728, "learning_rate": 1.921961125616478e-06, "loss": 0.7192, "step": 14855 }, { "epoch": 0.6156906626880517, "grad_norm": 0.4224589169025421, "learning_rate": 1.92175390608811e-06, "loss": 0.6978, "step": 14856 }, { "epoch": 0.6157321065937253, "grad_norm": 0.4516347348690033, "learning_rate": 1.9215466865597417e-06, "loss": 0.7161, "step": 14857 }, { "epoch": 0.6157735504993991, "grad_norm": 0.3848966658115387, "learning_rate": 1.921339467031373e-06, "loss": 0.7047, "step": 14858 }, { "epoch": 0.6158149944050727, "grad_norm": 0.40078938007354736, "learning_rate": 1.921132247503005e-06, "loss": 0.6639, "step": 14859 }, { "epoch": 0.6158564383107464, "grad_norm": 0.4002012312412262, "learning_rate": 1.9209250279746363e-06, "loss": 0.6707, "step": 14860 }, { "epoch": 0.6158978822164201, "grad_norm": 0.41139712929725647, "learning_rate": 1.920717808446268e-06, "loss": 0.6652, "step": 14861 }, { "epoch": 0.6159393261220938, "grad_norm": 0.4415040910243988, "learning_rate": 1.9205105889179e-06, "loss": 0.6973, "step": 14862 }, { "epoch": 0.6159807700277674, "grad_norm": 0.39361584186553955, "learning_rate": 1.9203033693895313e-06, "loss": 0.6699, "step": 14863 }, { "epoch": 0.6160222139334411, "grad_norm": 0.40452995896339417, "learning_rate": 1.920096149861163e-06, "loss": 0.6863, "step": 14864 }, { "epoch": 0.6160636578391148, "grad_norm": 0.4272562861442566, "learning_rate": 1.919888930332795e-06, "loss": 0.688, "step": 14865 }, { "epoch": 0.6161051017447884, "grad_norm": 0.3984299600124359, "learning_rate": 1.9196817108044263e-06, "loss": 0.6877, "step": 14866 }, { "epoch": 0.6161465456504621, "grad_norm": 0.4425066113471985, "learning_rate": 1.919474491276058e-06, "loss": 0.7156, "step": 14867 }, { "epoch": 0.6161879895561357, "grad_norm": 0.4061056673526764, "learning_rate": 1.9192672717476895e-06, "loss": 0.6573, "step": 14868 }, { "epoch": 0.6162294334618095, "grad_norm": 0.4346085786819458, "learning_rate": 1.9190600522193213e-06, "loss": 0.6713, "step": 14869 }, { "epoch": 0.6162708773674831, "grad_norm": 0.40673112869262695, "learning_rate": 1.918852832690953e-06, "loss": 0.6685, "step": 14870 }, { "epoch": 0.6163123212731568, "grad_norm": 0.4579155743122101, "learning_rate": 1.9186456131625845e-06, "loss": 0.6896, "step": 14871 }, { "epoch": 0.6163537651788304, "grad_norm": 0.40163636207580566, "learning_rate": 1.9184383936342163e-06, "loss": 0.6322, "step": 14872 }, { "epoch": 0.6163952090845041, "grad_norm": 0.41845765709877014, "learning_rate": 1.918231174105848e-06, "loss": 0.6711, "step": 14873 }, { "epoch": 0.6164366529901778, "grad_norm": 0.42747995257377625, "learning_rate": 1.9180239545774795e-06, "loss": 0.696, "step": 14874 }, { "epoch": 0.6164780968958514, "grad_norm": 0.42356839776039124, "learning_rate": 1.9178167350491113e-06, "loss": 0.7166, "step": 14875 }, { "epoch": 0.6165195408015252, "grad_norm": 0.3664929270744324, "learning_rate": 1.9176095155207427e-06, "loss": 0.6681, "step": 14876 }, { "epoch": 0.6165609847071988, "grad_norm": 0.41942086815834045, "learning_rate": 1.9174022959923745e-06, "loss": 0.7156, "step": 14877 }, { "epoch": 0.6166024286128725, "grad_norm": 0.42870232462882996, "learning_rate": 1.917195076464006e-06, "loss": 0.7419, "step": 14878 }, { "epoch": 0.6166438725185461, "grad_norm": 0.3751935660839081, "learning_rate": 1.9169878569356377e-06, "loss": 0.646, "step": 14879 }, { "epoch": 0.6166853164242199, "grad_norm": 0.4315398037433624, "learning_rate": 1.9167806374072695e-06, "loss": 0.6926, "step": 14880 }, { "epoch": 0.6167267603298935, "grad_norm": 0.4243287444114685, "learning_rate": 1.9165734178789013e-06, "loss": 0.7197, "step": 14881 }, { "epoch": 0.6167682042355671, "grad_norm": 0.45076388120651245, "learning_rate": 1.9163661983505327e-06, "loss": 0.6943, "step": 14882 }, { "epoch": 0.6168096481412408, "grad_norm": 0.40727728605270386, "learning_rate": 1.9161589788221645e-06, "loss": 0.6779, "step": 14883 }, { "epoch": 0.6168510920469145, "grad_norm": 0.39620494842529297, "learning_rate": 1.915951759293796e-06, "loss": 0.6665, "step": 14884 }, { "epoch": 0.6168925359525882, "grad_norm": 0.5452051758766174, "learning_rate": 1.9157445397654277e-06, "loss": 0.7035, "step": 14885 }, { "epoch": 0.6169339798582618, "grad_norm": 0.38104408979415894, "learning_rate": 1.915537320237059e-06, "loss": 0.6692, "step": 14886 }, { "epoch": 0.6169754237639355, "grad_norm": 0.4062226116657257, "learning_rate": 1.915330100708691e-06, "loss": 0.6772, "step": 14887 }, { "epoch": 0.6170168676696092, "grad_norm": 0.41058653593063354, "learning_rate": 1.9151228811803227e-06, "loss": 0.7271, "step": 14888 }, { "epoch": 0.6170583115752829, "grad_norm": 0.41350728273391724, "learning_rate": 1.9149156616519545e-06, "loss": 0.6797, "step": 14889 }, { "epoch": 0.6170997554809565, "grad_norm": 0.4004344940185547, "learning_rate": 1.914708442123586e-06, "loss": 0.6262, "step": 14890 }, { "epoch": 0.6171411993866301, "grad_norm": 0.38142356276512146, "learning_rate": 1.9145012225952177e-06, "loss": 0.608, "step": 14891 }, { "epoch": 0.6171826432923039, "grad_norm": 0.421258807182312, "learning_rate": 1.914294003066849e-06, "loss": 0.6786, "step": 14892 }, { "epoch": 0.6172240871979775, "grad_norm": 0.40107259154319763, "learning_rate": 1.914086783538481e-06, "loss": 0.6498, "step": 14893 }, { "epoch": 0.6172655311036512, "grad_norm": 0.41604548692703247, "learning_rate": 1.9138795640101123e-06, "loss": 0.662, "step": 14894 }, { "epoch": 0.6173069750093249, "grad_norm": 0.3963643014431, "learning_rate": 1.913672344481744e-06, "loss": 0.6425, "step": 14895 }, { "epoch": 0.6173484189149986, "grad_norm": 0.38042542338371277, "learning_rate": 1.913465124953376e-06, "loss": 0.6752, "step": 14896 }, { "epoch": 0.6173898628206722, "grad_norm": 0.404562383890152, "learning_rate": 1.9132579054250073e-06, "loss": 0.6432, "step": 14897 }, { "epoch": 0.6174313067263459, "grad_norm": 0.4414012134075165, "learning_rate": 1.913050685896639e-06, "loss": 0.7288, "step": 14898 }, { "epoch": 0.6174727506320196, "grad_norm": 0.38333478569984436, "learning_rate": 1.912843466368271e-06, "loss": 0.645, "step": 14899 }, { "epoch": 0.6175141945376932, "grad_norm": 0.40459883213043213, "learning_rate": 1.9126362468399023e-06, "loss": 0.6426, "step": 14900 }, { "epoch": 0.6175556384433669, "grad_norm": 0.37731632590293884, "learning_rate": 1.912429027311534e-06, "loss": 0.6219, "step": 14901 }, { "epoch": 0.6175970823490405, "grad_norm": 0.4283466041088104, "learning_rate": 1.9122218077831655e-06, "loss": 0.7366, "step": 14902 }, { "epoch": 0.6176385262547143, "grad_norm": 0.422166645526886, "learning_rate": 1.9120145882547973e-06, "loss": 0.7004, "step": 14903 }, { "epoch": 0.6176799701603879, "grad_norm": 0.4033878445625305, "learning_rate": 1.9118073687264287e-06, "loss": 0.6685, "step": 14904 }, { "epoch": 0.6177214140660616, "grad_norm": 0.41786786913871765, "learning_rate": 1.9116001491980605e-06, "loss": 0.6709, "step": 14905 }, { "epoch": 0.6177628579717352, "grad_norm": 0.41466015577316284, "learning_rate": 1.9113929296696923e-06, "loss": 0.6608, "step": 14906 }, { "epoch": 0.617804301877409, "grad_norm": 0.4175218939781189, "learning_rate": 1.911185710141324e-06, "loss": 0.678, "step": 14907 }, { "epoch": 0.6178457457830826, "grad_norm": 0.3884677588939667, "learning_rate": 1.9109784906129555e-06, "loss": 0.687, "step": 14908 }, { "epoch": 0.6178871896887562, "grad_norm": 0.40910816192626953, "learning_rate": 1.9107712710845873e-06, "loss": 0.6479, "step": 14909 }, { "epoch": 0.61792863359443, "grad_norm": 0.4066471457481384, "learning_rate": 1.9105640515562187e-06, "loss": 0.6575, "step": 14910 }, { "epoch": 0.6179700775001036, "grad_norm": 0.44696930050849915, "learning_rate": 1.9103568320278505e-06, "loss": 0.6727, "step": 14911 }, { "epoch": 0.6180115214057773, "grad_norm": 0.42891427874565125, "learning_rate": 1.910149612499482e-06, "loss": 0.6761, "step": 14912 }, { "epoch": 0.6180529653114509, "grad_norm": 0.40125611424446106, "learning_rate": 1.9099423929711137e-06, "loss": 0.6354, "step": 14913 }, { "epoch": 0.6180944092171247, "grad_norm": 0.4250132739543915, "learning_rate": 1.9097351734427455e-06, "loss": 0.6689, "step": 14914 }, { "epoch": 0.6181358531227983, "grad_norm": 0.4029315114021301, "learning_rate": 1.9095279539143773e-06, "loss": 0.6781, "step": 14915 }, { "epoch": 0.618177297028472, "grad_norm": 0.415741890668869, "learning_rate": 1.9093207343860087e-06, "loss": 0.6262, "step": 14916 }, { "epoch": 0.6182187409341456, "grad_norm": 0.47684839367866516, "learning_rate": 1.9091135148576405e-06, "loss": 0.7717, "step": 14917 }, { "epoch": 0.6182601848398193, "grad_norm": 0.4134511351585388, "learning_rate": 1.908906295329272e-06, "loss": 0.7207, "step": 14918 }, { "epoch": 0.618301628745493, "grad_norm": 0.42611750960350037, "learning_rate": 1.9086990758009037e-06, "loss": 0.7107, "step": 14919 }, { "epoch": 0.6183430726511666, "grad_norm": 0.41064801812171936, "learning_rate": 1.908491856272535e-06, "loss": 0.7402, "step": 14920 }, { "epoch": 0.6183845165568403, "grad_norm": 0.4163835942745209, "learning_rate": 1.908284636744167e-06, "loss": 0.6388, "step": 14921 }, { "epoch": 0.618425960462514, "grad_norm": 0.43049153685569763, "learning_rate": 1.9080774172157987e-06, "loss": 0.7017, "step": 14922 }, { "epoch": 0.6184674043681877, "grad_norm": 0.42513760924339294, "learning_rate": 1.9078701976874305e-06, "loss": 0.6736, "step": 14923 }, { "epoch": 0.6185088482738613, "grad_norm": 0.4017379879951477, "learning_rate": 1.907662978159062e-06, "loss": 0.634, "step": 14924 }, { "epoch": 0.6185502921795349, "grad_norm": 0.44294729828834534, "learning_rate": 1.9074557586306937e-06, "loss": 0.675, "step": 14925 }, { "epoch": 0.6185917360852087, "grad_norm": 0.42675837874412537, "learning_rate": 1.907248539102325e-06, "loss": 0.6782, "step": 14926 }, { "epoch": 0.6186331799908823, "grad_norm": 0.4348944127559662, "learning_rate": 1.9070413195739567e-06, "loss": 0.6625, "step": 14927 }, { "epoch": 0.618674623896556, "grad_norm": 0.403538316488266, "learning_rate": 1.9068341000455883e-06, "loss": 0.6495, "step": 14928 }, { "epoch": 0.6187160678022297, "grad_norm": 0.3947971761226654, "learning_rate": 1.9066268805172201e-06, "loss": 0.6926, "step": 14929 }, { "epoch": 0.6187575117079034, "grad_norm": 0.38076117634773254, "learning_rate": 1.9064196609888517e-06, "loss": 0.6578, "step": 14930 }, { "epoch": 0.618798955613577, "grad_norm": 0.40068864822387695, "learning_rate": 1.9062124414604835e-06, "loss": 0.6851, "step": 14931 }, { "epoch": 0.6188403995192507, "grad_norm": 0.40060731768608093, "learning_rate": 1.9060052219321151e-06, "loss": 0.6152, "step": 14932 }, { "epoch": 0.6188818434249244, "grad_norm": 0.4181751310825348, "learning_rate": 1.9057980024037467e-06, "loss": 0.6448, "step": 14933 }, { "epoch": 0.618923287330598, "grad_norm": 0.49117594957351685, "learning_rate": 1.9055907828753783e-06, "loss": 0.6498, "step": 14934 }, { "epoch": 0.6189647312362717, "grad_norm": 0.422296941280365, "learning_rate": 1.90538356334701e-06, "loss": 0.7025, "step": 14935 }, { "epoch": 0.6190061751419453, "grad_norm": 0.4141998887062073, "learning_rate": 1.9051763438186415e-06, "loss": 0.6737, "step": 14936 }, { "epoch": 0.6190476190476191, "grad_norm": 0.4630289375782013, "learning_rate": 1.904969124290273e-06, "loss": 0.749, "step": 14937 }, { "epoch": 0.6190890629532927, "grad_norm": 0.3958395719528198, "learning_rate": 1.904761904761905e-06, "loss": 0.6467, "step": 14938 }, { "epoch": 0.6191305068589664, "grad_norm": 0.4212743639945984, "learning_rate": 1.9045546852335365e-06, "loss": 0.729, "step": 14939 }, { "epoch": 0.61917195076464, "grad_norm": 0.3834397494792938, "learning_rate": 1.9043474657051683e-06, "loss": 0.6473, "step": 14940 }, { "epoch": 0.6192133946703138, "grad_norm": 0.4089038372039795, "learning_rate": 1.9041402461768e-06, "loss": 0.6636, "step": 14941 }, { "epoch": 0.6192548385759874, "grad_norm": 0.4103119373321533, "learning_rate": 1.9039330266484315e-06, "loss": 0.6396, "step": 14942 }, { "epoch": 0.619296282481661, "grad_norm": 0.3904162049293518, "learning_rate": 1.9037258071200631e-06, "loss": 0.6376, "step": 14943 }, { "epoch": 0.6193377263873348, "grad_norm": 0.46241432428359985, "learning_rate": 1.9035185875916947e-06, "loss": 0.6804, "step": 14944 }, { "epoch": 0.6193791702930084, "grad_norm": 0.4125785827636719, "learning_rate": 1.9033113680633263e-06, "loss": 0.7063, "step": 14945 }, { "epoch": 0.6194206141986821, "grad_norm": 0.42722296714782715, "learning_rate": 1.9031041485349581e-06, "loss": 0.7131, "step": 14946 }, { "epoch": 0.6194620581043557, "grad_norm": 0.39716655015945435, "learning_rate": 1.9028969290065897e-06, "loss": 0.6526, "step": 14947 }, { "epoch": 0.6195035020100295, "grad_norm": 0.44048047065734863, "learning_rate": 1.9026897094782215e-06, "loss": 0.7021, "step": 14948 }, { "epoch": 0.6195449459157031, "grad_norm": 0.43738025426864624, "learning_rate": 1.9024824899498531e-06, "loss": 0.7559, "step": 14949 }, { "epoch": 0.6195863898213768, "grad_norm": 0.4228183329105377, "learning_rate": 1.9022752704214847e-06, "loss": 0.6721, "step": 14950 }, { "epoch": 0.6196278337270504, "grad_norm": 0.3939340114593506, "learning_rate": 1.9020680508931163e-06, "loss": 0.6486, "step": 14951 }, { "epoch": 0.6196692776327241, "grad_norm": 0.4195728600025177, "learning_rate": 1.901860831364748e-06, "loss": 0.6777, "step": 14952 }, { "epoch": 0.6197107215383978, "grad_norm": 0.4379664361476898, "learning_rate": 1.9016536118363795e-06, "loss": 0.6599, "step": 14953 }, { "epoch": 0.6197521654440714, "grad_norm": 0.44955509901046753, "learning_rate": 1.901446392308011e-06, "loss": 0.6971, "step": 14954 }, { "epoch": 0.6197936093497451, "grad_norm": 0.3924221992492676, "learning_rate": 1.901239172779643e-06, "loss": 0.6616, "step": 14955 }, { "epoch": 0.6198350532554188, "grad_norm": 0.8724302649497986, "learning_rate": 1.9010319532512745e-06, "loss": 0.6545, "step": 14956 }, { "epoch": 0.6198764971610925, "grad_norm": 0.3908036947250366, "learning_rate": 1.9008247337229063e-06, "loss": 0.6577, "step": 14957 }, { "epoch": 0.6199179410667661, "grad_norm": 0.4001157581806183, "learning_rate": 1.900617514194538e-06, "loss": 0.6919, "step": 14958 }, { "epoch": 0.6199593849724399, "grad_norm": 0.44106099009513855, "learning_rate": 1.9004102946661695e-06, "loss": 0.6902, "step": 14959 }, { "epoch": 0.6200008288781135, "grad_norm": 0.42071396112442017, "learning_rate": 1.9002030751378011e-06, "loss": 0.6913, "step": 14960 }, { "epoch": 0.6200422727837871, "grad_norm": 0.42299771308898926, "learning_rate": 1.8999958556094327e-06, "loss": 0.6606, "step": 14961 }, { "epoch": 0.6200837166894608, "grad_norm": 0.3929431736469269, "learning_rate": 1.8997886360810643e-06, "loss": 0.6619, "step": 14962 }, { "epoch": 0.6201251605951345, "grad_norm": 0.3986915647983551, "learning_rate": 1.8995814165526961e-06, "loss": 0.6357, "step": 14963 }, { "epoch": 0.6201666045008082, "grad_norm": 0.4467551112174988, "learning_rate": 1.8993741970243277e-06, "loss": 0.7078, "step": 14964 }, { "epoch": 0.6202080484064818, "grad_norm": 0.3988366425037384, "learning_rate": 1.8991669774959593e-06, "loss": 0.6376, "step": 14965 }, { "epoch": 0.6202494923121555, "grad_norm": 0.3880581557750702, "learning_rate": 1.8989597579675911e-06, "loss": 0.6836, "step": 14966 }, { "epoch": 0.6202909362178292, "grad_norm": 0.3960559368133545, "learning_rate": 1.8987525384392227e-06, "loss": 0.6541, "step": 14967 }, { "epoch": 0.6203323801235029, "grad_norm": 0.4429948627948761, "learning_rate": 1.8985453189108543e-06, "loss": 0.6792, "step": 14968 }, { "epoch": 0.6203738240291765, "grad_norm": 0.3930130898952484, "learning_rate": 1.898338099382486e-06, "loss": 0.6628, "step": 14969 }, { "epoch": 0.6204152679348501, "grad_norm": 0.4203483462333679, "learning_rate": 1.8981308798541175e-06, "loss": 0.7119, "step": 14970 }, { "epoch": 0.6204567118405239, "grad_norm": 0.38294193148612976, "learning_rate": 1.897923660325749e-06, "loss": 0.6827, "step": 14971 }, { "epoch": 0.6204981557461975, "grad_norm": 0.4226776659488678, "learning_rate": 1.897716440797381e-06, "loss": 0.6859, "step": 14972 }, { "epoch": 0.6205395996518712, "grad_norm": 0.37658610939979553, "learning_rate": 1.8975092212690125e-06, "loss": 0.6184, "step": 14973 }, { "epoch": 0.6205810435575448, "grad_norm": 0.3817873001098633, "learning_rate": 1.8973020017406443e-06, "loss": 0.6797, "step": 14974 }, { "epoch": 0.6206224874632186, "grad_norm": 0.3894873857498169, "learning_rate": 1.897094782212276e-06, "loss": 0.653, "step": 14975 }, { "epoch": 0.6206639313688922, "grad_norm": 0.43013253808021545, "learning_rate": 1.8968875626839075e-06, "loss": 0.7168, "step": 14976 }, { "epoch": 0.6207053752745659, "grad_norm": 0.41388076543807983, "learning_rate": 1.8966803431555391e-06, "loss": 0.6772, "step": 14977 }, { "epoch": 0.6207468191802396, "grad_norm": 0.39460816979408264, "learning_rate": 1.8964731236271707e-06, "loss": 0.6904, "step": 14978 }, { "epoch": 0.6207882630859132, "grad_norm": 0.4248746335506439, "learning_rate": 1.8962659040988023e-06, "loss": 0.6145, "step": 14979 }, { "epoch": 0.6208297069915869, "grad_norm": 0.4171145558357239, "learning_rate": 1.8960586845704341e-06, "loss": 0.6592, "step": 14980 }, { "epoch": 0.6208711508972605, "grad_norm": 0.39695125818252563, "learning_rate": 1.8958514650420657e-06, "loss": 0.6548, "step": 14981 }, { "epoch": 0.6209125948029343, "grad_norm": 0.44437888264656067, "learning_rate": 1.8956442455136973e-06, "loss": 0.7383, "step": 14982 }, { "epoch": 0.6209540387086079, "grad_norm": 0.44190120697021484, "learning_rate": 1.8954370259853291e-06, "loss": 0.6953, "step": 14983 }, { "epoch": 0.6209954826142816, "grad_norm": 0.4153902530670166, "learning_rate": 1.8952298064569607e-06, "loss": 0.6688, "step": 14984 }, { "epoch": 0.6210369265199552, "grad_norm": 0.433437317609787, "learning_rate": 1.8950225869285923e-06, "loss": 0.6873, "step": 14985 }, { "epoch": 0.6210783704256289, "grad_norm": 0.4019441306591034, "learning_rate": 1.894815367400224e-06, "loss": 0.6418, "step": 14986 }, { "epoch": 0.6211198143313026, "grad_norm": 0.4344424903392792, "learning_rate": 1.8946081478718555e-06, "loss": 0.667, "step": 14987 }, { "epoch": 0.6211612582369762, "grad_norm": 0.3985518217086792, "learning_rate": 1.8944009283434871e-06, "loss": 0.7131, "step": 14988 }, { "epoch": 0.6212027021426499, "grad_norm": 0.41022828221321106, "learning_rate": 1.894193708815119e-06, "loss": 0.6017, "step": 14989 }, { "epoch": 0.6212441460483236, "grad_norm": 0.45377737283706665, "learning_rate": 1.8939864892867505e-06, "loss": 0.7004, "step": 14990 }, { "epoch": 0.6212855899539973, "grad_norm": 0.4057566523551941, "learning_rate": 1.8937792697583821e-06, "loss": 0.6775, "step": 14991 }, { "epoch": 0.6213270338596709, "grad_norm": 0.4433775842189789, "learning_rate": 1.893572050230014e-06, "loss": 0.7225, "step": 14992 }, { "epoch": 0.6213684777653447, "grad_norm": 0.39640748500823975, "learning_rate": 1.8933648307016455e-06, "loss": 0.7086, "step": 14993 }, { "epoch": 0.6214099216710183, "grad_norm": 0.41678476333618164, "learning_rate": 1.8931576111732771e-06, "loss": 0.6775, "step": 14994 }, { "epoch": 0.6214513655766919, "grad_norm": 0.39584657549858093, "learning_rate": 1.8929503916449087e-06, "loss": 0.6914, "step": 14995 }, { "epoch": 0.6214928094823656, "grad_norm": 0.43262630701065063, "learning_rate": 1.8927431721165403e-06, "loss": 0.6804, "step": 14996 }, { "epoch": 0.6215342533880392, "grad_norm": 0.4005282521247864, "learning_rate": 1.8925359525881721e-06, "loss": 0.6304, "step": 14997 }, { "epoch": 0.621575697293713, "grad_norm": 0.41645312309265137, "learning_rate": 1.8923287330598037e-06, "loss": 0.7037, "step": 14998 }, { "epoch": 0.6216171411993866, "grad_norm": 0.38411542773246765, "learning_rate": 1.8921215135314353e-06, "loss": 0.6193, "step": 14999 }, { "epoch": 0.6216585851050603, "grad_norm": 0.41326069831848145, "learning_rate": 1.8919142940030671e-06, "loss": 0.6997, "step": 15000 }, { "epoch": 0.621700029010734, "grad_norm": 0.41125306487083435, "learning_rate": 1.8917070744746987e-06, "loss": 0.6681, "step": 15001 }, { "epoch": 0.6217414729164077, "grad_norm": 0.40434789657592773, "learning_rate": 1.8914998549463303e-06, "loss": 0.6708, "step": 15002 }, { "epoch": 0.6217829168220813, "grad_norm": 0.42135167121887207, "learning_rate": 1.891292635417962e-06, "loss": 0.7191, "step": 15003 }, { "epoch": 0.6218243607277549, "grad_norm": 0.4209967851638794, "learning_rate": 1.8910854158895935e-06, "loss": 0.7266, "step": 15004 }, { "epoch": 0.6218658046334287, "grad_norm": 0.40049102902412415, "learning_rate": 1.8908781963612251e-06, "loss": 0.7117, "step": 15005 }, { "epoch": 0.6219072485391023, "grad_norm": 0.4341377317905426, "learning_rate": 1.890670976832857e-06, "loss": 0.6531, "step": 15006 }, { "epoch": 0.621948692444776, "grad_norm": 0.41807329654693604, "learning_rate": 1.8904637573044885e-06, "loss": 0.6945, "step": 15007 }, { "epoch": 0.6219901363504496, "grad_norm": 0.4158197343349457, "learning_rate": 1.8902565377761201e-06, "loss": 0.6727, "step": 15008 }, { "epoch": 0.6220315802561234, "grad_norm": 0.3971371054649353, "learning_rate": 1.890049318247752e-06, "loss": 0.6394, "step": 15009 }, { "epoch": 0.622073024161797, "grad_norm": 0.4433320462703705, "learning_rate": 1.8898420987193835e-06, "loss": 0.6926, "step": 15010 }, { "epoch": 0.6221144680674707, "grad_norm": 0.40059325098991394, "learning_rate": 1.8896348791910151e-06, "loss": 0.6646, "step": 15011 }, { "epoch": 0.6221559119731443, "grad_norm": 0.42483288049697876, "learning_rate": 1.8894276596626467e-06, "loss": 0.6694, "step": 15012 }, { "epoch": 0.622197355878818, "grad_norm": 0.48396921157836914, "learning_rate": 1.8892204401342783e-06, "loss": 0.6907, "step": 15013 }, { "epoch": 0.6222387997844917, "grad_norm": 0.3992592394351959, "learning_rate": 1.8890132206059101e-06, "loss": 0.6373, "step": 15014 }, { "epoch": 0.6222802436901653, "grad_norm": 0.3913261592388153, "learning_rate": 1.8888060010775417e-06, "loss": 0.6519, "step": 15015 }, { "epoch": 0.6223216875958391, "grad_norm": 0.4558086693286896, "learning_rate": 1.8885987815491733e-06, "loss": 0.6808, "step": 15016 }, { "epoch": 0.6223631315015127, "grad_norm": 0.39488786458969116, "learning_rate": 1.888391562020805e-06, "loss": 0.6951, "step": 15017 }, { "epoch": 0.6224045754071864, "grad_norm": 0.41554129123687744, "learning_rate": 1.8881843424924367e-06, "loss": 0.7256, "step": 15018 }, { "epoch": 0.62244601931286, "grad_norm": 0.41894760727882385, "learning_rate": 1.8879771229640683e-06, "loss": 0.7087, "step": 15019 }, { "epoch": 0.6224874632185338, "grad_norm": 0.4097542464733124, "learning_rate": 1.8877699034357e-06, "loss": 0.6152, "step": 15020 }, { "epoch": 0.6225289071242074, "grad_norm": 0.43736496567726135, "learning_rate": 1.8875626839073315e-06, "loss": 0.7388, "step": 15021 }, { "epoch": 0.622570351029881, "grad_norm": 0.44094017148017883, "learning_rate": 1.8873554643789633e-06, "loss": 0.6211, "step": 15022 }, { "epoch": 0.6226117949355547, "grad_norm": 0.41009101271629333, "learning_rate": 1.887148244850595e-06, "loss": 0.6707, "step": 15023 }, { "epoch": 0.6226532388412284, "grad_norm": 0.41505664587020874, "learning_rate": 1.8869410253222265e-06, "loss": 0.6647, "step": 15024 }, { "epoch": 0.6226946827469021, "grad_norm": 0.42578113079071045, "learning_rate": 1.8867338057938581e-06, "loss": 0.6624, "step": 15025 }, { "epoch": 0.6227361266525757, "grad_norm": 0.4175681173801422, "learning_rate": 1.8865265862654897e-06, "loss": 0.6897, "step": 15026 }, { "epoch": 0.6227775705582494, "grad_norm": 0.402174174785614, "learning_rate": 1.8863193667371215e-06, "loss": 0.6423, "step": 15027 }, { "epoch": 0.6228190144639231, "grad_norm": 0.40228110551834106, "learning_rate": 1.8861121472087531e-06, "loss": 0.6448, "step": 15028 }, { "epoch": 0.6228604583695968, "grad_norm": 0.3848807215690613, "learning_rate": 1.8859049276803847e-06, "loss": 0.6399, "step": 15029 }, { "epoch": 0.6229019022752704, "grad_norm": 0.4526704251766205, "learning_rate": 1.8856977081520163e-06, "loss": 0.7163, "step": 15030 }, { "epoch": 0.622943346180944, "grad_norm": 0.39039579033851624, "learning_rate": 1.8854904886236481e-06, "loss": 0.6676, "step": 15031 }, { "epoch": 0.6229847900866178, "grad_norm": 0.41271987557411194, "learning_rate": 1.8852832690952797e-06, "loss": 0.6526, "step": 15032 }, { "epoch": 0.6230262339922914, "grad_norm": 0.447249174118042, "learning_rate": 1.8850760495669113e-06, "loss": 0.6683, "step": 15033 }, { "epoch": 0.6230676778979651, "grad_norm": 0.4017137885093689, "learning_rate": 1.884868830038543e-06, "loss": 0.6472, "step": 15034 }, { "epoch": 0.6231091218036388, "grad_norm": 0.3883378803730011, "learning_rate": 1.8846616105101747e-06, "loss": 0.6613, "step": 15035 }, { "epoch": 0.6231505657093125, "grad_norm": 0.4125553369522095, "learning_rate": 1.8844543909818063e-06, "loss": 0.7063, "step": 15036 }, { "epoch": 0.6231920096149861, "grad_norm": 0.39273759722709656, "learning_rate": 1.884247171453438e-06, "loss": 0.6382, "step": 15037 }, { "epoch": 0.6232334535206598, "grad_norm": 0.4009930491447449, "learning_rate": 1.8840399519250695e-06, "loss": 0.7129, "step": 15038 }, { "epoch": 0.6232748974263335, "grad_norm": 0.4156873822212219, "learning_rate": 1.8838327323967013e-06, "loss": 0.6938, "step": 15039 }, { "epoch": 0.6233163413320071, "grad_norm": 0.38735291361808777, "learning_rate": 1.883625512868333e-06, "loss": 0.6812, "step": 15040 }, { "epoch": 0.6233577852376808, "grad_norm": 0.398480087518692, "learning_rate": 1.8834182933399645e-06, "loss": 0.6405, "step": 15041 }, { "epoch": 0.6233992291433544, "grad_norm": 0.4149356186389923, "learning_rate": 1.8832110738115961e-06, "loss": 0.671, "step": 15042 }, { "epoch": 0.6234406730490282, "grad_norm": 0.44876888394355774, "learning_rate": 1.8830038542832277e-06, "loss": 0.7008, "step": 15043 }, { "epoch": 0.6234821169547018, "grad_norm": 0.3913521468639374, "learning_rate": 1.8827966347548595e-06, "loss": 0.6301, "step": 15044 }, { "epoch": 0.6235235608603755, "grad_norm": 0.4093199670314789, "learning_rate": 1.8825894152264911e-06, "loss": 0.6268, "step": 15045 }, { "epoch": 0.6235650047660491, "grad_norm": 0.42487987875938416, "learning_rate": 1.8823821956981227e-06, "loss": 0.671, "step": 15046 }, { "epoch": 0.6236064486717228, "grad_norm": 0.4048291742801666, "learning_rate": 1.8821749761697543e-06, "loss": 0.6765, "step": 15047 }, { "epoch": 0.6236478925773965, "grad_norm": 0.41267016530036926, "learning_rate": 1.8819677566413861e-06, "loss": 0.6121, "step": 15048 }, { "epoch": 0.6236893364830701, "grad_norm": 0.4013306796550751, "learning_rate": 1.8817605371130177e-06, "loss": 0.6578, "step": 15049 }, { "epoch": 0.6237307803887439, "grad_norm": 0.4232058525085449, "learning_rate": 1.8815533175846493e-06, "loss": 0.649, "step": 15050 }, { "epoch": 0.6237722242944175, "grad_norm": 0.4371868371963501, "learning_rate": 1.881346098056281e-06, "loss": 0.6807, "step": 15051 }, { "epoch": 0.6238136682000912, "grad_norm": 0.4070182740688324, "learning_rate": 1.8811388785279125e-06, "loss": 0.71, "step": 15052 }, { "epoch": 0.6238551121057648, "grad_norm": 0.4002971351146698, "learning_rate": 1.8809316589995443e-06, "loss": 0.7266, "step": 15053 }, { "epoch": 0.6238965560114386, "grad_norm": 0.44876009225845337, "learning_rate": 1.880724439471176e-06, "loss": 0.6953, "step": 15054 }, { "epoch": 0.6239379999171122, "grad_norm": 0.4260658919811249, "learning_rate": 1.8805172199428075e-06, "loss": 0.729, "step": 15055 }, { "epoch": 0.6239794438227858, "grad_norm": 0.4081728756427765, "learning_rate": 1.8803100004144393e-06, "loss": 0.658, "step": 15056 }, { "epoch": 0.6240208877284595, "grad_norm": 0.44687339663505554, "learning_rate": 1.880102780886071e-06, "loss": 0.6918, "step": 15057 }, { "epoch": 0.6240623316341332, "grad_norm": 0.45699000358581543, "learning_rate": 1.8798955613577025e-06, "loss": 0.7054, "step": 15058 }, { "epoch": 0.6241037755398069, "grad_norm": 0.39785438776016235, "learning_rate": 1.8796883418293341e-06, "loss": 0.6437, "step": 15059 }, { "epoch": 0.6241452194454805, "grad_norm": 0.41500505805015564, "learning_rate": 1.8794811223009657e-06, "loss": 0.7405, "step": 15060 }, { "epoch": 0.6241866633511542, "grad_norm": 0.3978816270828247, "learning_rate": 1.8792739027725975e-06, "loss": 0.7246, "step": 15061 }, { "epoch": 0.6242281072568279, "grad_norm": 0.41105613112449646, "learning_rate": 1.8790666832442291e-06, "loss": 0.6635, "step": 15062 }, { "epoch": 0.6242695511625016, "grad_norm": 0.3859884440898895, "learning_rate": 1.8788594637158607e-06, "loss": 0.7081, "step": 15063 }, { "epoch": 0.6243109950681752, "grad_norm": 0.4172648787498474, "learning_rate": 1.8786522441874923e-06, "loss": 0.7197, "step": 15064 }, { "epoch": 0.6243524389738488, "grad_norm": 0.452157199382782, "learning_rate": 1.8784450246591241e-06, "loss": 0.6871, "step": 15065 }, { "epoch": 0.6243938828795226, "grad_norm": 0.4066287875175476, "learning_rate": 1.8782378051307557e-06, "loss": 0.7166, "step": 15066 }, { "epoch": 0.6244353267851962, "grad_norm": 0.3994321823120117, "learning_rate": 1.8780305856023873e-06, "loss": 0.6371, "step": 15067 }, { "epoch": 0.6244767706908699, "grad_norm": 0.39441168308258057, "learning_rate": 1.877823366074019e-06, "loss": 0.6793, "step": 15068 }, { "epoch": 0.6245182145965436, "grad_norm": 0.3962359130382538, "learning_rate": 1.8776161465456505e-06, "loss": 0.6476, "step": 15069 }, { "epoch": 0.6245596585022173, "grad_norm": 0.4303707778453827, "learning_rate": 1.8774089270172823e-06, "loss": 0.6835, "step": 15070 }, { "epoch": 0.6246011024078909, "grad_norm": 0.40143388509750366, "learning_rate": 1.877201707488914e-06, "loss": 0.6473, "step": 15071 }, { "epoch": 0.6246425463135646, "grad_norm": 0.36913296580314636, "learning_rate": 1.8769944879605455e-06, "loss": 0.6344, "step": 15072 }, { "epoch": 0.6246839902192383, "grad_norm": 0.43045252561569214, "learning_rate": 1.8767872684321773e-06, "loss": 0.6953, "step": 15073 }, { "epoch": 0.6247254341249119, "grad_norm": 0.4206538796424866, "learning_rate": 1.876580048903809e-06, "loss": 0.6646, "step": 15074 }, { "epoch": 0.6247668780305856, "grad_norm": 0.43381690979003906, "learning_rate": 1.8763728293754405e-06, "loss": 0.731, "step": 15075 }, { "epoch": 0.6248083219362592, "grad_norm": 0.4109366536140442, "learning_rate": 1.8761656098470721e-06, "loss": 0.6802, "step": 15076 }, { "epoch": 0.624849765841933, "grad_norm": 0.39540112018585205, "learning_rate": 1.8759583903187037e-06, "loss": 0.6395, "step": 15077 }, { "epoch": 0.6248912097476066, "grad_norm": 0.4225304126739502, "learning_rate": 1.8757511707903353e-06, "loss": 0.6775, "step": 15078 }, { "epoch": 0.6249326536532803, "grad_norm": 0.4204528331756592, "learning_rate": 1.8755439512619671e-06, "loss": 0.7078, "step": 15079 }, { "epoch": 0.624974097558954, "grad_norm": 0.3918272852897644, "learning_rate": 1.8753367317335987e-06, "loss": 0.6548, "step": 15080 }, { "epoch": 0.6250155414646277, "grad_norm": 0.40808728337287903, "learning_rate": 1.8751295122052303e-06, "loss": 0.6567, "step": 15081 }, { "epoch": 0.6250569853703013, "grad_norm": 0.40610620379447937, "learning_rate": 1.8749222926768621e-06, "loss": 0.6636, "step": 15082 }, { "epoch": 0.6250984292759749, "grad_norm": 0.4115828275680542, "learning_rate": 1.8747150731484937e-06, "loss": 0.6501, "step": 15083 }, { "epoch": 0.6251398731816487, "grad_norm": 0.3989485800266266, "learning_rate": 1.8745078536201253e-06, "loss": 0.6448, "step": 15084 }, { "epoch": 0.6251813170873223, "grad_norm": 0.4250113070011139, "learning_rate": 1.874300634091757e-06, "loss": 0.7057, "step": 15085 }, { "epoch": 0.625222760992996, "grad_norm": 0.4170987606048584, "learning_rate": 1.8740934145633885e-06, "loss": 0.636, "step": 15086 }, { "epoch": 0.6252642048986696, "grad_norm": 0.4141305387020111, "learning_rate": 1.8738861950350201e-06, "loss": 0.639, "step": 15087 }, { "epoch": 0.6253056488043434, "grad_norm": 0.4290854334831238, "learning_rate": 1.873678975506652e-06, "loss": 0.6741, "step": 15088 }, { "epoch": 0.625347092710017, "grad_norm": 0.42094510793685913, "learning_rate": 1.8734717559782835e-06, "loss": 0.6975, "step": 15089 }, { "epoch": 0.6253885366156907, "grad_norm": 0.39019322395324707, "learning_rate": 1.8732645364499153e-06, "loss": 0.635, "step": 15090 }, { "epoch": 0.6254299805213643, "grad_norm": 0.386406809091568, "learning_rate": 1.873057316921547e-06, "loss": 0.7102, "step": 15091 }, { "epoch": 0.625471424427038, "grad_norm": 0.4018535315990448, "learning_rate": 1.8728500973931785e-06, "loss": 0.6487, "step": 15092 }, { "epoch": 0.6255128683327117, "grad_norm": 0.3832484185695648, "learning_rate": 1.8726428778648101e-06, "loss": 0.6288, "step": 15093 }, { "epoch": 0.6255543122383853, "grad_norm": 0.4227793216705322, "learning_rate": 1.8724356583364417e-06, "loss": 0.6934, "step": 15094 }, { "epoch": 0.625595756144059, "grad_norm": 0.417961448431015, "learning_rate": 1.8722284388080733e-06, "loss": 0.6837, "step": 15095 }, { "epoch": 0.6256372000497327, "grad_norm": 0.3924984931945801, "learning_rate": 1.8720212192797051e-06, "loss": 0.687, "step": 15096 }, { "epoch": 0.6256786439554064, "grad_norm": 0.40902218222618103, "learning_rate": 1.8718139997513367e-06, "loss": 0.6909, "step": 15097 }, { "epoch": 0.62572008786108, "grad_norm": 0.4203513562679291, "learning_rate": 1.8716067802229683e-06, "loss": 0.6975, "step": 15098 }, { "epoch": 0.6257615317667538, "grad_norm": 0.4369301199913025, "learning_rate": 1.8713995606946001e-06, "loss": 0.6747, "step": 15099 }, { "epoch": 0.6258029756724274, "grad_norm": 0.44389697909355164, "learning_rate": 1.8711923411662317e-06, "loss": 0.7258, "step": 15100 }, { "epoch": 0.625844419578101, "grad_norm": 0.3983655869960785, "learning_rate": 1.8709851216378633e-06, "loss": 0.7212, "step": 15101 }, { "epoch": 0.6258858634837747, "grad_norm": 0.4208645820617676, "learning_rate": 1.870777902109495e-06, "loss": 0.678, "step": 15102 }, { "epoch": 0.6259273073894484, "grad_norm": 0.40043142437934875, "learning_rate": 1.8705706825811265e-06, "loss": 0.6244, "step": 15103 }, { "epoch": 0.6259687512951221, "grad_norm": 0.3910313844680786, "learning_rate": 1.8703634630527581e-06, "loss": 0.7004, "step": 15104 }, { "epoch": 0.6260101952007957, "grad_norm": 0.3992200493812561, "learning_rate": 1.87015624352439e-06, "loss": 0.6951, "step": 15105 }, { "epoch": 0.6260516391064694, "grad_norm": 0.40548646450042725, "learning_rate": 1.8699490239960215e-06, "loss": 0.6779, "step": 15106 }, { "epoch": 0.6260930830121431, "grad_norm": 0.4485556185245514, "learning_rate": 1.8697418044676533e-06, "loss": 0.6847, "step": 15107 }, { "epoch": 0.6261345269178167, "grad_norm": 0.4766916334629059, "learning_rate": 1.869534584939285e-06, "loss": 0.7166, "step": 15108 }, { "epoch": 0.6261759708234904, "grad_norm": 0.3784847855567932, "learning_rate": 1.8693273654109165e-06, "loss": 0.6455, "step": 15109 }, { "epoch": 0.626217414729164, "grad_norm": 0.40612301230430603, "learning_rate": 1.8691201458825481e-06, "loss": 0.6523, "step": 15110 }, { "epoch": 0.6262588586348378, "grad_norm": 0.3945150077342987, "learning_rate": 1.8689129263541797e-06, "loss": 0.6362, "step": 15111 }, { "epoch": 0.6263003025405114, "grad_norm": 0.40678516030311584, "learning_rate": 1.8687057068258113e-06, "loss": 0.6758, "step": 15112 }, { "epoch": 0.6263417464461851, "grad_norm": 0.40930014848709106, "learning_rate": 1.868498487297443e-06, "loss": 0.6381, "step": 15113 }, { "epoch": 0.6263831903518587, "grad_norm": 0.41044357419013977, "learning_rate": 1.8682912677690747e-06, "loss": 0.7061, "step": 15114 }, { "epoch": 0.6264246342575325, "grad_norm": 0.4284885823726654, "learning_rate": 1.8680840482407065e-06, "loss": 0.7136, "step": 15115 }, { "epoch": 0.6264660781632061, "grad_norm": 0.42167797684669495, "learning_rate": 1.8678768287123381e-06, "loss": 0.7148, "step": 15116 }, { "epoch": 0.6265075220688797, "grad_norm": 0.4144901931285858, "learning_rate": 1.8676696091839697e-06, "loss": 0.6299, "step": 15117 }, { "epoch": 0.6265489659745535, "grad_norm": 0.45195692777633667, "learning_rate": 1.8674623896556013e-06, "loss": 0.7139, "step": 15118 }, { "epoch": 0.6265904098802271, "grad_norm": 0.4080878496170044, "learning_rate": 1.867255170127233e-06, "loss": 0.7197, "step": 15119 }, { "epoch": 0.6266318537859008, "grad_norm": 0.4332001209259033, "learning_rate": 1.8670479505988645e-06, "loss": 0.6882, "step": 15120 }, { "epoch": 0.6266732976915744, "grad_norm": 0.4235941767692566, "learning_rate": 1.8668407310704961e-06, "loss": 0.6592, "step": 15121 }, { "epoch": 0.6267147415972482, "grad_norm": 0.43270188570022583, "learning_rate": 1.866633511542128e-06, "loss": 0.6587, "step": 15122 }, { "epoch": 0.6267561855029218, "grad_norm": 0.43156272172927856, "learning_rate": 1.8664262920137595e-06, "loss": 0.697, "step": 15123 }, { "epoch": 0.6267976294085955, "grad_norm": 0.43355482816696167, "learning_rate": 1.8662190724853913e-06, "loss": 0.6638, "step": 15124 }, { "epoch": 0.6268390733142691, "grad_norm": 0.42373040318489075, "learning_rate": 1.866011852957023e-06, "loss": 0.6896, "step": 15125 }, { "epoch": 0.6268805172199428, "grad_norm": 0.40422338247299194, "learning_rate": 1.8658046334286545e-06, "loss": 0.6807, "step": 15126 }, { "epoch": 0.6269219611256165, "grad_norm": 0.4284586012363434, "learning_rate": 1.8655974139002861e-06, "loss": 0.7472, "step": 15127 }, { "epoch": 0.6269634050312901, "grad_norm": 0.38522276282310486, "learning_rate": 1.8653901943719177e-06, "loss": 0.6427, "step": 15128 }, { "epoch": 0.6270048489369638, "grad_norm": 0.4155053496360779, "learning_rate": 1.8651829748435493e-06, "loss": 0.7136, "step": 15129 }, { "epoch": 0.6270462928426375, "grad_norm": 0.41273677349090576, "learning_rate": 1.864975755315181e-06, "loss": 0.6053, "step": 15130 }, { "epoch": 0.6270877367483112, "grad_norm": 0.3879847228527069, "learning_rate": 1.8647685357868127e-06, "loss": 0.6958, "step": 15131 }, { "epoch": 0.6271291806539848, "grad_norm": 0.417024165391922, "learning_rate": 1.8645613162584445e-06, "loss": 0.6804, "step": 15132 }, { "epoch": 0.6271706245596586, "grad_norm": 0.41300007700920105, "learning_rate": 1.8643540967300761e-06, "loss": 0.6676, "step": 15133 }, { "epoch": 0.6272120684653322, "grad_norm": 0.39110273122787476, "learning_rate": 1.8641468772017077e-06, "loss": 0.676, "step": 15134 }, { "epoch": 0.6272535123710058, "grad_norm": 0.43686729669570923, "learning_rate": 1.8639396576733393e-06, "loss": 0.66, "step": 15135 }, { "epoch": 0.6272949562766795, "grad_norm": 0.4129782021045685, "learning_rate": 1.863732438144971e-06, "loss": 0.6675, "step": 15136 }, { "epoch": 0.6273364001823531, "grad_norm": 0.39705002307891846, "learning_rate": 1.8635252186166025e-06, "loss": 0.655, "step": 15137 }, { "epoch": 0.6273778440880269, "grad_norm": 0.3971523642539978, "learning_rate": 1.8633179990882341e-06, "loss": 0.6273, "step": 15138 }, { "epoch": 0.6274192879937005, "grad_norm": 0.39130452275276184, "learning_rate": 1.8631107795598657e-06, "loss": 0.6935, "step": 15139 }, { "epoch": 0.6274607318993742, "grad_norm": 0.3793759047985077, "learning_rate": 1.8629035600314975e-06, "loss": 0.6208, "step": 15140 }, { "epoch": 0.6275021758050479, "grad_norm": 0.4395447075366974, "learning_rate": 1.8626963405031293e-06, "loss": 0.6508, "step": 15141 }, { "epoch": 0.6275436197107216, "grad_norm": 0.37019193172454834, "learning_rate": 1.862489120974761e-06, "loss": 0.64, "step": 15142 }, { "epoch": 0.6275850636163952, "grad_norm": 0.41842857003211975, "learning_rate": 1.8622819014463925e-06, "loss": 0.6714, "step": 15143 }, { "epoch": 0.6276265075220688, "grad_norm": 0.4311401844024658, "learning_rate": 1.8620746819180241e-06, "loss": 0.7139, "step": 15144 }, { "epoch": 0.6276679514277426, "grad_norm": 0.45155060291290283, "learning_rate": 1.8618674623896557e-06, "loss": 0.6622, "step": 15145 }, { "epoch": 0.6277093953334162, "grad_norm": 0.41654863953590393, "learning_rate": 1.8616602428612873e-06, "loss": 0.6814, "step": 15146 }, { "epoch": 0.6277508392390899, "grad_norm": 0.3885108530521393, "learning_rate": 1.861453023332919e-06, "loss": 0.6542, "step": 15147 }, { "epoch": 0.6277922831447635, "grad_norm": 0.4091849625110626, "learning_rate": 1.8612458038045505e-06, "loss": 0.7192, "step": 15148 }, { "epoch": 0.6278337270504373, "grad_norm": 0.4416104555130005, "learning_rate": 1.8610385842761825e-06, "loss": 0.6902, "step": 15149 }, { "epoch": 0.6278751709561109, "grad_norm": 0.4733984172344208, "learning_rate": 1.8608313647478141e-06, "loss": 0.7188, "step": 15150 }, { "epoch": 0.6279166148617846, "grad_norm": 0.4048673212528229, "learning_rate": 1.8606241452194457e-06, "loss": 0.7031, "step": 15151 }, { "epoch": 0.6279580587674582, "grad_norm": 0.4496244490146637, "learning_rate": 1.8604169256910773e-06, "loss": 0.6943, "step": 15152 }, { "epoch": 0.6279995026731319, "grad_norm": 0.4038473665714264, "learning_rate": 1.860209706162709e-06, "loss": 0.7095, "step": 15153 }, { "epoch": 0.6280409465788056, "grad_norm": 0.396587997674942, "learning_rate": 1.8600024866343405e-06, "loss": 0.6833, "step": 15154 }, { "epoch": 0.6280823904844792, "grad_norm": 0.42150723934173584, "learning_rate": 1.8597952671059721e-06, "loss": 0.6842, "step": 15155 }, { "epoch": 0.628123834390153, "grad_norm": 0.44472354650497437, "learning_rate": 1.8595880475776037e-06, "loss": 0.6594, "step": 15156 }, { "epoch": 0.6281652782958266, "grad_norm": 0.4122158885002136, "learning_rate": 1.8593808280492355e-06, "loss": 0.6926, "step": 15157 }, { "epoch": 0.6282067222015003, "grad_norm": 0.42942675948143005, "learning_rate": 1.8591736085208673e-06, "loss": 0.6375, "step": 15158 }, { "epoch": 0.6282481661071739, "grad_norm": 0.3868406414985657, "learning_rate": 1.858966388992499e-06, "loss": 0.7087, "step": 15159 }, { "epoch": 0.6282896100128477, "grad_norm": 0.4299485385417938, "learning_rate": 1.8587591694641305e-06, "loss": 0.6677, "step": 15160 }, { "epoch": 0.6283310539185213, "grad_norm": 0.37471336126327515, "learning_rate": 1.8585519499357621e-06, "loss": 0.6267, "step": 15161 }, { "epoch": 0.6283724978241949, "grad_norm": 0.40147238969802856, "learning_rate": 1.8583447304073937e-06, "loss": 0.6475, "step": 15162 }, { "epoch": 0.6284139417298686, "grad_norm": 0.4224829375743866, "learning_rate": 1.8581375108790253e-06, "loss": 0.7129, "step": 15163 }, { "epoch": 0.6284553856355423, "grad_norm": 0.45749062299728394, "learning_rate": 1.857930291350657e-06, "loss": 0.7151, "step": 15164 }, { "epoch": 0.628496829541216, "grad_norm": 0.425036758184433, "learning_rate": 1.8577230718222885e-06, "loss": 0.6497, "step": 15165 }, { "epoch": 0.6285382734468896, "grad_norm": 0.4190179109573364, "learning_rate": 1.8575158522939206e-06, "loss": 0.6812, "step": 15166 }, { "epoch": 0.6285797173525633, "grad_norm": 0.3665629029273987, "learning_rate": 1.8573086327655521e-06, "loss": 0.6416, "step": 15167 }, { "epoch": 0.628621161258237, "grad_norm": 0.40463849902153015, "learning_rate": 1.8571014132371837e-06, "loss": 0.626, "step": 15168 }, { "epoch": 0.6286626051639106, "grad_norm": 0.395557165145874, "learning_rate": 1.8568941937088153e-06, "loss": 0.7063, "step": 15169 }, { "epoch": 0.6287040490695843, "grad_norm": 0.42815476655960083, "learning_rate": 1.856686974180447e-06, "loss": 0.6788, "step": 15170 }, { "epoch": 0.628745492975258, "grad_norm": 0.4760686755180359, "learning_rate": 1.8564797546520785e-06, "loss": 0.7314, "step": 15171 }, { "epoch": 0.6287869368809317, "grad_norm": 0.3949381411075592, "learning_rate": 1.8562725351237101e-06, "loss": 0.6464, "step": 15172 }, { "epoch": 0.6288283807866053, "grad_norm": 0.43162328004837036, "learning_rate": 1.8560653155953417e-06, "loss": 0.6986, "step": 15173 }, { "epoch": 0.628869824692279, "grad_norm": 0.42263323068618774, "learning_rate": 1.8558580960669733e-06, "loss": 0.7025, "step": 15174 }, { "epoch": 0.6289112685979527, "grad_norm": 0.40511828660964966, "learning_rate": 1.8556508765386054e-06, "loss": 0.6407, "step": 15175 }, { "epoch": 0.6289527125036264, "grad_norm": 0.3812329173088074, "learning_rate": 1.855443657010237e-06, "loss": 0.6875, "step": 15176 }, { "epoch": 0.6289941564093, "grad_norm": 0.3953416347503662, "learning_rate": 1.8552364374818685e-06, "loss": 0.7083, "step": 15177 }, { "epoch": 0.6290356003149736, "grad_norm": 0.398917555809021, "learning_rate": 1.8550292179535001e-06, "loss": 0.7025, "step": 15178 }, { "epoch": 0.6290770442206474, "grad_norm": 0.44467058777809143, "learning_rate": 1.8548219984251317e-06, "loss": 0.6532, "step": 15179 }, { "epoch": 0.629118488126321, "grad_norm": 0.4180728793144226, "learning_rate": 1.8546147788967633e-06, "loss": 0.6653, "step": 15180 }, { "epoch": 0.6291599320319947, "grad_norm": 0.4180881381034851, "learning_rate": 1.854407559368395e-06, "loss": 0.6609, "step": 15181 }, { "epoch": 0.6292013759376683, "grad_norm": 0.39680027961730957, "learning_rate": 1.8542003398400265e-06, "loss": 0.6814, "step": 15182 }, { "epoch": 0.6292428198433421, "grad_norm": 0.3958665430545807, "learning_rate": 1.8539931203116586e-06, "loss": 0.6393, "step": 15183 }, { "epoch": 0.6292842637490157, "grad_norm": 0.42513954639434814, "learning_rate": 1.8537859007832902e-06, "loss": 0.6758, "step": 15184 }, { "epoch": 0.6293257076546894, "grad_norm": 0.4442698061466217, "learning_rate": 1.8535786812549218e-06, "loss": 0.676, "step": 15185 }, { "epoch": 0.629367151560363, "grad_norm": 0.4365846514701843, "learning_rate": 1.8533714617265533e-06, "loss": 0.7424, "step": 15186 }, { "epoch": 0.6294085954660367, "grad_norm": 0.4201393723487854, "learning_rate": 1.853164242198185e-06, "loss": 0.6702, "step": 15187 }, { "epoch": 0.6294500393717104, "grad_norm": 0.4058719575405121, "learning_rate": 1.8529570226698165e-06, "loss": 0.6915, "step": 15188 }, { "epoch": 0.629491483277384, "grad_norm": 0.4285821318626404, "learning_rate": 1.8527498031414481e-06, "loss": 0.6866, "step": 15189 }, { "epoch": 0.6295329271830578, "grad_norm": 0.41526636481285095, "learning_rate": 1.8525425836130797e-06, "loss": 0.6919, "step": 15190 }, { "epoch": 0.6295743710887314, "grad_norm": 0.4664246737957001, "learning_rate": 1.8523353640847113e-06, "loss": 0.7064, "step": 15191 }, { "epoch": 0.6296158149944051, "grad_norm": 0.43230971693992615, "learning_rate": 1.8521281445563434e-06, "loss": 0.6716, "step": 15192 }, { "epoch": 0.6296572589000787, "grad_norm": 0.4061552584171295, "learning_rate": 1.851920925027975e-06, "loss": 0.6531, "step": 15193 }, { "epoch": 0.6296987028057525, "grad_norm": 0.3810943365097046, "learning_rate": 1.8517137054996066e-06, "loss": 0.6641, "step": 15194 }, { "epoch": 0.6297401467114261, "grad_norm": 0.48444515466690063, "learning_rate": 1.8515064859712381e-06, "loss": 0.6594, "step": 15195 }, { "epoch": 0.6297815906170997, "grad_norm": 0.41792500019073486, "learning_rate": 1.8512992664428697e-06, "loss": 0.6818, "step": 15196 }, { "epoch": 0.6298230345227734, "grad_norm": 0.421718955039978, "learning_rate": 1.8510920469145013e-06, "loss": 0.6433, "step": 15197 }, { "epoch": 0.6298644784284471, "grad_norm": 0.4035363793373108, "learning_rate": 1.850884827386133e-06, "loss": 0.7075, "step": 15198 }, { "epoch": 0.6299059223341208, "grad_norm": 0.40614035725593567, "learning_rate": 1.8506776078577645e-06, "loss": 0.6941, "step": 15199 }, { "epoch": 0.6299473662397944, "grad_norm": 0.4248254597187042, "learning_rate": 1.8504703883293961e-06, "loss": 0.62, "step": 15200 }, { "epoch": 0.6299888101454681, "grad_norm": 0.4242332875728607, "learning_rate": 1.8502631688010282e-06, "loss": 0.7292, "step": 15201 }, { "epoch": 0.6300302540511418, "grad_norm": 0.4281512200832367, "learning_rate": 1.8500559492726598e-06, "loss": 0.6765, "step": 15202 }, { "epoch": 0.6300716979568155, "grad_norm": 0.407836377620697, "learning_rate": 1.8498487297442914e-06, "loss": 0.6685, "step": 15203 }, { "epoch": 0.6301131418624891, "grad_norm": 0.43116500973701477, "learning_rate": 1.849641510215923e-06, "loss": 0.7086, "step": 15204 }, { "epoch": 0.6301545857681627, "grad_norm": 0.4226404130458832, "learning_rate": 1.8494342906875545e-06, "loss": 0.7434, "step": 15205 }, { "epoch": 0.6301960296738365, "grad_norm": 0.4231027364730835, "learning_rate": 1.8492270711591861e-06, "loss": 0.7186, "step": 15206 }, { "epoch": 0.6302374735795101, "grad_norm": 0.42640358209609985, "learning_rate": 1.8490198516308177e-06, "loss": 0.6899, "step": 15207 }, { "epoch": 0.6302789174851838, "grad_norm": 0.44813916087150574, "learning_rate": 1.8488126321024493e-06, "loss": 0.6973, "step": 15208 }, { "epoch": 0.6303203613908575, "grad_norm": 0.38663163781166077, "learning_rate": 1.8486054125740814e-06, "loss": 0.7173, "step": 15209 }, { "epoch": 0.6303618052965312, "grad_norm": 0.41024643182754517, "learning_rate": 1.848398193045713e-06, "loss": 0.65, "step": 15210 }, { "epoch": 0.6304032492022048, "grad_norm": 0.41117218136787415, "learning_rate": 1.8481909735173446e-06, "loss": 0.6982, "step": 15211 }, { "epoch": 0.6304446931078785, "grad_norm": 0.41058972477912903, "learning_rate": 1.8479837539889762e-06, "loss": 0.6865, "step": 15212 }, { "epoch": 0.6304861370135522, "grad_norm": 0.4117130637168884, "learning_rate": 1.8477765344606077e-06, "loss": 0.6892, "step": 15213 }, { "epoch": 0.6305275809192258, "grad_norm": 0.41765522956848145, "learning_rate": 1.8475693149322393e-06, "loss": 0.6382, "step": 15214 }, { "epoch": 0.6305690248248995, "grad_norm": 0.40810832381248474, "learning_rate": 1.847362095403871e-06, "loss": 0.6602, "step": 15215 }, { "epoch": 0.6306104687305731, "grad_norm": 0.40406930446624756, "learning_rate": 1.8471548758755025e-06, "loss": 0.6541, "step": 15216 }, { "epoch": 0.6306519126362469, "grad_norm": 0.39908838272094727, "learning_rate": 1.8469476563471341e-06, "loss": 0.6044, "step": 15217 }, { "epoch": 0.6306933565419205, "grad_norm": 0.4255857765674591, "learning_rate": 1.8467404368187662e-06, "loss": 0.7188, "step": 15218 }, { "epoch": 0.6307348004475942, "grad_norm": 0.43373048305511475, "learning_rate": 1.8465332172903978e-06, "loss": 0.6893, "step": 15219 }, { "epoch": 0.6307762443532678, "grad_norm": 0.42076992988586426, "learning_rate": 1.8463259977620294e-06, "loss": 0.7458, "step": 15220 }, { "epoch": 0.6308176882589416, "grad_norm": 0.37135007977485657, "learning_rate": 1.846118778233661e-06, "loss": 0.6047, "step": 15221 }, { "epoch": 0.6308591321646152, "grad_norm": 0.40021198987960815, "learning_rate": 1.8459115587052925e-06, "loss": 0.637, "step": 15222 }, { "epoch": 0.6309005760702888, "grad_norm": 0.41561463475227356, "learning_rate": 1.8457043391769241e-06, "loss": 0.6511, "step": 15223 }, { "epoch": 0.6309420199759626, "grad_norm": 0.41267129778862, "learning_rate": 1.8454971196485557e-06, "loss": 0.7021, "step": 15224 }, { "epoch": 0.6309834638816362, "grad_norm": 0.40497538447380066, "learning_rate": 1.8452899001201873e-06, "loss": 0.6499, "step": 15225 }, { "epoch": 0.6310249077873099, "grad_norm": 0.40299052000045776, "learning_rate": 1.845082680591819e-06, "loss": 0.6724, "step": 15226 }, { "epoch": 0.6310663516929835, "grad_norm": 0.42921754717826843, "learning_rate": 1.844875461063451e-06, "loss": 0.6539, "step": 15227 }, { "epoch": 0.6311077955986573, "grad_norm": 0.41832417249679565, "learning_rate": 1.8446682415350826e-06, "loss": 0.6912, "step": 15228 }, { "epoch": 0.6311492395043309, "grad_norm": 0.4399922788143158, "learning_rate": 1.8444610220067142e-06, "loss": 0.697, "step": 15229 }, { "epoch": 0.6311906834100045, "grad_norm": 0.4036673903465271, "learning_rate": 1.8442538024783458e-06, "loss": 0.7183, "step": 15230 }, { "epoch": 0.6312321273156782, "grad_norm": 0.38633206486701965, "learning_rate": 1.8440465829499773e-06, "loss": 0.7314, "step": 15231 }, { "epoch": 0.6312735712213519, "grad_norm": 0.4601365923881531, "learning_rate": 1.843839363421609e-06, "loss": 0.7257, "step": 15232 }, { "epoch": 0.6313150151270256, "grad_norm": 0.42420706152915955, "learning_rate": 1.8436321438932405e-06, "loss": 0.6193, "step": 15233 }, { "epoch": 0.6313564590326992, "grad_norm": 0.4008655846118927, "learning_rate": 1.8434249243648721e-06, "loss": 0.6926, "step": 15234 }, { "epoch": 0.631397902938373, "grad_norm": 0.4010036587715149, "learning_rate": 1.8432177048365037e-06, "loss": 0.6395, "step": 15235 }, { "epoch": 0.6314393468440466, "grad_norm": 0.44225746393203735, "learning_rate": 1.8430104853081358e-06, "loss": 0.7395, "step": 15236 }, { "epoch": 0.6314807907497203, "grad_norm": 0.39972952008247375, "learning_rate": 1.8428032657797674e-06, "loss": 0.6681, "step": 15237 }, { "epoch": 0.6315222346553939, "grad_norm": 0.4018649160861969, "learning_rate": 1.842596046251399e-06, "loss": 0.6624, "step": 15238 }, { "epoch": 0.6315636785610675, "grad_norm": 0.41557028889656067, "learning_rate": 1.8423888267230306e-06, "loss": 0.6838, "step": 15239 }, { "epoch": 0.6316051224667413, "grad_norm": 0.443386435508728, "learning_rate": 1.8421816071946621e-06, "loss": 0.7234, "step": 15240 }, { "epoch": 0.6316465663724149, "grad_norm": 0.3995409309864044, "learning_rate": 1.8419743876662937e-06, "loss": 0.6422, "step": 15241 }, { "epoch": 0.6316880102780886, "grad_norm": 0.41352614760398865, "learning_rate": 1.8417671681379253e-06, "loss": 0.6329, "step": 15242 }, { "epoch": 0.6317294541837623, "grad_norm": 0.4218639135360718, "learning_rate": 1.841559948609557e-06, "loss": 0.7217, "step": 15243 }, { "epoch": 0.631770898089436, "grad_norm": 0.4205247461795807, "learning_rate": 1.841352729081189e-06, "loss": 0.7363, "step": 15244 }, { "epoch": 0.6318123419951096, "grad_norm": 0.427081435918808, "learning_rate": 1.8411455095528206e-06, "loss": 0.6747, "step": 15245 }, { "epoch": 0.6318537859007833, "grad_norm": 0.40872371196746826, "learning_rate": 1.8409382900244522e-06, "loss": 0.6393, "step": 15246 }, { "epoch": 0.631895229806457, "grad_norm": 0.4331202507019043, "learning_rate": 1.8407310704960838e-06, "loss": 0.6864, "step": 15247 }, { "epoch": 0.6319366737121306, "grad_norm": 0.4182370603084564, "learning_rate": 1.8405238509677154e-06, "loss": 0.7489, "step": 15248 }, { "epoch": 0.6319781176178043, "grad_norm": 0.3782501220703125, "learning_rate": 1.840316631439347e-06, "loss": 0.662, "step": 15249 }, { "epoch": 0.6320195615234779, "grad_norm": 0.4139811098575592, "learning_rate": 1.8401094119109785e-06, "loss": 0.6851, "step": 15250 }, { "epoch": 0.6320610054291517, "grad_norm": 0.3864934742450714, "learning_rate": 1.8399021923826101e-06, "loss": 0.6639, "step": 15251 }, { "epoch": 0.6321024493348253, "grad_norm": 0.4106742739677429, "learning_rate": 1.8396949728542417e-06, "loss": 0.667, "step": 15252 }, { "epoch": 0.632143893240499, "grad_norm": 0.3970029652118683, "learning_rate": 1.8394877533258738e-06, "loss": 0.6279, "step": 15253 }, { "epoch": 0.6321853371461726, "grad_norm": 0.40461331605911255, "learning_rate": 1.8392805337975054e-06, "loss": 0.6738, "step": 15254 }, { "epoch": 0.6322267810518464, "grad_norm": 0.42556217312812805, "learning_rate": 1.839073314269137e-06, "loss": 0.71, "step": 15255 }, { "epoch": 0.63226822495752, "grad_norm": 0.4029914140701294, "learning_rate": 1.8388660947407686e-06, "loss": 0.6836, "step": 15256 }, { "epoch": 0.6323096688631936, "grad_norm": 0.42458978295326233, "learning_rate": 1.8386588752124002e-06, "loss": 0.7009, "step": 15257 }, { "epoch": 0.6323511127688674, "grad_norm": 0.4208468794822693, "learning_rate": 1.8384516556840317e-06, "loss": 0.6475, "step": 15258 }, { "epoch": 0.632392556674541, "grad_norm": 0.448294997215271, "learning_rate": 1.8382444361556633e-06, "loss": 0.6774, "step": 15259 }, { "epoch": 0.6324340005802147, "grad_norm": 0.43901365995407104, "learning_rate": 1.838037216627295e-06, "loss": 0.6459, "step": 15260 }, { "epoch": 0.6324754444858883, "grad_norm": 0.4139244854450226, "learning_rate": 1.8378299970989265e-06, "loss": 0.6479, "step": 15261 }, { "epoch": 0.6325168883915621, "grad_norm": 0.4156998097896576, "learning_rate": 1.8376227775705586e-06, "loss": 0.6459, "step": 15262 }, { "epoch": 0.6325583322972357, "grad_norm": 0.4145723879337311, "learning_rate": 1.8374155580421902e-06, "loss": 0.6858, "step": 15263 }, { "epoch": 0.6325997762029094, "grad_norm": 0.39287474751472473, "learning_rate": 1.8372083385138218e-06, "loss": 0.6941, "step": 15264 }, { "epoch": 0.632641220108583, "grad_norm": 0.4358798861503601, "learning_rate": 1.8370011189854534e-06, "loss": 0.676, "step": 15265 }, { "epoch": 0.6326826640142567, "grad_norm": 0.47609537839889526, "learning_rate": 1.836793899457085e-06, "loss": 0.6711, "step": 15266 }, { "epoch": 0.6327241079199304, "grad_norm": 0.41265836358070374, "learning_rate": 1.8365866799287165e-06, "loss": 0.6995, "step": 15267 }, { "epoch": 0.632765551825604, "grad_norm": 0.42632484436035156, "learning_rate": 1.8363794604003481e-06, "loss": 0.7131, "step": 15268 }, { "epoch": 0.6328069957312777, "grad_norm": 0.4007568359375, "learning_rate": 1.8361722408719797e-06, "loss": 0.6609, "step": 15269 }, { "epoch": 0.6328484396369514, "grad_norm": 0.42806461453437805, "learning_rate": 1.8359650213436118e-06, "loss": 0.6315, "step": 15270 }, { "epoch": 0.6328898835426251, "grad_norm": 0.4497212767601013, "learning_rate": 1.8357578018152434e-06, "loss": 0.7407, "step": 15271 }, { "epoch": 0.6329313274482987, "grad_norm": 0.435983270406723, "learning_rate": 1.835550582286875e-06, "loss": 0.6835, "step": 15272 }, { "epoch": 0.6329727713539725, "grad_norm": 0.39427804946899414, "learning_rate": 1.8353433627585066e-06, "loss": 0.7013, "step": 15273 }, { "epoch": 0.6330142152596461, "grad_norm": 0.41245415806770325, "learning_rate": 1.8351361432301382e-06, "loss": 0.6345, "step": 15274 }, { "epoch": 0.6330556591653197, "grad_norm": 0.3692905008792877, "learning_rate": 1.8349289237017698e-06, "loss": 0.6273, "step": 15275 }, { "epoch": 0.6330971030709934, "grad_norm": 0.40609487891197205, "learning_rate": 1.8347217041734014e-06, "loss": 0.6486, "step": 15276 }, { "epoch": 0.633138546976667, "grad_norm": 0.4210486114025116, "learning_rate": 1.834514484645033e-06, "loss": 0.6552, "step": 15277 }, { "epoch": 0.6331799908823408, "grad_norm": 0.4253741204738617, "learning_rate": 1.8343072651166645e-06, "loss": 0.6888, "step": 15278 }, { "epoch": 0.6332214347880144, "grad_norm": 0.4268137514591217, "learning_rate": 1.8341000455882966e-06, "loss": 0.6389, "step": 15279 }, { "epoch": 0.6332628786936881, "grad_norm": 0.40833765268325806, "learning_rate": 1.8338928260599282e-06, "loss": 0.714, "step": 15280 }, { "epoch": 0.6333043225993618, "grad_norm": 0.3846840262413025, "learning_rate": 1.8336856065315598e-06, "loss": 0.6646, "step": 15281 }, { "epoch": 0.6333457665050354, "grad_norm": 0.44544732570648193, "learning_rate": 1.8334783870031914e-06, "loss": 0.7074, "step": 15282 }, { "epoch": 0.6333872104107091, "grad_norm": 0.45123475790023804, "learning_rate": 1.833271167474823e-06, "loss": 0.7048, "step": 15283 }, { "epoch": 0.6334286543163827, "grad_norm": 0.4004644453525543, "learning_rate": 1.8330639479464546e-06, "loss": 0.7075, "step": 15284 }, { "epoch": 0.6334700982220565, "grad_norm": 0.3853166401386261, "learning_rate": 1.8328567284180862e-06, "loss": 0.6354, "step": 15285 }, { "epoch": 0.6335115421277301, "grad_norm": 0.41783902049064636, "learning_rate": 1.8326495088897177e-06, "loss": 0.6665, "step": 15286 }, { "epoch": 0.6335529860334038, "grad_norm": 0.4098933935165405, "learning_rate": 1.8324422893613493e-06, "loss": 0.7092, "step": 15287 }, { "epoch": 0.6335944299390774, "grad_norm": 0.4107879102230072, "learning_rate": 1.8322350698329814e-06, "loss": 0.6567, "step": 15288 }, { "epoch": 0.6336358738447512, "grad_norm": 0.4225985109806061, "learning_rate": 1.832027850304613e-06, "loss": 0.6829, "step": 15289 }, { "epoch": 0.6336773177504248, "grad_norm": 0.40843454003334045, "learning_rate": 1.8318206307762446e-06, "loss": 0.6473, "step": 15290 }, { "epoch": 0.6337187616560984, "grad_norm": 0.37326687574386597, "learning_rate": 1.8316134112478762e-06, "loss": 0.6145, "step": 15291 }, { "epoch": 0.6337602055617721, "grad_norm": 0.4108583629131317, "learning_rate": 1.8314061917195078e-06, "loss": 0.6711, "step": 15292 }, { "epoch": 0.6338016494674458, "grad_norm": 0.39427095651626587, "learning_rate": 1.8311989721911394e-06, "loss": 0.6506, "step": 15293 }, { "epoch": 0.6338430933731195, "grad_norm": 0.4485965371131897, "learning_rate": 1.830991752662771e-06, "loss": 0.6656, "step": 15294 }, { "epoch": 0.6338845372787931, "grad_norm": 0.4352828562259674, "learning_rate": 1.8307845331344025e-06, "loss": 0.6816, "step": 15295 }, { "epoch": 0.6339259811844669, "grad_norm": 0.4076905846595764, "learning_rate": 1.8305773136060341e-06, "loss": 0.6594, "step": 15296 }, { "epoch": 0.6339674250901405, "grad_norm": 0.3939104974269867, "learning_rate": 1.8303700940776662e-06, "loss": 0.6646, "step": 15297 }, { "epoch": 0.6340088689958142, "grad_norm": 0.4331432580947876, "learning_rate": 1.8301628745492978e-06, "loss": 0.6991, "step": 15298 }, { "epoch": 0.6340503129014878, "grad_norm": 0.423004150390625, "learning_rate": 1.8299556550209294e-06, "loss": 0.6346, "step": 15299 }, { "epoch": 0.6340917568071615, "grad_norm": 0.48215097188949585, "learning_rate": 1.829748435492561e-06, "loss": 0.6926, "step": 15300 }, { "epoch": 0.6341332007128352, "grad_norm": 0.4022946357727051, "learning_rate": 1.8295412159641926e-06, "loss": 0.6652, "step": 15301 }, { "epoch": 0.6341746446185088, "grad_norm": 0.4577620029449463, "learning_rate": 1.8293339964358242e-06, "loss": 0.6606, "step": 15302 }, { "epoch": 0.6342160885241825, "grad_norm": 0.4438524842262268, "learning_rate": 1.8291267769074558e-06, "loss": 0.7141, "step": 15303 }, { "epoch": 0.6342575324298562, "grad_norm": 0.41563931107521057, "learning_rate": 1.8289195573790873e-06, "loss": 0.7114, "step": 15304 }, { "epoch": 0.6342989763355299, "grad_norm": 0.4010871350765228, "learning_rate": 1.8287123378507194e-06, "loss": 0.6757, "step": 15305 }, { "epoch": 0.6343404202412035, "grad_norm": 0.4367063045501709, "learning_rate": 1.828505118322351e-06, "loss": 0.6665, "step": 15306 }, { "epoch": 0.6343818641468773, "grad_norm": 0.40178465843200684, "learning_rate": 1.8282978987939826e-06, "loss": 0.6816, "step": 15307 }, { "epoch": 0.6344233080525509, "grad_norm": 0.4690416753292084, "learning_rate": 1.8280906792656142e-06, "loss": 0.7463, "step": 15308 }, { "epoch": 0.6344647519582245, "grad_norm": 0.40547385811805725, "learning_rate": 1.8278834597372458e-06, "loss": 0.7329, "step": 15309 }, { "epoch": 0.6345061958638982, "grad_norm": 0.4524579644203186, "learning_rate": 1.8276762402088774e-06, "loss": 0.7212, "step": 15310 }, { "epoch": 0.6345476397695718, "grad_norm": 0.40013644099235535, "learning_rate": 1.827469020680509e-06, "loss": 0.6991, "step": 15311 }, { "epoch": 0.6345890836752456, "grad_norm": 0.4508562386035919, "learning_rate": 1.8272618011521406e-06, "loss": 0.7095, "step": 15312 }, { "epoch": 0.6346305275809192, "grad_norm": 0.39066797494888306, "learning_rate": 1.8270545816237721e-06, "loss": 0.6245, "step": 15313 }, { "epoch": 0.6346719714865929, "grad_norm": 0.409310519695282, "learning_rate": 1.8268473620954042e-06, "loss": 0.6384, "step": 15314 }, { "epoch": 0.6347134153922666, "grad_norm": 0.38556766510009766, "learning_rate": 1.8266401425670358e-06, "loss": 0.6814, "step": 15315 }, { "epoch": 0.6347548592979403, "grad_norm": 0.3967755436897278, "learning_rate": 1.8264329230386674e-06, "loss": 0.6226, "step": 15316 }, { "epoch": 0.6347963032036139, "grad_norm": 0.42205455899238586, "learning_rate": 1.826225703510299e-06, "loss": 0.6769, "step": 15317 }, { "epoch": 0.6348377471092875, "grad_norm": 0.43861979246139526, "learning_rate": 1.8260184839819306e-06, "loss": 0.6919, "step": 15318 }, { "epoch": 0.6348791910149613, "grad_norm": 0.4533439874649048, "learning_rate": 1.8258112644535622e-06, "loss": 0.756, "step": 15319 }, { "epoch": 0.6349206349206349, "grad_norm": 0.46745434403419495, "learning_rate": 1.8256040449251938e-06, "loss": 0.793, "step": 15320 }, { "epoch": 0.6349620788263086, "grad_norm": 0.39336535334587097, "learning_rate": 1.8253968253968254e-06, "loss": 0.7023, "step": 15321 }, { "epoch": 0.6350035227319822, "grad_norm": 0.39952343702316284, "learning_rate": 1.8251896058684572e-06, "loss": 0.6448, "step": 15322 }, { "epoch": 0.635044966637656, "grad_norm": 0.43429267406463623, "learning_rate": 1.824982386340089e-06, "loss": 0.7242, "step": 15323 }, { "epoch": 0.6350864105433296, "grad_norm": 0.4304617643356323, "learning_rate": 1.8247751668117206e-06, "loss": 0.7037, "step": 15324 }, { "epoch": 0.6351278544490033, "grad_norm": 0.39618027210235596, "learning_rate": 1.8245679472833522e-06, "loss": 0.7019, "step": 15325 }, { "epoch": 0.635169298354677, "grad_norm": 0.4075004756450653, "learning_rate": 1.8243607277549838e-06, "loss": 0.678, "step": 15326 }, { "epoch": 0.6352107422603506, "grad_norm": 0.3968529999256134, "learning_rate": 1.8241535082266154e-06, "loss": 0.6562, "step": 15327 }, { "epoch": 0.6352521861660243, "grad_norm": 0.39338448643684387, "learning_rate": 1.823946288698247e-06, "loss": 0.6641, "step": 15328 }, { "epoch": 0.6352936300716979, "grad_norm": 0.4805199205875397, "learning_rate": 1.8237390691698786e-06, "loss": 0.7144, "step": 15329 }, { "epoch": 0.6353350739773717, "grad_norm": 0.40466493368148804, "learning_rate": 1.8235318496415102e-06, "loss": 0.7489, "step": 15330 }, { "epoch": 0.6353765178830453, "grad_norm": 0.4038875102996826, "learning_rate": 1.8233246301131422e-06, "loss": 0.6357, "step": 15331 }, { "epoch": 0.635417961788719, "grad_norm": 0.3885483741760254, "learning_rate": 1.8231174105847738e-06, "loss": 0.6814, "step": 15332 }, { "epoch": 0.6354594056943926, "grad_norm": 0.389848917722702, "learning_rate": 1.8229101910564054e-06, "loss": 0.6665, "step": 15333 }, { "epoch": 0.6355008496000664, "grad_norm": 0.41061657667160034, "learning_rate": 1.822702971528037e-06, "loss": 0.7104, "step": 15334 }, { "epoch": 0.63554229350574, "grad_norm": 0.40850281715393066, "learning_rate": 1.8224957519996686e-06, "loss": 0.7009, "step": 15335 }, { "epoch": 0.6355837374114136, "grad_norm": 0.46247005462646484, "learning_rate": 1.8222885324713002e-06, "loss": 0.6838, "step": 15336 }, { "epoch": 0.6356251813170873, "grad_norm": 0.4104243814945221, "learning_rate": 1.8220813129429318e-06, "loss": 0.6658, "step": 15337 }, { "epoch": 0.635666625222761, "grad_norm": 0.44252797961235046, "learning_rate": 1.8218740934145634e-06, "loss": 0.7246, "step": 15338 }, { "epoch": 0.6357080691284347, "grad_norm": 0.3962167501449585, "learning_rate": 1.8216668738861952e-06, "loss": 0.7439, "step": 15339 }, { "epoch": 0.6357495130341083, "grad_norm": 0.41960608959198, "learning_rate": 1.821459654357827e-06, "loss": 0.7339, "step": 15340 }, { "epoch": 0.635790956939782, "grad_norm": 0.39101892709732056, "learning_rate": 1.8212524348294586e-06, "loss": 0.6436, "step": 15341 }, { "epoch": 0.6358324008454557, "grad_norm": 0.43188178539276123, "learning_rate": 1.8210452153010902e-06, "loss": 0.6812, "step": 15342 }, { "epoch": 0.6358738447511293, "grad_norm": 0.3930559754371643, "learning_rate": 1.8208379957727218e-06, "loss": 0.6914, "step": 15343 }, { "epoch": 0.635915288656803, "grad_norm": 0.3822891116142273, "learning_rate": 1.8206307762443534e-06, "loss": 0.67, "step": 15344 }, { "epoch": 0.6359567325624766, "grad_norm": 0.43734651803970337, "learning_rate": 1.820423556715985e-06, "loss": 0.6444, "step": 15345 }, { "epoch": 0.6359981764681504, "grad_norm": 0.40735191106796265, "learning_rate": 1.8202163371876166e-06, "loss": 0.6306, "step": 15346 }, { "epoch": 0.636039620373824, "grad_norm": 0.4107307493686676, "learning_rate": 1.8200091176592482e-06, "loss": 0.7025, "step": 15347 }, { "epoch": 0.6360810642794977, "grad_norm": 0.41425636410713196, "learning_rate": 1.81980189813088e-06, "loss": 0.6489, "step": 15348 }, { "epoch": 0.6361225081851714, "grad_norm": 0.4198528528213501, "learning_rate": 1.8195946786025118e-06, "loss": 0.6743, "step": 15349 }, { "epoch": 0.6361639520908451, "grad_norm": 0.39457428455352783, "learning_rate": 1.8193874590741434e-06, "loss": 0.6542, "step": 15350 }, { "epoch": 0.6362053959965187, "grad_norm": 0.41537606716156006, "learning_rate": 1.819180239545775e-06, "loss": 0.6692, "step": 15351 }, { "epoch": 0.6362468399021923, "grad_norm": 0.4152434468269348, "learning_rate": 1.8189730200174066e-06, "loss": 0.6741, "step": 15352 }, { "epoch": 0.6362882838078661, "grad_norm": 0.4022214114665985, "learning_rate": 1.8187658004890382e-06, "loss": 0.6478, "step": 15353 }, { "epoch": 0.6363297277135397, "grad_norm": 0.4001022279262543, "learning_rate": 1.8185585809606698e-06, "loss": 0.6461, "step": 15354 }, { "epoch": 0.6363711716192134, "grad_norm": 0.4324760138988495, "learning_rate": 1.8183513614323014e-06, "loss": 0.7422, "step": 15355 }, { "epoch": 0.636412615524887, "grad_norm": 0.4440074861049652, "learning_rate": 1.8181441419039332e-06, "loss": 0.7327, "step": 15356 }, { "epoch": 0.6364540594305608, "grad_norm": 0.3978829085826874, "learning_rate": 1.8179369223755648e-06, "loss": 0.6724, "step": 15357 }, { "epoch": 0.6364955033362344, "grad_norm": 0.41842445731163025, "learning_rate": 1.8177297028471966e-06, "loss": 0.6865, "step": 15358 }, { "epoch": 0.6365369472419081, "grad_norm": 0.4144447445869446, "learning_rate": 1.8175224833188282e-06, "loss": 0.7278, "step": 15359 }, { "epoch": 0.6365783911475817, "grad_norm": 0.47591978311538696, "learning_rate": 1.8173152637904598e-06, "loss": 0.6877, "step": 15360 }, { "epoch": 0.6366198350532554, "grad_norm": 0.41524603962898254, "learning_rate": 1.8171080442620914e-06, "loss": 0.712, "step": 15361 }, { "epoch": 0.6366612789589291, "grad_norm": 0.4098089635372162, "learning_rate": 1.816900824733723e-06, "loss": 0.7083, "step": 15362 }, { "epoch": 0.6367027228646027, "grad_norm": 0.43261098861694336, "learning_rate": 1.8166936052053546e-06, "loss": 0.6821, "step": 15363 }, { "epoch": 0.6367441667702765, "grad_norm": 0.407537579536438, "learning_rate": 1.8164863856769864e-06, "loss": 0.6654, "step": 15364 }, { "epoch": 0.6367856106759501, "grad_norm": 0.42200544476509094, "learning_rate": 1.816279166148618e-06, "loss": 0.661, "step": 15365 }, { "epoch": 0.6368270545816238, "grad_norm": 0.42285263538360596, "learning_rate": 1.8160719466202498e-06, "loss": 0.698, "step": 15366 }, { "epoch": 0.6368684984872974, "grad_norm": 0.42784354090690613, "learning_rate": 1.8158647270918814e-06, "loss": 0.642, "step": 15367 }, { "epoch": 0.6369099423929712, "grad_norm": 0.3939831852912903, "learning_rate": 1.815657507563513e-06, "loss": 0.6266, "step": 15368 }, { "epoch": 0.6369513862986448, "grad_norm": 0.43385496735572815, "learning_rate": 1.8154502880351446e-06, "loss": 0.725, "step": 15369 }, { "epoch": 0.6369928302043184, "grad_norm": 0.4425962269306183, "learning_rate": 1.8152430685067762e-06, "loss": 0.7201, "step": 15370 }, { "epoch": 0.6370342741099921, "grad_norm": 0.43750327825546265, "learning_rate": 1.8150358489784078e-06, "loss": 0.6599, "step": 15371 }, { "epoch": 0.6370757180156658, "grad_norm": 0.4344983696937561, "learning_rate": 1.8148286294500394e-06, "loss": 0.6779, "step": 15372 }, { "epoch": 0.6371171619213395, "grad_norm": 0.38473379611968994, "learning_rate": 1.8146214099216712e-06, "loss": 0.6439, "step": 15373 }, { "epoch": 0.6371586058270131, "grad_norm": 0.39684540033340454, "learning_rate": 1.8144141903933028e-06, "loss": 0.6893, "step": 15374 }, { "epoch": 0.6372000497326868, "grad_norm": 0.4194018542766571, "learning_rate": 1.8142069708649346e-06, "loss": 0.6987, "step": 15375 }, { "epoch": 0.6372414936383605, "grad_norm": 0.4072990119457245, "learning_rate": 1.8139997513365662e-06, "loss": 0.6483, "step": 15376 }, { "epoch": 0.6372829375440342, "grad_norm": 0.41205328702926636, "learning_rate": 1.8137925318081978e-06, "loss": 0.6992, "step": 15377 }, { "epoch": 0.6373243814497078, "grad_norm": 0.5244513154029846, "learning_rate": 1.8135853122798294e-06, "loss": 0.7211, "step": 15378 }, { "epoch": 0.6373658253553814, "grad_norm": 0.38881808519363403, "learning_rate": 1.813378092751461e-06, "loss": 0.6494, "step": 15379 }, { "epoch": 0.6374072692610552, "grad_norm": 0.42637771368026733, "learning_rate": 1.8131708732230926e-06, "loss": 0.6914, "step": 15380 }, { "epoch": 0.6374487131667288, "grad_norm": 0.4013627767562866, "learning_rate": 1.8129636536947244e-06, "loss": 0.7068, "step": 15381 }, { "epoch": 0.6374901570724025, "grad_norm": 0.41826945543289185, "learning_rate": 1.812756434166356e-06, "loss": 0.6996, "step": 15382 }, { "epoch": 0.6375316009780762, "grad_norm": 0.40788719058036804, "learning_rate": 1.8125492146379876e-06, "loss": 0.6304, "step": 15383 }, { "epoch": 0.6375730448837499, "grad_norm": 0.3821995258331299, "learning_rate": 1.8123419951096194e-06, "loss": 0.6422, "step": 15384 }, { "epoch": 0.6376144887894235, "grad_norm": 0.40059471130371094, "learning_rate": 1.812134775581251e-06, "loss": 0.6824, "step": 15385 }, { "epoch": 0.6376559326950972, "grad_norm": 0.37301573157310486, "learning_rate": 1.8119275560528826e-06, "loss": 0.6437, "step": 15386 }, { "epoch": 0.6376973766007709, "grad_norm": 0.4050006866455078, "learning_rate": 1.8117203365245142e-06, "loss": 0.7366, "step": 15387 }, { "epoch": 0.6377388205064445, "grad_norm": 0.40898439288139343, "learning_rate": 1.8115131169961458e-06, "loss": 0.691, "step": 15388 }, { "epoch": 0.6377802644121182, "grad_norm": 0.4083981513977051, "learning_rate": 1.8113058974677774e-06, "loss": 0.6412, "step": 15389 }, { "epoch": 0.6378217083177918, "grad_norm": 0.3893134891986847, "learning_rate": 1.8110986779394092e-06, "loss": 0.609, "step": 15390 }, { "epoch": 0.6378631522234656, "grad_norm": 0.5936991572380066, "learning_rate": 1.8108914584110408e-06, "loss": 0.7109, "step": 15391 }, { "epoch": 0.6379045961291392, "grad_norm": 0.42896243929862976, "learning_rate": 1.8106842388826726e-06, "loss": 0.71, "step": 15392 }, { "epoch": 0.6379460400348129, "grad_norm": 0.407651424407959, "learning_rate": 1.8104770193543042e-06, "loss": 0.6548, "step": 15393 }, { "epoch": 0.6379874839404865, "grad_norm": 0.4001621603965759, "learning_rate": 1.8102697998259358e-06, "loss": 0.6809, "step": 15394 }, { "epoch": 0.6380289278461603, "grad_norm": 0.4125313460826874, "learning_rate": 1.8100625802975674e-06, "loss": 0.6721, "step": 15395 }, { "epoch": 0.6380703717518339, "grad_norm": 0.3769037425518036, "learning_rate": 1.809855360769199e-06, "loss": 0.6353, "step": 15396 }, { "epoch": 0.6381118156575075, "grad_norm": 0.43375149369239807, "learning_rate": 1.8096481412408306e-06, "loss": 0.7324, "step": 15397 }, { "epoch": 0.6381532595631813, "grad_norm": 0.4265619218349457, "learning_rate": 1.8094409217124624e-06, "loss": 0.7422, "step": 15398 }, { "epoch": 0.6381947034688549, "grad_norm": 0.3876926898956299, "learning_rate": 1.809233702184094e-06, "loss": 0.6541, "step": 15399 }, { "epoch": 0.6382361473745286, "grad_norm": 0.43742167949676514, "learning_rate": 1.8090264826557256e-06, "loss": 0.7141, "step": 15400 }, { "epoch": 0.6382775912802022, "grad_norm": 0.46639204025268555, "learning_rate": 1.8088192631273574e-06, "loss": 0.6757, "step": 15401 }, { "epoch": 0.638319035185876, "grad_norm": 0.42468637228012085, "learning_rate": 1.808612043598989e-06, "loss": 0.641, "step": 15402 }, { "epoch": 0.6383604790915496, "grad_norm": 0.3939315974712372, "learning_rate": 1.8084048240706206e-06, "loss": 0.6604, "step": 15403 }, { "epoch": 0.6384019229972232, "grad_norm": 0.40822020173072815, "learning_rate": 1.8081976045422522e-06, "loss": 0.6709, "step": 15404 }, { "epoch": 0.6384433669028969, "grad_norm": 0.3999190032482147, "learning_rate": 1.8079903850138838e-06, "loss": 0.6987, "step": 15405 }, { "epoch": 0.6384848108085706, "grad_norm": 0.38102737069129944, "learning_rate": 1.8077831654855154e-06, "loss": 0.6504, "step": 15406 }, { "epoch": 0.6385262547142443, "grad_norm": 0.3887810707092285, "learning_rate": 1.8075759459571472e-06, "loss": 0.6445, "step": 15407 }, { "epoch": 0.6385676986199179, "grad_norm": 0.42529791593551636, "learning_rate": 1.8073687264287788e-06, "loss": 0.6335, "step": 15408 }, { "epoch": 0.6386091425255916, "grad_norm": 0.4241181015968323, "learning_rate": 1.8071615069004104e-06, "loss": 0.6785, "step": 15409 }, { "epoch": 0.6386505864312653, "grad_norm": 0.4130811095237732, "learning_rate": 1.8069542873720422e-06, "loss": 0.6819, "step": 15410 }, { "epoch": 0.638692030336939, "grad_norm": 0.4061991274356842, "learning_rate": 1.8067470678436738e-06, "loss": 0.6498, "step": 15411 }, { "epoch": 0.6387334742426126, "grad_norm": 0.42730075120925903, "learning_rate": 1.8065398483153054e-06, "loss": 0.6409, "step": 15412 }, { "epoch": 0.6387749181482862, "grad_norm": 0.403927743434906, "learning_rate": 1.806332628786937e-06, "loss": 0.65, "step": 15413 }, { "epoch": 0.63881636205396, "grad_norm": 0.40547341108322144, "learning_rate": 1.8061254092585686e-06, "loss": 0.6718, "step": 15414 }, { "epoch": 0.6388578059596336, "grad_norm": 0.4219434857368469, "learning_rate": 1.8059181897302004e-06, "loss": 0.6656, "step": 15415 }, { "epoch": 0.6388992498653073, "grad_norm": 0.4398444890975952, "learning_rate": 1.805710970201832e-06, "loss": 0.6421, "step": 15416 }, { "epoch": 0.638940693770981, "grad_norm": 0.38409423828125, "learning_rate": 1.8055037506734636e-06, "loss": 0.6638, "step": 15417 }, { "epoch": 0.6389821376766547, "grad_norm": 0.3883430063724518, "learning_rate": 1.8052965311450954e-06, "loss": 0.6196, "step": 15418 }, { "epoch": 0.6390235815823283, "grad_norm": 0.3925822675228119, "learning_rate": 1.805089311616727e-06, "loss": 0.6156, "step": 15419 }, { "epoch": 0.639065025488002, "grad_norm": 0.4166000783443451, "learning_rate": 1.8048820920883586e-06, "loss": 0.6487, "step": 15420 }, { "epoch": 0.6391064693936757, "grad_norm": 0.4378117024898529, "learning_rate": 1.8046748725599902e-06, "loss": 0.696, "step": 15421 }, { "epoch": 0.6391479132993493, "grad_norm": 0.43523991107940674, "learning_rate": 1.8044676530316218e-06, "loss": 0.6813, "step": 15422 }, { "epoch": 0.639189357205023, "grad_norm": 0.4110928773880005, "learning_rate": 1.8042604335032534e-06, "loss": 0.7312, "step": 15423 }, { "epoch": 0.6392308011106966, "grad_norm": 0.4211960732936859, "learning_rate": 1.8040532139748852e-06, "loss": 0.6719, "step": 15424 }, { "epoch": 0.6392722450163704, "grad_norm": 0.42485764622688293, "learning_rate": 1.8038459944465168e-06, "loss": 0.7656, "step": 15425 }, { "epoch": 0.639313688922044, "grad_norm": 0.4234153628349304, "learning_rate": 1.8036387749181484e-06, "loss": 0.6285, "step": 15426 }, { "epoch": 0.6393551328277177, "grad_norm": 0.40785476565361023, "learning_rate": 1.8034315553897802e-06, "loss": 0.6797, "step": 15427 }, { "epoch": 0.6393965767333913, "grad_norm": 0.42951107025146484, "learning_rate": 1.8032243358614118e-06, "loss": 0.7104, "step": 15428 }, { "epoch": 0.6394380206390651, "grad_norm": 0.4540995955467224, "learning_rate": 1.8030171163330434e-06, "loss": 0.6962, "step": 15429 }, { "epoch": 0.6394794645447387, "grad_norm": 0.4313414990901947, "learning_rate": 1.802809896804675e-06, "loss": 0.6952, "step": 15430 }, { "epoch": 0.6395209084504123, "grad_norm": 0.4329187572002411, "learning_rate": 1.8026026772763066e-06, "loss": 0.6963, "step": 15431 }, { "epoch": 0.639562352356086, "grad_norm": 0.4532982409000397, "learning_rate": 1.8023954577479384e-06, "loss": 0.652, "step": 15432 }, { "epoch": 0.6396037962617597, "grad_norm": 0.4197540581226349, "learning_rate": 1.80218823821957e-06, "loss": 0.7086, "step": 15433 }, { "epoch": 0.6396452401674334, "grad_norm": 0.4483310282230377, "learning_rate": 1.8019810186912016e-06, "loss": 0.7339, "step": 15434 }, { "epoch": 0.639686684073107, "grad_norm": 0.45394623279571533, "learning_rate": 1.8017737991628332e-06, "loss": 0.6919, "step": 15435 }, { "epoch": 0.6397281279787808, "grad_norm": 0.5343771576881409, "learning_rate": 1.801566579634465e-06, "loss": 0.6495, "step": 15436 }, { "epoch": 0.6397695718844544, "grad_norm": 0.3990635275840759, "learning_rate": 1.8013593601060966e-06, "loss": 0.6919, "step": 15437 }, { "epoch": 0.6398110157901281, "grad_norm": 0.43411561846733093, "learning_rate": 1.8011521405777282e-06, "loss": 0.6812, "step": 15438 }, { "epoch": 0.6398524596958017, "grad_norm": 0.3796842694282532, "learning_rate": 1.8009449210493598e-06, "loss": 0.6494, "step": 15439 }, { "epoch": 0.6398939036014754, "grad_norm": 0.42167574167251587, "learning_rate": 1.8007377015209914e-06, "loss": 0.7529, "step": 15440 }, { "epoch": 0.6399353475071491, "grad_norm": 0.43358561396598816, "learning_rate": 1.8005304819926232e-06, "loss": 0.7549, "step": 15441 }, { "epoch": 0.6399767914128227, "grad_norm": 0.38262295722961426, "learning_rate": 1.8003232624642548e-06, "loss": 0.6375, "step": 15442 }, { "epoch": 0.6400182353184964, "grad_norm": 0.4293767511844635, "learning_rate": 1.8001160429358864e-06, "loss": 0.6791, "step": 15443 }, { "epoch": 0.6400596792241701, "grad_norm": 0.3715209364891052, "learning_rate": 1.799908823407518e-06, "loss": 0.6565, "step": 15444 }, { "epoch": 0.6401011231298438, "grad_norm": 0.41066911816596985, "learning_rate": 1.7997016038791498e-06, "loss": 0.7035, "step": 15445 }, { "epoch": 0.6401425670355174, "grad_norm": 0.43244168162345886, "learning_rate": 1.7994943843507814e-06, "loss": 0.6906, "step": 15446 }, { "epoch": 0.6401840109411912, "grad_norm": 0.42778491973876953, "learning_rate": 1.799287164822413e-06, "loss": 0.6898, "step": 15447 }, { "epoch": 0.6402254548468648, "grad_norm": 0.3971628248691559, "learning_rate": 1.7990799452940446e-06, "loss": 0.6458, "step": 15448 }, { "epoch": 0.6402668987525384, "grad_norm": 0.4122277796268463, "learning_rate": 1.7988727257656764e-06, "loss": 0.6842, "step": 15449 }, { "epoch": 0.6403083426582121, "grad_norm": 0.4397014379501343, "learning_rate": 1.798665506237308e-06, "loss": 0.7244, "step": 15450 }, { "epoch": 0.6403497865638857, "grad_norm": 0.3966304659843445, "learning_rate": 1.7984582867089396e-06, "loss": 0.655, "step": 15451 }, { "epoch": 0.6403912304695595, "grad_norm": 0.4156041145324707, "learning_rate": 1.7982510671805712e-06, "loss": 0.6803, "step": 15452 }, { "epoch": 0.6404326743752331, "grad_norm": 0.40549612045288086, "learning_rate": 1.798043847652203e-06, "loss": 0.6841, "step": 15453 }, { "epoch": 0.6404741182809068, "grad_norm": 0.38895317912101746, "learning_rate": 1.7978366281238346e-06, "loss": 0.6455, "step": 15454 }, { "epoch": 0.6405155621865805, "grad_norm": 0.47054755687713623, "learning_rate": 1.7976294085954662e-06, "loss": 0.7312, "step": 15455 }, { "epoch": 0.6405570060922542, "grad_norm": 0.4385436773300171, "learning_rate": 1.7974221890670978e-06, "loss": 0.7212, "step": 15456 }, { "epoch": 0.6405984499979278, "grad_norm": 0.4112132489681244, "learning_rate": 1.7972149695387296e-06, "loss": 0.6431, "step": 15457 }, { "epoch": 0.6406398939036014, "grad_norm": 0.4475666582584381, "learning_rate": 1.7970077500103612e-06, "loss": 0.6406, "step": 15458 }, { "epoch": 0.6406813378092752, "grad_norm": 0.3766789734363556, "learning_rate": 1.7968005304819928e-06, "loss": 0.6399, "step": 15459 }, { "epoch": 0.6407227817149488, "grad_norm": 0.396084189414978, "learning_rate": 1.7965933109536244e-06, "loss": 0.6602, "step": 15460 }, { "epoch": 0.6407642256206225, "grad_norm": 0.40319427847862244, "learning_rate": 1.796386091425256e-06, "loss": 0.6887, "step": 15461 }, { "epoch": 0.6408056695262961, "grad_norm": 0.4469258189201355, "learning_rate": 1.7961788718968878e-06, "loss": 0.6804, "step": 15462 }, { "epoch": 0.6408471134319699, "grad_norm": 0.41355183720588684, "learning_rate": 1.7959716523685194e-06, "loss": 0.636, "step": 15463 }, { "epoch": 0.6408885573376435, "grad_norm": 0.38520270586013794, "learning_rate": 1.795764432840151e-06, "loss": 0.6719, "step": 15464 }, { "epoch": 0.6409300012433171, "grad_norm": 0.42115798592567444, "learning_rate": 1.7955572133117826e-06, "loss": 0.7009, "step": 15465 }, { "epoch": 0.6409714451489908, "grad_norm": 0.39818131923675537, "learning_rate": 1.7953499937834144e-06, "loss": 0.6853, "step": 15466 }, { "epoch": 0.6410128890546645, "grad_norm": 0.41175755858421326, "learning_rate": 1.795142774255046e-06, "loss": 0.7642, "step": 15467 }, { "epoch": 0.6410543329603382, "grad_norm": 0.41653695702552795, "learning_rate": 1.7949355547266776e-06, "loss": 0.7021, "step": 15468 }, { "epoch": 0.6410957768660118, "grad_norm": 0.41070571541786194, "learning_rate": 1.7947283351983092e-06, "loss": 0.6785, "step": 15469 }, { "epoch": 0.6411372207716856, "grad_norm": 0.41294312477111816, "learning_rate": 1.7945211156699408e-06, "loss": 0.6848, "step": 15470 }, { "epoch": 0.6411786646773592, "grad_norm": 0.4428417384624481, "learning_rate": 1.7943138961415726e-06, "loss": 0.6898, "step": 15471 }, { "epoch": 0.6412201085830329, "grad_norm": 0.3724454939365387, "learning_rate": 1.7941066766132042e-06, "loss": 0.6388, "step": 15472 }, { "epoch": 0.6412615524887065, "grad_norm": 0.40590357780456543, "learning_rate": 1.7938994570848358e-06, "loss": 0.687, "step": 15473 }, { "epoch": 0.6413029963943802, "grad_norm": 0.4373098909854889, "learning_rate": 1.7936922375564676e-06, "loss": 0.6765, "step": 15474 }, { "epoch": 0.6413444403000539, "grad_norm": 0.41632959246635437, "learning_rate": 1.7934850180280992e-06, "loss": 0.6895, "step": 15475 }, { "epoch": 0.6413858842057275, "grad_norm": 0.4230007827281952, "learning_rate": 1.7932777984997308e-06, "loss": 0.6812, "step": 15476 }, { "epoch": 0.6414273281114012, "grad_norm": 0.4226545989513397, "learning_rate": 1.7930705789713624e-06, "loss": 0.6305, "step": 15477 }, { "epoch": 0.6414687720170749, "grad_norm": 0.4460834264755249, "learning_rate": 1.792863359442994e-06, "loss": 0.7042, "step": 15478 }, { "epoch": 0.6415102159227486, "grad_norm": 0.43822285532951355, "learning_rate": 1.7926561399146258e-06, "loss": 0.7664, "step": 15479 }, { "epoch": 0.6415516598284222, "grad_norm": 0.402290016412735, "learning_rate": 1.7924489203862574e-06, "loss": 0.6897, "step": 15480 }, { "epoch": 0.641593103734096, "grad_norm": 0.4117305874824524, "learning_rate": 1.792241700857889e-06, "loss": 0.6429, "step": 15481 }, { "epoch": 0.6416345476397696, "grad_norm": 0.39917096495628357, "learning_rate": 1.7920344813295206e-06, "loss": 0.6766, "step": 15482 }, { "epoch": 0.6416759915454432, "grad_norm": 0.429167777299881, "learning_rate": 1.7918272618011524e-06, "loss": 0.6743, "step": 15483 }, { "epoch": 0.6417174354511169, "grad_norm": 0.3926008343696594, "learning_rate": 1.791620042272784e-06, "loss": 0.6526, "step": 15484 }, { "epoch": 0.6417588793567905, "grad_norm": 0.45266926288604736, "learning_rate": 1.7914128227444156e-06, "loss": 0.7312, "step": 15485 }, { "epoch": 0.6418003232624643, "grad_norm": 0.4257446825504303, "learning_rate": 1.7912056032160472e-06, "loss": 0.7351, "step": 15486 }, { "epoch": 0.6418417671681379, "grad_norm": 0.39170026779174805, "learning_rate": 1.7909983836876788e-06, "loss": 0.6102, "step": 15487 }, { "epoch": 0.6418832110738116, "grad_norm": 0.4572887420654297, "learning_rate": 1.7907911641593106e-06, "loss": 0.6777, "step": 15488 }, { "epoch": 0.6419246549794853, "grad_norm": 0.40008044242858887, "learning_rate": 1.7905839446309422e-06, "loss": 0.6733, "step": 15489 }, { "epoch": 0.641966098885159, "grad_norm": 0.40949276089668274, "learning_rate": 1.7903767251025738e-06, "loss": 0.675, "step": 15490 }, { "epoch": 0.6420075427908326, "grad_norm": 0.3931456208229065, "learning_rate": 1.7901695055742056e-06, "loss": 0.6401, "step": 15491 }, { "epoch": 0.6420489866965062, "grad_norm": 0.42496979236602783, "learning_rate": 1.7899622860458372e-06, "loss": 0.6729, "step": 15492 }, { "epoch": 0.64209043060218, "grad_norm": 0.4187045097351074, "learning_rate": 1.7897550665174688e-06, "loss": 0.6855, "step": 15493 }, { "epoch": 0.6421318745078536, "grad_norm": 0.3931962549686432, "learning_rate": 1.7895478469891004e-06, "loss": 0.7234, "step": 15494 }, { "epoch": 0.6421733184135273, "grad_norm": 0.49752750992774963, "learning_rate": 1.789340627460732e-06, "loss": 0.7021, "step": 15495 }, { "epoch": 0.6422147623192009, "grad_norm": 0.39239534735679626, "learning_rate": 1.7891334079323636e-06, "loss": 0.6768, "step": 15496 }, { "epoch": 0.6422562062248747, "grad_norm": 0.43353843688964844, "learning_rate": 1.7889261884039954e-06, "loss": 0.6987, "step": 15497 }, { "epoch": 0.6422976501305483, "grad_norm": 0.39212489128112793, "learning_rate": 1.788718968875627e-06, "loss": 0.6252, "step": 15498 }, { "epoch": 0.642339094036222, "grad_norm": 0.4245157837867737, "learning_rate": 1.7885117493472586e-06, "loss": 0.7163, "step": 15499 }, { "epoch": 0.6423805379418956, "grad_norm": 0.38970744609832764, "learning_rate": 1.7883045298188904e-06, "loss": 0.6636, "step": 15500 }, { "epoch": 0.6424219818475693, "grad_norm": 0.4229969084262848, "learning_rate": 1.788097310290522e-06, "loss": 0.7322, "step": 15501 }, { "epoch": 0.642463425753243, "grad_norm": 0.39066338539123535, "learning_rate": 1.7878900907621536e-06, "loss": 0.6418, "step": 15502 }, { "epoch": 0.6425048696589166, "grad_norm": 0.44052889943122864, "learning_rate": 1.7876828712337852e-06, "loss": 0.679, "step": 15503 }, { "epoch": 0.6425463135645904, "grad_norm": 0.39470165967941284, "learning_rate": 1.7874756517054168e-06, "loss": 0.6566, "step": 15504 }, { "epoch": 0.642587757470264, "grad_norm": 0.4400657117366791, "learning_rate": 1.7872684321770484e-06, "loss": 0.6807, "step": 15505 }, { "epoch": 0.6426292013759377, "grad_norm": 0.4967343211174011, "learning_rate": 1.7870612126486802e-06, "loss": 0.6709, "step": 15506 }, { "epoch": 0.6426706452816113, "grad_norm": 0.40817248821258545, "learning_rate": 1.7868539931203118e-06, "loss": 0.7083, "step": 15507 }, { "epoch": 0.6427120891872851, "grad_norm": 0.43578851222991943, "learning_rate": 1.7866467735919436e-06, "loss": 0.6965, "step": 15508 }, { "epoch": 0.6427535330929587, "grad_norm": 0.40248626470565796, "learning_rate": 1.7864395540635752e-06, "loss": 0.6531, "step": 15509 }, { "epoch": 0.6427949769986323, "grad_norm": 0.4236171245574951, "learning_rate": 1.7862323345352068e-06, "loss": 0.6663, "step": 15510 }, { "epoch": 0.642836420904306, "grad_norm": 0.42424914240837097, "learning_rate": 1.7860251150068384e-06, "loss": 0.6782, "step": 15511 }, { "epoch": 0.6428778648099797, "grad_norm": 0.4155040383338928, "learning_rate": 1.78581789547847e-06, "loss": 0.6709, "step": 15512 }, { "epoch": 0.6429193087156534, "grad_norm": 0.4125572144985199, "learning_rate": 1.7856106759501016e-06, "loss": 0.6631, "step": 15513 }, { "epoch": 0.642960752621327, "grad_norm": 0.40927591919898987, "learning_rate": 1.7854034564217334e-06, "loss": 0.6448, "step": 15514 }, { "epoch": 0.6430021965270007, "grad_norm": 0.39371320605278015, "learning_rate": 1.785196236893365e-06, "loss": 0.6768, "step": 15515 }, { "epoch": 0.6430436404326744, "grad_norm": 0.4258314073085785, "learning_rate": 1.7849890173649966e-06, "loss": 0.6561, "step": 15516 }, { "epoch": 0.6430850843383481, "grad_norm": 0.408025324344635, "learning_rate": 1.7847817978366284e-06, "loss": 0.6665, "step": 15517 }, { "epoch": 0.6431265282440217, "grad_norm": 0.4315021336078644, "learning_rate": 1.78457457830826e-06, "loss": 0.7023, "step": 15518 }, { "epoch": 0.6431679721496953, "grad_norm": 0.4089753329753876, "learning_rate": 1.7843673587798916e-06, "loss": 0.6844, "step": 15519 }, { "epoch": 0.6432094160553691, "grad_norm": 0.41673681139945984, "learning_rate": 1.7841601392515232e-06, "loss": 0.7124, "step": 15520 }, { "epoch": 0.6432508599610427, "grad_norm": 0.4039592444896698, "learning_rate": 1.7839529197231548e-06, "loss": 0.6387, "step": 15521 }, { "epoch": 0.6432923038667164, "grad_norm": 0.4000694453716278, "learning_rate": 1.7837457001947864e-06, "loss": 0.6621, "step": 15522 }, { "epoch": 0.64333374777239, "grad_norm": 0.44936466217041016, "learning_rate": 1.7835384806664182e-06, "loss": 0.7476, "step": 15523 }, { "epoch": 0.6433751916780638, "grad_norm": 0.41147127747535706, "learning_rate": 1.7833312611380498e-06, "loss": 0.6982, "step": 15524 }, { "epoch": 0.6434166355837374, "grad_norm": 0.41451770067214966, "learning_rate": 1.7831240416096816e-06, "loss": 0.6868, "step": 15525 }, { "epoch": 0.643458079489411, "grad_norm": 0.4154144823551178, "learning_rate": 1.7829168220813132e-06, "loss": 0.6482, "step": 15526 }, { "epoch": 0.6434995233950848, "grad_norm": 0.42121171951293945, "learning_rate": 1.7827096025529448e-06, "loss": 0.6877, "step": 15527 }, { "epoch": 0.6435409673007584, "grad_norm": 0.4155755639076233, "learning_rate": 1.7825023830245764e-06, "loss": 0.6541, "step": 15528 }, { "epoch": 0.6435824112064321, "grad_norm": 0.3846248388290405, "learning_rate": 1.782295163496208e-06, "loss": 0.6345, "step": 15529 }, { "epoch": 0.6436238551121057, "grad_norm": 0.4115012288093567, "learning_rate": 1.7820879439678396e-06, "loss": 0.7029, "step": 15530 }, { "epoch": 0.6436652990177795, "grad_norm": 0.40665191411972046, "learning_rate": 1.7818807244394712e-06, "loss": 0.6768, "step": 15531 }, { "epoch": 0.6437067429234531, "grad_norm": 0.3928937315940857, "learning_rate": 1.781673504911103e-06, "loss": 0.6427, "step": 15532 }, { "epoch": 0.6437481868291268, "grad_norm": 0.4138561487197876, "learning_rate": 1.7814662853827346e-06, "loss": 0.6768, "step": 15533 }, { "epoch": 0.6437896307348004, "grad_norm": 0.43224290013313293, "learning_rate": 1.7812590658543664e-06, "loss": 0.6418, "step": 15534 }, { "epoch": 0.6438310746404741, "grad_norm": 0.3962171673774719, "learning_rate": 1.781051846325998e-06, "loss": 0.6545, "step": 15535 }, { "epoch": 0.6438725185461478, "grad_norm": 0.41967320442199707, "learning_rate": 1.7808446267976296e-06, "loss": 0.6729, "step": 15536 }, { "epoch": 0.6439139624518214, "grad_norm": 0.39832425117492676, "learning_rate": 1.7806374072692612e-06, "loss": 0.6377, "step": 15537 }, { "epoch": 0.6439554063574952, "grad_norm": 0.37731462717056274, "learning_rate": 1.7804301877408928e-06, "loss": 0.6256, "step": 15538 }, { "epoch": 0.6439968502631688, "grad_norm": 0.4135761559009552, "learning_rate": 1.7802229682125244e-06, "loss": 0.6572, "step": 15539 }, { "epoch": 0.6440382941688425, "grad_norm": 0.3860820531845093, "learning_rate": 1.7800157486841562e-06, "loss": 0.6365, "step": 15540 }, { "epoch": 0.6440797380745161, "grad_norm": 0.44271138310432434, "learning_rate": 1.7798085291557878e-06, "loss": 0.7278, "step": 15541 }, { "epoch": 0.6441211819801899, "grad_norm": 0.4696919322013855, "learning_rate": 1.7796013096274196e-06, "loss": 0.6932, "step": 15542 }, { "epoch": 0.6441626258858635, "grad_norm": 0.384356826543808, "learning_rate": 1.7793940900990512e-06, "loss": 0.6958, "step": 15543 }, { "epoch": 0.6442040697915371, "grad_norm": 0.3896111845970154, "learning_rate": 1.7791868705706828e-06, "loss": 0.6805, "step": 15544 }, { "epoch": 0.6442455136972108, "grad_norm": 0.4447305500507355, "learning_rate": 1.7789796510423144e-06, "loss": 0.7117, "step": 15545 }, { "epoch": 0.6442869576028845, "grad_norm": 0.4154475927352905, "learning_rate": 1.778772431513946e-06, "loss": 0.6862, "step": 15546 }, { "epoch": 0.6443284015085582, "grad_norm": 0.4060879349708557, "learning_rate": 1.7785652119855776e-06, "loss": 0.679, "step": 15547 }, { "epoch": 0.6443698454142318, "grad_norm": 0.45053189992904663, "learning_rate": 1.7783579924572092e-06, "loss": 0.7358, "step": 15548 }, { "epoch": 0.6444112893199055, "grad_norm": 0.4175051748752594, "learning_rate": 1.778150772928841e-06, "loss": 0.6824, "step": 15549 }, { "epoch": 0.6444527332255792, "grad_norm": 0.3870816230773926, "learning_rate": 1.7779435534004726e-06, "loss": 0.6592, "step": 15550 }, { "epoch": 0.6444941771312529, "grad_norm": 0.41827765107154846, "learning_rate": 1.7777363338721044e-06, "loss": 0.6664, "step": 15551 }, { "epoch": 0.6445356210369265, "grad_norm": 0.39840561151504517, "learning_rate": 1.777529114343736e-06, "loss": 0.6875, "step": 15552 }, { "epoch": 0.6445770649426001, "grad_norm": 0.4228484332561493, "learning_rate": 1.7773218948153676e-06, "loss": 0.705, "step": 15553 }, { "epoch": 0.6446185088482739, "grad_norm": 0.38958340883255005, "learning_rate": 1.7771146752869992e-06, "loss": 0.5992, "step": 15554 }, { "epoch": 0.6446599527539475, "grad_norm": 0.45174264907836914, "learning_rate": 1.7769074557586308e-06, "loss": 0.7788, "step": 15555 }, { "epoch": 0.6447013966596212, "grad_norm": 0.40604180097579956, "learning_rate": 1.7767002362302624e-06, "loss": 0.6975, "step": 15556 }, { "epoch": 0.6447428405652949, "grad_norm": 0.44124436378479004, "learning_rate": 1.776493016701894e-06, "loss": 0.7383, "step": 15557 }, { "epoch": 0.6447842844709686, "grad_norm": 0.40224844217300415, "learning_rate": 1.7762857971735258e-06, "loss": 0.6538, "step": 15558 }, { "epoch": 0.6448257283766422, "grad_norm": 0.4187207818031311, "learning_rate": 1.7760785776451576e-06, "loss": 0.701, "step": 15559 }, { "epoch": 0.6448671722823159, "grad_norm": 0.4450726807117462, "learning_rate": 1.7758713581167892e-06, "loss": 0.7512, "step": 15560 }, { "epoch": 0.6449086161879896, "grad_norm": 0.43539997935295105, "learning_rate": 1.7756641385884208e-06, "loss": 0.7194, "step": 15561 }, { "epoch": 0.6449500600936632, "grad_norm": 0.41573211550712585, "learning_rate": 1.7754569190600524e-06, "loss": 0.6765, "step": 15562 }, { "epoch": 0.6449915039993369, "grad_norm": 0.38446757197380066, "learning_rate": 1.775249699531684e-06, "loss": 0.6702, "step": 15563 }, { "epoch": 0.6450329479050105, "grad_norm": 0.423725962638855, "learning_rate": 1.7750424800033156e-06, "loss": 0.6636, "step": 15564 }, { "epoch": 0.6450743918106843, "grad_norm": 0.40703558921813965, "learning_rate": 1.7748352604749472e-06, "loss": 0.7012, "step": 15565 }, { "epoch": 0.6451158357163579, "grad_norm": 0.40120285749435425, "learning_rate": 1.774628040946579e-06, "loss": 0.6537, "step": 15566 }, { "epoch": 0.6451572796220316, "grad_norm": 0.3993944525718689, "learning_rate": 1.7744208214182108e-06, "loss": 0.6748, "step": 15567 }, { "epoch": 0.6451987235277052, "grad_norm": 0.4148927628993988, "learning_rate": 1.7742136018898424e-06, "loss": 0.6919, "step": 15568 }, { "epoch": 0.645240167433379, "grad_norm": 0.42864009737968445, "learning_rate": 1.774006382361474e-06, "loss": 0.6876, "step": 15569 }, { "epoch": 0.6452816113390526, "grad_norm": 0.4156241714954376, "learning_rate": 1.7737991628331056e-06, "loss": 0.6442, "step": 15570 }, { "epoch": 0.6453230552447262, "grad_norm": 0.4176127314567566, "learning_rate": 1.7735919433047372e-06, "loss": 0.705, "step": 15571 }, { "epoch": 0.6453644991504, "grad_norm": 0.4039718210697174, "learning_rate": 1.7733847237763688e-06, "loss": 0.6605, "step": 15572 }, { "epoch": 0.6454059430560736, "grad_norm": 0.40566107630729675, "learning_rate": 1.7731775042480004e-06, "loss": 0.6827, "step": 15573 }, { "epoch": 0.6454473869617473, "grad_norm": 0.39986708760261536, "learning_rate": 1.772970284719632e-06, "loss": 0.6602, "step": 15574 }, { "epoch": 0.6454888308674209, "grad_norm": 0.4003998637199402, "learning_rate": 1.7727630651912638e-06, "loss": 0.6602, "step": 15575 }, { "epoch": 0.6455302747730947, "grad_norm": 0.4174047112464905, "learning_rate": 1.7725558456628956e-06, "loss": 0.6719, "step": 15576 }, { "epoch": 0.6455717186787683, "grad_norm": 0.4078565239906311, "learning_rate": 1.7723486261345272e-06, "loss": 0.6418, "step": 15577 }, { "epoch": 0.645613162584442, "grad_norm": 0.4062003195285797, "learning_rate": 1.7721414066061588e-06, "loss": 0.7168, "step": 15578 }, { "epoch": 0.6456546064901156, "grad_norm": 0.4140835106372833, "learning_rate": 1.7719341870777904e-06, "loss": 0.6842, "step": 15579 }, { "epoch": 0.6456960503957893, "grad_norm": 0.4167834520339966, "learning_rate": 1.771726967549422e-06, "loss": 0.646, "step": 15580 }, { "epoch": 0.645737494301463, "grad_norm": 0.4254632294178009, "learning_rate": 1.7715197480210536e-06, "loss": 0.6473, "step": 15581 }, { "epoch": 0.6457789382071366, "grad_norm": 0.4167327284812927, "learning_rate": 1.7713125284926852e-06, "loss": 0.6802, "step": 15582 }, { "epoch": 0.6458203821128103, "grad_norm": 0.4255084991455078, "learning_rate": 1.7711053089643168e-06, "loss": 0.7144, "step": 15583 }, { "epoch": 0.645861826018484, "grad_norm": 0.3984299600124359, "learning_rate": 1.7708980894359488e-06, "loss": 0.6471, "step": 15584 }, { "epoch": 0.6459032699241577, "grad_norm": 0.40233105421066284, "learning_rate": 1.7706908699075804e-06, "loss": 0.6642, "step": 15585 }, { "epoch": 0.6459447138298313, "grad_norm": 0.42302951216697693, "learning_rate": 1.770483650379212e-06, "loss": 0.7412, "step": 15586 }, { "epoch": 0.6459861577355049, "grad_norm": 0.3434576094150543, "learning_rate": 1.7702764308508436e-06, "loss": 0.609, "step": 15587 }, { "epoch": 0.6460276016411787, "grad_norm": 0.4061698317527771, "learning_rate": 1.7700692113224752e-06, "loss": 0.6542, "step": 15588 }, { "epoch": 0.6460690455468523, "grad_norm": 0.4212305247783661, "learning_rate": 1.7698619917941068e-06, "loss": 0.639, "step": 15589 }, { "epoch": 0.646110489452526, "grad_norm": 0.42416831851005554, "learning_rate": 1.7696547722657384e-06, "loss": 0.6503, "step": 15590 }, { "epoch": 0.6461519333581996, "grad_norm": 0.41662371158599854, "learning_rate": 1.76944755273737e-06, "loss": 0.6392, "step": 15591 }, { "epoch": 0.6461933772638734, "grad_norm": 0.395205557346344, "learning_rate": 1.7692403332090016e-06, "loss": 0.601, "step": 15592 }, { "epoch": 0.646234821169547, "grad_norm": 0.433176726102829, "learning_rate": 1.7690331136806336e-06, "loss": 0.7008, "step": 15593 }, { "epoch": 0.6462762650752207, "grad_norm": 0.4153425693511963, "learning_rate": 1.7688258941522652e-06, "loss": 0.707, "step": 15594 }, { "epoch": 0.6463177089808944, "grad_norm": 0.42204827070236206, "learning_rate": 1.7686186746238968e-06, "loss": 0.7263, "step": 15595 }, { "epoch": 0.646359152886568, "grad_norm": 0.39451178908348083, "learning_rate": 1.7684114550955284e-06, "loss": 0.6919, "step": 15596 }, { "epoch": 0.6464005967922417, "grad_norm": 0.41760891675949097, "learning_rate": 1.76820423556716e-06, "loss": 0.6826, "step": 15597 }, { "epoch": 0.6464420406979153, "grad_norm": 0.4631892740726471, "learning_rate": 1.7679970160387916e-06, "loss": 0.6758, "step": 15598 }, { "epoch": 0.6464834846035891, "grad_norm": 0.44400617480278015, "learning_rate": 1.7677897965104232e-06, "loss": 0.7285, "step": 15599 }, { "epoch": 0.6465249285092627, "grad_norm": 0.4115111827850342, "learning_rate": 1.7675825769820548e-06, "loss": 0.6974, "step": 15600 }, { "epoch": 0.6465663724149364, "grad_norm": 0.4206889867782593, "learning_rate": 1.7673753574536868e-06, "loss": 0.7229, "step": 15601 }, { "epoch": 0.64660781632061, "grad_norm": 0.4195168912410736, "learning_rate": 1.7671681379253184e-06, "loss": 0.6982, "step": 15602 }, { "epoch": 0.6466492602262838, "grad_norm": 0.4469691216945648, "learning_rate": 1.76696091839695e-06, "loss": 0.6855, "step": 15603 }, { "epoch": 0.6466907041319574, "grad_norm": 0.42844244837760925, "learning_rate": 1.7667536988685816e-06, "loss": 0.6981, "step": 15604 }, { "epoch": 0.646732148037631, "grad_norm": 0.40317049622535706, "learning_rate": 1.7665464793402132e-06, "loss": 0.6484, "step": 15605 }, { "epoch": 0.6467735919433047, "grad_norm": 0.3886716067790985, "learning_rate": 1.7663392598118448e-06, "loss": 0.6538, "step": 15606 }, { "epoch": 0.6468150358489784, "grad_norm": 0.41700631380081177, "learning_rate": 1.7661320402834764e-06, "loss": 0.7236, "step": 15607 }, { "epoch": 0.6468564797546521, "grad_norm": 0.4060257077217102, "learning_rate": 1.765924820755108e-06, "loss": 0.7065, "step": 15608 }, { "epoch": 0.6468979236603257, "grad_norm": 0.45118248462677, "learning_rate": 1.7657176012267396e-06, "loss": 0.6687, "step": 15609 }, { "epoch": 0.6469393675659995, "grad_norm": 0.39940473437309265, "learning_rate": 1.7655103816983716e-06, "loss": 0.6024, "step": 15610 }, { "epoch": 0.6469808114716731, "grad_norm": 0.4820694327354431, "learning_rate": 1.7653031621700032e-06, "loss": 0.7668, "step": 15611 }, { "epoch": 0.6470222553773468, "grad_norm": 0.41874057054519653, "learning_rate": 1.7650959426416348e-06, "loss": 0.7288, "step": 15612 }, { "epoch": 0.6470636992830204, "grad_norm": 0.42237457633018494, "learning_rate": 1.7648887231132664e-06, "loss": 0.6687, "step": 15613 }, { "epoch": 0.647105143188694, "grad_norm": 0.38758614659309387, "learning_rate": 1.764681503584898e-06, "loss": 0.6298, "step": 15614 }, { "epoch": 0.6471465870943678, "grad_norm": 0.4139144718647003, "learning_rate": 1.7644742840565296e-06, "loss": 0.675, "step": 15615 }, { "epoch": 0.6471880310000414, "grad_norm": 0.44925493001937866, "learning_rate": 1.7642670645281612e-06, "loss": 0.7003, "step": 15616 }, { "epoch": 0.6472294749057151, "grad_norm": 0.4387260377407074, "learning_rate": 1.7640598449997928e-06, "loss": 0.679, "step": 15617 }, { "epoch": 0.6472709188113888, "grad_norm": 0.418400377035141, "learning_rate": 1.7638526254714244e-06, "loss": 0.6614, "step": 15618 }, { "epoch": 0.6473123627170625, "grad_norm": 0.411334753036499, "learning_rate": 1.7636454059430564e-06, "loss": 0.6787, "step": 15619 }, { "epoch": 0.6473538066227361, "grad_norm": 0.4391150176525116, "learning_rate": 1.763438186414688e-06, "loss": 0.7002, "step": 15620 }, { "epoch": 0.6473952505284098, "grad_norm": 0.42732009291648865, "learning_rate": 1.7632309668863196e-06, "loss": 0.6635, "step": 15621 }, { "epoch": 0.6474366944340835, "grad_norm": 0.4103679060935974, "learning_rate": 1.7630237473579512e-06, "loss": 0.668, "step": 15622 }, { "epoch": 0.6474781383397571, "grad_norm": 0.4332858622074127, "learning_rate": 1.7628165278295828e-06, "loss": 0.7334, "step": 15623 }, { "epoch": 0.6475195822454308, "grad_norm": 0.3918190896511078, "learning_rate": 1.7626093083012144e-06, "loss": 0.7061, "step": 15624 }, { "epoch": 0.6475610261511044, "grad_norm": 0.4244137108325958, "learning_rate": 1.762402088772846e-06, "loss": 0.6875, "step": 15625 }, { "epoch": 0.6476024700567782, "grad_norm": 0.4220355153083801, "learning_rate": 1.7621948692444776e-06, "loss": 0.7102, "step": 15626 }, { "epoch": 0.6476439139624518, "grad_norm": 0.4369968771934509, "learning_rate": 1.7619876497161096e-06, "loss": 0.6819, "step": 15627 }, { "epoch": 0.6476853578681255, "grad_norm": 0.4172671139240265, "learning_rate": 1.7617804301877412e-06, "loss": 0.7235, "step": 15628 }, { "epoch": 0.6477268017737992, "grad_norm": 0.42183375358581543, "learning_rate": 1.7615732106593728e-06, "loss": 0.6409, "step": 15629 }, { "epoch": 0.6477682456794729, "grad_norm": 0.4293377697467804, "learning_rate": 1.7613659911310044e-06, "loss": 0.6443, "step": 15630 }, { "epoch": 0.6478096895851465, "grad_norm": 0.4015118479728699, "learning_rate": 1.761158771602636e-06, "loss": 0.6604, "step": 15631 }, { "epoch": 0.6478511334908201, "grad_norm": 0.40238243341445923, "learning_rate": 1.7609515520742676e-06, "loss": 0.64, "step": 15632 }, { "epoch": 0.6478925773964939, "grad_norm": 0.4070974290370941, "learning_rate": 1.7607443325458992e-06, "loss": 0.6384, "step": 15633 }, { "epoch": 0.6479340213021675, "grad_norm": 0.4365856945514679, "learning_rate": 1.7605371130175308e-06, "loss": 0.7476, "step": 15634 }, { "epoch": 0.6479754652078412, "grad_norm": 0.44453659653663635, "learning_rate": 1.7603298934891624e-06, "loss": 0.6794, "step": 15635 }, { "epoch": 0.6480169091135148, "grad_norm": 0.4177300035953522, "learning_rate": 1.7601226739607944e-06, "loss": 0.7341, "step": 15636 }, { "epoch": 0.6480583530191886, "grad_norm": 0.4283050000667572, "learning_rate": 1.759915454432426e-06, "loss": 0.7031, "step": 15637 }, { "epoch": 0.6480997969248622, "grad_norm": 0.4999232590198517, "learning_rate": 1.7597082349040576e-06, "loss": 0.6847, "step": 15638 }, { "epoch": 0.6481412408305359, "grad_norm": 0.4562135636806488, "learning_rate": 1.7595010153756892e-06, "loss": 0.7196, "step": 15639 }, { "epoch": 0.6481826847362095, "grad_norm": 0.4090512692928314, "learning_rate": 1.7592937958473208e-06, "loss": 0.6272, "step": 15640 }, { "epoch": 0.6482241286418832, "grad_norm": 0.405238538980484, "learning_rate": 1.7590865763189524e-06, "loss": 0.7229, "step": 15641 }, { "epoch": 0.6482655725475569, "grad_norm": 0.3943670988082886, "learning_rate": 1.758879356790584e-06, "loss": 0.6609, "step": 15642 }, { "epoch": 0.6483070164532305, "grad_norm": 0.4016822278499603, "learning_rate": 1.7586721372622156e-06, "loss": 0.6753, "step": 15643 }, { "epoch": 0.6483484603589043, "grad_norm": 0.39707261323928833, "learning_rate": 1.7584649177338472e-06, "loss": 0.7124, "step": 15644 }, { "epoch": 0.6483899042645779, "grad_norm": 0.4021337926387787, "learning_rate": 1.7582576982054792e-06, "loss": 0.6509, "step": 15645 }, { "epoch": 0.6484313481702516, "grad_norm": 0.4227968156337738, "learning_rate": 1.7580504786771108e-06, "loss": 0.6411, "step": 15646 }, { "epoch": 0.6484727920759252, "grad_norm": 0.42698341608047485, "learning_rate": 1.7578432591487424e-06, "loss": 0.6746, "step": 15647 }, { "epoch": 0.6485142359815989, "grad_norm": 0.42486900091171265, "learning_rate": 1.757636039620374e-06, "loss": 0.6552, "step": 15648 }, { "epoch": 0.6485556798872726, "grad_norm": 0.4552864730358124, "learning_rate": 1.7574288200920056e-06, "loss": 0.691, "step": 15649 }, { "epoch": 0.6485971237929462, "grad_norm": 0.4610636532306671, "learning_rate": 1.7572216005636372e-06, "loss": 0.731, "step": 15650 }, { "epoch": 0.6486385676986199, "grad_norm": 0.4052690863609314, "learning_rate": 1.7570143810352688e-06, "loss": 0.6772, "step": 15651 }, { "epoch": 0.6486800116042936, "grad_norm": 0.4564581513404846, "learning_rate": 1.7568071615069004e-06, "loss": 0.6815, "step": 15652 }, { "epoch": 0.6487214555099673, "grad_norm": 0.41304343938827515, "learning_rate": 1.756599941978532e-06, "loss": 0.7079, "step": 15653 }, { "epoch": 0.6487628994156409, "grad_norm": 0.42861250042915344, "learning_rate": 1.756392722450164e-06, "loss": 0.6873, "step": 15654 }, { "epoch": 0.6488043433213146, "grad_norm": 0.41023650765419006, "learning_rate": 1.7561855029217956e-06, "loss": 0.6597, "step": 15655 }, { "epoch": 0.6488457872269883, "grad_norm": 0.38327041268348694, "learning_rate": 1.7559782833934272e-06, "loss": 0.6863, "step": 15656 }, { "epoch": 0.6488872311326619, "grad_norm": 0.3939375579357147, "learning_rate": 1.7557710638650588e-06, "loss": 0.6462, "step": 15657 }, { "epoch": 0.6489286750383356, "grad_norm": 0.41441771388053894, "learning_rate": 1.7555638443366904e-06, "loss": 0.67, "step": 15658 }, { "epoch": 0.6489701189440092, "grad_norm": 0.5000901818275452, "learning_rate": 1.755356624808322e-06, "loss": 0.7087, "step": 15659 }, { "epoch": 0.649011562849683, "grad_norm": 0.4338549077510834, "learning_rate": 1.7551494052799536e-06, "loss": 0.665, "step": 15660 }, { "epoch": 0.6490530067553566, "grad_norm": 0.4083094000816345, "learning_rate": 1.7549421857515852e-06, "loss": 0.7249, "step": 15661 }, { "epoch": 0.6490944506610303, "grad_norm": 0.442543625831604, "learning_rate": 1.7547349662232172e-06, "loss": 0.6471, "step": 15662 }, { "epoch": 0.649135894566704, "grad_norm": 0.39648306369781494, "learning_rate": 1.7545277466948488e-06, "loss": 0.7051, "step": 15663 }, { "epoch": 0.6491773384723777, "grad_norm": 0.41213417053222656, "learning_rate": 1.7543205271664804e-06, "loss": 0.6473, "step": 15664 }, { "epoch": 0.6492187823780513, "grad_norm": 0.3966955840587616, "learning_rate": 1.754113307638112e-06, "loss": 0.666, "step": 15665 }, { "epoch": 0.6492602262837249, "grad_norm": 0.41007545590400696, "learning_rate": 1.7539060881097436e-06, "loss": 0.6792, "step": 15666 }, { "epoch": 0.6493016701893987, "grad_norm": 0.39393675327301025, "learning_rate": 1.7536988685813752e-06, "loss": 0.6428, "step": 15667 }, { "epoch": 0.6493431140950723, "grad_norm": 0.3899173140525818, "learning_rate": 1.7534916490530068e-06, "loss": 0.6326, "step": 15668 }, { "epoch": 0.649384558000746, "grad_norm": 0.4753111004829407, "learning_rate": 1.7532844295246384e-06, "loss": 0.7156, "step": 15669 }, { "epoch": 0.6494260019064196, "grad_norm": 0.4707523286342621, "learning_rate": 1.75307720999627e-06, "loss": 0.7354, "step": 15670 }, { "epoch": 0.6494674458120934, "grad_norm": 0.38695940375328064, "learning_rate": 1.752869990467902e-06, "loss": 0.7065, "step": 15671 }, { "epoch": 0.649508889717767, "grad_norm": 0.42865437269210815, "learning_rate": 1.7526627709395336e-06, "loss": 0.6553, "step": 15672 }, { "epoch": 0.6495503336234407, "grad_norm": 0.41782253980636597, "learning_rate": 1.7524555514111652e-06, "loss": 0.6182, "step": 15673 }, { "epoch": 0.6495917775291143, "grad_norm": 0.40565982460975647, "learning_rate": 1.7522483318827968e-06, "loss": 0.6554, "step": 15674 }, { "epoch": 0.649633221434788, "grad_norm": 0.4111839234828949, "learning_rate": 1.7520411123544284e-06, "loss": 0.6344, "step": 15675 }, { "epoch": 0.6496746653404617, "grad_norm": 0.4392800033092499, "learning_rate": 1.75183389282606e-06, "loss": 0.729, "step": 15676 }, { "epoch": 0.6497161092461353, "grad_norm": 0.3998664319515228, "learning_rate": 1.7516266732976916e-06, "loss": 0.7295, "step": 15677 }, { "epoch": 0.649757553151809, "grad_norm": 0.40909072756767273, "learning_rate": 1.7514194537693232e-06, "loss": 0.6852, "step": 15678 }, { "epoch": 0.6497989970574827, "grad_norm": 0.38243603706359863, "learning_rate": 1.7512122342409548e-06, "loss": 0.6797, "step": 15679 }, { "epoch": 0.6498404409631564, "grad_norm": 0.5085466504096985, "learning_rate": 1.7510050147125868e-06, "loss": 0.6902, "step": 15680 }, { "epoch": 0.64988188486883, "grad_norm": 0.45818427205085754, "learning_rate": 1.7507977951842184e-06, "loss": 0.6636, "step": 15681 }, { "epoch": 0.6499233287745038, "grad_norm": 0.4107896685600281, "learning_rate": 1.75059057565585e-06, "loss": 0.6848, "step": 15682 }, { "epoch": 0.6499647726801774, "grad_norm": 0.42201653122901917, "learning_rate": 1.7503833561274816e-06, "loss": 0.7029, "step": 15683 }, { "epoch": 0.650006216585851, "grad_norm": 0.38527920842170715, "learning_rate": 1.7501761365991132e-06, "loss": 0.6305, "step": 15684 }, { "epoch": 0.6500476604915247, "grad_norm": 0.39661917090415955, "learning_rate": 1.7499689170707448e-06, "loss": 0.6362, "step": 15685 }, { "epoch": 0.6500891043971984, "grad_norm": 0.42612549662590027, "learning_rate": 1.7497616975423764e-06, "loss": 0.6174, "step": 15686 }, { "epoch": 0.6501305483028721, "grad_norm": 0.4097004234790802, "learning_rate": 1.749554478014008e-06, "loss": 0.6559, "step": 15687 }, { "epoch": 0.6501719922085457, "grad_norm": 0.44937238097190857, "learning_rate": 1.74934725848564e-06, "loss": 0.7092, "step": 15688 }, { "epoch": 0.6502134361142194, "grad_norm": 0.42180415987968445, "learning_rate": 1.7491400389572716e-06, "loss": 0.6865, "step": 15689 }, { "epoch": 0.6502548800198931, "grad_norm": 0.4132656157016754, "learning_rate": 1.7489328194289032e-06, "loss": 0.6841, "step": 15690 }, { "epoch": 0.6502963239255668, "grad_norm": 0.3920235335826874, "learning_rate": 1.7487255999005348e-06, "loss": 0.6725, "step": 15691 }, { "epoch": 0.6503377678312404, "grad_norm": 0.40079450607299805, "learning_rate": 1.7485183803721664e-06, "loss": 0.6591, "step": 15692 }, { "epoch": 0.650379211736914, "grad_norm": 0.40417376160621643, "learning_rate": 1.748311160843798e-06, "loss": 0.7151, "step": 15693 }, { "epoch": 0.6504206556425878, "grad_norm": 0.3965234160423279, "learning_rate": 1.7481039413154296e-06, "loss": 0.6196, "step": 15694 }, { "epoch": 0.6504620995482614, "grad_norm": 0.4023759067058563, "learning_rate": 1.7478967217870612e-06, "loss": 0.678, "step": 15695 }, { "epoch": 0.6505035434539351, "grad_norm": 0.4178605079650879, "learning_rate": 1.7476895022586928e-06, "loss": 0.6666, "step": 15696 }, { "epoch": 0.6505449873596088, "grad_norm": 0.47339579463005066, "learning_rate": 1.7474822827303248e-06, "loss": 0.701, "step": 15697 }, { "epoch": 0.6505864312652825, "grad_norm": 0.4555041790008545, "learning_rate": 1.7472750632019564e-06, "loss": 0.6528, "step": 15698 }, { "epoch": 0.6506278751709561, "grad_norm": 0.4053938388824463, "learning_rate": 1.747067843673588e-06, "loss": 0.6787, "step": 15699 }, { "epoch": 0.6506693190766297, "grad_norm": 0.43695059418678284, "learning_rate": 1.7468606241452196e-06, "loss": 0.6567, "step": 15700 }, { "epoch": 0.6507107629823035, "grad_norm": 0.39927127957344055, "learning_rate": 1.7466534046168512e-06, "loss": 0.6936, "step": 15701 }, { "epoch": 0.6507522068879771, "grad_norm": 0.40479668974876404, "learning_rate": 1.7464461850884828e-06, "loss": 0.703, "step": 15702 }, { "epoch": 0.6507936507936508, "grad_norm": 0.42943885922431946, "learning_rate": 1.7462389655601144e-06, "loss": 0.6963, "step": 15703 }, { "epoch": 0.6508350946993244, "grad_norm": 0.3916890621185303, "learning_rate": 1.746031746031746e-06, "loss": 0.6968, "step": 15704 }, { "epoch": 0.6508765386049982, "grad_norm": 0.4107820391654968, "learning_rate": 1.7458245265033776e-06, "loss": 0.6675, "step": 15705 }, { "epoch": 0.6509179825106718, "grad_norm": 0.3747807443141937, "learning_rate": 1.7456173069750096e-06, "loss": 0.6281, "step": 15706 }, { "epoch": 0.6509594264163455, "grad_norm": 0.40859073400497437, "learning_rate": 1.7454100874466412e-06, "loss": 0.6841, "step": 15707 }, { "epoch": 0.6510008703220191, "grad_norm": 0.39724966883659363, "learning_rate": 1.7452028679182728e-06, "loss": 0.6636, "step": 15708 }, { "epoch": 0.6510423142276928, "grad_norm": 0.4036860764026642, "learning_rate": 1.7449956483899044e-06, "loss": 0.6777, "step": 15709 }, { "epoch": 0.6510837581333665, "grad_norm": 0.4448586106300354, "learning_rate": 1.744788428861536e-06, "loss": 0.7024, "step": 15710 }, { "epoch": 0.6511252020390401, "grad_norm": 0.41343891620635986, "learning_rate": 1.7445812093331676e-06, "loss": 0.6592, "step": 15711 }, { "epoch": 0.6511666459447139, "grad_norm": 0.387207955121994, "learning_rate": 1.7443739898047992e-06, "loss": 0.6584, "step": 15712 }, { "epoch": 0.6512080898503875, "grad_norm": 0.4037681221961975, "learning_rate": 1.7441667702764308e-06, "loss": 0.6748, "step": 15713 }, { "epoch": 0.6512495337560612, "grad_norm": 0.5111191272735596, "learning_rate": 1.7439595507480624e-06, "loss": 0.6509, "step": 15714 }, { "epoch": 0.6512909776617348, "grad_norm": 0.4426088035106659, "learning_rate": 1.7437523312196944e-06, "loss": 0.6965, "step": 15715 }, { "epoch": 0.6513324215674086, "grad_norm": 0.4261202812194824, "learning_rate": 1.743545111691326e-06, "loss": 0.7212, "step": 15716 }, { "epoch": 0.6513738654730822, "grad_norm": 0.43412360548973083, "learning_rate": 1.7433378921629576e-06, "loss": 0.6752, "step": 15717 }, { "epoch": 0.6514153093787558, "grad_norm": 0.39340782165527344, "learning_rate": 1.7431306726345892e-06, "loss": 0.6741, "step": 15718 }, { "epoch": 0.6514567532844295, "grad_norm": 0.40273159742355347, "learning_rate": 1.7429234531062208e-06, "loss": 0.7104, "step": 15719 }, { "epoch": 0.6514981971901032, "grad_norm": 0.43781721591949463, "learning_rate": 1.7427162335778524e-06, "loss": 0.72, "step": 15720 }, { "epoch": 0.6515396410957769, "grad_norm": 0.4172431528568268, "learning_rate": 1.742509014049484e-06, "loss": 0.7109, "step": 15721 }, { "epoch": 0.6515810850014505, "grad_norm": 0.39169105887413025, "learning_rate": 1.7423017945211156e-06, "loss": 0.6504, "step": 15722 }, { "epoch": 0.6516225289071242, "grad_norm": 0.44551441073417664, "learning_rate": 1.7420945749927476e-06, "loss": 0.7129, "step": 15723 }, { "epoch": 0.6516639728127979, "grad_norm": 0.4175686538219452, "learning_rate": 1.7418873554643792e-06, "loss": 0.6648, "step": 15724 }, { "epoch": 0.6517054167184716, "grad_norm": 0.423713356256485, "learning_rate": 1.7416801359360108e-06, "loss": 0.6682, "step": 15725 }, { "epoch": 0.6517468606241452, "grad_norm": 0.39901238679885864, "learning_rate": 1.7414729164076424e-06, "loss": 0.7092, "step": 15726 }, { "epoch": 0.6517883045298188, "grad_norm": 0.43976593017578125, "learning_rate": 1.741265696879274e-06, "loss": 0.7295, "step": 15727 }, { "epoch": 0.6518297484354926, "grad_norm": 0.4272323548793793, "learning_rate": 1.7410584773509056e-06, "loss": 0.6616, "step": 15728 }, { "epoch": 0.6518711923411662, "grad_norm": 0.42126983404159546, "learning_rate": 1.7408512578225372e-06, "loss": 0.6682, "step": 15729 }, { "epoch": 0.6519126362468399, "grad_norm": 0.413254052400589, "learning_rate": 1.7406440382941688e-06, "loss": 0.6818, "step": 15730 }, { "epoch": 0.6519540801525135, "grad_norm": 0.40079787373542786, "learning_rate": 1.7404368187658004e-06, "loss": 0.668, "step": 15731 }, { "epoch": 0.6519955240581873, "grad_norm": 0.44727012515068054, "learning_rate": 1.7402295992374324e-06, "loss": 0.6926, "step": 15732 }, { "epoch": 0.6520369679638609, "grad_norm": 0.43767988681793213, "learning_rate": 1.740022379709064e-06, "loss": 0.6671, "step": 15733 }, { "epoch": 0.6520784118695346, "grad_norm": 0.42886170744895935, "learning_rate": 1.7398151601806956e-06, "loss": 0.7085, "step": 15734 }, { "epoch": 0.6521198557752083, "grad_norm": 0.4242027997970581, "learning_rate": 1.7396079406523272e-06, "loss": 0.6851, "step": 15735 }, { "epoch": 0.6521612996808819, "grad_norm": 0.4369575083255768, "learning_rate": 1.7394007211239588e-06, "loss": 0.687, "step": 15736 }, { "epoch": 0.6522027435865556, "grad_norm": 0.44731587171554565, "learning_rate": 1.7391935015955904e-06, "loss": 0.7048, "step": 15737 }, { "epoch": 0.6522441874922292, "grad_norm": 0.4220747947692871, "learning_rate": 1.738986282067222e-06, "loss": 0.6747, "step": 15738 }, { "epoch": 0.652285631397903, "grad_norm": 0.45188236236572266, "learning_rate": 1.7387790625388536e-06, "loss": 0.7251, "step": 15739 }, { "epoch": 0.6523270753035766, "grad_norm": 0.4787285029888153, "learning_rate": 1.7385718430104854e-06, "loss": 0.708, "step": 15740 }, { "epoch": 0.6523685192092503, "grad_norm": 0.40460318326950073, "learning_rate": 1.7383646234821172e-06, "loss": 0.6807, "step": 15741 }, { "epoch": 0.6524099631149239, "grad_norm": 0.42794525623321533, "learning_rate": 1.7381574039537488e-06, "loss": 0.6592, "step": 15742 }, { "epoch": 0.6524514070205977, "grad_norm": 0.4368503987789154, "learning_rate": 1.7379501844253804e-06, "loss": 0.7113, "step": 15743 }, { "epoch": 0.6524928509262713, "grad_norm": 0.39578866958618164, "learning_rate": 1.737742964897012e-06, "loss": 0.6418, "step": 15744 }, { "epoch": 0.6525342948319449, "grad_norm": 0.394930899143219, "learning_rate": 1.7375357453686436e-06, "loss": 0.679, "step": 15745 }, { "epoch": 0.6525757387376186, "grad_norm": 0.39836016297340393, "learning_rate": 1.7373285258402752e-06, "loss": 0.6707, "step": 15746 }, { "epoch": 0.6526171826432923, "grad_norm": 0.4076630771160126, "learning_rate": 1.7371213063119068e-06, "loss": 0.6771, "step": 15747 }, { "epoch": 0.652658626548966, "grad_norm": 0.4014841914176941, "learning_rate": 1.7369140867835384e-06, "loss": 0.6897, "step": 15748 }, { "epoch": 0.6527000704546396, "grad_norm": 0.4401458501815796, "learning_rate": 1.7367068672551704e-06, "loss": 0.6854, "step": 15749 }, { "epoch": 0.6527415143603134, "grad_norm": 0.41009724140167236, "learning_rate": 1.736499647726802e-06, "loss": 0.6951, "step": 15750 }, { "epoch": 0.652782958265987, "grad_norm": 0.42843538522720337, "learning_rate": 1.7362924281984336e-06, "loss": 0.7234, "step": 15751 }, { "epoch": 0.6528244021716607, "grad_norm": 0.41356161236763, "learning_rate": 1.7360852086700652e-06, "loss": 0.6855, "step": 15752 }, { "epoch": 0.6528658460773343, "grad_norm": 0.395937979221344, "learning_rate": 1.7358779891416968e-06, "loss": 0.6322, "step": 15753 }, { "epoch": 0.652907289983008, "grad_norm": 0.4456738829612732, "learning_rate": 1.7356707696133284e-06, "loss": 0.667, "step": 15754 }, { "epoch": 0.6529487338886817, "grad_norm": 0.4107266068458557, "learning_rate": 1.73546355008496e-06, "loss": 0.6565, "step": 15755 }, { "epoch": 0.6529901777943553, "grad_norm": 0.3851933479309082, "learning_rate": 1.7352563305565916e-06, "loss": 0.6565, "step": 15756 }, { "epoch": 0.653031621700029, "grad_norm": 0.426571249961853, "learning_rate": 1.7350491110282234e-06, "loss": 0.7231, "step": 15757 }, { "epoch": 0.6530730656057027, "grad_norm": 0.4126175045967102, "learning_rate": 1.7348418914998552e-06, "loss": 0.6329, "step": 15758 }, { "epoch": 0.6531145095113764, "grad_norm": 0.4429250955581665, "learning_rate": 1.7346346719714868e-06, "loss": 0.6556, "step": 15759 }, { "epoch": 0.65315595341705, "grad_norm": 0.4172758460044861, "learning_rate": 1.7344274524431184e-06, "loss": 0.6943, "step": 15760 }, { "epoch": 0.6531973973227236, "grad_norm": 0.414285272359848, "learning_rate": 1.73422023291475e-06, "loss": 0.6609, "step": 15761 }, { "epoch": 0.6532388412283974, "grad_norm": 0.39814522862434387, "learning_rate": 1.7340130133863816e-06, "loss": 0.6471, "step": 15762 }, { "epoch": 0.653280285134071, "grad_norm": 0.4019055664539337, "learning_rate": 1.7338057938580132e-06, "loss": 0.697, "step": 15763 }, { "epoch": 0.6533217290397447, "grad_norm": 0.4140705168247223, "learning_rate": 1.7335985743296448e-06, "loss": 0.6805, "step": 15764 }, { "epoch": 0.6533631729454183, "grad_norm": 0.3930293321609497, "learning_rate": 1.7333913548012764e-06, "loss": 0.6785, "step": 15765 }, { "epoch": 0.6534046168510921, "grad_norm": 0.41667789220809937, "learning_rate": 1.7331841352729082e-06, "loss": 0.7036, "step": 15766 }, { "epoch": 0.6534460607567657, "grad_norm": 0.404541939496994, "learning_rate": 1.73297691574454e-06, "loss": 0.6879, "step": 15767 }, { "epoch": 0.6534875046624394, "grad_norm": 0.38597381114959717, "learning_rate": 1.7327696962161716e-06, "loss": 0.6548, "step": 15768 }, { "epoch": 0.653528948568113, "grad_norm": 0.5062927007675171, "learning_rate": 1.7325624766878032e-06, "loss": 0.6995, "step": 15769 }, { "epoch": 0.6535703924737867, "grad_norm": 0.44508200883865356, "learning_rate": 1.7323552571594348e-06, "loss": 0.6768, "step": 15770 }, { "epoch": 0.6536118363794604, "grad_norm": 0.3669632077217102, "learning_rate": 1.7321480376310664e-06, "loss": 0.6306, "step": 15771 }, { "epoch": 0.653653280285134, "grad_norm": 0.3990861177444458, "learning_rate": 1.731940818102698e-06, "loss": 0.626, "step": 15772 }, { "epoch": 0.6536947241908078, "grad_norm": 0.41137459874153137, "learning_rate": 1.7317335985743296e-06, "loss": 0.7163, "step": 15773 }, { "epoch": 0.6537361680964814, "grad_norm": 0.409091979265213, "learning_rate": 1.7315263790459614e-06, "loss": 0.6924, "step": 15774 }, { "epoch": 0.6537776120021551, "grad_norm": 0.4474322199821472, "learning_rate": 1.7313191595175932e-06, "loss": 0.6869, "step": 15775 }, { "epoch": 0.6538190559078287, "grad_norm": 0.4054987132549286, "learning_rate": 1.7311119399892248e-06, "loss": 0.6887, "step": 15776 }, { "epoch": 0.6538604998135025, "grad_norm": 0.4308711290359497, "learning_rate": 1.7309047204608564e-06, "loss": 0.6984, "step": 15777 }, { "epoch": 0.6539019437191761, "grad_norm": 0.3708193004131317, "learning_rate": 1.730697500932488e-06, "loss": 0.6377, "step": 15778 }, { "epoch": 0.6539433876248497, "grad_norm": 0.39931997656822205, "learning_rate": 1.7304902814041196e-06, "loss": 0.6846, "step": 15779 }, { "epoch": 0.6539848315305234, "grad_norm": 0.4054282307624817, "learning_rate": 1.7302830618757512e-06, "loss": 0.6185, "step": 15780 }, { "epoch": 0.6540262754361971, "grad_norm": 0.3906192481517792, "learning_rate": 1.7300758423473828e-06, "loss": 0.5784, "step": 15781 }, { "epoch": 0.6540677193418708, "grad_norm": 0.40949708223342896, "learning_rate": 1.7298686228190144e-06, "loss": 0.645, "step": 15782 }, { "epoch": 0.6541091632475444, "grad_norm": 0.4361708164215088, "learning_rate": 1.7296614032906462e-06, "loss": 0.7278, "step": 15783 }, { "epoch": 0.6541506071532182, "grad_norm": 0.4104963541030884, "learning_rate": 1.729454183762278e-06, "loss": 0.7242, "step": 15784 }, { "epoch": 0.6541920510588918, "grad_norm": 0.39452454447746277, "learning_rate": 1.7292469642339096e-06, "loss": 0.6606, "step": 15785 }, { "epoch": 0.6542334949645655, "grad_norm": 0.3919631242752075, "learning_rate": 1.7290397447055412e-06, "loss": 0.6147, "step": 15786 }, { "epoch": 0.6542749388702391, "grad_norm": 0.5125225782394409, "learning_rate": 1.7288325251771728e-06, "loss": 0.7137, "step": 15787 }, { "epoch": 0.6543163827759128, "grad_norm": 0.4215662181377411, "learning_rate": 1.7286253056488044e-06, "loss": 0.6575, "step": 15788 }, { "epoch": 0.6543578266815865, "grad_norm": 0.36606648564338684, "learning_rate": 1.728418086120436e-06, "loss": 0.635, "step": 15789 }, { "epoch": 0.6543992705872601, "grad_norm": 0.3957790732383728, "learning_rate": 1.7282108665920676e-06, "loss": 0.6935, "step": 15790 }, { "epoch": 0.6544407144929338, "grad_norm": 0.4405267834663391, "learning_rate": 1.7280036470636994e-06, "loss": 0.6677, "step": 15791 }, { "epoch": 0.6544821583986075, "grad_norm": 0.41332128643989563, "learning_rate": 1.727796427535331e-06, "loss": 0.6958, "step": 15792 }, { "epoch": 0.6545236023042812, "grad_norm": 0.4082602560520172, "learning_rate": 1.7275892080069628e-06, "loss": 0.5955, "step": 15793 }, { "epoch": 0.6545650462099548, "grad_norm": 0.4165429472923279, "learning_rate": 1.7273819884785944e-06, "loss": 0.6996, "step": 15794 }, { "epoch": 0.6546064901156285, "grad_norm": 0.40681174397468567, "learning_rate": 1.727174768950226e-06, "loss": 0.6165, "step": 15795 }, { "epoch": 0.6546479340213022, "grad_norm": 0.40062281489372253, "learning_rate": 1.7269675494218576e-06, "loss": 0.6785, "step": 15796 }, { "epoch": 0.6546893779269758, "grad_norm": 0.4208965301513672, "learning_rate": 1.7267603298934892e-06, "loss": 0.7029, "step": 15797 }, { "epoch": 0.6547308218326495, "grad_norm": 0.42980527877807617, "learning_rate": 1.7265531103651208e-06, "loss": 0.6875, "step": 15798 }, { "epoch": 0.6547722657383231, "grad_norm": 0.40014299750328064, "learning_rate": 1.7263458908367524e-06, "loss": 0.6665, "step": 15799 }, { "epoch": 0.6548137096439969, "grad_norm": 0.40552690625190735, "learning_rate": 1.7261386713083842e-06, "loss": 0.6929, "step": 15800 }, { "epoch": 0.6548551535496705, "grad_norm": 0.4124605059623718, "learning_rate": 1.7259314517800158e-06, "loss": 0.6472, "step": 15801 }, { "epoch": 0.6548965974553442, "grad_norm": 0.40545380115509033, "learning_rate": 1.7257242322516476e-06, "loss": 0.6793, "step": 15802 }, { "epoch": 0.6549380413610179, "grad_norm": 0.40259498357772827, "learning_rate": 1.7255170127232792e-06, "loss": 0.6975, "step": 15803 }, { "epoch": 0.6549794852666916, "grad_norm": 0.40491700172424316, "learning_rate": 1.7253097931949108e-06, "loss": 0.7051, "step": 15804 }, { "epoch": 0.6550209291723652, "grad_norm": 0.40791767835617065, "learning_rate": 1.7251025736665424e-06, "loss": 0.7026, "step": 15805 }, { "epoch": 0.6550623730780388, "grad_norm": 0.44549524784088135, "learning_rate": 1.724895354138174e-06, "loss": 0.71, "step": 15806 }, { "epoch": 0.6551038169837126, "grad_norm": 0.4575514793395996, "learning_rate": 1.7246881346098056e-06, "loss": 0.709, "step": 15807 }, { "epoch": 0.6551452608893862, "grad_norm": 0.4460285007953644, "learning_rate": 1.7244809150814374e-06, "loss": 0.6729, "step": 15808 }, { "epoch": 0.6551867047950599, "grad_norm": 0.43750715255737305, "learning_rate": 1.724273695553069e-06, "loss": 0.7068, "step": 15809 }, { "epoch": 0.6552281487007335, "grad_norm": 0.4101375341415405, "learning_rate": 1.7240664760247008e-06, "loss": 0.6461, "step": 15810 }, { "epoch": 0.6552695926064073, "grad_norm": 0.4052460193634033, "learning_rate": 1.7238592564963324e-06, "loss": 0.7023, "step": 15811 }, { "epoch": 0.6553110365120809, "grad_norm": 0.408568799495697, "learning_rate": 1.723652036967964e-06, "loss": 0.6372, "step": 15812 }, { "epoch": 0.6553524804177546, "grad_norm": 0.3925889730453491, "learning_rate": 1.7234448174395956e-06, "loss": 0.6581, "step": 15813 }, { "epoch": 0.6553939243234282, "grad_norm": 0.4190484583377838, "learning_rate": 1.7232375979112272e-06, "loss": 0.6832, "step": 15814 }, { "epoch": 0.6554353682291019, "grad_norm": 0.4159119725227356, "learning_rate": 1.7230303783828588e-06, "loss": 0.6613, "step": 15815 }, { "epoch": 0.6554768121347756, "grad_norm": 0.40684086084365845, "learning_rate": 1.7228231588544906e-06, "loss": 0.6302, "step": 15816 }, { "epoch": 0.6555182560404492, "grad_norm": 0.4096267819404602, "learning_rate": 1.7226159393261222e-06, "loss": 0.717, "step": 15817 }, { "epoch": 0.655559699946123, "grad_norm": 0.43406757712364197, "learning_rate": 1.7224087197977538e-06, "loss": 0.7068, "step": 15818 }, { "epoch": 0.6556011438517966, "grad_norm": 0.385793536901474, "learning_rate": 1.7222015002693856e-06, "loss": 0.7109, "step": 15819 }, { "epoch": 0.6556425877574703, "grad_norm": 0.424255907535553, "learning_rate": 1.7219942807410172e-06, "loss": 0.7064, "step": 15820 }, { "epoch": 0.6556840316631439, "grad_norm": 0.4171079397201538, "learning_rate": 1.7217870612126488e-06, "loss": 0.7078, "step": 15821 }, { "epoch": 0.6557254755688176, "grad_norm": 0.40043315291404724, "learning_rate": 1.7215798416842804e-06, "loss": 0.6997, "step": 15822 }, { "epoch": 0.6557669194744913, "grad_norm": 0.4386458992958069, "learning_rate": 1.721372622155912e-06, "loss": 0.7202, "step": 15823 }, { "epoch": 0.6558083633801649, "grad_norm": 0.39670661091804504, "learning_rate": 1.7211654026275436e-06, "loss": 0.6692, "step": 15824 }, { "epoch": 0.6558498072858386, "grad_norm": 0.42229771614074707, "learning_rate": 1.7209581830991754e-06, "loss": 0.6913, "step": 15825 }, { "epoch": 0.6558912511915123, "grad_norm": 0.3951636850833893, "learning_rate": 1.720750963570807e-06, "loss": 0.6675, "step": 15826 }, { "epoch": 0.655932695097186, "grad_norm": 0.4452580511569977, "learning_rate": 1.7205437440424386e-06, "loss": 0.707, "step": 15827 }, { "epoch": 0.6559741390028596, "grad_norm": 0.4179422855377197, "learning_rate": 1.7203365245140704e-06, "loss": 0.6278, "step": 15828 }, { "epoch": 0.6560155829085333, "grad_norm": 0.4243967533111572, "learning_rate": 1.720129304985702e-06, "loss": 0.7197, "step": 15829 }, { "epoch": 0.656057026814207, "grad_norm": 0.4189925193786621, "learning_rate": 1.7199220854573336e-06, "loss": 0.6897, "step": 15830 }, { "epoch": 0.6560984707198806, "grad_norm": 0.4216445982456207, "learning_rate": 1.7197148659289652e-06, "loss": 0.75, "step": 15831 }, { "epoch": 0.6561399146255543, "grad_norm": 0.41182741522789, "learning_rate": 1.7195076464005968e-06, "loss": 0.6885, "step": 15832 }, { "epoch": 0.6561813585312279, "grad_norm": 0.4273031949996948, "learning_rate": 1.7193004268722286e-06, "loss": 0.7373, "step": 15833 }, { "epoch": 0.6562228024369017, "grad_norm": 0.4141566753387451, "learning_rate": 1.7190932073438602e-06, "loss": 0.7141, "step": 15834 }, { "epoch": 0.6562642463425753, "grad_norm": 0.3880942165851593, "learning_rate": 1.7188859878154918e-06, "loss": 0.6788, "step": 15835 }, { "epoch": 0.656305690248249, "grad_norm": 0.4042184054851532, "learning_rate": 1.7186787682871236e-06, "loss": 0.6592, "step": 15836 }, { "epoch": 0.6563471341539227, "grad_norm": 0.46124470233917236, "learning_rate": 1.7184715487587552e-06, "loss": 0.7415, "step": 15837 }, { "epoch": 0.6563885780595964, "grad_norm": 0.40873071551322937, "learning_rate": 1.7182643292303868e-06, "loss": 0.6603, "step": 15838 }, { "epoch": 0.65643002196527, "grad_norm": 0.420430988073349, "learning_rate": 1.7180571097020184e-06, "loss": 0.6494, "step": 15839 }, { "epoch": 0.6564714658709436, "grad_norm": 0.4463101923465729, "learning_rate": 1.71784989017365e-06, "loss": 0.6919, "step": 15840 }, { "epoch": 0.6565129097766174, "grad_norm": 0.3935699760913849, "learning_rate": 1.7176426706452816e-06, "loss": 0.5975, "step": 15841 }, { "epoch": 0.656554353682291, "grad_norm": 0.4163416028022766, "learning_rate": 1.7174354511169134e-06, "loss": 0.6686, "step": 15842 }, { "epoch": 0.6565957975879647, "grad_norm": 0.4248247742652893, "learning_rate": 1.717228231588545e-06, "loss": 0.6904, "step": 15843 }, { "epoch": 0.6566372414936383, "grad_norm": 0.3977970778942108, "learning_rate": 1.7170210120601766e-06, "loss": 0.6377, "step": 15844 }, { "epoch": 0.6566786853993121, "grad_norm": 0.3912927210330963, "learning_rate": 1.7168137925318084e-06, "loss": 0.6549, "step": 15845 }, { "epoch": 0.6567201293049857, "grad_norm": 0.4295571446418762, "learning_rate": 1.71660657300344e-06, "loss": 0.6997, "step": 15846 }, { "epoch": 0.6567615732106594, "grad_norm": 0.415941447019577, "learning_rate": 1.7163993534750716e-06, "loss": 0.6807, "step": 15847 }, { "epoch": 0.656803017116333, "grad_norm": 0.5575341582298279, "learning_rate": 1.7161921339467032e-06, "loss": 0.6826, "step": 15848 }, { "epoch": 0.6568444610220067, "grad_norm": 0.49588531255722046, "learning_rate": 1.7159849144183348e-06, "loss": 0.6343, "step": 15849 }, { "epoch": 0.6568859049276804, "grad_norm": 0.4204784631729126, "learning_rate": 1.7157776948899666e-06, "loss": 0.7034, "step": 15850 }, { "epoch": 0.656927348833354, "grad_norm": 0.38081130385398865, "learning_rate": 1.7155704753615982e-06, "loss": 0.7002, "step": 15851 }, { "epoch": 0.6569687927390278, "grad_norm": 0.40828636288642883, "learning_rate": 1.7153632558332298e-06, "loss": 0.6985, "step": 15852 }, { "epoch": 0.6570102366447014, "grad_norm": 0.37971195578575134, "learning_rate": 1.7151560363048614e-06, "loss": 0.6223, "step": 15853 }, { "epoch": 0.6570516805503751, "grad_norm": 0.43649357557296753, "learning_rate": 1.7149488167764932e-06, "loss": 0.697, "step": 15854 }, { "epoch": 0.6570931244560487, "grad_norm": 0.5322659015655518, "learning_rate": 1.7147415972481248e-06, "loss": 0.6671, "step": 15855 }, { "epoch": 0.6571345683617225, "grad_norm": 0.3835306167602539, "learning_rate": 1.7145343777197564e-06, "loss": 0.6781, "step": 15856 }, { "epoch": 0.6571760122673961, "grad_norm": 0.42388784885406494, "learning_rate": 1.714327158191388e-06, "loss": 0.6567, "step": 15857 }, { "epoch": 0.6572174561730697, "grad_norm": 0.4194665551185608, "learning_rate": 1.7141199386630196e-06, "loss": 0.6995, "step": 15858 }, { "epoch": 0.6572589000787434, "grad_norm": 0.39259710907936096, "learning_rate": 1.7139127191346514e-06, "loss": 0.6517, "step": 15859 }, { "epoch": 0.6573003439844171, "grad_norm": 0.3966829180717468, "learning_rate": 1.713705499606283e-06, "loss": 0.6904, "step": 15860 }, { "epoch": 0.6573417878900908, "grad_norm": 0.40633827447891235, "learning_rate": 1.7134982800779146e-06, "loss": 0.6731, "step": 15861 }, { "epoch": 0.6573832317957644, "grad_norm": 0.4272167980670929, "learning_rate": 1.7132910605495462e-06, "loss": 0.6674, "step": 15862 }, { "epoch": 0.6574246757014381, "grad_norm": 0.4242865741252899, "learning_rate": 1.713083841021178e-06, "loss": 0.6796, "step": 15863 }, { "epoch": 0.6574661196071118, "grad_norm": 0.3944559395313263, "learning_rate": 1.7128766214928096e-06, "loss": 0.6628, "step": 15864 }, { "epoch": 0.6575075635127855, "grad_norm": 0.39071494340896606, "learning_rate": 1.7126694019644412e-06, "loss": 0.6533, "step": 15865 }, { "epoch": 0.6575490074184591, "grad_norm": 0.40022996068000793, "learning_rate": 1.7124621824360728e-06, "loss": 0.6685, "step": 15866 }, { "epoch": 0.6575904513241327, "grad_norm": 0.38033562898635864, "learning_rate": 1.7122549629077046e-06, "loss": 0.6238, "step": 15867 }, { "epoch": 0.6576318952298065, "grad_norm": 0.4154983162879944, "learning_rate": 1.7120477433793362e-06, "loss": 0.6594, "step": 15868 }, { "epoch": 0.6576733391354801, "grad_norm": 0.3927316963672638, "learning_rate": 1.7118405238509678e-06, "loss": 0.6697, "step": 15869 }, { "epoch": 0.6577147830411538, "grad_norm": 0.40432554483413696, "learning_rate": 1.7116333043225994e-06, "loss": 0.7039, "step": 15870 }, { "epoch": 0.6577562269468274, "grad_norm": 0.38262805342674255, "learning_rate": 1.7114260847942312e-06, "loss": 0.6771, "step": 15871 }, { "epoch": 0.6577976708525012, "grad_norm": 0.43259990215301514, "learning_rate": 1.7112188652658628e-06, "loss": 0.6448, "step": 15872 }, { "epoch": 0.6578391147581748, "grad_norm": 0.4208540618419647, "learning_rate": 1.7110116457374944e-06, "loss": 0.6934, "step": 15873 }, { "epoch": 0.6578805586638485, "grad_norm": 0.4065565764904022, "learning_rate": 1.710804426209126e-06, "loss": 0.6309, "step": 15874 }, { "epoch": 0.6579220025695222, "grad_norm": 0.4515267610549927, "learning_rate": 1.7105972066807576e-06, "loss": 0.6863, "step": 15875 }, { "epoch": 0.6579634464751958, "grad_norm": 0.39399778842926025, "learning_rate": 1.7103899871523894e-06, "loss": 0.6019, "step": 15876 }, { "epoch": 0.6580048903808695, "grad_norm": 0.38645172119140625, "learning_rate": 1.710182767624021e-06, "loss": 0.6571, "step": 15877 }, { "epoch": 0.6580463342865431, "grad_norm": 0.40489277243614197, "learning_rate": 1.7099755480956526e-06, "loss": 0.6517, "step": 15878 }, { "epoch": 0.6580877781922169, "grad_norm": 0.40738117694854736, "learning_rate": 1.7097683285672842e-06, "loss": 0.7021, "step": 15879 }, { "epoch": 0.6581292220978905, "grad_norm": 0.40787044167518616, "learning_rate": 1.709561109038916e-06, "loss": 0.6906, "step": 15880 }, { "epoch": 0.6581706660035642, "grad_norm": 0.38955286145210266, "learning_rate": 1.7093538895105476e-06, "loss": 0.7096, "step": 15881 }, { "epoch": 0.6582121099092378, "grad_norm": 0.4115619957447052, "learning_rate": 1.7091466699821792e-06, "loss": 0.6593, "step": 15882 }, { "epoch": 0.6582535538149115, "grad_norm": 0.39343875646591187, "learning_rate": 1.7089394504538108e-06, "loss": 0.6759, "step": 15883 }, { "epoch": 0.6582949977205852, "grad_norm": 0.40498554706573486, "learning_rate": 1.7087322309254426e-06, "loss": 0.668, "step": 15884 }, { "epoch": 0.6583364416262588, "grad_norm": 0.3864211142063141, "learning_rate": 1.7085250113970742e-06, "loss": 0.6594, "step": 15885 }, { "epoch": 0.6583778855319325, "grad_norm": 0.4145214259624481, "learning_rate": 1.7083177918687058e-06, "loss": 0.7396, "step": 15886 }, { "epoch": 0.6584193294376062, "grad_norm": 0.3966812789440155, "learning_rate": 1.7081105723403374e-06, "loss": 0.645, "step": 15887 }, { "epoch": 0.6584607733432799, "grad_norm": 0.45376506447792053, "learning_rate": 1.707903352811969e-06, "loss": 0.7477, "step": 15888 }, { "epoch": 0.6585022172489535, "grad_norm": 0.42083802819252014, "learning_rate": 1.7076961332836008e-06, "loss": 0.6702, "step": 15889 }, { "epoch": 0.6585436611546273, "grad_norm": 0.3996073305606842, "learning_rate": 1.7074889137552324e-06, "loss": 0.6117, "step": 15890 }, { "epoch": 0.6585851050603009, "grad_norm": 0.4056505560874939, "learning_rate": 1.707281694226864e-06, "loss": 0.6304, "step": 15891 }, { "epoch": 0.6586265489659745, "grad_norm": 0.4683209955692291, "learning_rate": 1.7070744746984956e-06, "loss": 0.6975, "step": 15892 }, { "epoch": 0.6586679928716482, "grad_norm": 0.3804751932621002, "learning_rate": 1.7068672551701274e-06, "loss": 0.6525, "step": 15893 }, { "epoch": 0.6587094367773219, "grad_norm": 0.3976973295211792, "learning_rate": 1.706660035641759e-06, "loss": 0.6733, "step": 15894 }, { "epoch": 0.6587508806829956, "grad_norm": 0.4610307216644287, "learning_rate": 1.7064528161133906e-06, "loss": 0.7383, "step": 15895 }, { "epoch": 0.6587923245886692, "grad_norm": 0.3912046551704407, "learning_rate": 1.7062455965850222e-06, "loss": 0.6665, "step": 15896 }, { "epoch": 0.6588337684943429, "grad_norm": 0.3910112977027893, "learning_rate": 1.706038377056654e-06, "loss": 0.6439, "step": 15897 }, { "epoch": 0.6588752124000166, "grad_norm": 0.43328994512557983, "learning_rate": 1.7058311575282856e-06, "loss": 0.6835, "step": 15898 }, { "epoch": 0.6589166563056903, "grad_norm": 0.43128570914268494, "learning_rate": 1.7056239379999172e-06, "loss": 0.6743, "step": 15899 }, { "epoch": 0.6589581002113639, "grad_norm": 0.4320971369743347, "learning_rate": 1.7054167184715488e-06, "loss": 0.6869, "step": 15900 }, { "epoch": 0.6589995441170375, "grad_norm": 0.41274017095565796, "learning_rate": 1.7052094989431806e-06, "loss": 0.66, "step": 15901 }, { "epoch": 0.6590409880227113, "grad_norm": 0.44481131434440613, "learning_rate": 1.7050022794148122e-06, "loss": 0.6871, "step": 15902 }, { "epoch": 0.6590824319283849, "grad_norm": 0.3837624192237854, "learning_rate": 1.7047950598864438e-06, "loss": 0.6777, "step": 15903 }, { "epoch": 0.6591238758340586, "grad_norm": 0.4376017153263092, "learning_rate": 1.7045878403580754e-06, "loss": 0.7148, "step": 15904 }, { "epoch": 0.6591653197397322, "grad_norm": 0.4535057246685028, "learning_rate": 1.704380620829707e-06, "loss": 0.7454, "step": 15905 }, { "epoch": 0.659206763645406, "grad_norm": 0.4428234398365021, "learning_rate": 1.7041734013013388e-06, "loss": 0.6389, "step": 15906 }, { "epoch": 0.6592482075510796, "grad_norm": 0.43806129693984985, "learning_rate": 1.7039661817729704e-06, "loss": 0.7351, "step": 15907 }, { "epoch": 0.6592896514567533, "grad_norm": 0.43979784846305847, "learning_rate": 1.703758962244602e-06, "loss": 0.6935, "step": 15908 }, { "epoch": 0.659331095362427, "grad_norm": 0.416564404964447, "learning_rate": 1.7035517427162339e-06, "loss": 0.6406, "step": 15909 }, { "epoch": 0.6593725392681006, "grad_norm": 0.3931574523448944, "learning_rate": 1.7033445231878654e-06, "loss": 0.647, "step": 15910 }, { "epoch": 0.6594139831737743, "grad_norm": 0.38810956478118896, "learning_rate": 1.703137303659497e-06, "loss": 0.6935, "step": 15911 }, { "epoch": 0.6594554270794479, "grad_norm": 0.38679274916648865, "learning_rate": 1.7029300841311286e-06, "loss": 0.6147, "step": 15912 }, { "epoch": 0.6594968709851217, "grad_norm": 0.41585877537727356, "learning_rate": 1.7027228646027602e-06, "loss": 0.7, "step": 15913 }, { "epoch": 0.6595383148907953, "grad_norm": 0.40813443064689636, "learning_rate": 1.7025156450743918e-06, "loss": 0.6643, "step": 15914 }, { "epoch": 0.659579758796469, "grad_norm": 0.44207730889320374, "learning_rate": 1.7023084255460236e-06, "loss": 0.6857, "step": 15915 }, { "epoch": 0.6596212027021426, "grad_norm": 0.4151112139225006, "learning_rate": 1.7021012060176552e-06, "loss": 0.6924, "step": 15916 }, { "epoch": 0.6596626466078164, "grad_norm": 0.4181790351867676, "learning_rate": 1.7018939864892868e-06, "loss": 0.6512, "step": 15917 }, { "epoch": 0.65970409051349, "grad_norm": 0.3908064663410187, "learning_rate": 1.7016867669609187e-06, "loss": 0.6754, "step": 15918 }, { "epoch": 0.6597455344191636, "grad_norm": 0.4371959865093231, "learning_rate": 1.7014795474325502e-06, "loss": 0.7026, "step": 15919 }, { "epoch": 0.6597869783248373, "grad_norm": 0.4230654239654541, "learning_rate": 1.7012723279041818e-06, "loss": 0.6748, "step": 15920 }, { "epoch": 0.659828422230511, "grad_norm": 0.3909659683704376, "learning_rate": 1.7010651083758134e-06, "loss": 0.629, "step": 15921 }, { "epoch": 0.6598698661361847, "grad_norm": 0.39861851930618286, "learning_rate": 1.700857888847445e-06, "loss": 0.6663, "step": 15922 }, { "epoch": 0.6599113100418583, "grad_norm": 0.4235279858112335, "learning_rate": 1.7006506693190766e-06, "loss": 0.6687, "step": 15923 }, { "epoch": 0.6599527539475321, "grad_norm": 0.43056735396385193, "learning_rate": 1.7004434497907084e-06, "loss": 0.6729, "step": 15924 }, { "epoch": 0.6599941978532057, "grad_norm": 0.4405744969844818, "learning_rate": 1.70023623026234e-06, "loss": 0.6794, "step": 15925 }, { "epoch": 0.6600356417588794, "grad_norm": 0.41619405150413513, "learning_rate": 1.7000290107339719e-06, "loss": 0.6654, "step": 15926 }, { "epoch": 0.660077085664553, "grad_norm": 0.38647568225860596, "learning_rate": 1.6998217912056035e-06, "loss": 0.6709, "step": 15927 }, { "epoch": 0.6601185295702267, "grad_norm": 0.4220942556858063, "learning_rate": 1.699614571677235e-06, "loss": 0.6788, "step": 15928 }, { "epoch": 0.6601599734759004, "grad_norm": 0.44393622875213623, "learning_rate": 1.6994073521488666e-06, "loss": 0.7236, "step": 15929 }, { "epoch": 0.660201417381574, "grad_norm": 0.3824441432952881, "learning_rate": 1.6992001326204982e-06, "loss": 0.6794, "step": 15930 }, { "epoch": 0.6602428612872477, "grad_norm": 0.40702781081199646, "learning_rate": 1.6989929130921298e-06, "loss": 0.6897, "step": 15931 }, { "epoch": 0.6602843051929214, "grad_norm": 0.40720781683921814, "learning_rate": 1.6987856935637617e-06, "loss": 0.6589, "step": 15932 }, { "epoch": 0.6603257490985951, "grad_norm": 0.4167475998401642, "learning_rate": 1.6985784740353932e-06, "loss": 0.665, "step": 15933 }, { "epoch": 0.6603671930042687, "grad_norm": 0.39371156692504883, "learning_rate": 1.6983712545070248e-06, "loss": 0.6323, "step": 15934 }, { "epoch": 0.6604086369099424, "grad_norm": 0.3834646940231323, "learning_rate": 1.6981640349786567e-06, "loss": 0.6857, "step": 15935 }, { "epoch": 0.6604500808156161, "grad_norm": 0.3804472088813782, "learning_rate": 1.6979568154502883e-06, "loss": 0.62, "step": 15936 }, { "epoch": 0.6604915247212897, "grad_norm": 0.4005916714668274, "learning_rate": 1.6977495959219199e-06, "loss": 0.6201, "step": 15937 }, { "epoch": 0.6605329686269634, "grad_norm": 0.4113697409629822, "learning_rate": 1.6975423763935514e-06, "loss": 0.6401, "step": 15938 }, { "epoch": 0.660574412532637, "grad_norm": 0.39684680104255676, "learning_rate": 1.697335156865183e-06, "loss": 0.6804, "step": 15939 }, { "epoch": 0.6606158564383108, "grad_norm": 0.42072048783302307, "learning_rate": 1.6971279373368146e-06, "loss": 0.6742, "step": 15940 }, { "epoch": 0.6606573003439844, "grad_norm": 0.4549179673194885, "learning_rate": 1.6969207178084465e-06, "loss": 0.6667, "step": 15941 }, { "epoch": 0.6606987442496581, "grad_norm": 0.4198736846446991, "learning_rate": 1.696713498280078e-06, "loss": 0.6803, "step": 15942 }, { "epoch": 0.6607401881553318, "grad_norm": 0.4072663486003876, "learning_rate": 1.6965062787517099e-06, "loss": 0.6143, "step": 15943 }, { "epoch": 0.6607816320610054, "grad_norm": 0.42493024468421936, "learning_rate": 1.6962990592233415e-06, "loss": 0.6804, "step": 15944 }, { "epoch": 0.6608230759666791, "grad_norm": 0.46426454186439514, "learning_rate": 1.696091839694973e-06, "loss": 0.6821, "step": 15945 }, { "epoch": 0.6608645198723527, "grad_norm": 0.40335360169410706, "learning_rate": 1.6958846201666047e-06, "loss": 0.699, "step": 15946 }, { "epoch": 0.6609059637780265, "grad_norm": 0.39876291155815125, "learning_rate": 1.6956774006382362e-06, "loss": 0.667, "step": 15947 }, { "epoch": 0.6609474076837001, "grad_norm": 0.40360960364341736, "learning_rate": 1.6954701811098678e-06, "loss": 0.6815, "step": 15948 }, { "epoch": 0.6609888515893738, "grad_norm": 0.4324796497821808, "learning_rate": 1.6952629615814994e-06, "loss": 0.6776, "step": 15949 }, { "epoch": 0.6610302954950474, "grad_norm": 0.3844256103038788, "learning_rate": 1.6950557420531313e-06, "loss": 0.6344, "step": 15950 }, { "epoch": 0.6610717394007212, "grad_norm": 0.3923284709453583, "learning_rate": 1.6948485225247628e-06, "loss": 0.6514, "step": 15951 }, { "epoch": 0.6611131833063948, "grad_norm": 0.4134000837802887, "learning_rate": 1.6946413029963947e-06, "loss": 0.6976, "step": 15952 }, { "epoch": 0.6611546272120684, "grad_norm": 0.391956627368927, "learning_rate": 1.6944340834680263e-06, "loss": 0.7101, "step": 15953 }, { "epoch": 0.6611960711177421, "grad_norm": 0.43002066016197205, "learning_rate": 1.6942268639396579e-06, "loss": 0.6836, "step": 15954 }, { "epoch": 0.6612375150234158, "grad_norm": 0.3834863007068634, "learning_rate": 1.6940196444112895e-06, "loss": 0.663, "step": 15955 }, { "epoch": 0.6612789589290895, "grad_norm": 0.42112401127815247, "learning_rate": 1.693812424882921e-06, "loss": 0.6636, "step": 15956 }, { "epoch": 0.6613204028347631, "grad_norm": 0.42690062522888184, "learning_rate": 1.6936052053545526e-06, "loss": 0.6875, "step": 15957 }, { "epoch": 0.6613618467404369, "grad_norm": 0.4197480082511902, "learning_rate": 1.6933979858261845e-06, "loss": 0.6628, "step": 15958 }, { "epoch": 0.6614032906461105, "grad_norm": 0.40382689237594604, "learning_rate": 1.693190766297816e-06, "loss": 0.6641, "step": 15959 }, { "epoch": 0.6614447345517842, "grad_norm": 0.408812016248703, "learning_rate": 1.6929835467694479e-06, "loss": 0.6428, "step": 15960 }, { "epoch": 0.6614861784574578, "grad_norm": 0.42023780941963196, "learning_rate": 1.6927763272410795e-06, "loss": 0.6415, "step": 15961 }, { "epoch": 0.6615276223631315, "grad_norm": 0.42537063360214233, "learning_rate": 1.692569107712711e-06, "loss": 0.6483, "step": 15962 }, { "epoch": 0.6615690662688052, "grad_norm": 0.37976691126823425, "learning_rate": 1.6923618881843427e-06, "loss": 0.6464, "step": 15963 }, { "epoch": 0.6616105101744788, "grad_norm": 0.3804509937763214, "learning_rate": 1.6921546686559743e-06, "loss": 0.6699, "step": 15964 }, { "epoch": 0.6616519540801525, "grad_norm": 0.4056214690208435, "learning_rate": 1.6919474491276058e-06, "loss": 0.6644, "step": 15965 }, { "epoch": 0.6616933979858262, "grad_norm": 0.44680482149124146, "learning_rate": 1.6917402295992374e-06, "loss": 0.684, "step": 15966 }, { "epoch": 0.6617348418914999, "grad_norm": 0.44334855675697327, "learning_rate": 1.6915330100708693e-06, "loss": 0.7544, "step": 15967 }, { "epoch": 0.6617762857971735, "grad_norm": 0.4248839318752289, "learning_rate": 1.6913257905425009e-06, "loss": 0.7054, "step": 15968 }, { "epoch": 0.6618177297028472, "grad_norm": 0.40264639258384705, "learning_rate": 1.6911185710141327e-06, "loss": 0.6372, "step": 15969 }, { "epoch": 0.6618591736085209, "grad_norm": 0.44528117775917053, "learning_rate": 1.6909113514857643e-06, "loss": 0.6794, "step": 15970 }, { "epoch": 0.6619006175141945, "grad_norm": 0.4198397099971771, "learning_rate": 1.6907041319573959e-06, "loss": 0.6733, "step": 15971 }, { "epoch": 0.6619420614198682, "grad_norm": 0.4407023787498474, "learning_rate": 1.6904969124290275e-06, "loss": 0.7764, "step": 15972 }, { "epoch": 0.6619835053255418, "grad_norm": 0.4075586199760437, "learning_rate": 1.690289692900659e-06, "loss": 0.6359, "step": 15973 }, { "epoch": 0.6620249492312156, "grad_norm": 0.427508682012558, "learning_rate": 1.6900824733722906e-06, "loss": 0.7397, "step": 15974 }, { "epoch": 0.6620663931368892, "grad_norm": 0.38765883445739746, "learning_rate": 1.6898752538439222e-06, "loss": 0.6344, "step": 15975 }, { "epoch": 0.6621078370425629, "grad_norm": 0.38913047313690186, "learning_rate": 1.689668034315554e-06, "loss": 0.6936, "step": 15976 }, { "epoch": 0.6621492809482366, "grad_norm": 0.3855162560939789, "learning_rate": 1.6894608147871859e-06, "loss": 0.7083, "step": 15977 }, { "epoch": 0.6621907248539103, "grad_norm": 0.42753905057907104, "learning_rate": 1.6892535952588175e-06, "loss": 0.7003, "step": 15978 }, { "epoch": 0.6622321687595839, "grad_norm": 0.35751551389694214, "learning_rate": 1.689046375730449e-06, "loss": 0.652, "step": 15979 }, { "epoch": 0.6622736126652575, "grad_norm": 0.4125056266784668, "learning_rate": 1.6888391562020807e-06, "loss": 0.679, "step": 15980 }, { "epoch": 0.6623150565709313, "grad_norm": 0.4588261544704437, "learning_rate": 1.6886319366737123e-06, "loss": 0.6451, "step": 15981 }, { "epoch": 0.6623565004766049, "grad_norm": 0.4142706096172333, "learning_rate": 1.6884247171453439e-06, "loss": 0.6566, "step": 15982 }, { "epoch": 0.6623979443822786, "grad_norm": 0.4337237477302551, "learning_rate": 1.6882174976169754e-06, "loss": 0.6987, "step": 15983 }, { "epoch": 0.6624393882879522, "grad_norm": 0.37395796179771423, "learning_rate": 1.6880102780886073e-06, "loss": 0.6373, "step": 15984 }, { "epoch": 0.662480832193626, "grad_norm": 0.4059516489505768, "learning_rate": 1.6878030585602389e-06, "loss": 0.6509, "step": 15985 }, { "epoch": 0.6625222760992996, "grad_norm": 0.4157456159591675, "learning_rate": 1.6875958390318707e-06, "loss": 0.6637, "step": 15986 }, { "epoch": 0.6625637200049733, "grad_norm": 0.43141451478004456, "learning_rate": 1.6873886195035023e-06, "loss": 0.678, "step": 15987 }, { "epoch": 0.6626051639106469, "grad_norm": 0.46040433645248413, "learning_rate": 1.6871813999751339e-06, "loss": 0.7114, "step": 15988 }, { "epoch": 0.6626466078163206, "grad_norm": 0.3955116271972656, "learning_rate": 1.6869741804467655e-06, "loss": 0.6833, "step": 15989 }, { "epoch": 0.6626880517219943, "grad_norm": 0.4257911443710327, "learning_rate": 1.686766960918397e-06, "loss": 0.6278, "step": 15990 }, { "epoch": 0.6627294956276679, "grad_norm": 0.4683906137943268, "learning_rate": 1.6865597413900287e-06, "loss": 0.6965, "step": 15991 }, { "epoch": 0.6627709395333417, "grad_norm": 0.3924520015716553, "learning_rate": 1.6863525218616602e-06, "loss": 0.6294, "step": 15992 }, { "epoch": 0.6628123834390153, "grad_norm": 0.4638044238090515, "learning_rate": 1.686145302333292e-06, "loss": 0.6873, "step": 15993 }, { "epoch": 0.662853827344689, "grad_norm": 0.4742959439754486, "learning_rate": 1.6859380828049239e-06, "loss": 0.7397, "step": 15994 }, { "epoch": 0.6628952712503626, "grad_norm": 0.3841947019100189, "learning_rate": 1.6857308632765555e-06, "loss": 0.6431, "step": 15995 }, { "epoch": 0.6629367151560364, "grad_norm": 0.4122883081436157, "learning_rate": 1.685523643748187e-06, "loss": 0.6649, "step": 15996 }, { "epoch": 0.66297815906171, "grad_norm": 0.41568756103515625, "learning_rate": 1.6853164242198187e-06, "loss": 0.6763, "step": 15997 }, { "epoch": 0.6630196029673836, "grad_norm": 0.3982507884502411, "learning_rate": 1.6851092046914503e-06, "loss": 0.6459, "step": 15998 }, { "epoch": 0.6630610468730573, "grad_norm": 0.4323634207248688, "learning_rate": 1.6849019851630819e-06, "loss": 0.7063, "step": 15999 }, { "epoch": 0.663102490778731, "grad_norm": 0.4259624183177948, "learning_rate": 1.6846947656347135e-06, "loss": 0.687, "step": 16000 }, { "epoch": 0.6631439346844047, "grad_norm": 0.4316518306732178, "learning_rate": 1.684487546106345e-06, "loss": 0.6763, "step": 16001 }, { "epoch": 0.6631853785900783, "grad_norm": 0.403502494096756, "learning_rate": 1.684280326577977e-06, "loss": 0.6892, "step": 16002 }, { "epoch": 0.663226822495752, "grad_norm": 0.42343929409980774, "learning_rate": 1.6840731070496087e-06, "loss": 0.656, "step": 16003 }, { "epoch": 0.6632682664014257, "grad_norm": 0.42163679003715515, "learning_rate": 1.6838658875212403e-06, "loss": 0.731, "step": 16004 }, { "epoch": 0.6633097103070993, "grad_norm": 0.381375253200531, "learning_rate": 1.6836586679928719e-06, "loss": 0.6681, "step": 16005 }, { "epoch": 0.663351154212773, "grad_norm": 0.4233826696872711, "learning_rate": 1.6834514484645035e-06, "loss": 0.6768, "step": 16006 }, { "epoch": 0.6633925981184466, "grad_norm": 0.42052093148231506, "learning_rate": 1.683244228936135e-06, "loss": 0.7349, "step": 16007 }, { "epoch": 0.6634340420241204, "grad_norm": 0.43697795271873474, "learning_rate": 1.6830370094077667e-06, "loss": 0.7327, "step": 16008 }, { "epoch": 0.663475485929794, "grad_norm": 0.43963536620140076, "learning_rate": 1.6828297898793983e-06, "loss": 0.6729, "step": 16009 }, { "epoch": 0.6635169298354677, "grad_norm": 0.41651421785354614, "learning_rate": 1.6826225703510298e-06, "loss": 0.6672, "step": 16010 }, { "epoch": 0.6635583737411413, "grad_norm": 0.4033149480819702, "learning_rate": 1.6824153508226619e-06, "loss": 0.6641, "step": 16011 }, { "epoch": 0.6635998176468151, "grad_norm": 0.40479135513305664, "learning_rate": 1.6822081312942935e-06, "loss": 0.6951, "step": 16012 }, { "epoch": 0.6636412615524887, "grad_norm": 0.395394504070282, "learning_rate": 1.682000911765925e-06, "loss": 0.6714, "step": 16013 }, { "epoch": 0.6636827054581623, "grad_norm": 0.41213956475257874, "learning_rate": 1.6817936922375567e-06, "loss": 0.6383, "step": 16014 }, { "epoch": 0.6637241493638361, "grad_norm": 0.42876607179641724, "learning_rate": 1.6815864727091883e-06, "loss": 0.6636, "step": 16015 }, { "epoch": 0.6637655932695097, "grad_norm": 0.40472525358200073, "learning_rate": 1.6813792531808199e-06, "loss": 0.6509, "step": 16016 }, { "epoch": 0.6638070371751834, "grad_norm": 0.4018842875957489, "learning_rate": 1.6811720336524515e-06, "loss": 0.656, "step": 16017 }, { "epoch": 0.663848481080857, "grad_norm": 0.45379096269607544, "learning_rate": 1.680964814124083e-06, "loss": 0.6395, "step": 16018 }, { "epoch": 0.6638899249865308, "grad_norm": 0.4383485019207001, "learning_rate": 1.680757594595715e-06, "loss": 0.7437, "step": 16019 }, { "epoch": 0.6639313688922044, "grad_norm": 0.43282946944236755, "learning_rate": 1.6805503750673467e-06, "loss": 0.6921, "step": 16020 }, { "epoch": 0.6639728127978781, "grad_norm": 0.4040639102458954, "learning_rate": 1.6803431555389783e-06, "loss": 0.6621, "step": 16021 }, { "epoch": 0.6640142567035517, "grad_norm": 0.411870539188385, "learning_rate": 1.6801359360106099e-06, "loss": 0.6697, "step": 16022 }, { "epoch": 0.6640557006092254, "grad_norm": 0.42130282521247864, "learning_rate": 1.6799287164822415e-06, "loss": 0.6752, "step": 16023 }, { "epoch": 0.6640971445148991, "grad_norm": 0.3882516622543335, "learning_rate": 1.679721496953873e-06, "loss": 0.6571, "step": 16024 }, { "epoch": 0.6641385884205727, "grad_norm": 0.40047913789749146, "learning_rate": 1.6795142774255047e-06, "loss": 0.6327, "step": 16025 }, { "epoch": 0.6641800323262465, "grad_norm": 0.40959978103637695, "learning_rate": 1.6793070578971363e-06, "loss": 0.6689, "step": 16026 }, { "epoch": 0.6642214762319201, "grad_norm": 0.4312475025653839, "learning_rate": 1.6790998383687679e-06, "loss": 0.6646, "step": 16027 }, { "epoch": 0.6642629201375938, "grad_norm": 0.38784950971603394, "learning_rate": 1.6788926188403999e-06, "loss": 0.6848, "step": 16028 }, { "epoch": 0.6643043640432674, "grad_norm": 0.3920278251171112, "learning_rate": 1.6786853993120315e-06, "loss": 0.6897, "step": 16029 }, { "epoch": 0.6643458079489412, "grad_norm": 0.4259209930896759, "learning_rate": 1.678478179783663e-06, "loss": 0.7073, "step": 16030 }, { "epoch": 0.6643872518546148, "grad_norm": 0.42099717259407043, "learning_rate": 1.6782709602552947e-06, "loss": 0.7007, "step": 16031 }, { "epoch": 0.6644286957602884, "grad_norm": 0.4049660265445709, "learning_rate": 1.6780637407269263e-06, "loss": 0.7053, "step": 16032 }, { "epoch": 0.6644701396659621, "grad_norm": 0.3923211991786957, "learning_rate": 1.6778565211985579e-06, "loss": 0.6736, "step": 16033 }, { "epoch": 0.6645115835716358, "grad_norm": 0.4169822931289673, "learning_rate": 1.6776493016701895e-06, "loss": 0.6494, "step": 16034 }, { "epoch": 0.6645530274773095, "grad_norm": 0.42023107409477234, "learning_rate": 1.677442082141821e-06, "loss": 0.7021, "step": 16035 }, { "epoch": 0.6645944713829831, "grad_norm": 0.4183354377746582, "learning_rate": 1.6772348626134527e-06, "loss": 0.6786, "step": 16036 }, { "epoch": 0.6646359152886568, "grad_norm": 0.3896489441394806, "learning_rate": 1.6770276430850847e-06, "loss": 0.6848, "step": 16037 }, { "epoch": 0.6646773591943305, "grad_norm": 0.4182365834712982, "learning_rate": 1.6768204235567163e-06, "loss": 0.665, "step": 16038 }, { "epoch": 0.6647188031000042, "grad_norm": 0.4500863254070282, "learning_rate": 1.6766132040283479e-06, "loss": 0.7019, "step": 16039 }, { "epoch": 0.6647602470056778, "grad_norm": 0.40082263946533203, "learning_rate": 1.6764059844999795e-06, "loss": 0.7444, "step": 16040 }, { "epoch": 0.6648016909113514, "grad_norm": 0.4252960681915283, "learning_rate": 1.676198764971611e-06, "loss": 0.6992, "step": 16041 }, { "epoch": 0.6648431348170252, "grad_norm": 0.45064249634742737, "learning_rate": 1.6759915454432427e-06, "loss": 0.6973, "step": 16042 }, { "epoch": 0.6648845787226988, "grad_norm": 0.3930206000804901, "learning_rate": 1.6757843259148743e-06, "loss": 0.679, "step": 16043 }, { "epoch": 0.6649260226283725, "grad_norm": 0.3674457371234894, "learning_rate": 1.6755771063865059e-06, "loss": 0.6426, "step": 16044 }, { "epoch": 0.6649674665340461, "grad_norm": 0.4044419527053833, "learning_rate": 1.6753698868581379e-06, "loss": 0.6428, "step": 16045 }, { "epoch": 0.6650089104397199, "grad_norm": 0.43544474244117737, "learning_rate": 1.6751626673297695e-06, "loss": 0.7529, "step": 16046 }, { "epoch": 0.6650503543453935, "grad_norm": 0.40776705741882324, "learning_rate": 1.674955447801401e-06, "loss": 0.7068, "step": 16047 }, { "epoch": 0.6650917982510672, "grad_norm": 0.42460039258003235, "learning_rate": 1.6747482282730327e-06, "loss": 0.7185, "step": 16048 }, { "epoch": 0.6651332421567409, "grad_norm": 0.43609604239463806, "learning_rate": 1.6745410087446643e-06, "loss": 0.7278, "step": 16049 }, { "epoch": 0.6651746860624145, "grad_norm": 0.41036221385002136, "learning_rate": 1.6743337892162959e-06, "loss": 0.6145, "step": 16050 }, { "epoch": 0.6652161299680882, "grad_norm": 0.4103671610355377, "learning_rate": 1.6741265696879275e-06, "loss": 0.6915, "step": 16051 }, { "epoch": 0.6652575738737618, "grad_norm": 0.45543572306632996, "learning_rate": 1.673919350159559e-06, "loss": 0.7053, "step": 16052 }, { "epoch": 0.6652990177794356, "grad_norm": 0.41510987281799316, "learning_rate": 1.6737121306311907e-06, "loss": 0.6608, "step": 16053 }, { "epoch": 0.6653404616851092, "grad_norm": 0.4028263986110687, "learning_rate": 1.6735049111028227e-06, "loss": 0.6244, "step": 16054 }, { "epoch": 0.6653819055907829, "grad_norm": 0.43574294447898865, "learning_rate": 1.6732976915744543e-06, "loss": 0.7166, "step": 16055 }, { "epoch": 0.6654233494964565, "grad_norm": 0.40508994460105896, "learning_rate": 1.6730904720460859e-06, "loss": 0.6774, "step": 16056 }, { "epoch": 0.6654647934021302, "grad_norm": 0.4149644374847412, "learning_rate": 1.6728832525177175e-06, "loss": 0.6564, "step": 16057 }, { "epoch": 0.6655062373078039, "grad_norm": 0.4047861099243164, "learning_rate": 1.672676032989349e-06, "loss": 0.6609, "step": 16058 }, { "epoch": 0.6655476812134775, "grad_norm": 0.39124172925949097, "learning_rate": 1.6724688134609807e-06, "loss": 0.6243, "step": 16059 }, { "epoch": 0.6655891251191512, "grad_norm": 0.4387703537940979, "learning_rate": 1.6722615939326123e-06, "loss": 0.729, "step": 16060 }, { "epoch": 0.6656305690248249, "grad_norm": 0.39764660596847534, "learning_rate": 1.6720543744042439e-06, "loss": 0.6512, "step": 16061 }, { "epoch": 0.6656720129304986, "grad_norm": 0.4250905215740204, "learning_rate": 1.6718471548758755e-06, "loss": 0.7319, "step": 16062 }, { "epoch": 0.6657134568361722, "grad_norm": 0.42997339367866516, "learning_rate": 1.6716399353475075e-06, "loss": 0.6418, "step": 16063 }, { "epoch": 0.665754900741846, "grad_norm": 0.38097551465034485, "learning_rate": 1.671432715819139e-06, "loss": 0.6078, "step": 16064 }, { "epoch": 0.6657963446475196, "grad_norm": 0.42145785689353943, "learning_rate": 1.6712254962907707e-06, "loss": 0.7346, "step": 16065 }, { "epoch": 0.6658377885531932, "grad_norm": 0.39531630277633667, "learning_rate": 1.6710182767624023e-06, "loss": 0.6514, "step": 16066 }, { "epoch": 0.6658792324588669, "grad_norm": 0.4041268229484558, "learning_rate": 1.6708110572340339e-06, "loss": 0.6733, "step": 16067 }, { "epoch": 0.6659206763645406, "grad_norm": 0.4416980445384979, "learning_rate": 1.6706038377056655e-06, "loss": 0.6494, "step": 16068 }, { "epoch": 0.6659621202702143, "grad_norm": 0.4193469285964966, "learning_rate": 1.670396618177297e-06, "loss": 0.6912, "step": 16069 }, { "epoch": 0.6660035641758879, "grad_norm": 0.4195697605609894, "learning_rate": 1.6701893986489287e-06, "loss": 0.7168, "step": 16070 }, { "epoch": 0.6660450080815616, "grad_norm": 0.41283613443374634, "learning_rate": 1.6699821791205603e-06, "loss": 0.7352, "step": 16071 }, { "epoch": 0.6660864519872353, "grad_norm": 0.4048596918582916, "learning_rate": 1.6697749595921923e-06, "loss": 0.6902, "step": 16072 }, { "epoch": 0.666127895892909, "grad_norm": 0.39398664236068726, "learning_rate": 1.6695677400638239e-06, "loss": 0.7106, "step": 16073 }, { "epoch": 0.6661693397985826, "grad_norm": 0.3825371265411377, "learning_rate": 1.6693605205354555e-06, "loss": 0.6458, "step": 16074 }, { "epoch": 0.6662107837042562, "grad_norm": 0.45259997248649597, "learning_rate": 1.669153301007087e-06, "loss": 0.7255, "step": 16075 }, { "epoch": 0.66625222760993, "grad_norm": 0.41928672790527344, "learning_rate": 1.6689460814787187e-06, "loss": 0.6669, "step": 16076 }, { "epoch": 0.6662936715156036, "grad_norm": 0.408802330493927, "learning_rate": 1.6687388619503503e-06, "loss": 0.6746, "step": 16077 }, { "epoch": 0.6663351154212773, "grad_norm": 0.4325581192970276, "learning_rate": 1.6685316424219819e-06, "loss": 0.724, "step": 16078 }, { "epoch": 0.666376559326951, "grad_norm": 0.3957660496234894, "learning_rate": 1.6683244228936135e-06, "loss": 0.7019, "step": 16079 }, { "epoch": 0.6664180032326247, "grad_norm": 0.4133421778678894, "learning_rate": 1.6681172033652455e-06, "loss": 0.6909, "step": 16080 }, { "epoch": 0.6664594471382983, "grad_norm": 0.4407144784927368, "learning_rate": 1.667909983836877e-06, "loss": 0.7502, "step": 16081 }, { "epoch": 0.666500891043972, "grad_norm": 0.4287929832935333, "learning_rate": 1.6677027643085087e-06, "loss": 0.752, "step": 16082 }, { "epoch": 0.6665423349496457, "grad_norm": 0.3846110999584198, "learning_rate": 1.6674955447801403e-06, "loss": 0.6394, "step": 16083 }, { "epoch": 0.6665837788553193, "grad_norm": 0.4137878715991974, "learning_rate": 1.6672883252517719e-06, "loss": 0.6752, "step": 16084 }, { "epoch": 0.666625222760993, "grad_norm": 0.4137948751449585, "learning_rate": 1.6670811057234035e-06, "loss": 0.6696, "step": 16085 }, { "epoch": 0.6666666666666666, "grad_norm": 0.41198962926864624, "learning_rate": 1.666873886195035e-06, "loss": 0.72, "step": 16086 }, { "checkpoint_runtime": 206.5332 }, { "epoch": 0.6667081105723404, "grad_norm": 0.3782370984554291, "learning_rate": 1.6666666666666667e-06, "loss": 0.6387, "step": 16087 }, { "epoch": 0.666749554478014, "grad_norm": 0.42298710346221924, "learning_rate": 1.6664594471382983e-06, "loss": 0.6467, "step": 16088 }, { "epoch": 0.6667909983836877, "grad_norm": 0.41521352529525757, "learning_rate": 1.6662522276099303e-06, "loss": 0.6682, "step": 16089 }, { "epoch": 0.6668324422893613, "grad_norm": 0.4355306029319763, "learning_rate": 1.6660450080815619e-06, "loss": 0.7256, "step": 16090 }, { "epoch": 0.6668738861950351, "grad_norm": 0.45535486936569214, "learning_rate": 1.6658377885531935e-06, "loss": 0.6934, "step": 16091 }, { "epoch": 0.6669153301007087, "grad_norm": 0.3881808817386627, "learning_rate": 1.665630569024825e-06, "loss": 0.6741, "step": 16092 }, { "epoch": 0.6669567740063823, "grad_norm": 0.41817671060562134, "learning_rate": 1.6654233494964567e-06, "loss": 0.7003, "step": 16093 }, { "epoch": 0.666998217912056, "grad_norm": 0.4169234335422516, "learning_rate": 1.6652161299680883e-06, "loss": 0.6702, "step": 16094 }, { "epoch": 0.6670396618177297, "grad_norm": 0.3980676531791687, "learning_rate": 1.6650089104397199e-06, "loss": 0.662, "step": 16095 }, { "epoch": 0.6670811057234034, "grad_norm": 0.45285820960998535, "learning_rate": 1.6648016909113515e-06, "loss": 0.6627, "step": 16096 }, { "epoch": 0.667122549629077, "grad_norm": 0.4042116701602936, "learning_rate": 1.664594471382983e-06, "loss": 0.705, "step": 16097 }, { "epoch": 0.6671639935347508, "grad_norm": 0.4135550856590271, "learning_rate": 1.664387251854615e-06, "loss": 0.6527, "step": 16098 }, { "epoch": 0.6672054374404244, "grad_norm": 0.3868297338485718, "learning_rate": 1.6641800323262467e-06, "loss": 0.6555, "step": 16099 }, { "epoch": 0.6672468813460981, "grad_norm": 0.3735204041004181, "learning_rate": 1.6639728127978783e-06, "loss": 0.6294, "step": 16100 }, { "epoch": 0.6672883252517717, "grad_norm": 0.42280200123786926, "learning_rate": 1.6637655932695099e-06, "loss": 0.6832, "step": 16101 }, { "epoch": 0.6673297691574454, "grad_norm": 0.46520352363586426, "learning_rate": 1.6635583737411415e-06, "loss": 0.72, "step": 16102 }, { "epoch": 0.6673712130631191, "grad_norm": 0.4183734357357025, "learning_rate": 1.663351154212773e-06, "loss": 0.6389, "step": 16103 }, { "epoch": 0.6674126569687927, "grad_norm": 0.42776432633399963, "learning_rate": 1.6631439346844047e-06, "loss": 0.6499, "step": 16104 }, { "epoch": 0.6674541008744664, "grad_norm": 0.3861633837223053, "learning_rate": 1.6629367151560363e-06, "loss": 0.6812, "step": 16105 }, { "epoch": 0.6674955447801401, "grad_norm": 0.40811535716056824, "learning_rate": 1.6627294956276683e-06, "loss": 0.6699, "step": 16106 }, { "epoch": 0.6675369886858138, "grad_norm": 0.42259132862091064, "learning_rate": 1.6625222760992999e-06, "loss": 0.6531, "step": 16107 }, { "epoch": 0.6675784325914874, "grad_norm": 0.4174489974975586, "learning_rate": 1.6623150565709315e-06, "loss": 0.6829, "step": 16108 }, { "epoch": 0.6676198764971611, "grad_norm": 0.4301969110965729, "learning_rate": 1.662107837042563e-06, "loss": 0.6689, "step": 16109 }, { "epoch": 0.6676613204028348, "grad_norm": 0.4433270990848541, "learning_rate": 1.6619006175141947e-06, "loss": 0.7388, "step": 16110 }, { "epoch": 0.6677027643085084, "grad_norm": 0.4464190900325775, "learning_rate": 1.6616933979858263e-06, "loss": 0.6688, "step": 16111 }, { "epoch": 0.6677442082141821, "grad_norm": 0.44650769233703613, "learning_rate": 1.6614861784574579e-06, "loss": 0.7051, "step": 16112 }, { "epoch": 0.6677856521198557, "grad_norm": 0.41957297921180725, "learning_rate": 1.6612789589290895e-06, "loss": 0.6997, "step": 16113 }, { "epoch": 0.6678270960255295, "grad_norm": 0.43037837743759155, "learning_rate": 1.661071739400721e-06, "loss": 0.6857, "step": 16114 }, { "epoch": 0.6678685399312031, "grad_norm": 0.435438334941864, "learning_rate": 1.660864519872353e-06, "loss": 0.7158, "step": 16115 }, { "epoch": 0.6679099838368768, "grad_norm": 0.4308931529521942, "learning_rate": 1.6606573003439847e-06, "loss": 0.6692, "step": 16116 }, { "epoch": 0.6679514277425505, "grad_norm": 0.42331191897392273, "learning_rate": 1.6604500808156163e-06, "loss": 0.6602, "step": 16117 }, { "epoch": 0.6679928716482241, "grad_norm": 0.4257618188858032, "learning_rate": 1.6602428612872479e-06, "loss": 0.7251, "step": 16118 }, { "epoch": 0.6680343155538978, "grad_norm": 0.41592153906822205, "learning_rate": 1.6600356417588795e-06, "loss": 0.6929, "step": 16119 }, { "epoch": 0.6680757594595714, "grad_norm": 0.45938268303871155, "learning_rate": 1.659828422230511e-06, "loss": 0.7739, "step": 16120 }, { "epoch": 0.6681172033652452, "grad_norm": 0.44558459520339966, "learning_rate": 1.6596212027021427e-06, "loss": 0.6475, "step": 16121 }, { "epoch": 0.6681586472709188, "grad_norm": 0.39787960052490234, "learning_rate": 1.6594139831737743e-06, "loss": 0.6838, "step": 16122 }, { "epoch": 0.6682000911765925, "grad_norm": 0.4259253740310669, "learning_rate": 1.6592067636454059e-06, "loss": 0.7268, "step": 16123 }, { "epoch": 0.6682415350822661, "grad_norm": 0.40844789147377014, "learning_rate": 1.6589995441170379e-06, "loss": 0.6992, "step": 16124 }, { "epoch": 0.6682829789879399, "grad_norm": 0.4130089581012726, "learning_rate": 1.6587923245886695e-06, "loss": 0.6613, "step": 16125 }, { "epoch": 0.6683244228936135, "grad_norm": 0.40782657265663147, "learning_rate": 1.658585105060301e-06, "loss": 0.7076, "step": 16126 }, { "epoch": 0.6683658667992871, "grad_norm": 0.3920920491218567, "learning_rate": 1.6583778855319327e-06, "loss": 0.6813, "step": 16127 }, { "epoch": 0.6684073107049608, "grad_norm": 0.40434354543685913, "learning_rate": 1.6581706660035643e-06, "loss": 0.6547, "step": 16128 }, { "epoch": 0.6684487546106345, "grad_norm": 0.38019701838493347, "learning_rate": 1.6579634464751959e-06, "loss": 0.587, "step": 16129 }, { "epoch": 0.6684901985163082, "grad_norm": 0.4412388801574707, "learning_rate": 1.6577562269468275e-06, "loss": 0.6936, "step": 16130 }, { "epoch": 0.6685316424219818, "grad_norm": 0.3654560148715973, "learning_rate": 1.657549007418459e-06, "loss": 0.6685, "step": 16131 }, { "epoch": 0.6685730863276556, "grad_norm": 0.4146740734577179, "learning_rate": 1.657341787890091e-06, "loss": 0.6553, "step": 16132 }, { "epoch": 0.6686145302333292, "grad_norm": 0.4086534082889557, "learning_rate": 1.6571345683617227e-06, "loss": 0.6763, "step": 16133 }, { "epoch": 0.6686559741390029, "grad_norm": 0.43128788471221924, "learning_rate": 1.6569273488333543e-06, "loss": 0.6589, "step": 16134 }, { "epoch": 0.6686974180446765, "grad_norm": 0.4087723195552826, "learning_rate": 1.6567201293049859e-06, "loss": 0.6735, "step": 16135 }, { "epoch": 0.6687388619503501, "grad_norm": 0.43764543533325195, "learning_rate": 1.6565129097766175e-06, "loss": 0.7008, "step": 16136 }, { "epoch": 0.6687803058560239, "grad_norm": 0.4277664124965668, "learning_rate": 1.656305690248249e-06, "loss": 0.6943, "step": 16137 }, { "epoch": 0.6688217497616975, "grad_norm": 0.38755127787590027, "learning_rate": 1.6560984707198807e-06, "loss": 0.6885, "step": 16138 }, { "epoch": 0.6688631936673712, "grad_norm": 0.4323432445526123, "learning_rate": 1.6558912511915123e-06, "loss": 0.708, "step": 16139 }, { "epoch": 0.6689046375730449, "grad_norm": 0.390971302986145, "learning_rate": 1.6556840316631439e-06, "loss": 0.6669, "step": 16140 }, { "epoch": 0.6689460814787186, "grad_norm": 0.39258337020874023, "learning_rate": 1.6554768121347759e-06, "loss": 0.6962, "step": 16141 }, { "epoch": 0.6689875253843922, "grad_norm": 0.39720359444618225, "learning_rate": 1.6552695926064075e-06, "loss": 0.677, "step": 16142 }, { "epoch": 0.669028969290066, "grad_norm": 0.42048683762550354, "learning_rate": 1.655062373078039e-06, "loss": 0.6591, "step": 16143 }, { "epoch": 0.6690704131957396, "grad_norm": 0.3739628493785858, "learning_rate": 1.6548551535496707e-06, "loss": 0.6371, "step": 16144 }, { "epoch": 0.6691118571014132, "grad_norm": 0.4006481468677521, "learning_rate": 1.6546479340213023e-06, "loss": 0.6851, "step": 16145 }, { "epoch": 0.6691533010070869, "grad_norm": 0.3969648778438568, "learning_rate": 1.6544407144929339e-06, "loss": 0.624, "step": 16146 }, { "epoch": 0.6691947449127605, "grad_norm": 0.399095743894577, "learning_rate": 1.6542334949645655e-06, "loss": 0.6572, "step": 16147 }, { "epoch": 0.6692361888184343, "grad_norm": 0.4084377884864807, "learning_rate": 1.654026275436197e-06, "loss": 0.6627, "step": 16148 }, { "epoch": 0.6692776327241079, "grad_norm": 0.408769428730011, "learning_rate": 1.6538190559078287e-06, "loss": 0.6638, "step": 16149 }, { "epoch": 0.6693190766297816, "grad_norm": 0.3968919515609741, "learning_rate": 1.6536118363794607e-06, "loss": 0.6172, "step": 16150 }, { "epoch": 0.6693605205354553, "grad_norm": 0.40839144587516785, "learning_rate": 1.6534046168510923e-06, "loss": 0.6583, "step": 16151 }, { "epoch": 0.669401964441129, "grad_norm": 0.42067572474479675, "learning_rate": 1.6531973973227239e-06, "loss": 0.6727, "step": 16152 }, { "epoch": 0.6694434083468026, "grad_norm": 0.5413890480995178, "learning_rate": 1.6529901777943555e-06, "loss": 0.7273, "step": 16153 }, { "epoch": 0.6694848522524762, "grad_norm": 0.40024593472480774, "learning_rate": 1.652782958265987e-06, "loss": 0.661, "step": 16154 }, { "epoch": 0.66952629615815, "grad_norm": 0.4183686673641205, "learning_rate": 1.6525757387376187e-06, "loss": 0.6366, "step": 16155 }, { "epoch": 0.6695677400638236, "grad_norm": 0.37812820076942444, "learning_rate": 1.6523685192092503e-06, "loss": 0.6239, "step": 16156 }, { "epoch": 0.6696091839694973, "grad_norm": 0.4074612557888031, "learning_rate": 1.6521612996808819e-06, "loss": 0.6366, "step": 16157 }, { "epoch": 0.6696506278751709, "grad_norm": 0.4239770770072937, "learning_rate": 1.6519540801525137e-06, "loss": 0.6835, "step": 16158 }, { "epoch": 0.6696920717808447, "grad_norm": 0.4043435752391815, "learning_rate": 1.6517468606241455e-06, "loss": 0.6848, "step": 16159 }, { "epoch": 0.6697335156865183, "grad_norm": 0.433847039937973, "learning_rate": 1.651539641095777e-06, "loss": 0.626, "step": 16160 }, { "epoch": 0.669774959592192, "grad_norm": 0.4534812569618225, "learning_rate": 1.6513324215674087e-06, "loss": 0.7311, "step": 16161 }, { "epoch": 0.6698164034978656, "grad_norm": 0.4000806510448456, "learning_rate": 1.6511252020390403e-06, "loss": 0.7021, "step": 16162 }, { "epoch": 0.6698578474035393, "grad_norm": 0.4079470634460449, "learning_rate": 1.6509179825106719e-06, "loss": 0.6663, "step": 16163 }, { "epoch": 0.669899291309213, "grad_norm": 0.44350677728652954, "learning_rate": 1.6507107629823035e-06, "loss": 0.6385, "step": 16164 }, { "epoch": 0.6699407352148866, "grad_norm": 0.3970186710357666, "learning_rate": 1.650503543453935e-06, "loss": 0.6418, "step": 16165 }, { "epoch": 0.6699821791205604, "grad_norm": 0.44802772998809814, "learning_rate": 1.6502963239255667e-06, "loss": 0.7186, "step": 16166 }, { "epoch": 0.670023623026234, "grad_norm": 0.37707698345184326, "learning_rate": 1.6500891043971987e-06, "loss": 0.6653, "step": 16167 }, { "epoch": 0.6700650669319077, "grad_norm": 0.4406111240386963, "learning_rate": 1.6498818848688303e-06, "loss": 0.7322, "step": 16168 }, { "epoch": 0.6701065108375813, "grad_norm": 0.3813234865665436, "learning_rate": 1.6496746653404619e-06, "loss": 0.7019, "step": 16169 }, { "epoch": 0.6701479547432551, "grad_norm": 0.4211023151874542, "learning_rate": 1.6494674458120935e-06, "loss": 0.6459, "step": 16170 }, { "epoch": 0.6701893986489287, "grad_norm": 0.43281981348991394, "learning_rate": 1.649260226283725e-06, "loss": 0.6621, "step": 16171 }, { "epoch": 0.6702308425546023, "grad_norm": 0.41704243421554565, "learning_rate": 1.6490530067553567e-06, "loss": 0.7269, "step": 16172 }, { "epoch": 0.670272286460276, "grad_norm": 0.40744224190711975, "learning_rate": 1.6488457872269883e-06, "loss": 0.6689, "step": 16173 }, { "epoch": 0.6703137303659497, "grad_norm": 0.4072561264038086, "learning_rate": 1.6486385676986199e-06, "loss": 0.6731, "step": 16174 }, { "epoch": 0.6703551742716234, "grad_norm": 0.3993113338947296, "learning_rate": 1.6484313481702517e-06, "loss": 0.6678, "step": 16175 }, { "epoch": 0.670396618177297, "grad_norm": 0.40756484866142273, "learning_rate": 1.6482241286418835e-06, "loss": 0.7014, "step": 16176 }, { "epoch": 0.6704380620829707, "grad_norm": 0.3986349403858185, "learning_rate": 1.648016909113515e-06, "loss": 0.6455, "step": 16177 }, { "epoch": 0.6704795059886444, "grad_norm": 0.4195970594882965, "learning_rate": 1.6478096895851467e-06, "loss": 0.7222, "step": 16178 }, { "epoch": 0.670520949894318, "grad_norm": 0.4104878008365631, "learning_rate": 1.6476024700567783e-06, "loss": 0.6975, "step": 16179 }, { "epoch": 0.6705623937999917, "grad_norm": 0.4368089735507965, "learning_rate": 1.6473952505284099e-06, "loss": 0.6934, "step": 16180 }, { "epoch": 0.6706038377056653, "grad_norm": 0.36301952600479126, "learning_rate": 1.6471880310000415e-06, "loss": 0.6632, "step": 16181 }, { "epoch": 0.6706452816113391, "grad_norm": 0.4034351110458374, "learning_rate": 1.646980811471673e-06, "loss": 0.5869, "step": 16182 }, { "epoch": 0.6706867255170127, "grad_norm": 0.4272559583187103, "learning_rate": 1.6467735919433047e-06, "loss": 0.7006, "step": 16183 }, { "epoch": 0.6707281694226864, "grad_norm": 0.38148510456085205, "learning_rate": 1.6465663724149365e-06, "loss": 0.6191, "step": 16184 }, { "epoch": 0.67076961332836, "grad_norm": 0.43508726358413696, "learning_rate": 1.6463591528865683e-06, "loss": 0.6609, "step": 16185 }, { "epoch": 0.6708110572340338, "grad_norm": 0.4198850691318512, "learning_rate": 1.6461519333581999e-06, "loss": 0.6925, "step": 16186 }, { "epoch": 0.6708525011397074, "grad_norm": 0.38852623105049133, "learning_rate": 1.6459447138298315e-06, "loss": 0.6342, "step": 16187 }, { "epoch": 0.670893945045381, "grad_norm": 0.4669853746891022, "learning_rate": 1.645737494301463e-06, "loss": 0.7495, "step": 16188 }, { "epoch": 0.6709353889510548, "grad_norm": 0.3950718343257904, "learning_rate": 1.6455302747730947e-06, "loss": 0.6306, "step": 16189 }, { "epoch": 0.6709768328567284, "grad_norm": 0.4189806282520294, "learning_rate": 1.6453230552447263e-06, "loss": 0.6941, "step": 16190 }, { "epoch": 0.6710182767624021, "grad_norm": 0.4150543510913849, "learning_rate": 1.6451158357163579e-06, "loss": 0.6221, "step": 16191 }, { "epoch": 0.6710597206680757, "grad_norm": 0.40631791949272156, "learning_rate": 1.6449086161879897e-06, "loss": 0.6909, "step": 16192 }, { "epoch": 0.6711011645737495, "grad_norm": 0.4215424656867981, "learning_rate": 1.6447013966596215e-06, "loss": 0.7144, "step": 16193 }, { "epoch": 0.6711426084794231, "grad_norm": 0.39717501401901245, "learning_rate": 1.644494177131253e-06, "loss": 0.6669, "step": 16194 }, { "epoch": 0.6711840523850968, "grad_norm": 0.3882301449775696, "learning_rate": 1.6442869576028847e-06, "loss": 0.6549, "step": 16195 }, { "epoch": 0.6712254962907704, "grad_norm": 0.40179410576820374, "learning_rate": 1.6440797380745163e-06, "loss": 0.6177, "step": 16196 }, { "epoch": 0.6712669401964441, "grad_norm": 0.4116314947605133, "learning_rate": 1.6438725185461479e-06, "loss": 0.6688, "step": 16197 }, { "epoch": 0.6713083841021178, "grad_norm": 0.40976348519325256, "learning_rate": 1.6436652990177795e-06, "loss": 0.6497, "step": 16198 }, { "epoch": 0.6713498280077914, "grad_norm": 0.4308205246925354, "learning_rate": 1.643458079489411e-06, "loss": 0.6731, "step": 16199 }, { "epoch": 0.6713912719134651, "grad_norm": 0.42541491985321045, "learning_rate": 1.6432508599610427e-06, "loss": 0.688, "step": 16200 }, { "epoch": 0.6714327158191388, "grad_norm": 0.405973881483078, "learning_rate": 1.6430436404326745e-06, "loss": 0.6946, "step": 16201 }, { "epoch": 0.6714741597248125, "grad_norm": 0.41808444261550903, "learning_rate": 1.6428364209043063e-06, "loss": 0.6643, "step": 16202 }, { "epoch": 0.6715156036304861, "grad_norm": 0.42289915680885315, "learning_rate": 1.6426292013759379e-06, "loss": 0.6948, "step": 16203 }, { "epoch": 0.6715570475361599, "grad_norm": 0.38629665970802307, "learning_rate": 1.6424219818475695e-06, "loss": 0.6179, "step": 16204 }, { "epoch": 0.6715984914418335, "grad_norm": 0.4205452799797058, "learning_rate": 1.642214762319201e-06, "loss": 0.6248, "step": 16205 }, { "epoch": 0.6716399353475071, "grad_norm": 0.3938872218132019, "learning_rate": 1.6420075427908327e-06, "loss": 0.6367, "step": 16206 }, { "epoch": 0.6716813792531808, "grad_norm": 0.43419158458709717, "learning_rate": 1.6418003232624643e-06, "loss": 0.6558, "step": 16207 }, { "epoch": 0.6717228231588545, "grad_norm": 0.4543806314468384, "learning_rate": 1.6415931037340959e-06, "loss": 0.7085, "step": 16208 }, { "epoch": 0.6717642670645282, "grad_norm": 0.4216350317001343, "learning_rate": 1.6413858842057277e-06, "loss": 0.6613, "step": 16209 }, { "epoch": 0.6718057109702018, "grad_norm": 0.44536542892456055, "learning_rate": 1.6411786646773593e-06, "loss": 0.6895, "step": 16210 }, { "epoch": 0.6718471548758755, "grad_norm": 0.4421224296092987, "learning_rate": 1.640971445148991e-06, "loss": 0.678, "step": 16211 }, { "epoch": 0.6718885987815492, "grad_norm": 0.41273733973503113, "learning_rate": 1.6407642256206227e-06, "loss": 0.6544, "step": 16212 }, { "epoch": 0.6719300426872229, "grad_norm": 0.40923386812210083, "learning_rate": 1.6405570060922543e-06, "loss": 0.6619, "step": 16213 }, { "epoch": 0.6719714865928965, "grad_norm": 0.4133892059326172, "learning_rate": 1.6403497865638859e-06, "loss": 0.6826, "step": 16214 }, { "epoch": 0.6720129304985701, "grad_norm": 0.3920084536075592, "learning_rate": 1.6401425670355175e-06, "loss": 0.6458, "step": 16215 }, { "epoch": 0.6720543744042439, "grad_norm": 0.40323612093925476, "learning_rate": 1.639935347507149e-06, "loss": 0.635, "step": 16216 }, { "epoch": 0.6720958183099175, "grad_norm": 0.4171929657459259, "learning_rate": 1.6397281279787807e-06, "loss": 0.6707, "step": 16217 }, { "epoch": 0.6721372622155912, "grad_norm": 0.44549378752708435, "learning_rate": 1.6395209084504125e-06, "loss": 0.6748, "step": 16218 }, { "epoch": 0.6721787061212648, "grad_norm": 0.4428058862686157, "learning_rate": 1.639313688922044e-06, "loss": 0.683, "step": 16219 }, { "epoch": 0.6722201500269386, "grad_norm": 0.4401097297668457, "learning_rate": 1.6391064693936759e-06, "loss": 0.7292, "step": 16220 }, { "epoch": 0.6722615939326122, "grad_norm": 0.4201399087905884, "learning_rate": 1.6388992498653075e-06, "loss": 0.7075, "step": 16221 }, { "epoch": 0.6723030378382859, "grad_norm": 0.3983350396156311, "learning_rate": 1.638692030336939e-06, "loss": 0.6359, "step": 16222 }, { "epoch": 0.6723444817439596, "grad_norm": 0.4110371470451355, "learning_rate": 1.6384848108085707e-06, "loss": 0.6111, "step": 16223 }, { "epoch": 0.6723859256496332, "grad_norm": 0.4090495705604553, "learning_rate": 1.6382775912802023e-06, "loss": 0.6917, "step": 16224 }, { "epoch": 0.6724273695553069, "grad_norm": 0.4375980794429779, "learning_rate": 1.6380703717518339e-06, "loss": 0.656, "step": 16225 }, { "epoch": 0.6724688134609805, "grad_norm": 0.3786022663116455, "learning_rate": 1.6378631522234657e-06, "loss": 0.5862, "step": 16226 }, { "epoch": 0.6725102573666543, "grad_norm": 0.40136271715164185, "learning_rate": 1.6376559326950973e-06, "loss": 0.6282, "step": 16227 }, { "epoch": 0.6725517012723279, "grad_norm": 0.3963063061237335, "learning_rate": 1.637448713166729e-06, "loss": 0.6832, "step": 16228 }, { "epoch": 0.6725931451780016, "grad_norm": 0.44079920649528503, "learning_rate": 1.6372414936383607e-06, "loss": 0.6821, "step": 16229 }, { "epoch": 0.6726345890836752, "grad_norm": 0.38565924763679504, "learning_rate": 1.6370342741099923e-06, "loss": 0.6611, "step": 16230 }, { "epoch": 0.672676032989349, "grad_norm": 0.43100571632385254, "learning_rate": 1.6368270545816239e-06, "loss": 0.6582, "step": 16231 }, { "epoch": 0.6727174768950226, "grad_norm": 0.4247682988643646, "learning_rate": 1.6366198350532555e-06, "loss": 0.6597, "step": 16232 }, { "epoch": 0.6727589208006962, "grad_norm": 0.42204317450523376, "learning_rate": 1.636412615524887e-06, "loss": 0.6775, "step": 16233 }, { "epoch": 0.67280036470637, "grad_norm": 0.45549553632736206, "learning_rate": 1.6362053959965187e-06, "loss": 0.707, "step": 16234 }, { "epoch": 0.6728418086120436, "grad_norm": 0.41855502128601074, "learning_rate": 1.6359981764681505e-06, "loss": 0.6909, "step": 16235 }, { "epoch": 0.6728832525177173, "grad_norm": 0.4504358768463135, "learning_rate": 1.635790956939782e-06, "loss": 0.6772, "step": 16236 }, { "epoch": 0.6729246964233909, "grad_norm": 0.4598044157028198, "learning_rate": 1.635583737411414e-06, "loss": 0.6921, "step": 16237 }, { "epoch": 0.6729661403290647, "grad_norm": 0.4012659788131714, "learning_rate": 1.6353765178830455e-06, "loss": 0.6499, "step": 16238 }, { "epoch": 0.6730075842347383, "grad_norm": 0.42355167865753174, "learning_rate": 1.635169298354677e-06, "loss": 0.6709, "step": 16239 }, { "epoch": 0.6730490281404119, "grad_norm": 0.4301091730594635, "learning_rate": 1.6349620788263087e-06, "loss": 0.6774, "step": 16240 }, { "epoch": 0.6730904720460856, "grad_norm": 0.4485968351364136, "learning_rate": 1.6347548592979403e-06, "loss": 0.7341, "step": 16241 }, { "epoch": 0.6731319159517593, "grad_norm": 0.4498499631881714, "learning_rate": 1.6345476397695719e-06, "loss": 0.7295, "step": 16242 }, { "epoch": 0.673173359857433, "grad_norm": 0.4515078067779541, "learning_rate": 1.6343404202412037e-06, "loss": 0.6929, "step": 16243 }, { "epoch": 0.6732148037631066, "grad_norm": 0.40700146555900574, "learning_rate": 1.6341332007128353e-06, "loss": 0.6604, "step": 16244 }, { "epoch": 0.6732562476687803, "grad_norm": 0.44087740778923035, "learning_rate": 1.6339259811844669e-06, "loss": 0.6985, "step": 16245 }, { "epoch": 0.673297691574454, "grad_norm": 0.42361631989479065, "learning_rate": 1.6337187616560987e-06, "loss": 0.6396, "step": 16246 }, { "epoch": 0.6733391354801277, "grad_norm": 0.4367403984069824, "learning_rate": 1.6335115421277303e-06, "loss": 0.6477, "step": 16247 }, { "epoch": 0.6733805793858013, "grad_norm": 0.38674861192703247, "learning_rate": 1.6333043225993619e-06, "loss": 0.6917, "step": 16248 }, { "epoch": 0.6734220232914749, "grad_norm": 0.3819606304168701, "learning_rate": 1.6330971030709935e-06, "loss": 0.6569, "step": 16249 }, { "epoch": 0.6734634671971487, "grad_norm": 0.413939505815506, "learning_rate": 1.632889883542625e-06, "loss": 0.666, "step": 16250 }, { "epoch": 0.6735049111028223, "grad_norm": 0.4057898223400116, "learning_rate": 1.632682664014257e-06, "loss": 0.6681, "step": 16251 }, { "epoch": 0.673546355008496, "grad_norm": 0.4033510982990265, "learning_rate": 1.6324754444858885e-06, "loss": 0.6508, "step": 16252 }, { "epoch": 0.6735877989141696, "grad_norm": 0.3945682942867279, "learning_rate": 1.63226822495752e-06, "loss": 0.6672, "step": 16253 }, { "epoch": 0.6736292428198434, "grad_norm": 0.44846194982528687, "learning_rate": 1.632061005429152e-06, "loss": 0.7268, "step": 16254 }, { "epoch": 0.673670686725517, "grad_norm": 0.41785702109336853, "learning_rate": 1.6318537859007835e-06, "loss": 0.6791, "step": 16255 }, { "epoch": 0.6737121306311907, "grad_norm": 0.4068128764629364, "learning_rate": 1.631646566372415e-06, "loss": 0.7002, "step": 16256 }, { "epoch": 0.6737535745368644, "grad_norm": 0.39389556646347046, "learning_rate": 1.6314393468440467e-06, "loss": 0.6519, "step": 16257 }, { "epoch": 0.673795018442538, "grad_norm": 0.3985666334629059, "learning_rate": 1.6312321273156783e-06, "loss": 0.6454, "step": 16258 }, { "epoch": 0.6738364623482117, "grad_norm": 0.4097382724285126, "learning_rate": 1.6310249077873099e-06, "loss": 0.6548, "step": 16259 }, { "epoch": 0.6738779062538853, "grad_norm": 0.3841261565685272, "learning_rate": 1.6308176882589417e-06, "loss": 0.6792, "step": 16260 }, { "epoch": 0.6739193501595591, "grad_norm": 0.4110111892223358, "learning_rate": 1.6306104687305733e-06, "loss": 0.6952, "step": 16261 }, { "epoch": 0.6739607940652327, "grad_norm": 0.3988364338874817, "learning_rate": 1.6304032492022049e-06, "loss": 0.6238, "step": 16262 }, { "epoch": 0.6740022379709064, "grad_norm": 0.4527319669723511, "learning_rate": 1.6301960296738367e-06, "loss": 0.7438, "step": 16263 }, { "epoch": 0.67404368187658, "grad_norm": 0.44866862893104553, "learning_rate": 1.6299888101454683e-06, "loss": 0.6898, "step": 16264 }, { "epoch": 0.6740851257822538, "grad_norm": 0.4020460844039917, "learning_rate": 1.6297815906171e-06, "loss": 0.6614, "step": 16265 }, { "epoch": 0.6741265696879274, "grad_norm": 0.40324339270591736, "learning_rate": 1.6295743710887315e-06, "loss": 0.6399, "step": 16266 }, { "epoch": 0.674168013593601, "grad_norm": 0.3819420039653778, "learning_rate": 1.629367151560363e-06, "loss": 0.6808, "step": 16267 }, { "epoch": 0.6742094574992747, "grad_norm": 0.4149378836154938, "learning_rate": 1.629159932031995e-06, "loss": 0.7164, "step": 16268 }, { "epoch": 0.6742509014049484, "grad_norm": 0.40158915519714355, "learning_rate": 1.6289527125036265e-06, "loss": 0.7136, "step": 16269 }, { "epoch": 0.6742923453106221, "grad_norm": 0.3849627673625946, "learning_rate": 1.628745492975258e-06, "loss": 0.6185, "step": 16270 }, { "epoch": 0.6743337892162957, "grad_norm": 0.45467665791511536, "learning_rate": 1.6285382734468897e-06, "loss": 0.7211, "step": 16271 }, { "epoch": 0.6743752331219695, "grad_norm": 0.4521908760070801, "learning_rate": 1.6283310539185215e-06, "loss": 0.7085, "step": 16272 }, { "epoch": 0.6744166770276431, "grad_norm": 0.4169001281261444, "learning_rate": 1.628123834390153e-06, "loss": 0.6376, "step": 16273 }, { "epoch": 0.6744581209333168, "grad_norm": 0.44982749223709106, "learning_rate": 1.6279166148617847e-06, "loss": 0.7058, "step": 16274 }, { "epoch": 0.6744995648389904, "grad_norm": 0.4299849569797516, "learning_rate": 1.6277093953334163e-06, "loss": 0.7378, "step": 16275 }, { "epoch": 0.674541008744664, "grad_norm": 0.418659508228302, "learning_rate": 1.6275021758050479e-06, "loss": 0.7139, "step": 16276 }, { "epoch": 0.6745824526503378, "grad_norm": 0.4244844615459442, "learning_rate": 1.6272949562766797e-06, "loss": 0.7012, "step": 16277 }, { "epoch": 0.6746238965560114, "grad_norm": 0.4675801694393158, "learning_rate": 1.6270877367483113e-06, "loss": 0.7019, "step": 16278 }, { "epoch": 0.6746653404616851, "grad_norm": 0.39383813738822937, "learning_rate": 1.6268805172199429e-06, "loss": 0.5985, "step": 16279 }, { "epoch": 0.6747067843673588, "grad_norm": 0.42497989535331726, "learning_rate": 1.6266732976915745e-06, "loss": 0.7156, "step": 16280 }, { "epoch": 0.6747482282730325, "grad_norm": 0.4233720600605011, "learning_rate": 1.6264660781632063e-06, "loss": 0.6801, "step": 16281 }, { "epoch": 0.6747896721787061, "grad_norm": 0.4023624658584595, "learning_rate": 1.626258858634838e-06, "loss": 0.637, "step": 16282 }, { "epoch": 0.6748311160843798, "grad_norm": 0.3951246738433838, "learning_rate": 1.6260516391064695e-06, "loss": 0.6704, "step": 16283 }, { "epoch": 0.6748725599900535, "grad_norm": 0.3980317711830139, "learning_rate": 1.625844419578101e-06, "loss": 0.6638, "step": 16284 }, { "epoch": 0.6749140038957271, "grad_norm": 0.43908804655075073, "learning_rate": 1.625637200049733e-06, "loss": 0.6793, "step": 16285 }, { "epoch": 0.6749554478014008, "grad_norm": 0.4163043797016144, "learning_rate": 1.6254299805213645e-06, "loss": 0.7032, "step": 16286 }, { "epoch": 0.6749968917070744, "grad_norm": 0.4263400733470917, "learning_rate": 1.625222760992996e-06, "loss": 0.6843, "step": 16287 }, { "epoch": 0.6750383356127482, "grad_norm": 0.37725648283958435, "learning_rate": 1.6250155414646277e-06, "loss": 0.6519, "step": 16288 }, { "epoch": 0.6750797795184218, "grad_norm": 0.39993736147880554, "learning_rate": 1.6248083219362595e-06, "loss": 0.686, "step": 16289 }, { "epoch": 0.6751212234240955, "grad_norm": 0.39305904507637024, "learning_rate": 1.624601102407891e-06, "loss": 0.6774, "step": 16290 }, { "epoch": 0.6751626673297692, "grad_norm": 0.4104795753955841, "learning_rate": 1.6243938828795227e-06, "loss": 0.7332, "step": 16291 }, { "epoch": 0.6752041112354429, "grad_norm": 0.4128848612308502, "learning_rate": 1.6241866633511543e-06, "loss": 0.625, "step": 16292 }, { "epoch": 0.6752455551411165, "grad_norm": 0.4621288478374481, "learning_rate": 1.6239794438227859e-06, "loss": 0.6459, "step": 16293 }, { "epoch": 0.6752869990467901, "grad_norm": 0.38027289509773254, "learning_rate": 1.6237722242944177e-06, "loss": 0.6458, "step": 16294 }, { "epoch": 0.6753284429524639, "grad_norm": 0.4220151901245117, "learning_rate": 1.6235650047660493e-06, "loss": 0.6895, "step": 16295 }, { "epoch": 0.6753698868581375, "grad_norm": 0.417141318321228, "learning_rate": 1.623357785237681e-06, "loss": 0.7129, "step": 16296 }, { "epoch": 0.6754113307638112, "grad_norm": 0.4627132713794708, "learning_rate": 1.6231505657093125e-06, "loss": 0.7146, "step": 16297 }, { "epoch": 0.6754527746694848, "grad_norm": 0.45222043991088867, "learning_rate": 1.6229433461809443e-06, "loss": 0.7344, "step": 16298 }, { "epoch": 0.6754942185751586, "grad_norm": 0.4135313332080841, "learning_rate": 1.622736126652576e-06, "loss": 0.6919, "step": 16299 }, { "epoch": 0.6755356624808322, "grad_norm": 0.40488457679748535, "learning_rate": 1.6225289071242075e-06, "loss": 0.6931, "step": 16300 }, { "epoch": 0.6755771063865058, "grad_norm": 0.4216037690639496, "learning_rate": 1.622321687595839e-06, "loss": 0.7334, "step": 16301 }, { "epoch": 0.6756185502921795, "grad_norm": 0.41567814350128174, "learning_rate": 1.622114468067471e-06, "loss": 0.691, "step": 16302 }, { "epoch": 0.6756599941978532, "grad_norm": 0.41751399636268616, "learning_rate": 1.6219072485391025e-06, "loss": 0.6677, "step": 16303 }, { "epoch": 0.6757014381035269, "grad_norm": 0.44869473576545715, "learning_rate": 1.621700029010734e-06, "loss": 0.6925, "step": 16304 }, { "epoch": 0.6757428820092005, "grad_norm": 0.4266965687274933, "learning_rate": 1.6214928094823657e-06, "loss": 0.7043, "step": 16305 }, { "epoch": 0.6757843259148743, "grad_norm": 0.40986594557762146, "learning_rate": 1.6212855899539973e-06, "loss": 0.6956, "step": 16306 }, { "epoch": 0.6758257698205479, "grad_norm": 0.47800666093826294, "learning_rate": 1.621078370425629e-06, "loss": 0.6982, "step": 16307 }, { "epoch": 0.6758672137262216, "grad_norm": 0.4086482524871826, "learning_rate": 1.6208711508972607e-06, "loss": 0.7305, "step": 16308 }, { "epoch": 0.6759086576318952, "grad_norm": 0.44619372487068176, "learning_rate": 1.6206639313688923e-06, "loss": 0.6656, "step": 16309 }, { "epoch": 0.6759501015375688, "grad_norm": 0.4284505248069763, "learning_rate": 1.620456711840524e-06, "loss": 0.6781, "step": 16310 }, { "epoch": 0.6759915454432426, "grad_norm": 0.4361781179904938, "learning_rate": 1.6202494923121557e-06, "loss": 0.6724, "step": 16311 }, { "epoch": 0.6760329893489162, "grad_norm": 0.44303861260414124, "learning_rate": 1.6200422727837873e-06, "loss": 0.6614, "step": 16312 }, { "epoch": 0.6760744332545899, "grad_norm": 0.4088652431964874, "learning_rate": 1.619835053255419e-06, "loss": 0.6334, "step": 16313 }, { "epoch": 0.6761158771602636, "grad_norm": 0.4180946946144104, "learning_rate": 1.6196278337270505e-06, "loss": 0.6797, "step": 16314 }, { "epoch": 0.6761573210659373, "grad_norm": 0.3916303217411041, "learning_rate": 1.6194206141986823e-06, "loss": 0.6414, "step": 16315 }, { "epoch": 0.6761987649716109, "grad_norm": 0.4205370545387268, "learning_rate": 1.619213394670314e-06, "loss": 0.6371, "step": 16316 }, { "epoch": 0.6762402088772846, "grad_norm": 0.4024277627468109, "learning_rate": 1.6190061751419455e-06, "loss": 0.6995, "step": 16317 }, { "epoch": 0.6762816527829583, "grad_norm": 0.4250434339046478, "learning_rate": 1.618798955613577e-06, "loss": 0.6921, "step": 16318 }, { "epoch": 0.6763230966886319, "grad_norm": 0.41153907775878906, "learning_rate": 1.618591736085209e-06, "loss": 0.6835, "step": 16319 }, { "epoch": 0.6763645405943056, "grad_norm": 0.4041583240032196, "learning_rate": 1.6183845165568405e-06, "loss": 0.6863, "step": 16320 }, { "epoch": 0.6764059844999792, "grad_norm": 0.4305073618888855, "learning_rate": 1.618177297028472e-06, "loss": 0.6351, "step": 16321 }, { "epoch": 0.676447428405653, "grad_norm": 0.3942486047744751, "learning_rate": 1.6179700775001037e-06, "loss": 0.6493, "step": 16322 }, { "epoch": 0.6764888723113266, "grad_norm": 0.4173102080821991, "learning_rate": 1.6177628579717353e-06, "loss": 0.7092, "step": 16323 }, { "epoch": 0.6765303162170003, "grad_norm": 0.4446958303451538, "learning_rate": 1.6175556384433671e-06, "loss": 0.6725, "step": 16324 }, { "epoch": 0.676571760122674, "grad_norm": 0.4260506331920624, "learning_rate": 1.6173484189149987e-06, "loss": 0.6759, "step": 16325 }, { "epoch": 0.6766132040283477, "grad_norm": 0.42306116223335266, "learning_rate": 1.6171411993866303e-06, "loss": 0.6675, "step": 16326 }, { "epoch": 0.6766546479340213, "grad_norm": 0.42986342310905457, "learning_rate": 1.616933979858262e-06, "loss": 0.6997, "step": 16327 }, { "epoch": 0.6766960918396949, "grad_norm": 0.4706627428531647, "learning_rate": 1.6167267603298937e-06, "loss": 0.7153, "step": 16328 }, { "epoch": 0.6767375357453687, "grad_norm": 0.4231176972389221, "learning_rate": 1.6165195408015253e-06, "loss": 0.7085, "step": 16329 }, { "epoch": 0.6767789796510423, "grad_norm": 0.45094263553619385, "learning_rate": 1.616312321273157e-06, "loss": 0.7087, "step": 16330 }, { "epoch": 0.676820423556716, "grad_norm": 0.40936052799224854, "learning_rate": 1.6161051017447885e-06, "loss": 0.6331, "step": 16331 }, { "epoch": 0.6768618674623896, "grad_norm": 0.3899785280227661, "learning_rate": 1.61589788221642e-06, "loss": 0.6772, "step": 16332 }, { "epoch": 0.6769033113680634, "grad_norm": 0.4100188612937927, "learning_rate": 1.615690662688052e-06, "loss": 0.6163, "step": 16333 }, { "epoch": 0.676944755273737, "grad_norm": 0.3940403461456299, "learning_rate": 1.6154834431596835e-06, "loss": 0.6554, "step": 16334 }, { "epoch": 0.6769861991794107, "grad_norm": 0.38139110803604126, "learning_rate": 1.615276223631315e-06, "loss": 0.642, "step": 16335 }, { "epoch": 0.6770276430850843, "grad_norm": 0.4233373701572418, "learning_rate": 1.615069004102947e-06, "loss": 0.7378, "step": 16336 }, { "epoch": 0.677069086990758, "grad_norm": 0.40375572443008423, "learning_rate": 1.6148617845745785e-06, "loss": 0.7084, "step": 16337 }, { "epoch": 0.6771105308964317, "grad_norm": 0.42493799328804016, "learning_rate": 1.6146545650462101e-06, "loss": 0.6952, "step": 16338 }, { "epoch": 0.6771519748021053, "grad_norm": 0.42446815967559814, "learning_rate": 1.6144473455178417e-06, "loss": 0.6812, "step": 16339 }, { "epoch": 0.677193418707779, "grad_norm": 0.43217477202415466, "learning_rate": 1.6142401259894733e-06, "loss": 0.7056, "step": 16340 }, { "epoch": 0.6772348626134527, "grad_norm": 0.39489418268203735, "learning_rate": 1.6140329064611051e-06, "loss": 0.6627, "step": 16341 }, { "epoch": 0.6772763065191264, "grad_norm": 0.45600074529647827, "learning_rate": 1.6138256869327367e-06, "loss": 0.7402, "step": 16342 }, { "epoch": 0.6773177504248, "grad_norm": 0.4149990677833557, "learning_rate": 1.6136184674043683e-06, "loss": 0.6376, "step": 16343 }, { "epoch": 0.6773591943304738, "grad_norm": 0.43980997800827026, "learning_rate": 1.6134112478760001e-06, "loss": 0.6875, "step": 16344 }, { "epoch": 0.6774006382361474, "grad_norm": 0.42125609517097473, "learning_rate": 1.6132040283476317e-06, "loss": 0.6913, "step": 16345 }, { "epoch": 0.677442082141821, "grad_norm": 0.44827336072921753, "learning_rate": 1.6129968088192633e-06, "loss": 0.7043, "step": 16346 }, { "epoch": 0.6774835260474947, "grad_norm": 0.41174080967903137, "learning_rate": 1.612789589290895e-06, "loss": 0.6453, "step": 16347 }, { "epoch": 0.6775249699531684, "grad_norm": 0.4401751160621643, "learning_rate": 1.6125823697625265e-06, "loss": 0.7715, "step": 16348 }, { "epoch": 0.6775664138588421, "grad_norm": 0.39284059405326843, "learning_rate": 1.612375150234158e-06, "loss": 0.668, "step": 16349 }, { "epoch": 0.6776078577645157, "grad_norm": 0.392184853553772, "learning_rate": 1.61216793070579e-06, "loss": 0.6143, "step": 16350 }, { "epoch": 0.6776493016701894, "grad_norm": 0.4336661398410797, "learning_rate": 1.6119607111774215e-06, "loss": 0.7201, "step": 16351 }, { "epoch": 0.6776907455758631, "grad_norm": 0.41449442505836487, "learning_rate": 1.611753491649053e-06, "loss": 0.6814, "step": 16352 }, { "epoch": 0.6777321894815368, "grad_norm": 0.405932754278183, "learning_rate": 1.611546272120685e-06, "loss": 0.6533, "step": 16353 }, { "epoch": 0.6777736333872104, "grad_norm": 0.43539363145828247, "learning_rate": 1.6113390525923165e-06, "loss": 0.6742, "step": 16354 }, { "epoch": 0.677815077292884, "grad_norm": 0.4115121364593506, "learning_rate": 1.6111318330639481e-06, "loss": 0.6416, "step": 16355 }, { "epoch": 0.6778565211985578, "grad_norm": 0.4653797149658203, "learning_rate": 1.6109246135355797e-06, "loss": 0.6671, "step": 16356 }, { "epoch": 0.6778979651042314, "grad_norm": 0.3799360692501068, "learning_rate": 1.6107173940072113e-06, "loss": 0.6876, "step": 16357 }, { "epoch": 0.6779394090099051, "grad_norm": 0.4085388481616974, "learning_rate": 1.610510174478843e-06, "loss": 0.6417, "step": 16358 }, { "epoch": 0.6779808529155787, "grad_norm": 0.4052393138408661, "learning_rate": 1.6103029549504747e-06, "loss": 0.6354, "step": 16359 }, { "epoch": 0.6780222968212525, "grad_norm": 0.3867664933204651, "learning_rate": 1.6100957354221063e-06, "loss": 0.6368, "step": 16360 }, { "epoch": 0.6780637407269261, "grad_norm": 0.419985830783844, "learning_rate": 1.6098885158937381e-06, "loss": 0.6982, "step": 16361 }, { "epoch": 0.6781051846325997, "grad_norm": 0.4375365674495697, "learning_rate": 1.6096812963653697e-06, "loss": 0.6902, "step": 16362 }, { "epoch": 0.6781466285382735, "grad_norm": 0.42397716641426086, "learning_rate": 1.6094740768370013e-06, "loss": 0.663, "step": 16363 }, { "epoch": 0.6781880724439471, "grad_norm": 0.4689071476459503, "learning_rate": 1.609266857308633e-06, "loss": 0.728, "step": 16364 }, { "epoch": 0.6782295163496208, "grad_norm": 0.4131699502468109, "learning_rate": 1.6090596377802645e-06, "loss": 0.6672, "step": 16365 }, { "epoch": 0.6782709602552944, "grad_norm": 0.4080883860588074, "learning_rate": 1.608852418251896e-06, "loss": 0.6611, "step": 16366 }, { "epoch": 0.6783124041609682, "grad_norm": 0.42712077498435974, "learning_rate": 1.6086451987235277e-06, "loss": 0.7373, "step": 16367 }, { "epoch": 0.6783538480666418, "grad_norm": 0.43096664547920227, "learning_rate": 1.6084379791951595e-06, "loss": 0.7115, "step": 16368 }, { "epoch": 0.6783952919723155, "grad_norm": 0.4470101594924927, "learning_rate": 1.6082307596667911e-06, "loss": 0.6733, "step": 16369 }, { "epoch": 0.6784367358779891, "grad_norm": 0.40488073229789734, "learning_rate": 1.608023540138423e-06, "loss": 0.6436, "step": 16370 }, { "epoch": 0.6784781797836628, "grad_norm": 0.37891021370887756, "learning_rate": 1.6078163206100545e-06, "loss": 0.6249, "step": 16371 }, { "epoch": 0.6785196236893365, "grad_norm": 0.43865838646888733, "learning_rate": 1.6076091010816861e-06, "loss": 0.7073, "step": 16372 }, { "epoch": 0.6785610675950101, "grad_norm": 0.43869486451148987, "learning_rate": 1.6074018815533177e-06, "loss": 0.6827, "step": 16373 }, { "epoch": 0.6786025115006838, "grad_norm": 0.3984203338623047, "learning_rate": 1.6071946620249493e-06, "loss": 0.658, "step": 16374 }, { "epoch": 0.6786439554063575, "grad_norm": 0.45380374789237976, "learning_rate": 1.606987442496581e-06, "loss": 0.7141, "step": 16375 }, { "epoch": 0.6786853993120312, "grad_norm": 0.4323147237300873, "learning_rate": 1.6067802229682127e-06, "loss": 0.6031, "step": 16376 }, { "epoch": 0.6787268432177048, "grad_norm": 0.41525501012802124, "learning_rate": 1.6065730034398443e-06, "loss": 0.7329, "step": 16377 }, { "epoch": 0.6787682871233786, "grad_norm": 0.40813004970550537, "learning_rate": 1.6063657839114761e-06, "loss": 0.6813, "step": 16378 }, { "epoch": 0.6788097310290522, "grad_norm": 0.4367498457431793, "learning_rate": 1.6061585643831077e-06, "loss": 0.7474, "step": 16379 }, { "epoch": 0.6788511749347258, "grad_norm": 0.4303681254386902, "learning_rate": 1.6059513448547393e-06, "loss": 0.6897, "step": 16380 }, { "epoch": 0.6788926188403995, "grad_norm": 0.39185333251953125, "learning_rate": 1.605744125326371e-06, "loss": 0.6603, "step": 16381 }, { "epoch": 0.6789340627460732, "grad_norm": 0.4145040214061737, "learning_rate": 1.6055369057980025e-06, "loss": 0.6353, "step": 16382 }, { "epoch": 0.6789755066517469, "grad_norm": 0.4737611711025238, "learning_rate": 1.6053296862696341e-06, "loss": 0.6804, "step": 16383 }, { "epoch": 0.6790169505574205, "grad_norm": 0.39113378524780273, "learning_rate": 1.6051224667412657e-06, "loss": 0.7124, "step": 16384 }, { "epoch": 0.6790583944630942, "grad_norm": 0.4277935326099396, "learning_rate": 1.6049152472128975e-06, "loss": 0.6947, "step": 16385 }, { "epoch": 0.6790998383687679, "grad_norm": 0.39183101058006287, "learning_rate": 1.6047080276845291e-06, "loss": 0.7061, "step": 16386 }, { "epoch": 0.6791412822744416, "grad_norm": 0.45061150193214417, "learning_rate": 1.604500808156161e-06, "loss": 0.6682, "step": 16387 }, { "epoch": 0.6791827261801152, "grad_norm": 0.4232819080352783, "learning_rate": 1.6042935886277925e-06, "loss": 0.6459, "step": 16388 }, { "epoch": 0.6792241700857888, "grad_norm": 0.42031776905059814, "learning_rate": 1.6040863690994241e-06, "loss": 0.6907, "step": 16389 }, { "epoch": 0.6792656139914626, "grad_norm": 0.4279818832874298, "learning_rate": 1.6038791495710557e-06, "loss": 0.6531, "step": 16390 }, { "epoch": 0.6793070578971362, "grad_norm": 0.3922806680202484, "learning_rate": 1.6036719300426873e-06, "loss": 0.6721, "step": 16391 }, { "epoch": 0.6793485018028099, "grad_norm": 0.44862061738967896, "learning_rate": 1.603464710514319e-06, "loss": 0.6506, "step": 16392 }, { "epoch": 0.6793899457084835, "grad_norm": 0.4305098354816437, "learning_rate": 1.6032574909859505e-06, "loss": 0.6833, "step": 16393 }, { "epoch": 0.6794313896141573, "grad_norm": 0.40128201246261597, "learning_rate": 1.6030502714575823e-06, "loss": 0.6747, "step": 16394 }, { "epoch": 0.6794728335198309, "grad_norm": 0.45804712176322937, "learning_rate": 1.6028430519292141e-06, "loss": 0.7432, "step": 16395 }, { "epoch": 0.6795142774255046, "grad_norm": 0.43453139066696167, "learning_rate": 1.6026358324008457e-06, "loss": 0.6606, "step": 16396 }, { "epoch": 0.6795557213311783, "grad_norm": 0.4176333546638489, "learning_rate": 1.6024286128724773e-06, "loss": 0.6892, "step": 16397 }, { "epoch": 0.6795971652368519, "grad_norm": 0.38999608159065247, "learning_rate": 1.602221393344109e-06, "loss": 0.6334, "step": 16398 }, { "epoch": 0.6796386091425256, "grad_norm": 0.3954404294490814, "learning_rate": 1.6020141738157405e-06, "loss": 0.6724, "step": 16399 }, { "epoch": 0.6796800530481992, "grad_norm": 0.39918017387390137, "learning_rate": 1.6018069542873721e-06, "loss": 0.6584, "step": 16400 }, { "epoch": 0.679721496953873, "grad_norm": 0.3800272047519684, "learning_rate": 1.6015997347590037e-06, "loss": 0.5919, "step": 16401 }, { "epoch": 0.6797629408595466, "grad_norm": 0.4102345407009125, "learning_rate": 1.6013925152306355e-06, "loss": 0.6677, "step": 16402 }, { "epoch": 0.6798043847652203, "grad_norm": 0.470389723777771, "learning_rate": 1.6011852957022671e-06, "loss": 0.6833, "step": 16403 }, { "epoch": 0.6798458286708939, "grad_norm": 0.4658568799495697, "learning_rate": 1.600978076173899e-06, "loss": 0.6716, "step": 16404 }, { "epoch": 0.6798872725765677, "grad_norm": 0.4668389558792114, "learning_rate": 1.6007708566455305e-06, "loss": 0.6666, "step": 16405 }, { "epoch": 0.6799287164822413, "grad_norm": 0.41219428181648254, "learning_rate": 1.6005636371171621e-06, "loss": 0.6957, "step": 16406 }, { "epoch": 0.6799701603879149, "grad_norm": 0.41921865940093994, "learning_rate": 1.6003564175887937e-06, "loss": 0.6343, "step": 16407 }, { "epoch": 0.6800116042935886, "grad_norm": 0.45205870270729065, "learning_rate": 1.6001491980604253e-06, "loss": 0.7302, "step": 16408 }, { "epoch": 0.6800530481992623, "grad_norm": 0.40544790029525757, "learning_rate": 1.599941978532057e-06, "loss": 0.6827, "step": 16409 }, { "epoch": 0.680094492104936, "grad_norm": 0.4359225332736969, "learning_rate": 1.5997347590036885e-06, "loss": 0.6851, "step": 16410 }, { "epoch": 0.6801359360106096, "grad_norm": 0.3854822814464569, "learning_rate": 1.5995275394753203e-06, "loss": 0.6687, "step": 16411 }, { "epoch": 0.6801773799162834, "grad_norm": 0.4356563091278076, "learning_rate": 1.5993203199469521e-06, "loss": 0.6915, "step": 16412 }, { "epoch": 0.680218823821957, "grad_norm": 0.4016231596469879, "learning_rate": 1.5991131004185837e-06, "loss": 0.6888, "step": 16413 }, { "epoch": 0.6802602677276306, "grad_norm": 0.45952433347702026, "learning_rate": 1.5989058808902153e-06, "loss": 0.6317, "step": 16414 }, { "epoch": 0.6803017116333043, "grad_norm": 0.40454673767089844, "learning_rate": 1.598698661361847e-06, "loss": 0.6283, "step": 16415 }, { "epoch": 0.680343155538978, "grad_norm": 0.4314827620983124, "learning_rate": 1.5984914418334785e-06, "loss": 0.668, "step": 16416 }, { "epoch": 0.6803845994446517, "grad_norm": 0.38920295238494873, "learning_rate": 1.5982842223051101e-06, "loss": 0.6718, "step": 16417 }, { "epoch": 0.6804260433503253, "grad_norm": 0.3890742361545563, "learning_rate": 1.5980770027767417e-06, "loss": 0.6465, "step": 16418 }, { "epoch": 0.680467487255999, "grad_norm": 0.40719839930534363, "learning_rate": 1.5978697832483733e-06, "loss": 0.718, "step": 16419 }, { "epoch": 0.6805089311616727, "grad_norm": 0.4480153024196625, "learning_rate": 1.5976625637200051e-06, "loss": 0.6699, "step": 16420 }, { "epoch": 0.6805503750673464, "grad_norm": 0.4204968214035034, "learning_rate": 1.597455344191637e-06, "loss": 0.6669, "step": 16421 }, { "epoch": 0.68059181897302, "grad_norm": 0.39290159940719604, "learning_rate": 1.5972481246632685e-06, "loss": 0.6714, "step": 16422 }, { "epoch": 0.6806332628786936, "grad_norm": 0.4116114377975464, "learning_rate": 1.5970409051349001e-06, "loss": 0.6796, "step": 16423 }, { "epoch": 0.6806747067843674, "grad_norm": 0.4009172320365906, "learning_rate": 1.5968336856065317e-06, "loss": 0.7012, "step": 16424 }, { "epoch": 0.680716150690041, "grad_norm": 0.423123836517334, "learning_rate": 1.5966264660781633e-06, "loss": 0.6826, "step": 16425 }, { "epoch": 0.6807575945957147, "grad_norm": 0.387929767370224, "learning_rate": 1.596419246549795e-06, "loss": 0.6619, "step": 16426 }, { "epoch": 0.6807990385013883, "grad_norm": 0.4412389099597931, "learning_rate": 1.5962120270214265e-06, "loss": 0.6962, "step": 16427 }, { "epoch": 0.6808404824070621, "grad_norm": 0.440782368183136, "learning_rate": 1.5960048074930581e-06, "loss": 0.7002, "step": 16428 }, { "epoch": 0.6808819263127357, "grad_norm": 0.41289159655570984, "learning_rate": 1.5957975879646901e-06, "loss": 0.7097, "step": 16429 }, { "epoch": 0.6809233702184094, "grad_norm": 0.38834673166275024, "learning_rate": 1.5955903684363217e-06, "loss": 0.6785, "step": 16430 }, { "epoch": 0.680964814124083, "grad_norm": 0.42564669251441956, "learning_rate": 1.5953831489079533e-06, "loss": 0.6555, "step": 16431 }, { "epoch": 0.6810062580297567, "grad_norm": 0.4332869350910187, "learning_rate": 1.595175929379585e-06, "loss": 0.657, "step": 16432 }, { "epoch": 0.6810477019354304, "grad_norm": 0.4403079152107239, "learning_rate": 1.5949687098512165e-06, "loss": 0.6875, "step": 16433 }, { "epoch": 0.681089145841104, "grad_norm": 0.4539627730846405, "learning_rate": 1.5947614903228481e-06, "loss": 0.759, "step": 16434 }, { "epoch": 0.6811305897467778, "grad_norm": 0.41863948106765747, "learning_rate": 1.5945542707944797e-06, "loss": 0.6464, "step": 16435 }, { "epoch": 0.6811720336524514, "grad_norm": 0.43567246198654175, "learning_rate": 1.5943470512661113e-06, "loss": 0.7043, "step": 16436 }, { "epoch": 0.6812134775581251, "grad_norm": 0.4339817762374878, "learning_rate": 1.5941398317377433e-06, "loss": 0.6624, "step": 16437 }, { "epoch": 0.6812549214637987, "grad_norm": 0.41677218675613403, "learning_rate": 1.593932612209375e-06, "loss": 0.6487, "step": 16438 }, { "epoch": 0.6812963653694725, "grad_norm": 0.4437923729419708, "learning_rate": 1.5937253926810065e-06, "loss": 0.6965, "step": 16439 }, { "epoch": 0.6813378092751461, "grad_norm": 0.3804916739463806, "learning_rate": 1.5935181731526381e-06, "loss": 0.6079, "step": 16440 }, { "epoch": 0.6813792531808197, "grad_norm": 0.3881668448448181, "learning_rate": 1.5933109536242697e-06, "loss": 0.637, "step": 16441 }, { "epoch": 0.6814206970864934, "grad_norm": 0.4043217599391937, "learning_rate": 1.5931037340959013e-06, "loss": 0.6866, "step": 16442 }, { "epoch": 0.6814621409921671, "grad_norm": 0.3965768814086914, "learning_rate": 1.592896514567533e-06, "loss": 0.6495, "step": 16443 }, { "epoch": 0.6815035848978408, "grad_norm": 0.3855952024459839, "learning_rate": 1.5926892950391645e-06, "loss": 0.6659, "step": 16444 }, { "epoch": 0.6815450288035144, "grad_norm": 0.445781946182251, "learning_rate": 1.5924820755107961e-06, "loss": 0.6599, "step": 16445 }, { "epoch": 0.6815864727091882, "grad_norm": 0.454216331243515, "learning_rate": 1.5922748559824281e-06, "loss": 0.7072, "step": 16446 }, { "epoch": 0.6816279166148618, "grad_norm": 0.4069503843784332, "learning_rate": 1.5920676364540597e-06, "loss": 0.6682, "step": 16447 }, { "epoch": 0.6816693605205355, "grad_norm": 0.4295148253440857, "learning_rate": 1.5918604169256913e-06, "loss": 0.6738, "step": 16448 }, { "epoch": 0.6817108044262091, "grad_norm": 0.39556998014450073, "learning_rate": 1.591653197397323e-06, "loss": 0.6241, "step": 16449 }, { "epoch": 0.6817522483318827, "grad_norm": 0.4372722804546356, "learning_rate": 1.5914459778689545e-06, "loss": 0.6685, "step": 16450 }, { "epoch": 0.6817936922375565, "grad_norm": 0.40175312757492065, "learning_rate": 1.5912387583405861e-06, "loss": 0.6339, "step": 16451 }, { "epoch": 0.6818351361432301, "grad_norm": 0.4111756682395935, "learning_rate": 1.5910315388122177e-06, "loss": 0.6394, "step": 16452 }, { "epoch": 0.6818765800489038, "grad_norm": 0.3894212245941162, "learning_rate": 1.5908243192838493e-06, "loss": 0.6433, "step": 16453 }, { "epoch": 0.6819180239545775, "grad_norm": 0.39975839853286743, "learning_rate": 1.590617099755481e-06, "loss": 0.6362, "step": 16454 }, { "epoch": 0.6819594678602512, "grad_norm": 0.4323562979698181, "learning_rate": 1.590409880227113e-06, "loss": 0.7339, "step": 16455 }, { "epoch": 0.6820009117659248, "grad_norm": 0.3968159556388855, "learning_rate": 1.5902026606987445e-06, "loss": 0.6748, "step": 16456 }, { "epoch": 0.6820423556715985, "grad_norm": 0.4343950152397156, "learning_rate": 1.5899954411703761e-06, "loss": 0.6993, "step": 16457 }, { "epoch": 0.6820837995772722, "grad_norm": 0.3977610468864441, "learning_rate": 1.5897882216420077e-06, "loss": 0.6163, "step": 16458 }, { "epoch": 0.6821252434829458, "grad_norm": 0.44916510581970215, "learning_rate": 1.5895810021136393e-06, "loss": 0.6542, "step": 16459 }, { "epoch": 0.6821666873886195, "grad_norm": 0.43771079182624817, "learning_rate": 1.589373782585271e-06, "loss": 0.704, "step": 16460 }, { "epoch": 0.6822081312942931, "grad_norm": 0.3977387547492981, "learning_rate": 1.5891665630569025e-06, "loss": 0.6414, "step": 16461 }, { "epoch": 0.6822495751999669, "grad_norm": 0.42292988300323486, "learning_rate": 1.5889593435285341e-06, "loss": 0.6833, "step": 16462 }, { "epoch": 0.6822910191056405, "grad_norm": 0.3783145546913147, "learning_rate": 1.5887521240001661e-06, "loss": 0.6665, "step": 16463 }, { "epoch": 0.6823324630113142, "grad_norm": 0.42651253938674927, "learning_rate": 1.5885449044717977e-06, "loss": 0.6747, "step": 16464 }, { "epoch": 0.6823739069169878, "grad_norm": 0.4274154305458069, "learning_rate": 1.5883376849434293e-06, "loss": 0.6541, "step": 16465 }, { "epoch": 0.6824153508226616, "grad_norm": 0.4113521873950958, "learning_rate": 1.588130465415061e-06, "loss": 0.6912, "step": 16466 }, { "epoch": 0.6824567947283352, "grad_norm": 0.3884619474411011, "learning_rate": 1.5879232458866925e-06, "loss": 0.7103, "step": 16467 }, { "epoch": 0.6824982386340088, "grad_norm": 0.40528884530067444, "learning_rate": 1.5877160263583241e-06, "loss": 0.67, "step": 16468 }, { "epoch": 0.6825396825396826, "grad_norm": 0.43685516715049744, "learning_rate": 1.5875088068299557e-06, "loss": 0.6743, "step": 16469 }, { "epoch": 0.6825811264453562, "grad_norm": 0.43480271100997925, "learning_rate": 1.5873015873015873e-06, "loss": 0.651, "step": 16470 }, { "epoch": 0.6826225703510299, "grad_norm": 0.44695931673049927, "learning_rate": 1.587094367773219e-06, "loss": 0.6974, "step": 16471 }, { "epoch": 0.6826640142567035, "grad_norm": 0.38569626212120056, "learning_rate": 1.586887148244851e-06, "loss": 0.6494, "step": 16472 }, { "epoch": 0.6827054581623773, "grad_norm": 0.4279003441333771, "learning_rate": 1.5866799287164825e-06, "loss": 0.6913, "step": 16473 }, { "epoch": 0.6827469020680509, "grad_norm": 0.42841625213623047, "learning_rate": 1.5864727091881141e-06, "loss": 0.6838, "step": 16474 }, { "epoch": 0.6827883459737245, "grad_norm": 0.39657533168792725, "learning_rate": 1.5862654896597457e-06, "loss": 0.6587, "step": 16475 }, { "epoch": 0.6828297898793982, "grad_norm": 0.46557366847991943, "learning_rate": 1.5860582701313773e-06, "loss": 0.688, "step": 16476 }, { "epoch": 0.6828712337850719, "grad_norm": 0.4088340699672699, "learning_rate": 1.585851050603009e-06, "loss": 0.6521, "step": 16477 }, { "epoch": 0.6829126776907456, "grad_norm": 0.39520496129989624, "learning_rate": 1.5856438310746405e-06, "loss": 0.65, "step": 16478 }, { "epoch": 0.6829541215964192, "grad_norm": 0.43614083528518677, "learning_rate": 1.5854366115462721e-06, "loss": 0.7324, "step": 16479 }, { "epoch": 0.682995565502093, "grad_norm": 0.4328020215034485, "learning_rate": 1.5852293920179037e-06, "loss": 0.7106, "step": 16480 }, { "epoch": 0.6830370094077666, "grad_norm": 0.4099116325378418, "learning_rate": 1.5850221724895357e-06, "loss": 0.6519, "step": 16481 }, { "epoch": 0.6830784533134403, "grad_norm": 0.43008145689964294, "learning_rate": 1.5848149529611673e-06, "loss": 0.6464, "step": 16482 }, { "epoch": 0.6831198972191139, "grad_norm": 0.42204973101615906, "learning_rate": 1.584607733432799e-06, "loss": 0.6729, "step": 16483 }, { "epoch": 0.6831613411247875, "grad_norm": 0.40035882592201233, "learning_rate": 1.5844005139044305e-06, "loss": 0.6619, "step": 16484 }, { "epoch": 0.6832027850304613, "grad_norm": 0.4388119876384735, "learning_rate": 1.5841932943760621e-06, "loss": 0.6975, "step": 16485 }, { "epoch": 0.6832442289361349, "grad_norm": 0.46647024154663086, "learning_rate": 1.5839860748476937e-06, "loss": 0.7454, "step": 16486 }, { "epoch": 0.6832856728418086, "grad_norm": 0.37079963088035583, "learning_rate": 1.5837788553193253e-06, "loss": 0.6313, "step": 16487 }, { "epoch": 0.6833271167474823, "grad_norm": 0.45674821734428406, "learning_rate": 1.583571635790957e-06, "loss": 0.6571, "step": 16488 }, { "epoch": 0.683368560653156, "grad_norm": 0.4435444474220276, "learning_rate": 1.5833644162625885e-06, "loss": 0.6857, "step": 16489 }, { "epoch": 0.6834100045588296, "grad_norm": 0.4087064862251282, "learning_rate": 1.5831571967342205e-06, "loss": 0.6564, "step": 16490 }, { "epoch": 0.6834514484645033, "grad_norm": 0.42712050676345825, "learning_rate": 1.5829499772058521e-06, "loss": 0.647, "step": 16491 }, { "epoch": 0.683492892370177, "grad_norm": 0.3875502645969391, "learning_rate": 1.5827427576774837e-06, "loss": 0.7181, "step": 16492 }, { "epoch": 0.6835343362758506, "grad_norm": 0.43457236886024475, "learning_rate": 1.5825355381491153e-06, "loss": 0.6504, "step": 16493 }, { "epoch": 0.6835757801815243, "grad_norm": 0.4282146394252777, "learning_rate": 1.582328318620747e-06, "loss": 0.6628, "step": 16494 }, { "epoch": 0.6836172240871979, "grad_norm": 0.42648109793663025, "learning_rate": 1.5821210990923785e-06, "loss": 0.6797, "step": 16495 }, { "epoch": 0.6836586679928717, "grad_norm": 0.432401567697525, "learning_rate": 1.5819138795640101e-06, "loss": 0.6802, "step": 16496 }, { "epoch": 0.6837001118985453, "grad_norm": 0.41156721115112305, "learning_rate": 1.5817066600356417e-06, "loss": 0.7217, "step": 16497 }, { "epoch": 0.683741555804219, "grad_norm": 0.4197638928890228, "learning_rate": 1.5814994405072737e-06, "loss": 0.6382, "step": 16498 }, { "epoch": 0.6837829997098926, "grad_norm": 0.4296789765357971, "learning_rate": 1.5812922209789053e-06, "loss": 0.6733, "step": 16499 }, { "epoch": 0.6838244436155664, "grad_norm": 0.39848610758781433, "learning_rate": 1.581085001450537e-06, "loss": 0.6176, "step": 16500 }, { "epoch": 0.68386588752124, "grad_norm": 0.3919709026813507, "learning_rate": 1.5808777819221685e-06, "loss": 0.6495, "step": 16501 }, { "epoch": 0.6839073314269136, "grad_norm": 0.4141601324081421, "learning_rate": 1.5806705623938001e-06, "loss": 0.7084, "step": 16502 }, { "epoch": 0.6839487753325874, "grad_norm": 0.40558919310569763, "learning_rate": 1.5804633428654317e-06, "loss": 0.6359, "step": 16503 }, { "epoch": 0.683990219238261, "grad_norm": 0.39816582202911377, "learning_rate": 1.5802561233370633e-06, "loss": 0.6287, "step": 16504 }, { "epoch": 0.6840316631439347, "grad_norm": 0.41478803753852844, "learning_rate": 1.580048903808695e-06, "loss": 0.6727, "step": 16505 }, { "epoch": 0.6840731070496083, "grad_norm": 0.3917585611343384, "learning_rate": 1.5798416842803265e-06, "loss": 0.64, "step": 16506 }, { "epoch": 0.6841145509552821, "grad_norm": 0.38947901129722595, "learning_rate": 1.5796344647519585e-06, "loss": 0.6396, "step": 16507 }, { "epoch": 0.6841559948609557, "grad_norm": 0.39654383063316345, "learning_rate": 1.5794272452235901e-06, "loss": 0.7181, "step": 16508 }, { "epoch": 0.6841974387666294, "grad_norm": 0.39379623532295227, "learning_rate": 1.5792200256952217e-06, "loss": 0.6619, "step": 16509 }, { "epoch": 0.684238882672303, "grad_norm": 0.42536720633506775, "learning_rate": 1.5790128061668533e-06, "loss": 0.7007, "step": 16510 }, { "epoch": 0.6842803265779767, "grad_norm": 0.41144341230392456, "learning_rate": 1.578805586638485e-06, "loss": 0.6689, "step": 16511 }, { "epoch": 0.6843217704836504, "grad_norm": 0.4173933267593384, "learning_rate": 1.5785983671101165e-06, "loss": 0.7174, "step": 16512 }, { "epoch": 0.684363214389324, "grad_norm": 0.3956053853034973, "learning_rate": 1.5783911475817481e-06, "loss": 0.6897, "step": 16513 }, { "epoch": 0.6844046582949977, "grad_norm": 0.4237261414527893, "learning_rate": 1.5781839280533797e-06, "loss": 0.686, "step": 16514 }, { "epoch": 0.6844461022006714, "grad_norm": 0.37724512815475464, "learning_rate": 1.5779767085250113e-06, "loss": 0.6365, "step": 16515 }, { "epoch": 0.6844875461063451, "grad_norm": 0.41833508014678955, "learning_rate": 1.5777694889966433e-06, "loss": 0.6486, "step": 16516 }, { "epoch": 0.6845289900120187, "grad_norm": 0.3877980709075928, "learning_rate": 1.577562269468275e-06, "loss": 0.6691, "step": 16517 }, { "epoch": 0.6845704339176925, "grad_norm": 0.39981138706207275, "learning_rate": 1.5773550499399065e-06, "loss": 0.6868, "step": 16518 }, { "epoch": 0.6846118778233661, "grad_norm": 0.405407577753067, "learning_rate": 1.5771478304115381e-06, "loss": 0.6838, "step": 16519 }, { "epoch": 0.6846533217290397, "grad_norm": 0.3803228735923767, "learning_rate": 1.5769406108831697e-06, "loss": 0.6328, "step": 16520 }, { "epoch": 0.6846947656347134, "grad_norm": 0.39798641204833984, "learning_rate": 1.5767333913548013e-06, "loss": 0.7107, "step": 16521 }, { "epoch": 0.684736209540387, "grad_norm": 0.390122652053833, "learning_rate": 1.576526171826433e-06, "loss": 0.629, "step": 16522 }, { "epoch": 0.6847776534460608, "grad_norm": 0.3843939006328583, "learning_rate": 1.5763189522980645e-06, "loss": 0.6329, "step": 16523 }, { "epoch": 0.6848190973517344, "grad_norm": 0.42882293462753296, "learning_rate": 1.5761117327696965e-06, "loss": 0.6968, "step": 16524 }, { "epoch": 0.6848605412574081, "grad_norm": 0.38911494612693787, "learning_rate": 1.5759045132413281e-06, "loss": 0.6613, "step": 16525 }, { "epoch": 0.6849019851630818, "grad_norm": 0.3814469277858734, "learning_rate": 1.5756972937129597e-06, "loss": 0.6724, "step": 16526 }, { "epoch": 0.6849434290687555, "grad_norm": 0.46253183484077454, "learning_rate": 1.5754900741845913e-06, "loss": 0.6598, "step": 16527 }, { "epoch": 0.6849848729744291, "grad_norm": 0.4089282155036926, "learning_rate": 1.575282854656223e-06, "loss": 0.6528, "step": 16528 }, { "epoch": 0.6850263168801027, "grad_norm": 0.43547695875167847, "learning_rate": 1.5750756351278545e-06, "loss": 0.6802, "step": 16529 }, { "epoch": 0.6850677607857765, "grad_norm": 0.46075403690338135, "learning_rate": 1.5748684155994861e-06, "loss": 0.644, "step": 16530 }, { "epoch": 0.6851092046914501, "grad_norm": 0.3681713938713074, "learning_rate": 1.5746611960711177e-06, "loss": 0.6615, "step": 16531 }, { "epoch": 0.6851506485971238, "grad_norm": 0.41443437337875366, "learning_rate": 1.5744539765427493e-06, "loss": 0.6586, "step": 16532 }, { "epoch": 0.6851920925027974, "grad_norm": 0.4202670156955719, "learning_rate": 1.5742467570143813e-06, "loss": 0.6835, "step": 16533 }, { "epoch": 0.6852335364084712, "grad_norm": 0.41073766350746155, "learning_rate": 1.574039537486013e-06, "loss": 0.6445, "step": 16534 }, { "epoch": 0.6852749803141448, "grad_norm": 0.4130845069885254, "learning_rate": 1.5738323179576445e-06, "loss": 0.6738, "step": 16535 }, { "epoch": 0.6853164242198184, "grad_norm": 0.4134502708911896, "learning_rate": 1.5736250984292761e-06, "loss": 0.6697, "step": 16536 }, { "epoch": 0.6853578681254922, "grad_norm": 0.3807763457298279, "learning_rate": 1.5734178789009077e-06, "loss": 0.6543, "step": 16537 }, { "epoch": 0.6853993120311658, "grad_norm": 0.4259345829486847, "learning_rate": 1.5732106593725393e-06, "loss": 0.6823, "step": 16538 }, { "epoch": 0.6854407559368395, "grad_norm": 0.4215068221092224, "learning_rate": 1.573003439844171e-06, "loss": 0.6819, "step": 16539 }, { "epoch": 0.6854821998425131, "grad_norm": 0.45479243993759155, "learning_rate": 1.5727962203158025e-06, "loss": 0.6824, "step": 16540 }, { "epoch": 0.6855236437481869, "grad_norm": 0.42038097977638245, "learning_rate": 1.5725890007874341e-06, "loss": 0.7203, "step": 16541 }, { "epoch": 0.6855650876538605, "grad_norm": 0.4235568642616272, "learning_rate": 1.5723817812590661e-06, "loss": 0.701, "step": 16542 }, { "epoch": 0.6856065315595342, "grad_norm": 0.4088001251220703, "learning_rate": 1.5721745617306977e-06, "loss": 0.7339, "step": 16543 }, { "epoch": 0.6856479754652078, "grad_norm": 0.37654632329940796, "learning_rate": 1.5719673422023293e-06, "loss": 0.6416, "step": 16544 }, { "epoch": 0.6856894193708815, "grad_norm": 0.3778918981552124, "learning_rate": 1.571760122673961e-06, "loss": 0.6454, "step": 16545 }, { "epoch": 0.6857308632765552, "grad_norm": 0.4051094055175781, "learning_rate": 1.5715529031455925e-06, "loss": 0.6429, "step": 16546 }, { "epoch": 0.6857723071822288, "grad_norm": 0.41533297300338745, "learning_rate": 1.5713456836172241e-06, "loss": 0.6442, "step": 16547 }, { "epoch": 0.6858137510879025, "grad_norm": 0.41721484065055847, "learning_rate": 1.5711384640888557e-06, "loss": 0.6323, "step": 16548 }, { "epoch": 0.6858551949935762, "grad_norm": 0.38716697692871094, "learning_rate": 1.5709312445604873e-06, "loss": 0.6571, "step": 16549 }, { "epoch": 0.6858966388992499, "grad_norm": 0.43903231620788574, "learning_rate": 1.5707240250321193e-06, "loss": 0.7168, "step": 16550 }, { "epoch": 0.6859380828049235, "grad_norm": 0.39671075344085693, "learning_rate": 1.570516805503751e-06, "loss": 0.644, "step": 16551 }, { "epoch": 0.6859795267105973, "grad_norm": 0.4761524796485901, "learning_rate": 1.5703095859753825e-06, "loss": 0.7354, "step": 16552 }, { "epoch": 0.6860209706162709, "grad_norm": 0.4064832031726837, "learning_rate": 1.5701023664470141e-06, "loss": 0.6594, "step": 16553 }, { "epoch": 0.6860624145219445, "grad_norm": 0.4024033844470978, "learning_rate": 1.5698951469186457e-06, "loss": 0.7124, "step": 16554 }, { "epoch": 0.6861038584276182, "grad_norm": 0.40506869554519653, "learning_rate": 1.5696879273902773e-06, "loss": 0.6566, "step": 16555 }, { "epoch": 0.6861453023332919, "grad_norm": 0.4111260771751404, "learning_rate": 1.569480707861909e-06, "loss": 0.6458, "step": 16556 }, { "epoch": 0.6861867462389656, "grad_norm": 0.42981472611427307, "learning_rate": 1.5692734883335405e-06, "loss": 0.6594, "step": 16557 }, { "epoch": 0.6862281901446392, "grad_norm": 0.3832322657108307, "learning_rate": 1.5690662688051721e-06, "loss": 0.6486, "step": 16558 }, { "epoch": 0.6862696340503129, "grad_norm": 0.4087298512458801, "learning_rate": 1.5688590492768041e-06, "loss": 0.665, "step": 16559 }, { "epoch": 0.6863110779559866, "grad_norm": 0.39591184258461, "learning_rate": 1.5686518297484357e-06, "loss": 0.7036, "step": 16560 }, { "epoch": 0.6863525218616603, "grad_norm": 0.4204745590686798, "learning_rate": 1.5684446102200673e-06, "loss": 0.6853, "step": 16561 }, { "epoch": 0.6863939657673339, "grad_norm": 0.3913266360759735, "learning_rate": 1.568237390691699e-06, "loss": 0.6178, "step": 16562 }, { "epoch": 0.6864354096730075, "grad_norm": 0.4209268391132355, "learning_rate": 1.5680301711633305e-06, "loss": 0.6661, "step": 16563 }, { "epoch": 0.6864768535786813, "grad_norm": 0.44486355781555176, "learning_rate": 1.5678229516349621e-06, "loss": 0.6576, "step": 16564 }, { "epoch": 0.6865182974843549, "grad_norm": 0.4286119043827057, "learning_rate": 1.5676157321065937e-06, "loss": 0.7102, "step": 16565 }, { "epoch": 0.6865597413900286, "grad_norm": 0.4500056207180023, "learning_rate": 1.5674085125782253e-06, "loss": 0.6621, "step": 16566 }, { "epoch": 0.6866011852957022, "grad_norm": 0.39222705364227295, "learning_rate": 1.567201293049857e-06, "loss": 0.689, "step": 16567 }, { "epoch": 0.686642629201376, "grad_norm": 0.4452320635318756, "learning_rate": 1.566994073521489e-06, "loss": 0.7114, "step": 16568 }, { "epoch": 0.6866840731070496, "grad_norm": 0.3967004716396332, "learning_rate": 1.5667868539931205e-06, "loss": 0.6643, "step": 16569 }, { "epoch": 0.6867255170127233, "grad_norm": 0.4173629879951477, "learning_rate": 1.5665796344647521e-06, "loss": 0.6283, "step": 16570 }, { "epoch": 0.686766960918397, "grad_norm": 0.4006367623806, "learning_rate": 1.5663724149363837e-06, "loss": 0.6176, "step": 16571 }, { "epoch": 0.6868084048240706, "grad_norm": 0.37428367137908936, "learning_rate": 1.5661651954080153e-06, "loss": 0.6014, "step": 16572 }, { "epoch": 0.6868498487297443, "grad_norm": 0.4641467034816742, "learning_rate": 1.565957975879647e-06, "loss": 0.7308, "step": 16573 }, { "epoch": 0.6868912926354179, "grad_norm": 0.4359675943851471, "learning_rate": 1.5657507563512785e-06, "loss": 0.6833, "step": 16574 }, { "epoch": 0.6869327365410917, "grad_norm": 0.4111281633377075, "learning_rate": 1.5655435368229101e-06, "loss": 0.6729, "step": 16575 }, { "epoch": 0.6869741804467653, "grad_norm": 0.398801326751709, "learning_rate": 1.5653363172945417e-06, "loss": 0.6554, "step": 16576 }, { "epoch": 0.687015624352439, "grad_norm": 0.4150601327419281, "learning_rate": 1.5651290977661737e-06, "loss": 0.6782, "step": 16577 }, { "epoch": 0.6870570682581126, "grad_norm": 0.40952086448669434, "learning_rate": 1.5649218782378053e-06, "loss": 0.7075, "step": 16578 }, { "epoch": 0.6870985121637864, "grad_norm": 0.41787293553352356, "learning_rate": 1.564714658709437e-06, "loss": 0.6836, "step": 16579 }, { "epoch": 0.68713995606946, "grad_norm": 0.4947410821914673, "learning_rate": 1.5645074391810685e-06, "loss": 0.7079, "step": 16580 }, { "epoch": 0.6871813999751336, "grad_norm": 0.47498029470443726, "learning_rate": 1.5643002196527001e-06, "loss": 0.7661, "step": 16581 }, { "epoch": 0.6872228438808073, "grad_norm": 0.4191833436489105, "learning_rate": 1.5640930001243317e-06, "loss": 0.6766, "step": 16582 }, { "epoch": 0.687264287786481, "grad_norm": 0.38336294889450073, "learning_rate": 1.5638857805959633e-06, "loss": 0.6667, "step": 16583 }, { "epoch": 0.6873057316921547, "grad_norm": 0.41743946075439453, "learning_rate": 1.563678561067595e-06, "loss": 0.6149, "step": 16584 }, { "epoch": 0.6873471755978283, "grad_norm": 0.4376504123210907, "learning_rate": 1.563471341539227e-06, "loss": 0.6603, "step": 16585 }, { "epoch": 0.687388619503502, "grad_norm": 0.4002896249294281, "learning_rate": 1.5632641220108585e-06, "loss": 0.698, "step": 16586 }, { "epoch": 0.6874300634091757, "grad_norm": 0.4106179177761078, "learning_rate": 1.5630569024824901e-06, "loss": 0.6782, "step": 16587 }, { "epoch": 0.6874715073148494, "grad_norm": 0.39783352613449097, "learning_rate": 1.5628496829541217e-06, "loss": 0.6683, "step": 16588 }, { "epoch": 0.687512951220523, "grad_norm": 0.40603554248809814, "learning_rate": 1.5626424634257533e-06, "loss": 0.6742, "step": 16589 }, { "epoch": 0.6875543951261966, "grad_norm": 0.39988526701927185, "learning_rate": 1.562435243897385e-06, "loss": 0.678, "step": 16590 }, { "epoch": 0.6875958390318704, "grad_norm": 0.4415309727191925, "learning_rate": 1.5622280243690165e-06, "loss": 0.7517, "step": 16591 }, { "epoch": 0.687637282937544, "grad_norm": 0.4360368251800537, "learning_rate": 1.5620208048406481e-06, "loss": 0.6565, "step": 16592 }, { "epoch": 0.6876787268432177, "grad_norm": 0.4377506673336029, "learning_rate": 1.56181358531228e-06, "loss": 0.7119, "step": 16593 }, { "epoch": 0.6877201707488914, "grad_norm": 0.4718312621116638, "learning_rate": 1.5616063657839117e-06, "loss": 0.7297, "step": 16594 }, { "epoch": 0.6877616146545651, "grad_norm": 0.41062983870506287, "learning_rate": 1.5613991462555433e-06, "loss": 0.6667, "step": 16595 }, { "epoch": 0.6878030585602387, "grad_norm": 0.3887721300125122, "learning_rate": 1.561191926727175e-06, "loss": 0.7039, "step": 16596 }, { "epoch": 0.6878445024659123, "grad_norm": 0.37392768263816833, "learning_rate": 1.5609847071988065e-06, "loss": 0.6484, "step": 16597 }, { "epoch": 0.6878859463715861, "grad_norm": 0.4421646296977997, "learning_rate": 1.5607774876704381e-06, "loss": 0.6299, "step": 16598 }, { "epoch": 0.6879273902772597, "grad_norm": 0.43053150177001953, "learning_rate": 1.5605702681420697e-06, "loss": 0.6981, "step": 16599 }, { "epoch": 0.6879688341829334, "grad_norm": 0.4001030921936035, "learning_rate": 1.5603630486137013e-06, "loss": 0.6431, "step": 16600 }, { "epoch": 0.688010278088607, "grad_norm": 0.4896659553050995, "learning_rate": 1.560155829085333e-06, "loss": 0.6539, "step": 16601 }, { "epoch": 0.6880517219942808, "grad_norm": 0.46579355001449585, "learning_rate": 1.5599486095569647e-06, "loss": 0.7017, "step": 16602 }, { "epoch": 0.6880931658999544, "grad_norm": 0.5946474075317383, "learning_rate": 1.5597413900285965e-06, "loss": 0.6731, "step": 16603 }, { "epoch": 0.6881346098056281, "grad_norm": 0.41856658458709717, "learning_rate": 1.5595341705002281e-06, "loss": 0.6332, "step": 16604 }, { "epoch": 0.6881760537113017, "grad_norm": 0.4070376455783844, "learning_rate": 1.5593269509718597e-06, "loss": 0.7124, "step": 16605 }, { "epoch": 0.6882174976169754, "grad_norm": 0.4237927794456482, "learning_rate": 1.5591197314434913e-06, "loss": 0.6603, "step": 16606 }, { "epoch": 0.6882589415226491, "grad_norm": 0.4384535551071167, "learning_rate": 1.558912511915123e-06, "loss": 0.6399, "step": 16607 }, { "epoch": 0.6883003854283227, "grad_norm": 0.41120848059654236, "learning_rate": 1.5587052923867545e-06, "loss": 0.7314, "step": 16608 }, { "epoch": 0.6883418293339965, "grad_norm": 0.47084200382232666, "learning_rate": 1.5584980728583861e-06, "loss": 0.7021, "step": 16609 }, { "epoch": 0.6883832732396701, "grad_norm": 0.42838045954704285, "learning_rate": 1.558290853330018e-06, "loss": 0.6853, "step": 16610 }, { "epoch": 0.6884247171453438, "grad_norm": 0.42581799626350403, "learning_rate": 1.5580836338016498e-06, "loss": 0.6422, "step": 16611 }, { "epoch": 0.6884661610510174, "grad_norm": 0.4468276798725128, "learning_rate": 1.5578764142732813e-06, "loss": 0.7737, "step": 16612 }, { "epoch": 0.6885076049566912, "grad_norm": 0.42627325654029846, "learning_rate": 1.557669194744913e-06, "loss": 0.6744, "step": 16613 }, { "epoch": 0.6885490488623648, "grad_norm": 0.3897863030433655, "learning_rate": 1.5574619752165445e-06, "loss": 0.6317, "step": 16614 }, { "epoch": 0.6885904927680384, "grad_norm": 0.38529959321022034, "learning_rate": 1.5572547556881761e-06, "loss": 0.6831, "step": 16615 }, { "epoch": 0.6886319366737121, "grad_norm": 0.41123276948928833, "learning_rate": 1.5570475361598077e-06, "loss": 0.696, "step": 16616 }, { "epoch": 0.6886733805793858, "grad_norm": 0.44052428007125854, "learning_rate": 1.5568403166314393e-06, "loss": 0.7007, "step": 16617 }, { "epoch": 0.6887148244850595, "grad_norm": 0.433239221572876, "learning_rate": 1.556633097103071e-06, "loss": 0.6866, "step": 16618 }, { "epoch": 0.6887562683907331, "grad_norm": 0.3594861626625061, "learning_rate": 1.5564258775747027e-06, "loss": 0.6045, "step": 16619 }, { "epoch": 0.6887977122964068, "grad_norm": 0.40516790747642517, "learning_rate": 1.5562186580463346e-06, "loss": 0.6643, "step": 16620 }, { "epoch": 0.6888391562020805, "grad_norm": 0.40015071630477905, "learning_rate": 1.5560114385179661e-06, "loss": 0.6758, "step": 16621 }, { "epoch": 0.6888806001077542, "grad_norm": 0.40229904651641846, "learning_rate": 1.5558042189895977e-06, "loss": 0.6394, "step": 16622 }, { "epoch": 0.6889220440134278, "grad_norm": 0.4143989682197571, "learning_rate": 1.5555969994612293e-06, "loss": 0.6694, "step": 16623 }, { "epoch": 0.6889634879191014, "grad_norm": 0.45119255781173706, "learning_rate": 1.555389779932861e-06, "loss": 0.6772, "step": 16624 }, { "epoch": 0.6890049318247752, "grad_norm": 0.3928352892398834, "learning_rate": 1.5551825604044925e-06, "loss": 0.6389, "step": 16625 }, { "epoch": 0.6890463757304488, "grad_norm": 0.4000597894191742, "learning_rate": 1.5549753408761241e-06, "loss": 0.7, "step": 16626 }, { "epoch": 0.6890878196361225, "grad_norm": 0.3982619643211365, "learning_rate": 1.554768121347756e-06, "loss": 0.639, "step": 16627 }, { "epoch": 0.6891292635417962, "grad_norm": 0.44259390234947205, "learning_rate": 1.5545609018193875e-06, "loss": 0.686, "step": 16628 }, { "epoch": 0.6891707074474699, "grad_norm": 0.4073920249938965, "learning_rate": 1.5543536822910194e-06, "loss": 0.7192, "step": 16629 }, { "epoch": 0.6892121513531435, "grad_norm": 0.4161735475063324, "learning_rate": 1.554146462762651e-06, "loss": 0.6526, "step": 16630 }, { "epoch": 0.6892535952588172, "grad_norm": 0.40179237723350525, "learning_rate": 1.5539392432342825e-06, "loss": 0.6736, "step": 16631 }, { "epoch": 0.6892950391644909, "grad_norm": 0.3989693224430084, "learning_rate": 1.5537320237059141e-06, "loss": 0.649, "step": 16632 }, { "epoch": 0.6893364830701645, "grad_norm": 0.41954126954078674, "learning_rate": 1.5535248041775457e-06, "loss": 0.7083, "step": 16633 }, { "epoch": 0.6893779269758382, "grad_norm": 0.39503049850463867, "learning_rate": 1.5533175846491773e-06, "loss": 0.6772, "step": 16634 }, { "epoch": 0.6894193708815118, "grad_norm": 0.39476102590560913, "learning_rate": 1.553110365120809e-06, "loss": 0.6627, "step": 16635 }, { "epoch": 0.6894608147871856, "grad_norm": 0.43434375524520874, "learning_rate": 1.5529031455924407e-06, "loss": 0.675, "step": 16636 }, { "epoch": 0.6895022586928592, "grad_norm": 0.41737160086631775, "learning_rate": 1.5526959260640723e-06, "loss": 0.6492, "step": 16637 }, { "epoch": 0.6895437025985329, "grad_norm": 0.3905733525753021, "learning_rate": 1.5524887065357042e-06, "loss": 0.6401, "step": 16638 }, { "epoch": 0.6895851465042065, "grad_norm": 0.4114612638950348, "learning_rate": 1.5522814870073358e-06, "loss": 0.6372, "step": 16639 }, { "epoch": 0.6896265904098803, "grad_norm": 0.4302862882614136, "learning_rate": 1.5520742674789673e-06, "loss": 0.6765, "step": 16640 }, { "epoch": 0.6896680343155539, "grad_norm": 0.4326808452606201, "learning_rate": 1.551867047950599e-06, "loss": 0.6886, "step": 16641 }, { "epoch": 0.6897094782212275, "grad_norm": 0.4120123088359833, "learning_rate": 1.5516598284222305e-06, "loss": 0.6305, "step": 16642 }, { "epoch": 0.6897509221269013, "grad_norm": 0.3964245319366455, "learning_rate": 1.5514526088938621e-06, "loss": 0.6846, "step": 16643 }, { "epoch": 0.6897923660325749, "grad_norm": 0.42278286814689636, "learning_rate": 1.551245389365494e-06, "loss": 0.7108, "step": 16644 }, { "epoch": 0.6898338099382486, "grad_norm": 0.4320739805698395, "learning_rate": 1.5510381698371255e-06, "loss": 0.656, "step": 16645 }, { "epoch": 0.6898752538439222, "grad_norm": 0.42302489280700684, "learning_rate": 1.5508309503087574e-06, "loss": 0.6881, "step": 16646 }, { "epoch": 0.689916697749596, "grad_norm": 0.42559999227523804, "learning_rate": 1.550623730780389e-06, "loss": 0.6848, "step": 16647 }, { "epoch": 0.6899581416552696, "grad_norm": 0.42445310950279236, "learning_rate": 1.5504165112520206e-06, "loss": 0.665, "step": 16648 }, { "epoch": 0.6899995855609433, "grad_norm": 0.4359683692455292, "learning_rate": 1.5502092917236521e-06, "loss": 0.6562, "step": 16649 }, { "epoch": 0.6900410294666169, "grad_norm": 0.4109833836555481, "learning_rate": 1.5500020721952837e-06, "loss": 0.6897, "step": 16650 }, { "epoch": 0.6900824733722906, "grad_norm": 0.4030595123767853, "learning_rate": 1.5497948526669153e-06, "loss": 0.6656, "step": 16651 }, { "epoch": 0.6901239172779643, "grad_norm": 0.4421241581439972, "learning_rate": 1.549587633138547e-06, "loss": 0.653, "step": 16652 }, { "epoch": 0.6901653611836379, "grad_norm": 0.4107217788696289, "learning_rate": 1.5493804136101787e-06, "loss": 0.6704, "step": 16653 }, { "epoch": 0.6902068050893116, "grad_norm": 0.4132261276245117, "learning_rate": 1.5491731940818103e-06, "loss": 0.6769, "step": 16654 }, { "epoch": 0.6902482489949853, "grad_norm": 0.40674081444740295, "learning_rate": 1.5489659745534422e-06, "loss": 0.6672, "step": 16655 }, { "epoch": 0.690289692900659, "grad_norm": 0.41795578598976135, "learning_rate": 1.5487587550250738e-06, "loss": 0.7114, "step": 16656 }, { "epoch": 0.6903311368063326, "grad_norm": 0.4428230822086334, "learning_rate": 1.5485515354967054e-06, "loss": 0.7246, "step": 16657 }, { "epoch": 0.6903725807120062, "grad_norm": 0.3923572301864624, "learning_rate": 1.548344315968337e-06, "loss": 0.6683, "step": 16658 }, { "epoch": 0.69041402461768, "grad_norm": 0.43894433975219727, "learning_rate": 1.5481370964399685e-06, "loss": 0.6902, "step": 16659 }, { "epoch": 0.6904554685233536, "grad_norm": 0.4109112024307251, "learning_rate": 1.5479298769116001e-06, "loss": 0.6685, "step": 16660 }, { "epoch": 0.6904969124290273, "grad_norm": 0.41837331652641296, "learning_rate": 1.547722657383232e-06, "loss": 0.7336, "step": 16661 }, { "epoch": 0.690538356334701, "grad_norm": 0.40523672103881836, "learning_rate": 1.5475154378548635e-06, "loss": 0.684, "step": 16662 }, { "epoch": 0.6905798002403747, "grad_norm": 0.4326830506324768, "learning_rate": 1.5473082183264951e-06, "loss": 0.7327, "step": 16663 }, { "epoch": 0.6906212441460483, "grad_norm": 0.4027855396270752, "learning_rate": 1.547100998798127e-06, "loss": 0.6208, "step": 16664 }, { "epoch": 0.690662688051722, "grad_norm": 0.3977644443511963, "learning_rate": 1.5468937792697586e-06, "loss": 0.6669, "step": 16665 }, { "epoch": 0.6907041319573957, "grad_norm": 0.45860642194747925, "learning_rate": 1.5466865597413902e-06, "loss": 0.7107, "step": 16666 }, { "epoch": 0.6907455758630693, "grad_norm": 0.39222654700279236, "learning_rate": 1.5464793402130217e-06, "loss": 0.6348, "step": 16667 }, { "epoch": 0.690787019768743, "grad_norm": 0.45074620842933655, "learning_rate": 1.5462721206846533e-06, "loss": 0.7476, "step": 16668 }, { "epoch": 0.6908284636744166, "grad_norm": 0.4063180685043335, "learning_rate": 1.546064901156285e-06, "loss": 0.6587, "step": 16669 }, { "epoch": 0.6908699075800904, "grad_norm": 0.40964141488075256, "learning_rate": 1.5458576816279168e-06, "loss": 0.6631, "step": 16670 }, { "epoch": 0.690911351485764, "grad_norm": 0.42978546023368835, "learning_rate": 1.5456504620995483e-06, "loss": 0.6721, "step": 16671 }, { "epoch": 0.6909527953914377, "grad_norm": 0.4148223400115967, "learning_rate": 1.5454432425711802e-06, "loss": 0.7114, "step": 16672 }, { "epoch": 0.6909942392971113, "grad_norm": 0.40420371294021606, "learning_rate": 1.5452360230428118e-06, "loss": 0.6989, "step": 16673 }, { "epoch": 0.6910356832027851, "grad_norm": 0.39026448130607605, "learning_rate": 1.5450288035144434e-06, "loss": 0.6561, "step": 16674 }, { "epoch": 0.6910771271084587, "grad_norm": 0.37360256910324097, "learning_rate": 1.544821583986075e-06, "loss": 0.6057, "step": 16675 }, { "epoch": 0.6911185710141323, "grad_norm": 0.3920608460903168, "learning_rate": 1.5446143644577065e-06, "loss": 0.678, "step": 16676 }, { "epoch": 0.691160014919806, "grad_norm": 0.4223755896091461, "learning_rate": 1.5444071449293381e-06, "loss": 0.7416, "step": 16677 }, { "epoch": 0.6912014588254797, "grad_norm": 0.4065549075603485, "learning_rate": 1.54419992540097e-06, "loss": 0.7261, "step": 16678 }, { "epoch": 0.6912429027311534, "grad_norm": 0.39833980798721313, "learning_rate": 1.5439927058726016e-06, "loss": 0.6622, "step": 16679 }, { "epoch": 0.691284346636827, "grad_norm": 0.4140157997608185, "learning_rate": 1.5437854863442331e-06, "loss": 0.649, "step": 16680 }, { "epoch": 0.6913257905425008, "grad_norm": 0.42184978723526, "learning_rate": 1.543578266815865e-06, "loss": 0.688, "step": 16681 }, { "epoch": 0.6913672344481744, "grad_norm": 0.39209532737731934, "learning_rate": 1.5433710472874966e-06, "loss": 0.6313, "step": 16682 }, { "epoch": 0.6914086783538481, "grad_norm": 0.4041922688484192, "learning_rate": 1.5431638277591282e-06, "loss": 0.6193, "step": 16683 }, { "epoch": 0.6914501222595217, "grad_norm": 0.40010058879852295, "learning_rate": 1.5429566082307598e-06, "loss": 0.6494, "step": 16684 }, { "epoch": 0.6914915661651954, "grad_norm": 0.42123520374298096, "learning_rate": 1.5427493887023913e-06, "loss": 0.662, "step": 16685 }, { "epoch": 0.6915330100708691, "grad_norm": 0.4396835267543793, "learning_rate": 1.542542169174023e-06, "loss": 0.6631, "step": 16686 }, { "epoch": 0.6915744539765427, "grad_norm": 0.4054194688796997, "learning_rate": 1.5423349496456548e-06, "loss": 0.6172, "step": 16687 }, { "epoch": 0.6916158978822164, "grad_norm": 0.4175710380077362, "learning_rate": 1.5421277301172864e-06, "loss": 0.6493, "step": 16688 }, { "epoch": 0.6916573417878901, "grad_norm": 0.43800342082977295, "learning_rate": 1.541920510588918e-06, "loss": 0.7166, "step": 16689 }, { "epoch": 0.6916987856935638, "grad_norm": 0.47093433141708374, "learning_rate": 1.5417132910605498e-06, "loss": 0.667, "step": 16690 }, { "epoch": 0.6917402295992374, "grad_norm": 0.43820780515670776, "learning_rate": 1.5415060715321814e-06, "loss": 0.7324, "step": 16691 }, { "epoch": 0.6917816735049112, "grad_norm": 0.4080968499183655, "learning_rate": 1.541298852003813e-06, "loss": 0.6655, "step": 16692 }, { "epoch": 0.6918231174105848, "grad_norm": 0.40456321835517883, "learning_rate": 1.5410916324754446e-06, "loss": 0.6959, "step": 16693 }, { "epoch": 0.6918645613162584, "grad_norm": 0.410861074924469, "learning_rate": 1.5408844129470761e-06, "loss": 0.6814, "step": 16694 }, { "epoch": 0.6919060052219321, "grad_norm": 0.44444844126701355, "learning_rate": 1.540677193418708e-06, "loss": 0.6663, "step": 16695 }, { "epoch": 0.6919474491276058, "grad_norm": 0.40606433153152466, "learning_rate": 1.5404699738903396e-06, "loss": 0.6707, "step": 16696 }, { "epoch": 0.6919888930332795, "grad_norm": 0.4321768283843994, "learning_rate": 1.5402627543619712e-06, "loss": 0.6799, "step": 16697 }, { "epoch": 0.6920303369389531, "grad_norm": 0.44445449113845825, "learning_rate": 1.540055534833603e-06, "loss": 0.6613, "step": 16698 }, { "epoch": 0.6920717808446268, "grad_norm": 0.4180211126804352, "learning_rate": 1.5398483153052346e-06, "loss": 0.642, "step": 16699 }, { "epoch": 0.6921132247503005, "grad_norm": 0.4246850907802582, "learning_rate": 1.5396410957768662e-06, "loss": 0.7034, "step": 16700 }, { "epoch": 0.6921546686559742, "grad_norm": 0.42065727710723877, "learning_rate": 1.5394338762484978e-06, "loss": 0.682, "step": 16701 }, { "epoch": 0.6921961125616478, "grad_norm": 0.4103626012802124, "learning_rate": 1.5392266567201294e-06, "loss": 0.6591, "step": 16702 }, { "epoch": 0.6922375564673214, "grad_norm": 0.4429527521133423, "learning_rate": 1.5390194371917612e-06, "loss": 0.6428, "step": 16703 }, { "epoch": 0.6922790003729952, "grad_norm": 0.4589152932167053, "learning_rate": 1.5388122176633928e-06, "loss": 0.6707, "step": 16704 }, { "epoch": 0.6923204442786688, "grad_norm": 0.4238857328891754, "learning_rate": 1.5386049981350244e-06, "loss": 0.699, "step": 16705 }, { "epoch": 0.6923618881843425, "grad_norm": 0.41641494631767273, "learning_rate": 1.538397778606656e-06, "loss": 0.6094, "step": 16706 }, { "epoch": 0.6924033320900161, "grad_norm": 0.42477741837501526, "learning_rate": 1.5381905590782878e-06, "loss": 0.6698, "step": 16707 }, { "epoch": 0.6924447759956899, "grad_norm": 0.40682101249694824, "learning_rate": 1.5379833395499194e-06, "loss": 0.6687, "step": 16708 }, { "epoch": 0.6924862199013635, "grad_norm": 0.43637245893478394, "learning_rate": 1.537776120021551e-06, "loss": 0.6957, "step": 16709 }, { "epoch": 0.6925276638070372, "grad_norm": 0.413145512342453, "learning_rate": 1.5375689004931826e-06, "loss": 0.7073, "step": 16710 }, { "epoch": 0.6925691077127109, "grad_norm": 0.40275755524635315, "learning_rate": 1.5373616809648142e-06, "loss": 0.6587, "step": 16711 }, { "epoch": 0.6926105516183845, "grad_norm": 0.4630493223667145, "learning_rate": 1.537154461436446e-06, "loss": 0.6951, "step": 16712 }, { "epoch": 0.6926519955240582, "grad_norm": 0.3768662214279175, "learning_rate": 1.5369472419080776e-06, "loss": 0.708, "step": 16713 }, { "epoch": 0.6926934394297318, "grad_norm": 0.4116923213005066, "learning_rate": 1.5367400223797092e-06, "loss": 0.6056, "step": 16714 }, { "epoch": 0.6927348833354056, "grad_norm": 0.42894136905670166, "learning_rate": 1.5365328028513408e-06, "loss": 0.6782, "step": 16715 }, { "epoch": 0.6927763272410792, "grad_norm": 0.4153902530670166, "learning_rate": 1.5363255833229726e-06, "loss": 0.6897, "step": 16716 }, { "epoch": 0.6928177711467529, "grad_norm": 0.43107399344444275, "learning_rate": 1.5361183637946042e-06, "loss": 0.7162, "step": 16717 }, { "epoch": 0.6928592150524265, "grad_norm": 0.39449232816696167, "learning_rate": 1.5359111442662358e-06, "loss": 0.6477, "step": 16718 }, { "epoch": 0.6929006589581002, "grad_norm": 0.45432373881340027, "learning_rate": 1.5357039247378674e-06, "loss": 0.6836, "step": 16719 }, { "epoch": 0.6929421028637739, "grad_norm": 0.42863452434539795, "learning_rate": 1.5354967052094992e-06, "loss": 0.7517, "step": 16720 }, { "epoch": 0.6929835467694475, "grad_norm": 0.4364698827266693, "learning_rate": 1.5352894856811308e-06, "loss": 0.6488, "step": 16721 }, { "epoch": 0.6930249906751212, "grad_norm": 0.37991440296173096, "learning_rate": 1.5350822661527624e-06, "loss": 0.6276, "step": 16722 }, { "epoch": 0.6930664345807949, "grad_norm": 0.4021202623844147, "learning_rate": 1.534875046624394e-06, "loss": 0.7272, "step": 16723 }, { "epoch": 0.6931078784864686, "grad_norm": 0.4254559874534607, "learning_rate": 1.5346678270960256e-06, "loss": 0.7212, "step": 16724 }, { "epoch": 0.6931493223921422, "grad_norm": 0.41879189014434814, "learning_rate": 1.5344606075676574e-06, "loss": 0.6898, "step": 16725 }, { "epoch": 0.693190766297816, "grad_norm": 0.4046343266963959, "learning_rate": 1.534253388039289e-06, "loss": 0.6903, "step": 16726 }, { "epoch": 0.6932322102034896, "grad_norm": 0.4003014862537384, "learning_rate": 1.5340461685109206e-06, "loss": 0.6875, "step": 16727 }, { "epoch": 0.6932736541091632, "grad_norm": 0.40442928671836853, "learning_rate": 1.5338389489825522e-06, "loss": 0.6763, "step": 16728 }, { "epoch": 0.6933150980148369, "grad_norm": 0.4136636555194855, "learning_rate": 1.533631729454184e-06, "loss": 0.6602, "step": 16729 }, { "epoch": 0.6933565419205105, "grad_norm": 0.39174598455429077, "learning_rate": 1.5334245099258156e-06, "loss": 0.6753, "step": 16730 }, { "epoch": 0.6933979858261843, "grad_norm": 0.4188128411769867, "learning_rate": 1.5332172903974472e-06, "loss": 0.6621, "step": 16731 }, { "epoch": 0.6934394297318579, "grad_norm": 0.42641279101371765, "learning_rate": 1.5330100708690788e-06, "loss": 0.6665, "step": 16732 }, { "epoch": 0.6934808736375316, "grad_norm": 0.4473981261253357, "learning_rate": 1.5328028513407106e-06, "loss": 0.7188, "step": 16733 }, { "epoch": 0.6935223175432053, "grad_norm": 0.45557376742362976, "learning_rate": 1.5325956318123422e-06, "loss": 0.7129, "step": 16734 }, { "epoch": 0.693563761448879, "grad_norm": 0.4230588674545288, "learning_rate": 1.5323884122839738e-06, "loss": 0.6581, "step": 16735 }, { "epoch": 0.6936052053545526, "grad_norm": 0.38401761651039124, "learning_rate": 1.5321811927556054e-06, "loss": 0.6869, "step": 16736 }, { "epoch": 0.6936466492602262, "grad_norm": 0.4082692563533783, "learning_rate": 1.5319739732272372e-06, "loss": 0.6768, "step": 16737 }, { "epoch": 0.6936880931659, "grad_norm": 0.39676159620285034, "learning_rate": 1.5317667536988688e-06, "loss": 0.677, "step": 16738 }, { "epoch": 0.6937295370715736, "grad_norm": 0.4020856022834778, "learning_rate": 1.5315595341705004e-06, "loss": 0.7216, "step": 16739 }, { "epoch": 0.6937709809772473, "grad_norm": 0.4189518094062805, "learning_rate": 1.531352314642132e-06, "loss": 0.6577, "step": 16740 }, { "epoch": 0.6938124248829209, "grad_norm": 0.39654698967933655, "learning_rate": 1.5311450951137636e-06, "loss": 0.6797, "step": 16741 }, { "epoch": 0.6938538687885947, "grad_norm": 0.4032168984413147, "learning_rate": 1.5309378755853954e-06, "loss": 0.6045, "step": 16742 }, { "epoch": 0.6938953126942683, "grad_norm": 0.389242947101593, "learning_rate": 1.530730656057027e-06, "loss": 0.6562, "step": 16743 }, { "epoch": 0.693936756599942, "grad_norm": 0.4361919164657593, "learning_rate": 1.5305234365286586e-06, "loss": 0.6545, "step": 16744 }, { "epoch": 0.6939782005056156, "grad_norm": 0.3848486840724945, "learning_rate": 1.5303162170002902e-06, "loss": 0.6483, "step": 16745 }, { "epoch": 0.6940196444112893, "grad_norm": 0.39408209919929504, "learning_rate": 1.530108997471922e-06, "loss": 0.6943, "step": 16746 }, { "epoch": 0.694061088316963, "grad_norm": 0.418505996465683, "learning_rate": 1.5299017779435536e-06, "loss": 0.6986, "step": 16747 }, { "epoch": 0.6941025322226366, "grad_norm": 0.40993648767471313, "learning_rate": 1.5296945584151852e-06, "loss": 0.6577, "step": 16748 }, { "epoch": 0.6941439761283104, "grad_norm": 0.4200143814086914, "learning_rate": 1.5294873388868168e-06, "loss": 0.7361, "step": 16749 }, { "epoch": 0.694185420033984, "grad_norm": 0.41309866309165955, "learning_rate": 1.5292801193584484e-06, "loss": 0.7214, "step": 16750 }, { "epoch": 0.6942268639396577, "grad_norm": 0.41637322306632996, "learning_rate": 1.5290728998300802e-06, "loss": 0.6411, "step": 16751 }, { "epoch": 0.6942683078453313, "grad_norm": 0.4240413308143616, "learning_rate": 1.5288656803017118e-06, "loss": 0.6815, "step": 16752 }, { "epoch": 0.6943097517510051, "grad_norm": 0.41010725498199463, "learning_rate": 1.5286584607733434e-06, "loss": 0.6743, "step": 16753 }, { "epoch": 0.6943511956566787, "grad_norm": 0.4233991205692291, "learning_rate": 1.5284512412449752e-06, "loss": 0.7251, "step": 16754 }, { "epoch": 0.6943926395623523, "grad_norm": 0.4166107773780823, "learning_rate": 1.5282440217166068e-06, "loss": 0.6938, "step": 16755 }, { "epoch": 0.694434083468026, "grad_norm": 0.4158385097980499, "learning_rate": 1.5280368021882384e-06, "loss": 0.6731, "step": 16756 }, { "epoch": 0.6944755273736997, "grad_norm": 0.4258190393447876, "learning_rate": 1.52782958265987e-06, "loss": 0.6572, "step": 16757 }, { "epoch": 0.6945169712793734, "grad_norm": 0.4180450737476349, "learning_rate": 1.5276223631315016e-06, "loss": 0.6733, "step": 16758 }, { "epoch": 0.694558415185047, "grad_norm": 0.41732221841812134, "learning_rate": 1.5274151436031334e-06, "loss": 0.6731, "step": 16759 }, { "epoch": 0.6945998590907208, "grad_norm": 0.41948410868644714, "learning_rate": 1.527207924074765e-06, "loss": 0.6478, "step": 16760 }, { "epoch": 0.6946413029963944, "grad_norm": 0.39481037855148315, "learning_rate": 1.5270007045463966e-06, "loss": 0.7046, "step": 16761 }, { "epoch": 0.6946827469020681, "grad_norm": 0.41728588938713074, "learning_rate": 1.5267934850180282e-06, "loss": 0.6885, "step": 16762 }, { "epoch": 0.6947241908077417, "grad_norm": 0.41394034028053284, "learning_rate": 1.52658626548966e-06, "loss": 0.6515, "step": 16763 }, { "epoch": 0.6947656347134153, "grad_norm": 0.4248751401901245, "learning_rate": 1.5263790459612916e-06, "loss": 0.709, "step": 16764 }, { "epoch": 0.6948070786190891, "grad_norm": 0.3850202262401581, "learning_rate": 1.5261718264329232e-06, "loss": 0.6534, "step": 16765 }, { "epoch": 0.6948485225247627, "grad_norm": 0.44443872570991516, "learning_rate": 1.5259646069045548e-06, "loss": 0.7002, "step": 16766 }, { "epoch": 0.6948899664304364, "grad_norm": 0.4599267840385437, "learning_rate": 1.5257573873761864e-06, "loss": 0.6987, "step": 16767 }, { "epoch": 0.6949314103361101, "grad_norm": 0.4172816276550293, "learning_rate": 1.5255501678478182e-06, "loss": 0.6709, "step": 16768 }, { "epoch": 0.6949728542417838, "grad_norm": 0.4471736252307892, "learning_rate": 1.5253429483194498e-06, "loss": 0.696, "step": 16769 }, { "epoch": 0.6950142981474574, "grad_norm": 0.4285956919193268, "learning_rate": 1.5251357287910814e-06, "loss": 0.6322, "step": 16770 }, { "epoch": 0.6950557420531311, "grad_norm": 0.39196693897247314, "learning_rate": 1.5249285092627132e-06, "loss": 0.6858, "step": 16771 }, { "epoch": 0.6950971859588048, "grad_norm": 0.39466962218284607, "learning_rate": 1.5247212897343448e-06, "loss": 0.6902, "step": 16772 }, { "epoch": 0.6951386298644784, "grad_norm": 0.43944862484931946, "learning_rate": 1.5245140702059764e-06, "loss": 0.6694, "step": 16773 }, { "epoch": 0.6951800737701521, "grad_norm": 0.4425968825817108, "learning_rate": 1.524306850677608e-06, "loss": 0.6812, "step": 16774 }, { "epoch": 0.6952215176758257, "grad_norm": 0.4463377296924591, "learning_rate": 1.5240996311492396e-06, "loss": 0.6783, "step": 16775 }, { "epoch": 0.6952629615814995, "grad_norm": 0.37946656346321106, "learning_rate": 1.5238924116208712e-06, "loss": 0.6787, "step": 16776 }, { "epoch": 0.6953044054871731, "grad_norm": 0.4161308705806732, "learning_rate": 1.523685192092503e-06, "loss": 0.6813, "step": 16777 }, { "epoch": 0.6953458493928468, "grad_norm": 0.4392637610435486, "learning_rate": 1.5234779725641346e-06, "loss": 0.6874, "step": 16778 }, { "epoch": 0.6953872932985204, "grad_norm": 0.4265557825565338, "learning_rate": 1.5232707530357662e-06, "loss": 0.6833, "step": 16779 }, { "epoch": 0.6954287372041941, "grad_norm": 0.4445050060749054, "learning_rate": 1.523063533507398e-06, "loss": 0.6892, "step": 16780 }, { "epoch": 0.6954701811098678, "grad_norm": 0.42713114619255066, "learning_rate": 1.5228563139790296e-06, "loss": 0.6965, "step": 16781 }, { "epoch": 0.6955116250155414, "grad_norm": 0.3750733733177185, "learning_rate": 1.5226490944506612e-06, "loss": 0.6562, "step": 16782 }, { "epoch": 0.6955530689212152, "grad_norm": 0.42506036162376404, "learning_rate": 1.5224418749222928e-06, "loss": 0.6718, "step": 16783 }, { "epoch": 0.6955945128268888, "grad_norm": 0.42093586921691895, "learning_rate": 1.5222346553939244e-06, "loss": 0.7211, "step": 16784 }, { "epoch": 0.6956359567325625, "grad_norm": 0.3898966312408447, "learning_rate": 1.522027435865556e-06, "loss": 0.6814, "step": 16785 }, { "epoch": 0.6956774006382361, "grad_norm": 0.4490966796875, "learning_rate": 1.5218202163371878e-06, "loss": 0.6709, "step": 16786 }, { "epoch": 0.6957188445439099, "grad_norm": 0.4603123664855957, "learning_rate": 1.5216129968088194e-06, "loss": 0.7029, "step": 16787 }, { "epoch": 0.6957602884495835, "grad_norm": 0.4312245547771454, "learning_rate": 1.5214057772804512e-06, "loss": 0.7288, "step": 16788 }, { "epoch": 0.6958017323552571, "grad_norm": 0.4495743215084076, "learning_rate": 1.5211985577520828e-06, "loss": 0.7168, "step": 16789 }, { "epoch": 0.6958431762609308, "grad_norm": 0.42483657598495483, "learning_rate": 1.5209913382237144e-06, "loss": 0.6938, "step": 16790 }, { "epoch": 0.6958846201666045, "grad_norm": 0.3786216080188751, "learning_rate": 1.520784118695346e-06, "loss": 0.6505, "step": 16791 }, { "epoch": 0.6959260640722782, "grad_norm": 0.4108757972717285, "learning_rate": 1.5205768991669776e-06, "loss": 0.6202, "step": 16792 }, { "epoch": 0.6959675079779518, "grad_norm": 0.42579811811447144, "learning_rate": 1.5203696796386092e-06, "loss": 0.6873, "step": 16793 }, { "epoch": 0.6960089518836255, "grad_norm": 0.4422059655189514, "learning_rate": 1.520162460110241e-06, "loss": 0.6714, "step": 16794 }, { "epoch": 0.6960503957892992, "grad_norm": 0.3904128074645996, "learning_rate": 1.5199552405818726e-06, "loss": 0.657, "step": 16795 }, { "epoch": 0.6960918396949729, "grad_norm": 0.42671409249305725, "learning_rate": 1.5197480210535044e-06, "loss": 0.7234, "step": 16796 }, { "epoch": 0.6961332836006465, "grad_norm": 0.450268417596817, "learning_rate": 1.519540801525136e-06, "loss": 0.7026, "step": 16797 }, { "epoch": 0.6961747275063201, "grad_norm": 0.43762993812561035, "learning_rate": 1.5193335819967676e-06, "loss": 0.6362, "step": 16798 }, { "epoch": 0.6962161714119939, "grad_norm": 0.40904274582862854, "learning_rate": 1.5191263624683992e-06, "loss": 0.661, "step": 16799 }, { "epoch": 0.6962576153176675, "grad_norm": 0.4229663610458374, "learning_rate": 1.5189191429400308e-06, "loss": 0.7126, "step": 16800 }, { "epoch": 0.6962990592233412, "grad_norm": 0.4693845808506012, "learning_rate": 1.5187119234116624e-06, "loss": 0.7423, "step": 16801 }, { "epoch": 0.6963405031290149, "grad_norm": 0.38161811232566833, "learning_rate": 1.518504703883294e-06, "loss": 0.6063, "step": 16802 }, { "epoch": 0.6963819470346886, "grad_norm": 0.4070481061935425, "learning_rate": 1.5182974843549258e-06, "loss": 0.6562, "step": 16803 }, { "epoch": 0.6964233909403622, "grad_norm": 0.42256149649620056, "learning_rate": 1.5180902648265574e-06, "loss": 0.6943, "step": 16804 }, { "epoch": 0.6964648348460359, "grad_norm": 0.3968719244003296, "learning_rate": 1.5178830452981892e-06, "loss": 0.6415, "step": 16805 }, { "epoch": 0.6965062787517096, "grad_norm": 0.4523000717163086, "learning_rate": 1.5176758257698208e-06, "loss": 0.7175, "step": 16806 }, { "epoch": 0.6965477226573832, "grad_norm": 0.40705400705337524, "learning_rate": 1.5174686062414524e-06, "loss": 0.6466, "step": 16807 }, { "epoch": 0.6965891665630569, "grad_norm": 0.39460861682891846, "learning_rate": 1.517261386713084e-06, "loss": 0.6168, "step": 16808 }, { "epoch": 0.6966306104687305, "grad_norm": 0.3971591293811798, "learning_rate": 1.5170541671847156e-06, "loss": 0.6415, "step": 16809 }, { "epoch": 0.6966720543744043, "grad_norm": 0.42209601402282715, "learning_rate": 1.5168469476563472e-06, "loss": 0.6923, "step": 16810 }, { "epoch": 0.6967134982800779, "grad_norm": 0.4125151038169861, "learning_rate": 1.5166397281279788e-06, "loss": 0.6766, "step": 16811 }, { "epoch": 0.6967549421857516, "grad_norm": 0.39692404866218567, "learning_rate": 1.5164325085996106e-06, "loss": 0.6671, "step": 16812 }, { "epoch": 0.6967963860914252, "grad_norm": 0.4617835581302643, "learning_rate": 1.5162252890712424e-06, "loss": 0.6876, "step": 16813 }, { "epoch": 0.696837829997099, "grad_norm": 0.4173271656036377, "learning_rate": 1.516018069542874e-06, "loss": 0.6738, "step": 16814 }, { "epoch": 0.6968792739027726, "grad_norm": 0.4262498915195465, "learning_rate": 1.5158108500145056e-06, "loss": 0.7068, "step": 16815 }, { "epoch": 0.6969207178084462, "grad_norm": 0.48951685428619385, "learning_rate": 1.5156036304861372e-06, "loss": 0.7024, "step": 16816 }, { "epoch": 0.69696216171412, "grad_norm": 0.4242796301841736, "learning_rate": 1.5153964109577688e-06, "loss": 0.6973, "step": 16817 }, { "epoch": 0.6970036056197936, "grad_norm": 0.3965175449848175, "learning_rate": 1.5151891914294004e-06, "loss": 0.6591, "step": 16818 }, { "epoch": 0.6970450495254673, "grad_norm": 0.42197319865226746, "learning_rate": 1.514981971901032e-06, "loss": 0.6859, "step": 16819 }, { "epoch": 0.6970864934311409, "grad_norm": 0.3900800347328186, "learning_rate": 1.5147747523726638e-06, "loss": 0.6915, "step": 16820 }, { "epoch": 0.6971279373368147, "grad_norm": 0.40391185879707336, "learning_rate": 1.5145675328442954e-06, "loss": 0.6527, "step": 16821 }, { "epoch": 0.6971693812424883, "grad_norm": 0.39946305751800537, "learning_rate": 1.5143603133159272e-06, "loss": 0.6371, "step": 16822 }, { "epoch": 0.697210825148162, "grad_norm": 0.4062614142894745, "learning_rate": 1.5141530937875588e-06, "loss": 0.6676, "step": 16823 }, { "epoch": 0.6972522690538356, "grad_norm": 0.42598265409469604, "learning_rate": 1.5139458742591904e-06, "loss": 0.708, "step": 16824 }, { "epoch": 0.6972937129595093, "grad_norm": 0.415072500705719, "learning_rate": 1.513738654730822e-06, "loss": 0.6641, "step": 16825 }, { "epoch": 0.697335156865183, "grad_norm": 0.44375693798065186, "learning_rate": 1.5135314352024536e-06, "loss": 0.6763, "step": 16826 }, { "epoch": 0.6973766007708566, "grad_norm": 0.4355472922325134, "learning_rate": 1.5133242156740852e-06, "loss": 0.7122, "step": 16827 }, { "epoch": 0.6974180446765303, "grad_norm": 0.42468228936195374, "learning_rate": 1.5131169961457168e-06, "loss": 0.6796, "step": 16828 }, { "epoch": 0.697459488582204, "grad_norm": 0.41289567947387695, "learning_rate": 1.5129097766173486e-06, "loss": 0.6519, "step": 16829 }, { "epoch": 0.6975009324878777, "grad_norm": 0.38233935832977295, "learning_rate": 1.5127025570889804e-06, "loss": 0.6549, "step": 16830 }, { "epoch": 0.6975423763935513, "grad_norm": 0.41228243708610535, "learning_rate": 1.512495337560612e-06, "loss": 0.6597, "step": 16831 }, { "epoch": 0.6975838202992249, "grad_norm": 0.42211341857910156, "learning_rate": 1.5122881180322436e-06, "loss": 0.7007, "step": 16832 }, { "epoch": 0.6976252642048987, "grad_norm": 0.41498398780822754, "learning_rate": 1.5120808985038752e-06, "loss": 0.6923, "step": 16833 }, { "epoch": 0.6976667081105723, "grad_norm": 0.4597736895084381, "learning_rate": 1.5118736789755068e-06, "loss": 0.7036, "step": 16834 }, { "epoch": 0.697708152016246, "grad_norm": 0.3906548023223877, "learning_rate": 1.5116664594471384e-06, "loss": 0.6792, "step": 16835 }, { "epoch": 0.6977495959219197, "grad_norm": 0.4018573462963104, "learning_rate": 1.51145923991877e-06, "loss": 0.6367, "step": 16836 }, { "epoch": 0.6977910398275934, "grad_norm": 0.41260090470314026, "learning_rate": 1.5112520203904016e-06, "loss": 0.7161, "step": 16837 }, { "epoch": 0.697832483733267, "grad_norm": 0.40674540400505066, "learning_rate": 1.5110448008620334e-06, "loss": 0.7073, "step": 16838 }, { "epoch": 0.6978739276389407, "grad_norm": 0.41956791281700134, "learning_rate": 1.5108375813336652e-06, "loss": 0.6954, "step": 16839 }, { "epoch": 0.6979153715446144, "grad_norm": 0.40299227833747864, "learning_rate": 1.5106303618052968e-06, "loss": 0.6693, "step": 16840 }, { "epoch": 0.697956815450288, "grad_norm": 0.46463340520858765, "learning_rate": 1.5104231422769284e-06, "loss": 0.77, "step": 16841 }, { "epoch": 0.6979982593559617, "grad_norm": 0.3964928686618805, "learning_rate": 1.51021592274856e-06, "loss": 0.6885, "step": 16842 }, { "epoch": 0.6980397032616353, "grad_norm": 0.41980281472206116, "learning_rate": 1.5100087032201916e-06, "loss": 0.6509, "step": 16843 }, { "epoch": 0.6980811471673091, "grad_norm": 0.4317905902862549, "learning_rate": 1.5098014836918232e-06, "loss": 0.6395, "step": 16844 }, { "epoch": 0.6981225910729827, "grad_norm": 0.40261349081993103, "learning_rate": 1.5095942641634548e-06, "loss": 0.6709, "step": 16845 }, { "epoch": 0.6981640349786564, "grad_norm": 0.43510785698890686, "learning_rate": 1.5093870446350864e-06, "loss": 0.6847, "step": 16846 }, { "epoch": 0.69820547888433, "grad_norm": 0.42978084087371826, "learning_rate": 1.5091798251067184e-06, "loss": 0.6941, "step": 16847 }, { "epoch": 0.6982469227900038, "grad_norm": 0.3732667565345764, "learning_rate": 1.50897260557835e-06, "loss": 0.635, "step": 16848 }, { "epoch": 0.6982883666956774, "grad_norm": 0.4033155143260956, "learning_rate": 1.5087653860499816e-06, "loss": 0.6013, "step": 16849 }, { "epoch": 0.698329810601351, "grad_norm": 0.3825395107269287, "learning_rate": 1.5085581665216132e-06, "loss": 0.652, "step": 16850 }, { "epoch": 0.6983712545070248, "grad_norm": 0.38410305976867676, "learning_rate": 1.5083509469932448e-06, "loss": 0.6948, "step": 16851 }, { "epoch": 0.6984126984126984, "grad_norm": 0.38327160477638245, "learning_rate": 1.5081437274648764e-06, "loss": 0.6569, "step": 16852 }, { "epoch": 0.6984541423183721, "grad_norm": 0.4063495099544525, "learning_rate": 1.507936507936508e-06, "loss": 0.645, "step": 16853 }, { "epoch": 0.6984955862240457, "grad_norm": 0.3928200602531433, "learning_rate": 1.5077292884081396e-06, "loss": 0.6973, "step": 16854 }, { "epoch": 0.6985370301297195, "grad_norm": 0.41994142532348633, "learning_rate": 1.5075220688797714e-06, "loss": 0.6743, "step": 16855 }, { "epoch": 0.6985784740353931, "grad_norm": 0.38647305965423584, "learning_rate": 1.5073148493514032e-06, "loss": 0.6689, "step": 16856 }, { "epoch": 0.6986199179410668, "grad_norm": 0.43377214670181274, "learning_rate": 1.5071076298230348e-06, "loss": 0.7073, "step": 16857 }, { "epoch": 0.6986613618467404, "grad_norm": 0.432190865278244, "learning_rate": 1.5069004102946664e-06, "loss": 0.6925, "step": 16858 }, { "epoch": 0.6987028057524141, "grad_norm": 0.4318296015262604, "learning_rate": 1.506693190766298e-06, "loss": 0.6904, "step": 16859 }, { "epoch": 0.6987442496580878, "grad_norm": 0.4050523340702057, "learning_rate": 1.5064859712379296e-06, "loss": 0.6589, "step": 16860 }, { "epoch": 0.6987856935637614, "grad_norm": 0.40000247955322266, "learning_rate": 1.5062787517095612e-06, "loss": 0.6682, "step": 16861 }, { "epoch": 0.6988271374694351, "grad_norm": 0.406726598739624, "learning_rate": 1.5060715321811928e-06, "loss": 0.6846, "step": 16862 }, { "epoch": 0.6988685813751088, "grad_norm": 0.4119182825088501, "learning_rate": 1.5058643126528244e-06, "loss": 0.658, "step": 16863 }, { "epoch": 0.6989100252807825, "grad_norm": 0.4022960364818573, "learning_rate": 1.5056570931244564e-06, "loss": 0.6583, "step": 16864 }, { "epoch": 0.6989514691864561, "grad_norm": 0.3845854699611664, "learning_rate": 1.505449873596088e-06, "loss": 0.6912, "step": 16865 }, { "epoch": 0.6989929130921299, "grad_norm": 0.3953777849674225, "learning_rate": 1.5052426540677196e-06, "loss": 0.6174, "step": 16866 }, { "epoch": 0.6990343569978035, "grad_norm": 0.4566531181335449, "learning_rate": 1.5050354345393512e-06, "loss": 0.6931, "step": 16867 }, { "epoch": 0.6990758009034771, "grad_norm": 0.4375271201133728, "learning_rate": 1.5048282150109828e-06, "loss": 0.6443, "step": 16868 }, { "epoch": 0.6991172448091508, "grad_norm": 0.42125600576400757, "learning_rate": 1.5046209954826144e-06, "loss": 0.6792, "step": 16869 }, { "epoch": 0.6991586887148245, "grad_norm": 0.40954530239105225, "learning_rate": 1.504413775954246e-06, "loss": 0.6616, "step": 16870 }, { "epoch": 0.6992001326204982, "grad_norm": 0.4000544250011444, "learning_rate": 1.5042065564258776e-06, "loss": 0.6407, "step": 16871 }, { "epoch": 0.6992415765261718, "grad_norm": 0.4300186038017273, "learning_rate": 1.5039993368975092e-06, "loss": 0.7112, "step": 16872 }, { "epoch": 0.6992830204318455, "grad_norm": 0.44238343834877014, "learning_rate": 1.5037921173691412e-06, "loss": 0.6779, "step": 16873 }, { "epoch": 0.6993244643375192, "grad_norm": 0.406878799200058, "learning_rate": 1.5035848978407728e-06, "loss": 0.6429, "step": 16874 }, { "epoch": 0.6993659082431929, "grad_norm": 0.4408002197742462, "learning_rate": 1.5033776783124044e-06, "loss": 0.684, "step": 16875 }, { "epoch": 0.6994073521488665, "grad_norm": 0.414987176656723, "learning_rate": 1.503170458784036e-06, "loss": 0.6971, "step": 16876 }, { "epoch": 0.6994487960545401, "grad_norm": 0.5833993554115295, "learning_rate": 1.5029632392556676e-06, "loss": 0.6301, "step": 16877 }, { "epoch": 0.6994902399602139, "grad_norm": 0.4130208492279053, "learning_rate": 1.5027560197272992e-06, "loss": 0.6592, "step": 16878 }, { "epoch": 0.6995316838658875, "grad_norm": 0.4715719223022461, "learning_rate": 1.5025488001989308e-06, "loss": 0.6772, "step": 16879 }, { "epoch": 0.6995731277715612, "grad_norm": 0.39166057109832764, "learning_rate": 1.5023415806705624e-06, "loss": 0.668, "step": 16880 }, { "epoch": 0.6996145716772348, "grad_norm": 0.41646552085876465, "learning_rate": 1.5021343611421944e-06, "loss": 0.687, "step": 16881 }, { "epoch": 0.6996560155829086, "grad_norm": 0.43060335516929626, "learning_rate": 1.501927141613826e-06, "loss": 0.6909, "step": 16882 }, { "epoch": 0.6996974594885822, "grad_norm": 0.4006277322769165, "learning_rate": 1.5017199220854576e-06, "loss": 0.6444, "step": 16883 }, { "epoch": 0.6997389033942559, "grad_norm": 0.40410366654396057, "learning_rate": 1.5015127025570892e-06, "loss": 0.6575, "step": 16884 }, { "epoch": 0.6997803472999296, "grad_norm": 0.4085201919078827, "learning_rate": 1.5013054830287208e-06, "loss": 0.7256, "step": 16885 }, { "epoch": 0.6998217912056032, "grad_norm": 0.3965468108654022, "learning_rate": 1.5010982635003524e-06, "loss": 0.6622, "step": 16886 }, { "epoch": 0.6998632351112769, "grad_norm": 0.3922404646873474, "learning_rate": 1.500891043971984e-06, "loss": 0.6522, "step": 16887 }, { "epoch": 0.6999046790169505, "grad_norm": 0.4113626778125763, "learning_rate": 1.5006838244436156e-06, "loss": 0.6141, "step": 16888 }, { "epoch": 0.6999461229226243, "grad_norm": 0.3975231945514679, "learning_rate": 1.5004766049152472e-06, "loss": 0.6646, "step": 16889 }, { "epoch": 0.6999875668282979, "grad_norm": 0.40247538685798645, "learning_rate": 1.5002693853868792e-06, "loss": 0.6548, "step": 16890 }, { "epoch": 0.7000290107339716, "grad_norm": 0.3989601135253906, "learning_rate": 1.5000621658585108e-06, "loss": 0.6954, "step": 16891 }, { "epoch": 0.7000704546396452, "grad_norm": 0.42328518629074097, "learning_rate": 1.4998549463301424e-06, "loss": 0.6854, "step": 16892 }, { "epoch": 0.7001118985453189, "grad_norm": 0.40688613057136536, "learning_rate": 1.499647726801774e-06, "loss": 0.6627, "step": 16893 }, { "epoch": 0.7001533424509926, "grad_norm": 0.4467812180519104, "learning_rate": 1.4994405072734056e-06, "loss": 0.6686, "step": 16894 }, { "epoch": 0.7001947863566662, "grad_norm": 0.3951134979724884, "learning_rate": 1.4992332877450372e-06, "loss": 0.6543, "step": 16895 }, { "epoch": 0.7002362302623399, "grad_norm": 0.3915548622608185, "learning_rate": 1.4990260682166688e-06, "loss": 0.6696, "step": 16896 }, { "epoch": 0.7002776741680136, "grad_norm": 0.4269884526729584, "learning_rate": 1.4988188486883004e-06, "loss": 0.6206, "step": 16897 }, { "epoch": 0.7003191180736873, "grad_norm": 0.42337566614151, "learning_rate": 1.498611629159932e-06, "loss": 0.6331, "step": 16898 }, { "epoch": 0.7003605619793609, "grad_norm": 0.4235631227493286, "learning_rate": 1.498404409631564e-06, "loss": 0.6876, "step": 16899 }, { "epoch": 0.7004020058850347, "grad_norm": 0.41312679648399353, "learning_rate": 1.4981971901031956e-06, "loss": 0.657, "step": 16900 }, { "epoch": 0.7004434497907083, "grad_norm": 0.46847036480903625, "learning_rate": 1.4979899705748272e-06, "loss": 0.6699, "step": 16901 }, { "epoch": 0.7004848936963819, "grad_norm": 0.42012014985084534, "learning_rate": 1.4977827510464588e-06, "loss": 0.6663, "step": 16902 }, { "epoch": 0.7005263376020556, "grad_norm": 0.5709120631217957, "learning_rate": 1.4975755315180904e-06, "loss": 0.7129, "step": 16903 }, { "epoch": 0.7005677815077292, "grad_norm": 0.4306354224681854, "learning_rate": 1.497368311989722e-06, "loss": 0.6906, "step": 16904 }, { "epoch": 0.700609225413403, "grad_norm": 0.4431023597717285, "learning_rate": 1.4971610924613536e-06, "loss": 0.6997, "step": 16905 }, { "epoch": 0.7006506693190766, "grad_norm": 0.43004193902015686, "learning_rate": 1.4969538729329852e-06, "loss": 0.6741, "step": 16906 }, { "epoch": 0.7006921132247503, "grad_norm": 0.40990740060806274, "learning_rate": 1.4967466534046172e-06, "loss": 0.6495, "step": 16907 }, { "epoch": 0.700733557130424, "grad_norm": 0.3952885568141937, "learning_rate": 1.4965394338762488e-06, "loss": 0.6335, "step": 16908 }, { "epoch": 0.7007750010360977, "grad_norm": 0.4075387120246887, "learning_rate": 1.4963322143478804e-06, "loss": 0.6484, "step": 16909 }, { "epoch": 0.7008164449417713, "grad_norm": 0.40629440546035767, "learning_rate": 1.496124994819512e-06, "loss": 0.6713, "step": 16910 }, { "epoch": 0.7008578888474449, "grad_norm": 0.41931506991386414, "learning_rate": 1.4959177752911436e-06, "loss": 0.649, "step": 16911 }, { "epoch": 0.7008993327531187, "grad_norm": 0.41404739022254944, "learning_rate": 1.4957105557627752e-06, "loss": 0.6538, "step": 16912 }, { "epoch": 0.7009407766587923, "grad_norm": 0.4480193853378296, "learning_rate": 1.4955033362344068e-06, "loss": 0.7324, "step": 16913 }, { "epoch": 0.700982220564466, "grad_norm": 0.41701433062553406, "learning_rate": 1.4952961167060384e-06, "loss": 0.7278, "step": 16914 }, { "epoch": 0.7010236644701396, "grad_norm": 0.3988775312900543, "learning_rate": 1.49508889717767e-06, "loss": 0.6853, "step": 16915 }, { "epoch": 0.7010651083758134, "grad_norm": 0.4116692543029785, "learning_rate": 1.494881677649302e-06, "loss": 0.6633, "step": 16916 }, { "epoch": 0.701106552281487, "grad_norm": 0.43091800808906555, "learning_rate": 1.4946744581209336e-06, "loss": 0.6558, "step": 16917 }, { "epoch": 0.7011479961871607, "grad_norm": 0.4412301480770111, "learning_rate": 1.4944672385925652e-06, "loss": 0.6689, "step": 16918 }, { "epoch": 0.7011894400928343, "grad_norm": 0.43008169531822205, "learning_rate": 1.4942600190641968e-06, "loss": 0.7034, "step": 16919 }, { "epoch": 0.701230883998508, "grad_norm": 0.43792837858200073, "learning_rate": 1.4940527995358284e-06, "loss": 0.7284, "step": 16920 }, { "epoch": 0.7012723279041817, "grad_norm": 0.3991003930568695, "learning_rate": 1.49384558000746e-06, "loss": 0.6267, "step": 16921 }, { "epoch": 0.7013137718098553, "grad_norm": 0.40029212832450867, "learning_rate": 1.4936383604790916e-06, "loss": 0.6514, "step": 16922 }, { "epoch": 0.7013552157155291, "grad_norm": 0.4225305914878845, "learning_rate": 1.4934311409507232e-06, "loss": 0.6763, "step": 16923 }, { "epoch": 0.7013966596212027, "grad_norm": 0.4079318940639496, "learning_rate": 1.4932239214223548e-06, "loss": 0.6503, "step": 16924 }, { "epoch": 0.7014381035268764, "grad_norm": 0.46222391724586487, "learning_rate": 1.4930167018939868e-06, "loss": 0.6248, "step": 16925 }, { "epoch": 0.70147954743255, "grad_norm": 0.41309854388237, "learning_rate": 1.4928094823656184e-06, "loss": 0.72, "step": 16926 }, { "epoch": 0.7015209913382238, "grad_norm": 0.4318609833717346, "learning_rate": 1.49260226283725e-06, "loss": 0.6915, "step": 16927 }, { "epoch": 0.7015624352438974, "grad_norm": 0.3991871178150177, "learning_rate": 1.4923950433088816e-06, "loss": 0.6627, "step": 16928 }, { "epoch": 0.701603879149571, "grad_norm": 0.4203322231769562, "learning_rate": 1.4921878237805132e-06, "loss": 0.7135, "step": 16929 }, { "epoch": 0.7016453230552447, "grad_norm": 0.44132688641548157, "learning_rate": 1.4919806042521448e-06, "loss": 0.6299, "step": 16930 }, { "epoch": 0.7016867669609184, "grad_norm": 0.43255603313446045, "learning_rate": 1.4917733847237764e-06, "loss": 0.6721, "step": 16931 }, { "epoch": 0.7017282108665921, "grad_norm": 0.4208125174045563, "learning_rate": 1.491566165195408e-06, "loss": 0.6793, "step": 16932 }, { "epoch": 0.7017696547722657, "grad_norm": 0.4287651479244232, "learning_rate": 1.4913589456670396e-06, "loss": 0.6636, "step": 16933 }, { "epoch": 0.7018110986779394, "grad_norm": 0.4042983055114746, "learning_rate": 1.4911517261386716e-06, "loss": 0.6798, "step": 16934 }, { "epoch": 0.7018525425836131, "grad_norm": 0.44416797161102295, "learning_rate": 1.4909445066103032e-06, "loss": 0.7109, "step": 16935 }, { "epoch": 0.7018939864892868, "grad_norm": 0.4055388569831848, "learning_rate": 1.4907372870819348e-06, "loss": 0.7058, "step": 16936 }, { "epoch": 0.7019354303949604, "grad_norm": 0.48726895451545715, "learning_rate": 1.4905300675535664e-06, "loss": 0.7354, "step": 16937 }, { "epoch": 0.701976874300634, "grad_norm": 0.40984928607940674, "learning_rate": 1.490322848025198e-06, "loss": 0.665, "step": 16938 }, { "epoch": 0.7020183182063078, "grad_norm": 0.4156568646430969, "learning_rate": 1.4901156284968296e-06, "loss": 0.6515, "step": 16939 }, { "epoch": 0.7020597621119814, "grad_norm": 0.4230828881263733, "learning_rate": 1.4899084089684612e-06, "loss": 0.6665, "step": 16940 }, { "epoch": 0.7021012060176551, "grad_norm": 0.41777509450912476, "learning_rate": 1.4897011894400928e-06, "loss": 0.6549, "step": 16941 }, { "epoch": 0.7021426499233288, "grad_norm": 0.41468846797943115, "learning_rate": 1.4894939699117248e-06, "loss": 0.653, "step": 16942 }, { "epoch": 0.7021840938290025, "grad_norm": 0.41629305481910706, "learning_rate": 1.4892867503833564e-06, "loss": 0.6626, "step": 16943 }, { "epoch": 0.7022255377346761, "grad_norm": 0.39117059111595154, "learning_rate": 1.489079530854988e-06, "loss": 0.6298, "step": 16944 }, { "epoch": 0.7022669816403498, "grad_norm": 0.41990095376968384, "learning_rate": 1.4888723113266196e-06, "loss": 0.6995, "step": 16945 }, { "epoch": 0.7023084255460235, "grad_norm": 0.43583956360816956, "learning_rate": 1.4886650917982512e-06, "loss": 0.688, "step": 16946 }, { "epoch": 0.7023498694516971, "grad_norm": 0.4226759970188141, "learning_rate": 1.4884578722698828e-06, "loss": 0.6426, "step": 16947 }, { "epoch": 0.7023913133573708, "grad_norm": 0.38450413942337036, "learning_rate": 1.4882506527415144e-06, "loss": 0.6559, "step": 16948 }, { "epoch": 0.7024327572630444, "grad_norm": 0.42348501086235046, "learning_rate": 1.488043433213146e-06, "loss": 0.6617, "step": 16949 }, { "epoch": 0.7024742011687182, "grad_norm": 0.4035886526107788, "learning_rate": 1.4878362136847776e-06, "loss": 0.6581, "step": 16950 }, { "epoch": 0.7025156450743918, "grad_norm": 0.44935572147369385, "learning_rate": 1.4876289941564096e-06, "loss": 0.6742, "step": 16951 }, { "epoch": 0.7025570889800655, "grad_norm": 0.38853031396865845, "learning_rate": 1.4874217746280412e-06, "loss": 0.6335, "step": 16952 }, { "epoch": 0.7025985328857391, "grad_norm": 0.41631633043289185, "learning_rate": 1.4872145550996728e-06, "loss": 0.6658, "step": 16953 }, { "epoch": 0.7026399767914128, "grad_norm": 0.4389462172985077, "learning_rate": 1.4870073355713044e-06, "loss": 0.7032, "step": 16954 }, { "epoch": 0.7026814206970865, "grad_norm": 0.42487213015556335, "learning_rate": 1.486800116042936e-06, "loss": 0.6472, "step": 16955 }, { "epoch": 0.7027228646027601, "grad_norm": 0.41566380858421326, "learning_rate": 1.4865928965145676e-06, "loss": 0.6411, "step": 16956 }, { "epoch": 0.7027643085084339, "grad_norm": 0.3981218934059143, "learning_rate": 1.4863856769861992e-06, "loss": 0.6149, "step": 16957 }, { "epoch": 0.7028057524141075, "grad_norm": 0.44129303097724915, "learning_rate": 1.4861784574578308e-06, "loss": 0.6501, "step": 16958 }, { "epoch": 0.7028471963197812, "grad_norm": 0.4113207459449768, "learning_rate": 1.4859712379294624e-06, "loss": 0.6938, "step": 16959 }, { "epoch": 0.7028886402254548, "grad_norm": 0.41196146607398987, "learning_rate": 1.4857640184010944e-06, "loss": 0.6528, "step": 16960 }, { "epoch": 0.7029300841311286, "grad_norm": 0.4329908490180969, "learning_rate": 1.485556798872726e-06, "loss": 0.6578, "step": 16961 }, { "epoch": 0.7029715280368022, "grad_norm": 0.45901238918304443, "learning_rate": 1.4853495793443576e-06, "loss": 0.6781, "step": 16962 }, { "epoch": 0.7030129719424758, "grad_norm": 0.41950342059135437, "learning_rate": 1.4851423598159892e-06, "loss": 0.6868, "step": 16963 }, { "epoch": 0.7030544158481495, "grad_norm": 0.39756476879119873, "learning_rate": 1.4849351402876208e-06, "loss": 0.6584, "step": 16964 }, { "epoch": 0.7030958597538232, "grad_norm": 0.3989443778991699, "learning_rate": 1.4847279207592524e-06, "loss": 0.6432, "step": 16965 }, { "epoch": 0.7031373036594969, "grad_norm": 0.41724640130996704, "learning_rate": 1.484520701230884e-06, "loss": 0.6348, "step": 16966 }, { "epoch": 0.7031787475651705, "grad_norm": 0.39071863889694214, "learning_rate": 1.4843134817025156e-06, "loss": 0.6661, "step": 16967 }, { "epoch": 0.7032201914708442, "grad_norm": 0.393300324678421, "learning_rate": 1.4841062621741476e-06, "loss": 0.6433, "step": 16968 }, { "epoch": 0.7032616353765179, "grad_norm": 0.3562166690826416, "learning_rate": 1.4838990426457792e-06, "loss": 0.611, "step": 16969 }, { "epoch": 0.7033030792821916, "grad_norm": 0.3994918167591095, "learning_rate": 1.4836918231174108e-06, "loss": 0.6146, "step": 16970 }, { "epoch": 0.7033445231878652, "grad_norm": 0.4227902889251709, "learning_rate": 1.4834846035890424e-06, "loss": 0.6611, "step": 16971 }, { "epoch": 0.7033859670935388, "grad_norm": 0.38050928711891174, "learning_rate": 1.483277384060674e-06, "loss": 0.6443, "step": 16972 }, { "epoch": 0.7034274109992126, "grad_norm": 0.410273939371109, "learning_rate": 1.4830701645323056e-06, "loss": 0.7322, "step": 16973 }, { "epoch": 0.7034688549048862, "grad_norm": 0.45212045311927795, "learning_rate": 1.4828629450039372e-06, "loss": 0.6539, "step": 16974 }, { "epoch": 0.7035102988105599, "grad_norm": 0.4202477037906647, "learning_rate": 1.4826557254755688e-06, "loss": 0.6753, "step": 16975 }, { "epoch": 0.7035517427162336, "grad_norm": 0.4380984306335449, "learning_rate": 1.4824485059472004e-06, "loss": 0.71, "step": 16976 }, { "epoch": 0.7035931866219073, "grad_norm": 0.42050158977508545, "learning_rate": 1.4822412864188324e-06, "loss": 0.6537, "step": 16977 }, { "epoch": 0.7036346305275809, "grad_norm": 0.388355553150177, "learning_rate": 1.482034066890464e-06, "loss": 0.6428, "step": 16978 }, { "epoch": 0.7036760744332546, "grad_norm": 0.40893515944480896, "learning_rate": 1.4818268473620956e-06, "loss": 0.6838, "step": 16979 }, { "epoch": 0.7037175183389283, "grad_norm": 0.3791602849960327, "learning_rate": 1.4816196278337272e-06, "loss": 0.6667, "step": 16980 }, { "epoch": 0.7037589622446019, "grad_norm": 0.3972429633140564, "learning_rate": 1.4814124083053588e-06, "loss": 0.6608, "step": 16981 }, { "epoch": 0.7038004061502756, "grad_norm": 0.39118531346321106, "learning_rate": 1.4812051887769904e-06, "loss": 0.6744, "step": 16982 }, { "epoch": 0.7038418500559492, "grad_norm": 0.3992242217063904, "learning_rate": 1.480997969248622e-06, "loss": 0.6438, "step": 16983 }, { "epoch": 0.703883293961623, "grad_norm": 0.3970489203929901, "learning_rate": 1.4807907497202536e-06, "loss": 0.6727, "step": 16984 }, { "epoch": 0.7039247378672966, "grad_norm": 0.4176301658153534, "learning_rate": 1.4805835301918852e-06, "loss": 0.6736, "step": 16985 }, { "epoch": 0.7039661817729703, "grad_norm": 0.42490169405937195, "learning_rate": 1.4803763106635172e-06, "loss": 0.6642, "step": 16986 }, { "epoch": 0.704007625678644, "grad_norm": 0.4063190519809723, "learning_rate": 1.4801690911351488e-06, "loss": 0.7094, "step": 16987 }, { "epoch": 0.7040490695843177, "grad_norm": 0.42902156710624695, "learning_rate": 1.4799618716067804e-06, "loss": 0.6313, "step": 16988 }, { "epoch": 0.7040905134899913, "grad_norm": 0.4297036826610565, "learning_rate": 1.479754652078412e-06, "loss": 0.6306, "step": 16989 }, { "epoch": 0.7041319573956649, "grad_norm": 0.44268572330474854, "learning_rate": 1.4795474325500436e-06, "loss": 0.7174, "step": 16990 }, { "epoch": 0.7041734013013387, "grad_norm": 0.3757169544696808, "learning_rate": 1.4793402130216752e-06, "loss": 0.6384, "step": 16991 }, { "epoch": 0.7042148452070123, "grad_norm": 0.3899397552013397, "learning_rate": 1.4791329934933068e-06, "loss": 0.5986, "step": 16992 }, { "epoch": 0.704256289112686, "grad_norm": 0.46488726139068604, "learning_rate": 1.4789257739649384e-06, "loss": 0.6893, "step": 16993 }, { "epoch": 0.7042977330183596, "grad_norm": 0.4335681200027466, "learning_rate": 1.47871855443657e-06, "loss": 0.6301, "step": 16994 }, { "epoch": 0.7043391769240334, "grad_norm": 0.4316413104534149, "learning_rate": 1.478511334908202e-06, "loss": 0.6376, "step": 16995 }, { "epoch": 0.704380620829707, "grad_norm": 0.39548900723457336, "learning_rate": 1.4783041153798336e-06, "loss": 0.6609, "step": 16996 }, { "epoch": 0.7044220647353807, "grad_norm": 0.41640374064445496, "learning_rate": 1.4780968958514652e-06, "loss": 0.7007, "step": 16997 }, { "epoch": 0.7044635086410543, "grad_norm": 0.40604373812675476, "learning_rate": 1.4778896763230968e-06, "loss": 0.6991, "step": 16998 }, { "epoch": 0.704504952546728, "grad_norm": 0.4296743869781494, "learning_rate": 1.4776824567947284e-06, "loss": 0.6903, "step": 16999 }, { "epoch": 0.7045463964524017, "grad_norm": 0.45505931973457336, "learning_rate": 1.47747523726636e-06, "loss": 0.7207, "step": 17000 }, { "epoch": 0.7045878403580753, "grad_norm": 0.4159430265426636, "learning_rate": 1.4772680177379916e-06, "loss": 0.6559, "step": 17001 }, { "epoch": 0.704629284263749, "grad_norm": 0.41849228739738464, "learning_rate": 1.4770607982096232e-06, "loss": 0.6816, "step": 17002 }, { "epoch": 0.7046707281694227, "grad_norm": 0.3905252516269684, "learning_rate": 1.4768535786812552e-06, "loss": 0.6168, "step": 17003 }, { "epoch": 0.7047121720750964, "grad_norm": 0.4428134858608246, "learning_rate": 1.4766463591528868e-06, "loss": 0.689, "step": 17004 }, { "epoch": 0.70475361598077, "grad_norm": 0.4219529330730438, "learning_rate": 1.4764391396245184e-06, "loss": 0.6653, "step": 17005 }, { "epoch": 0.7047950598864438, "grad_norm": 0.41161489486694336, "learning_rate": 1.47623192009615e-06, "loss": 0.7155, "step": 17006 }, { "epoch": 0.7048365037921174, "grad_norm": 0.42839911580085754, "learning_rate": 1.4760247005677816e-06, "loss": 0.6329, "step": 17007 }, { "epoch": 0.704877947697791, "grad_norm": 0.4130896031856537, "learning_rate": 1.4758174810394132e-06, "loss": 0.7053, "step": 17008 }, { "epoch": 0.7049193916034647, "grad_norm": 0.4535704553127289, "learning_rate": 1.4756102615110448e-06, "loss": 0.7151, "step": 17009 }, { "epoch": 0.7049608355091384, "grad_norm": 0.41720089316368103, "learning_rate": 1.4754030419826764e-06, "loss": 0.6949, "step": 17010 }, { "epoch": 0.7050022794148121, "grad_norm": 0.4244033396244049, "learning_rate": 1.475195822454308e-06, "loss": 0.6821, "step": 17011 }, { "epoch": 0.7050437233204857, "grad_norm": 0.42856988310813904, "learning_rate": 1.47498860292594e-06, "loss": 0.6565, "step": 17012 }, { "epoch": 0.7050851672261594, "grad_norm": 0.4642801582813263, "learning_rate": 1.4747813833975716e-06, "loss": 0.7529, "step": 17013 }, { "epoch": 0.7051266111318331, "grad_norm": 0.4119976758956909, "learning_rate": 1.4745741638692032e-06, "loss": 0.6482, "step": 17014 }, { "epoch": 0.7051680550375067, "grad_norm": 0.38056135177612305, "learning_rate": 1.4743669443408348e-06, "loss": 0.6421, "step": 17015 }, { "epoch": 0.7052094989431804, "grad_norm": 0.40900319814682007, "learning_rate": 1.4741597248124664e-06, "loss": 0.6687, "step": 17016 }, { "epoch": 0.705250942848854, "grad_norm": 0.4239286780357361, "learning_rate": 1.473952505284098e-06, "loss": 0.6488, "step": 17017 }, { "epoch": 0.7052923867545278, "grad_norm": 0.4327636957168579, "learning_rate": 1.4737452857557296e-06, "loss": 0.6494, "step": 17018 }, { "epoch": 0.7053338306602014, "grad_norm": 0.5016306638717651, "learning_rate": 1.4735380662273612e-06, "loss": 0.6963, "step": 17019 }, { "epoch": 0.7053752745658751, "grad_norm": 0.43588921427726746, "learning_rate": 1.473330846698993e-06, "loss": 0.6738, "step": 17020 }, { "epoch": 0.7054167184715487, "grad_norm": 0.4024278223514557, "learning_rate": 1.4731236271706248e-06, "loss": 0.6494, "step": 17021 }, { "epoch": 0.7054581623772225, "grad_norm": 0.3988310694694519, "learning_rate": 1.4729164076422564e-06, "loss": 0.6169, "step": 17022 }, { "epoch": 0.7054996062828961, "grad_norm": 0.4274252951145172, "learning_rate": 1.472709188113888e-06, "loss": 0.6699, "step": 17023 }, { "epoch": 0.7055410501885697, "grad_norm": 0.43692880868911743, "learning_rate": 1.4725019685855196e-06, "loss": 0.6687, "step": 17024 }, { "epoch": 0.7055824940942435, "grad_norm": 0.4472159445285797, "learning_rate": 1.4722947490571512e-06, "loss": 0.7314, "step": 17025 }, { "epoch": 0.7056239379999171, "grad_norm": 0.47058236598968506, "learning_rate": 1.4720875295287828e-06, "loss": 0.6857, "step": 17026 }, { "epoch": 0.7056653819055908, "grad_norm": 0.4456748962402344, "learning_rate": 1.4718803100004144e-06, "loss": 0.6776, "step": 17027 }, { "epoch": 0.7057068258112644, "grad_norm": 0.396790087223053, "learning_rate": 1.471673090472046e-06, "loss": 0.6501, "step": 17028 }, { "epoch": 0.7057482697169382, "grad_norm": 0.4563027620315552, "learning_rate": 1.471465870943678e-06, "loss": 0.6908, "step": 17029 }, { "epoch": 0.7057897136226118, "grad_norm": 0.4618462324142456, "learning_rate": 1.4712586514153096e-06, "loss": 0.7585, "step": 17030 }, { "epoch": 0.7058311575282855, "grad_norm": 0.4485417306423187, "learning_rate": 1.4710514318869412e-06, "loss": 0.6924, "step": 17031 }, { "epoch": 0.7058726014339591, "grad_norm": 0.4422416090965271, "learning_rate": 1.4708442123585728e-06, "loss": 0.7087, "step": 17032 }, { "epoch": 0.7059140453396328, "grad_norm": 0.4310205578804016, "learning_rate": 1.4706369928302044e-06, "loss": 0.6956, "step": 17033 }, { "epoch": 0.7059554892453065, "grad_norm": 0.3919341564178467, "learning_rate": 1.470429773301836e-06, "loss": 0.6569, "step": 17034 }, { "epoch": 0.7059969331509801, "grad_norm": 0.4226980209350586, "learning_rate": 1.4702225537734676e-06, "loss": 0.6877, "step": 17035 }, { "epoch": 0.7060383770566538, "grad_norm": 0.40782156586647034, "learning_rate": 1.4700153342450992e-06, "loss": 0.6448, "step": 17036 }, { "epoch": 0.7060798209623275, "grad_norm": 0.40488287806510925, "learning_rate": 1.469808114716731e-06, "loss": 0.6787, "step": 17037 }, { "epoch": 0.7061212648680012, "grad_norm": 0.3945687711238861, "learning_rate": 1.4696008951883628e-06, "loss": 0.6329, "step": 17038 }, { "epoch": 0.7061627087736748, "grad_norm": 0.3957037627696991, "learning_rate": 1.4693936756599944e-06, "loss": 0.693, "step": 17039 }, { "epoch": 0.7062041526793486, "grad_norm": 0.37580937147140503, "learning_rate": 1.469186456131626e-06, "loss": 0.6639, "step": 17040 }, { "epoch": 0.7062455965850222, "grad_norm": 0.43958523869514465, "learning_rate": 1.4689792366032576e-06, "loss": 0.689, "step": 17041 }, { "epoch": 0.7062870404906958, "grad_norm": 0.4280121624469757, "learning_rate": 1.4687720170748892e-06, "loss": 0.6523, "step": 17042 }, { "epoch": 0.7063284843963695, "grad_norm": 0.39378538727760315, "learning_rate": 1.4685647975465208e-06, "loss": 0.6317, "step": 17043 }, { "epoch": 0.7063699283020431, "grad_norm": 0.40225014090538025, "learning_rate": 1.4683575780181524e-06, "loss": 0.6304, "step": 17044 }, { "epoch": 0.7064113722077169, "grad_norm": 0.47212979197502136, "learning_rate": 1.4681503584897842e-06, "loss": 0.7336, "step": 17045 }, { "epoch": 0.7064528161133905, "grad_norm": 0.39396536350250244, "learning_rate": 1.4679431389614158e-06, "loss": 0.6509, "step": 17046 }, { "epoch": 0.7064942600190642, "grad_norm": 0.39323511719703674, "learning_rate": 1.4677359194330476e-06, "loss": 0.6667, "step": 17047 }, { "epoch": 0.7065357039247379, "grad_norm": 0.460580438375473, "learning_rate": 1.4675286999046792e-06, "loss": 0.715, "step": 17048 }, { "epoch": 0.7065771478304116, "grad_norm": 0.4254401624202728, "learning_rate": 1.4673214803763108e-06, "loss": 0.7581, "step": 17049 }, { "epoch": 0.7066185917360852, "grad_norm": 0.3968885540962219, "learning_rate": 1.4671142608479424e-06, "loss": 0.6185, "step": 17050 }, { "epoch": 0.7066600356417588, "grad_norm": 0.3994959890842438, "learning_rate": 1.466907041319574e-06, "loss": 0.6378, "step": 17051 }, { "epoch": 0.7067014795474326, "grad_norm": 0.4218335747718811, "learning_rate": 1.4666998217912056e-06, "loss": 0.7278, "step": 17052 }, { "epoch": 0.7067429234531062, "grad_norm": 0.4707273244857788, "learning_rate": 1.4664926022628372e-06, "loss": 0.6509, "step": 17053 }, { "epoch": 0.7067843673587799, "grad_norm": 0.3982565999031067, "learning_rate": 1.466285382734469e-06, "loss": 0.6462, "step": 17054 }, { "epoch": 0.7068258112644535, "grad_norm": 0.3947368562221527, "learning_rate": 1.4660781632061006e-06, "loss": 0.7189, "step": 17055 }, { "epoch": 0.7068672551701273, "grad_norm": 0.4142375886440277, "learning_rate": 1.4658709436777324e-06, "loss": 0.6594, "step": 17056 }, { "epoch": 0.7069086990758009, "grad_norm": 0.40377792716026306, "learning_rate": 1.465663724149364e-06, "loss": 0.6804, "step": 17057 }, { "epoch": 0.7069501429814746, "grad_norm": 0.40233200788497925, "learning_rate": 1.4654565046209956e-06, "loss": 0.6265, "step": 17058 }, { "epoch": 0.7069915868871482, "grad_norm": 0.3936782479286194, "learning_rate": 1.4652492850926272e-06, "loss": 0.6267, "step": 17059 }, { "epoch": 0.7070330307928219, "grad_norm": 0.4427107572555542, "learning_rate": 1.4650420655642588e-06, "loss": 0.698, "step": 17060 }, { "epoch": 0.7070744746984956, "grad_norm": 0.4007430672645569, "learning_rate": 1.4648348460358904e-06, "loss": 0.6765, "step": 17061 }, { "epoch": 0.7071159186041692, "grad_norm": 0.39708080887794495, "learning_rate": 1.4646276265075222e-06, "loss": 0.6517, "step": 17062 }, { "epoch": 0.707157362509843, "grad_norm": 0.4321609437465668, "learning_rate": 1.4644204069791538e-06, "loss": 0.7058, "step": 17063 }, { "epoch": 0.7071988064155166, "grad_norm": 0.4112613797187805, "learning_rate": 1.4642131874507856e-06, "loss": 0.6758, "step": 17064 }, { "epoch": 0.7072402503211903, "grad_norm": 0.4398932158946991, "learning_rate": 1.4640059679224172e-06, "loss": 0.687, "step": 17065 }, { "epoch": 0.7072816942268639, "grad_norm": 0.4064832031726837, "learning_rate": 1.4637987483940488e-06, "loss": 0.6769, "step": 17066 }, { "epoch": 0.7073231381325377, "grad_norm": 0.4149221181869507, "learning_rate": 1.4635915288656804e-06, "loss": 0.613, "step": 17067 }, { "epoch": 0.7073645820382113, "grad_norm": 0.4063790440559387, "learning_rate": 1.463384309337312e-06, "loss": 0.6802, "step": 17068 }, { "epoch": 0.7074060259438849, "grad_norm": 0.42275065183639526, "learning_rate": 1.4631770898089436e-06, "loss": 0.6938, "step": 17069 }, { "epoch": 0.7074474698495586, "grad_norm": 0.4306286573410034, "learning_rate": 1.4629698702805752e-06, "loss": 0.6743, "step": 17070 }, { "epoch": 0.7074889137552323, "grad_norm": 0.41605764627456665, "learning_rate": 1.462762650752207e-06, "loss": 0.6355, "step": 17071 }, { "epoch": 0.707530357660906, "grad_norm": 0.4109753668308258, "learning_rate": 1.4625554312238386e-06, "loss": 0.6904, "step": 17072 }, { "epoch": 0.7075718015665796, "grad_norm": 0.3909955322742462, "learning_rate": 1.4623482116954704e-06, "loss": 0.6299, "step": 17073 }, { "epoch": 0.7076132454722533, "grad_norm": 0.3955157399177551, "learning_rate": 1.462140992167102e-06, "loss": 0.6339, "step": 17074 }, { "epoch": 0.707654689377927, "grad_norm": 0.4407317340373993, "learning_rate": 1.4619337726387336e-06, "loss": 0.6602, "step": 17075 }, { "epoch": 0.7076961332836006, "grad_norm": 0.44630587100982666, "learning_rate": 1.4617265531103652e-06, "loss": 0.6783, "step": 17076 }, { "epoch": 0.7077375771892743, "grad_norm": 0.43595075607299805, "learning_rate": 1.4615193335819968e-06, "loss": 0.7275, "step": 17077 }, { "epoch": 0.707779021094948, "grad_norm": 0.3888038098812103, "learning_rate": 1.4613121140536284e-06, "loss": 0.6553, "step": 17078 }, { "epoch": 0.7078204650006217, "grad_norm": 0.4243892431259155, "learning_rate": 1.4611048945252602e-06, "loss": 0.6301, "step": 17079 }, { "epoch": 0.7078619089062953, "grad_norm": 0.44253790378570557, "learning_rate": 1.4608976749968918e-06, "loss": 0.6824, "step": 17080 }, { "epoch": 0.707903352811969, "grad_norm": 0.37912118434906006, "learning_rate": 1.4606904554685234e-06, "loss": 0.6451, "step": 17081 }, { "epoch": 0.7079447967176427, "grad_norm": 0.4390753507614136, "learning_rate": 1.4604832359401552e-06, "loss": 0.6846, "step": 17082 }, { "epoch": 0.7079862406233164, "grad_norm": 0.3990453779697418, "learning_rate": 1.4602760164117868e-06, "loss": 0.6683, "step": 17083 }, { "epoch": 0.70802768452899, "grad_norm": 0.3845057785511017, "learning_rate": 1.4600687968834184e-06, "loss": 0.6385, "step": 17084 }, { "epoch": 0.7080691284346636, "grad_norm": 0.41920962929725647, "learning_rate": 1.45986157735505e-06, "loss": 0.729, "step": 17085 }, { "epoch": 0.7081105723403374, "grad_norm": 0.3904169201850891, "learning_rate": 1.4596543578266816e-06, "loss": 0.658, "step": 17086 }, { "epoch": 0.708152016246011, "grad_norm": 0.38504117727279663, "learning_rate": 1.4594471382983132e-06, "loss": 0.6554, "step": 17087 }, { "epoch": 0.7081934601516847, "grad_norm": 0.3803388774394989, "learning_rate": 1.459239918769945e-06, "loss": 0.6277, "step": 17088 }, { "epoch": 0.7082349040573583, "grad_norm": 0.47713714838027954, "learning_rate": 1.4590326992415766e-06, "loss": 0.7305, "step": 17089 }, { "epoch": 0.7082763479630321, "grad_norm": 0.4066689908504486, "learning_rate": 1.4588254797132084e-06, "loss": 0.6588, "step": 17090 }, { "epoch": 0.7083177918687057, "grad_norm": 0.4041980803012848, "learning_rate": 1.45861826018484e-06, "loss": 0.6775, "step": 17091 }, { "epoch": 0.7083592357743794, "grad_norm": 0.4055139720439911, "learning_rate": 1.4584110406564716e-06, "loss": 0.6799, "step": 17092 }, { "epoch": 0.708400679680053, "grad_norm": 0.3667786121368408, "learning_rate": 1.4582038211281032e-06, "loss": 0.6416, "step": 17093 }, { "epoch": 0.7084421235857267, "grad_norm": 0.41678696870803833, "learning_rate": 1.4579966015997348e-06, "loss": 0.6559, "step": 17094 }, { "epoch": 0.7084835674914004, "grad_norm": 0.3906075656414032, "learning_rate": 1.4577893820713664e-06, "loss": 0.6453, "step": 17095 }, { "epoch": 0.708525011397074, "grad_norm": 0.41323980689048767, "learning_rate": 1.4575821625429982e-06, "loss": 0.6798, "step": 17096 }, { "epoch": 0.7085664553027478, "grad_norm": 0.3746788203716278, "learning_rate": 1.4573749430146298e-06, "loss": 0.6724, "step": 17097 }, { "epoch": 0.7086078992084214, "grad_norm": 0.45917192101478577, "learning_rate": 1.4571677234862614e-06, "loss": 0.6748, "step": 17098 }, { "epoch": 0.7086493431140951, "grad_norm": 0.39618241786956787, "learning_rate": 1.4569605039578932e-06, "loss": 0.6692, "step": 17099 }, { "epoch": 0.7086907870197687, "grad_norm": 0.4390541613101959, "learning_rate": 1.4567532844295248e-06, "loss": 0.641, "step": 17100 }, { "epoch": 0.7087322309254425, "grad_norm": 0.43311506509780884, "learning_rate": 1.4565460649011564e-06, "loss": 0.6797, "step": 17101 }, { "epoch": 0.7087736748311161, "grad_norm": 0.38658228516578674, "learning_rate": 1.456338845372788e-06, "loss": 0.6289, "step": 17102 }, { "epoch": 0.7088151187367897, "grad_norm": 0.43291813135147095, "learning_rate": 1.4561316258444196e-06, "loss": 0.6902, "step": 17103 }, { "epoch": 0.7088565626424634, "grad_norm": 0.41580280661582947, "learning_rate": 1.4559244063160512e-06, "loss": 0.6732, "step": 17104 }, { "epoch": 0.7088980065481371, "grad_norm": 0.4244527220726013, "learning_rate": 1.455717186787683e-06, "loss": 0.6561, "step": 17105 }, { "epoch": 0.7089394504538108, "grad_norm": 0.41677457094192505, "learning_rate": 1.4555099672593146e-06, "loss": 0.6775, "step": 17106 }, { "epoch": 0.7089808943594844, "grad_norm": 0.43259790539741516, "learning_rate": 1.4553027477309462e-06, "loss": 0.6995, "step": 17107 }, { "epoch": 0.7090223382651581, "grad_norm": 0.38651514053344727, "learning_rate": 1.455095528202578e-06, "loss": 0.6445, "step": 17108 }, { "epoch": 0.7090637821708318, "grad_norm": 0.429196834564209, "learning_rate": 1.4548883086742096e-06, "loss": 0.7271, "step": 17109 }, { "epoch": 0.7091052260765055, "grad_norm": 0.4176829159259796, "learning_rate": 1.4546810891458412e-06, "loss": 0.6772, "step": 17110 }, { "epoch": 0.7091466699821791, "grad_norm": 0.460344523191452, "learning_rate": 1.4544738696174728e-06, "loss": 0.7085, "step": 17111 }, { "epoch": 0.7091881138878527, "grad_norm": 0.3839628994464874, "learning_rate": 1.4542666500891044e-06, "loss": 0.7034, "step": 17112 }, { "epoch": 0.7092295577935265, "grad_norm": 0.4124516248703003, "learning_rate": 1.4540594305607362e-06, "loss": 0.6892, "step": 17113 }, { "epoch": 0.7092710016992001, "grad_norm": 0.4253595471382141, "learning_rate": 1.4538522110323678e-06, "loss": 0.6819, "step": 17114 }, { "epoch": 0.7093124456048738, "grad_norm": 0.4169895648956299, "learning_rate": 1.4536449915039994e-06, "loss": 0.7084, "step": 17115 }, { "epoch": 0.7093538895105475, "grad_norm": 0.4271676242351532, "learning_rate": 1.4534377719756312e-06, "loss": 0.7556, "step": 17116 }, { "epoch": 0.7093953334162212, "grad_norm": 0.4364808201789856, "learning_rate": 1.4532305524472628e-06, "loss": 0.6449, "step": 17117 }, { "epoch": 0.7094367773218948, "grad_norm": 0.44272512197494507, "learning_rate": 1.4530233329188944e-06, "loss": 0.6505, "step": 17118 }, { "epoch": 0.7094782212275685, "grad_norm": 0.42349377274513245, "learning_rate": 1.452816113390526e-06, "loss": 0.6873, "step": 17119 }, { "epoch": 0.7095196651332422, "grad_norm": 0.41405540704727173, "learning_rate": 1.4526088938621576e-06, "loss": 0.6627, "step": 17120 }, { "epoch": 0.7095611090389158, "grad_norm": 0.39842119812965393, "learning_rate": 1.4524016743337892e-06, "loss": 0.6957, "step": 17121 }, { "epoch": 0.7096025529445895, "grad_norm": 0.4041319191455841, "learning_rate": 1.452194454805421e-06, "loss": 0.6924, "step": 17122 }, { "epoch": 0.7096439968502631, "grad_norm": 0.44509357213974, "learning_rate": 1.4519872352770526e-06, "loss": 0.6395, "step": 17123 }, { "epoch": 0.7096854407559369, "grad_norm": 0.47146058082580566, "learning_rate": 1.4517800157486842e-06, "loss": 0.7155, "step": 17124 }, { "epoch": 0.7097268846616105, "grad_norm": 0.40336716175079346, "learning_rate": 1.451572796220316e-06, "loss": 0.679, "step": 17125 }, { "epoch": 0.7097683285672842, "grad_norm": 0.3946537971496582, "learning_rate": 1.4513655766919476e-06, "loss": 0.637, "step": 17126 }, { "epoch": 0.7098097724729578, "grad_norm": 0.41641882061958313, "learning_rate": 1.4511583571635792e-06, "loss": 0.7004, "step": 17127 }, { "epoch": 0.7098512163786316, "grad_norm": 0.38467055559158325, "learning_rate": 1.4509511376352108e-06, "loss": 0.7043, "step": 17128 }, { "epoch": 0.7098926602843052, "grad_norm": 0.4395214021205902, "learning_rate": 1.4507439181068424e-06, "loss": 0.6794, "step": 17129 }, { "epoch": 0.7099341041899788, "grad_norm": 0.4153783917427063, "learning_rate": 1.4505366985784742e-06, "loss": 0.6769, "step": 17130 }, { "epoch": 0.7099755480956526, "grad_norm": 0.44801390171051025, "learning_rate": 1.4503294790501058e-06, "loss": 0.6903, "step": 17131 }, { "epoch": 0.7100169920013262, "grad_norm": 0.44084593653678894, "learning_rate": 1.4501222595217374e-06, "loss": 0.6797, "step": 17132 }, { "epoch": 0.7100584359069999, "grad_norm": 0.43808794021606445, "learning_rate": 1.449915039993369e-06, "loss": 0.7211, "step": 17133 }, { "epoch": 0.7100998798126735, "grad_norm": 0.4207003712654114, "learning_rate": 1.4497078204650008e-06, "loss": 0.6899, "step": 17134 }, { "epoch": 0.7101413237183473, "grad_norm": 0.38609179854393005, "learning_rate": 1.4495006009366324e-06, "loss": 0.6251, "step": 17135 }, { "epoch": 0.7101827676240209, "grad_norm": 0.4192427694797516, "learning_rate": 1.449293381408264e-06, "loss": 0.6793, "step": 17136 }, { "epoch": 0.7102242115296945, "grad_norm": 0.42130979895591736, "learning_rate": 1.4490861618798956e-06, "loss": 0.6743, "step": 17137 }, { "epoch": 0.7102656554353682, "grad_norm": 0.4154570996761322, "learning_rate": 1.4488789423515274e-06, "loss": 0.6456, "step": 17138 }, { "epoch": 0.7103070993410419, "grad_norm": 0.40507709980010986, "learning_rate": 1.448671722823159e-06, "loss": 0.7285, "step": 17139 }, { "epoch": 0.7103485432467156, "grad_norm": 0.42757031321525574, "learning_rate": 1.4484645032947906e-06, "loss": 0.6388, "step": 17140 }, { "epoch": 0.7103899871523892, "grad_norm": 0.3910718560218811, "learning_rate": 1.4482572837664222e-06, "loss": 0.661, "step": 17141 }, { "epoch": 0.710431431058063, "grad_norm": 0.4253712594509125, "learning_rate": 1.4480500642380538e-06, "loss": 0.6499, "step": 17142 }, { "epoch": 0.7104728749637366, "grad_norm": 0.3651489019393921, "learning_rate": 1.4478428447096856e-06, "loss": 0.6265, "step": 17143 }, { "epoch": 0.7105143188694103, "grad_norm": 0.407944917678833, "learning_rate": 1.4476356251813172e-06, "loss": 0.7019, "step": 17144 }, { "epoch": 0.7105557627750839, "grad_norm": 0.4259357154369354, "learning_rate": 1.4474284056529488e-06, "loss": 0.7537, "step": 17145 }, { "epoch": 0.7105972066807575, "grad_norm": 0.40328076481819153, "learning_rate": 1.4472211861245804e-06, "loss": 0.6704, "step": 17146 }, { "epoch": 0.7106386505864313, "grad_norm": 0.43832820653915405, "learning_rate": 1.4470139665962122e-06, "loss": 0.6389, "step": 17147 }, { "epoch": 0.7106800944921049, "grad_norm": 0.41793403029441833, "learning_rate": 1.4468067470678438e-06, "loss": 0.7264, "step": 17148 }, { "epoch": 0.7107215383977786, "grad_norm": 0.42761752009391785, "learning_rate": 1.4465995275394754e-06, "loss": 0.7018, "step": 17149 }, { "epoch": 0.7107629823034523, "grad_norm": 0.41330400109291077, "learning_rate": 1.446392308011107e-06, "loss": 0.6656, "step": 17150 }, { "epoch": 0.710804426209126, "grad_norm": 0.4136297106742859, "learning_rate": 1.4461850884827388e-06, "loss": 0.6294, "step": 17151 }, { "epoch": 0.7108458701147996, "grad_norm": 0.4349151849746704, "learning_rate": 1.4459778689543704e-06, "loss": 0.6934, "step": 17152 }, { "epoch": 0.7108873140204733, "grad_norm": 0.4104061722755432, "learning_rate": 1.445770649426002e-06, "loss": 0.6287, "step": 17153 }, { "epoch": 0.710928757926147, "grad_norm": 0.4348065257072449, "learning_rate": 1.4455634298976336e-06, "loss": 0.7236, "step": 17154 }, { "epoch": 0.7109702018318206, "grad_norm": 0.41015976667404175, "learning_rate": 1.4453562103692654e-06, "loss": 0.6581, "step": 17155 }, { "epoch": 0.7110116457374943, "grad_norm": 0.3860315978527069, "learning_rate": 1.445148990840897e-06, "loss": 0.6606, "step": 17156 }, { "epoch": 0.7110530896431679, "grad_norm": 0.42880141735076904, "learning_rate": 1.4449417713125286e-06, "loss": 0.6698, "step": 17157 }, { "epoch": 0.7110945335488417, "grad_norm": 0.39286312460899353, "learning_rate": 1.4447345517841602e-06, "loss": 0.6528, "step": 17158 }, { "epoch": 0.7111359774545153, "grad_norm": 0.3640916347503662, "learning_rate": 1.4445273322557918e-06, "loss": 0.5814, "step": 17159 }, { "epoch": 0.711177421360189, "grad_norm": 0.4011084735393524, "learning_rate": 1.4443201127274236e-06, "loss": 0.6968, "step": 17160 }, { "epoch": 0.7112188652658626, "grad_norm": 0.38492611050605774, "learning_rate": 1.4441128931990552e-06, "loss": 0.6752, "step": 17161 }, { "epoch": 0.7112603091715364, "grad_norm": 0.423849493265152, "learning_rate": 1.4439056736706868e-06, "loss": 0.7087, "step": 17162 }, { "epoch": 0.71130175307721, "grad_norm": 0.39665883779525757, "learning_rate": 1.4436984541423184e-06, "loss": 0.6516, "step": 17163 }, { "epoch": 0.7113431969828836, "grad_norm": 0.40594109892845154, "learning_rate": 1.4434912346139502e-06, "loss": 0.6578, "step": 17164 }, { "epoch": 0.7113846408885574, "grad_norm": 0.3938899636268616, "learning_rate": 1.4432840150855818e-06, "loss": 0.6234, "step": 17165 }, { "epoch": 0.711426084794231, "grad_norm": 0.4268961548805237, "learning_rate": 1.4430767955572134e-06, "loss": 0.6715, "step": 17166 }, { "epoch": 0.7114675286999047, "grad_norm": 0.3874188959598541, "learning_rate": 1.442869576028845e-06, "loss": 0.6031, "step": 17167 }, { "epoch": 0.7115089726055783, "grad_norm": 0.4064997434616089, "learning_rate": 1.4426623565004766e-06, "loss": 0.6934, "step": 17168 }, { "epoch": 0.7115504165112521, "grad_norm": 0.47106385231018066, "learning_rate": 1.4424551369721084e-06, "loss": 0.6675, "step": 17169 }, { "epoch": 0.7115918604169257, "grad_norm": 0.40281856060028076, "learning_rate": 1.44224791744374e-06, "loss": 0.6554, "step": 17170 }, { "epoch": 0.7116333043225994, "grad_norm": 0.48936718702316284, "learning_rate": 1.4420406979153716e-06, "loss": 0.7, "step": 17171 }, { "epoch": 0.711674748228273, "grad_norm": 0.41593289375305176, "learning_rate": 1.4418334783870034e-06, "loss": 0.6951, "step": 17172 }, { "epoch": 0.7117161921339467, "grad_norm": 0.39680179953575134, "learning_rate": 1.441626258858635e-06, "loss": 0.6985, "step": 17173 }, { "epoch": 0.7117576360396204, "grad_norm": 0.4302290976047516, "learning_rate": 1.4414190393302666e-06, "loss": 0.696, "step": 17174 }, { "epoch": 0.711799079945294, "grad_norm": 0.45055392384529114, "learning_rate": 1.4412118198018982e-06, "loss": 0.7241, "step": 17175 }, { "epoch": 0.7118405238509677, "grad_norm": 0.41072559356689453, "learning_rate": 1.4410046002735298e-06, "loss": 0.6782, "step": 17176 }, { "epoch": 0.7118819677566414, "grad_norm": 0.41165706515312195, "learning_rate": 1.4407973807451616e-06, "loss": 0.6366, "step": 17177 }, { "epoch": 0.7119234116623151, "grad_norm": 0.4233558773994446, "learning_rate": 1.4405901612167932e-06, "loss": 0.7349, "step": 17178 }, { "epoch": 0.7119648555679887, "grad_norm": 0.4239099621772766, "learning_rate": 1.4403829416884248e-06, "loss": 0.6941, "step": 17179 }, { "epoch": 0.7120062994736625, "grad_norm": 0.4017597436904907, "learning_rate": 1.4401757221600564e-06, "loss": 0.6864, "step": 17180 }, { "epoch": 0.7120477433793361, "grad_norm": 0.41544127464294434, "learning_rate": 1.4399685026316882e-06, "loss": 0.6912, "step": 17181 }, { "epoch": 0.7120891872850097, "grad_norm": 0.41935113072395325, "learning_rate": 1.4397612831033198e-06, "loss": 0.7124, "step": 17182 }, { "epoch": 0.7121306311906834, "grad_norm": 0.4116427004337311, "learning_rate": 1.4395540635749514e-06, "loss": 0.6257, "step": 17183 }, { "epoch": 0.712172075096357, "grad_norm": 0.3983500003814697, "learning_rate": 1.439346844046583e-06, "loss": 0.6316, "step": 17184 }, { "epoch": 0.7122135190020308, "grad_norm": 0.4283016622066498, "learning_rate": 1.4391396245182146e-06, "loss": 0.6697, "step": 17185 }, { "epoch": 0.7122549629077044, "grad_norm": 0.4285280108451843, "learning_rate": 1.4389324049898464e-06, "loss": 0.6304, "step": 17186 }, { "epoch": 0.7122964068133781, "grad_norm": 0.3857199549674988, "learning_rate": 1.438725185461478e-06, "loss": 0.6162, "step": 17187 }, { "epoch": 0.7123378507190518, "grad_norm": 0.42128753662109375, "learning_rate": 1.4385179659331096e-06, "loss": 0.7351, "step": 17188 }, { "epoch": 0.7123792946247254, "grad_norm": 0.4033653140068054, "learning_rate": 1.4383107464047414e-06, "loss": 0.6455, "step": 17189 }, { "epoch": 0.7124207385303991, "grad_norm": 0.39610305428504944, "learning_rate": 1.438103526876373e-06, "loss": 0.6907, "step": 17190 }, { "epoch": 0.7124621824360727, "grad_norm": 0.3711470365524292, "learning_rate": 1.4378963073480046e-06, "loss": 0.6562, "step": 17191 }, { "epoch": 0.7125036263417465, "grad_norm": 0.4262102544307709, "learning_rate": 1.4376890878196362e-06, "loss": 0.6953, "step": 17192 }, { "epoch": 0.7125450702474201, "grad_norm": 0.4469776153564453, "learning_rate": 1.4374818682912678e-06, "loss": 0.6724, "step": 17193 }, { "epoch": 0.7125865141530938, "grad_norm": 0.3950137495994568, "learning_rate": 1.4372746487628994e-06, "loss": 0.6243, "step": 17194 }, { "epoch": 0.7126279580587674, "grad_norm": 0.3959225118160248, "learning_rate": 1.4370674292345312e-06, "loss": 0.6711, "step": 17195 }, { "epoch": 0.7126694019644412, "grad_norm": 0.42926108837127686, "learning_rate": 1.4368602097061628e-06, "loss": 0.7019, "step": 17196 }, { "epoch": 0.7127108458701148, "grad_norm": 0.4032299816608429, "learning_rate": 1.4366529901777944e-06, "loss": 0.658, "step": 17197 }, { "epoch": 0.7127522897757884, "grad_norm": 0.403743177652359, "learning_rate": 1.4364457706494262e-06, "loss": 0.6777, "step": 17198 }, { "epoch": 0.7127937336814621, "grad_norm": 0.39242538809776306, "learning_rate": 1.4362385511210578e-06, "loss": 0.6731, "step": 17199 }, { "epoch": 0.7128351775871358, "grad_norm": 0.4026254713535309, "learning_rate": 1.4360313315926894e-06, "loss": 0.6554, "step": 17200 }, { "epoch": 0.7128766214928095, "grad_norm": 0.39283907413482666, "learning_rate": 1.435824112064321e-06, "loss": 0.6555, "step": 17201 }, { "epoch": 0.7129180653984831, "grad_norm": 0.4268520176410675, "learning_rate": 1.4356168925359526e-06, "loss": 0.673, "step": 17202 }, { "epoch": 0.7129595093041569, "grad_norm": 0.3949146568775177, "learning_rate": 1.4354096730075842e-06, "loss": 0.6725, "step": 17203 }, { "epoch": 0.7130009532098305, "grad_norm": 0.4011481702327728, "learning_rate": 1.435202453479216e-06, "loss": 0.6685, "step": 17204 }, { "epoch": 0.7130423971155042, "grad_norm": 0.4004656672477722, "learning_rate": 1.4349952339508476e-06, "loss": 0.658, "step": 17205 }, { "epoch": 0.7130838410211778, "grad_norm": 0.38832539319992065, "learning_rate": 1.4347880144224794e-06, "loss": 0.6816, "step": 17206 }, { "epoch": 0.7131252849268515, "grad_norm": 0.4332389235496521, "learning_rate": 1.434580794894111e-06, "loss": 0.6782, "step": 17207 }, { "epoch": 0.7131667288325252, "grad_norm": 0.4023863673210144, "learning_rate": 1.4343735753657426e-06, "loss": 0.6721, "step": 17208 }, { "epoch": 0.7132081727381988, "grad_norm": 0.40926694869995117, "learning_rate": 1.4341663558373742e-06, "loss": 0.6533, "step": 17209 }, { "epoch": 0.7132496166438725, "grad_norm": 0.45090505480766296, "learning_rate": 1.4339591363090058e-06, "loss": 0.6849, "step": 17210 }, { "epoch": 0.7132910605495462, "grad_norm": 0.41792985796928406, "learning_rate": 1.4337519167806374e-06, "loss": 0.5996, "step": 17211 }, { "epoch": 0.7133325044552199, "grad_norm": 0.4013620615005493, "learning_rate": 1.4335446972522692e-06, "loss": 0.6077, "step": 17212 }, { "epoch": 0.7133739483608935, "grad_norm": 0.4184472858905792, "learning_rate": 1.4333374777239008e-06, "loss": 0.7068, "step": 17213 }, { "epoch": 0.7134153922665672, "grad_norm": 0.3798205256462097, "learning_rate": 1.4331302581955324e-06, "loss": 0.6216, "step": 17214 }, { "epoch": 0.7134568361722409, "grad_norm": 0.41796883940696716, "learning_rate": 1.4329230386671642e-06, "loss": 0.7, "step": 17215 }, { "epoch": 0.7134982800779145, "grad_norm": 0.4258931875228882, "learning_rate": 1.4327158191387958e-06, "loss": 0.6682, "step": 17216 }, { "epoch": 0.7135397239835882, "grad_norm": 0.42291250824928284, "learning_rate": 1.4325085996104274e-06, "loss": 0.6877, "step": 17217 }, { "epoch": 0.7135811678892618, "grad_norm": 0.4162435233592987, "learning_rate": 1.432301380082059e-06, "loss": 0.7123, "step": 17218 }, { "epoch": 0.7136226117949356, "grad_norm": 0.4066869616508484, "learning_rate": 1.4320941605536906e-06, "loss": 0.6738, "step": 17219 }, { "epoch": 0.7136640557006092, "grad_norm": 0.39517447352409363, "learning_rate": 1.4318869410253222e-06, "loss": 0.6409, "step": 17220 }, { "epoch": 0.7137054996062829, "grad_norm": 0.42086559534072876, "learning_rate": 1.431679721496954e-06, "loss": 0.6699, "step": 17221 }, { "epoch": 0.7137469435119566, "grad_norm": 0.3757900297641754, "learning_rate": 1.4314725019685856e-06, "loss": 0.6602, "step": 17222 }, { "epoch": 0.7137883874176303, "grad_norm": 0.3955202102661133, "learning_rate": 1.4312652824402174e-06, "loss": 0.6301, "step": 17223 }, { "epoch": 0.7138298313233039, "grad_norm": 0.4265369772911072, "learning_rate": 1.431058062911849e-06, "loss": 0.6851, "step": 17224 }, { "epoch": 0.7138712752289775, "grad_norm": 0.4501838684082031, "learning_rate": 1.4308508433834806e-06, "loss": 0.6528, "step": 17225 }, { "epoch": 0.7139127191346513, "grad_norm": 0.38698020577430725, "learning_rate": 1.4306436238551122e-06, "loss": 0.6587, "step": 17226 }, { "epoch": 0.7139541630403249, "grad_norm": 0.4086521863937378, "learning_rate": 1.4304364043267438e-06, "loss": 0.6498, "step": 17227 }, { "epoch": 0.7139956069459986, "grad_norm": 0.4133686423301697, "learning_rate": 1.4302291847983754e-06, "loss": 0.6514, "step": 17228 }, { "epoch": 0.7140370508516722, "grad_norm": 0.4076843857765198, "learning_rate": 1.430021965270007e-06, "loss": 0.6722, "step": 17229 }, { "epoch": 0.714078494757346, "grad_norm": 0.38485851883888245, "learning_rate": 1.4298147457416388e-06, "loss": 0.6227, "step": 17230 }, { "epoch": 0.7141199386630196, "grad_norm": 0.388555645942688, "learning_rate": 1.4296075262132706e-06, "loss": 0.7004, "step": 17231 }, { "epoch": 0.7141613825686933, "grad_norm": 0.41111770272254944, "learning_rate": 1.4294003066849022e-06, "loss": 0.6534, "step": 17232 }, { "epoch": 0.714202826474367, "grad_norm": 0.41902655363082886, "learning_rate": 1.4291930871565338e-06, "loss": 0.6831, "step": 17233 }, { "epoch": 0.7142442703800406, "grad_norm": 0.4162984788417816, "learning_rate": 1.4289858676281654e-06, "loss": 0.6687, "step": 17234 }, { "epoch": 0.7142857142857143, "grad_norm": 0.4067620635032654, "learning_rate": 1.428778648099797e-06, "loss": 0.6279, "step": 17235 }, { "epoch": 0.7143271581913879, "grad_norm": 0.38521045446395874, "learning_rate": 1.4285714285714286e-06, "loss": 0.6353, "step": 17236 }, { "epoch": 0.7143686020970617, "grad_norm": 0.45835113525390625, "learning_rate": 1.4283642090430602e-06, "loss": 0.7493, "step": 17237 }, { "epoch": 0.7144100460027353, "grad_norm": 0.3873526155948639, "learning_rate": 1.428156989514692e-06, "loss": 0.6814, "step": 17238 }, { "epoch": 0.714451489908409, "grad_norm": 0.3855592608451843, "learning_rate": 1.4279497699863236e-06, "loss": 0.6904, "step": 17239 }, { "epoch": 0.7144929338140826, "grad_norm": 0.4106155037879944, "learning_rate": 1.4277425504579554e-06, "loss": 0.6538, "step": 17240 }, { "epoch": 0.7145343777197564, "grad_norm": 0.4222412705421448, "learning_rate": 1.427535330929587e-06, "loss": 0.7169, "step": 17241 }, { "epoch": 0.71457582162543, "grad_norm": 0.4217895567417145, "learning_rate": 1.4273281114012186e-06, "loss": 0.7034, "step": 17242 }, { "epoch": 0.7146172655311036, "grad_norm": 0.39750197529792786, "learning_rate": 1.4271208918728502e-06, "loss": 0.6324, "step": 17243 }, { "epoch": 0.7146587094367773, "grad_norm": 0.4085679054260254, "learning_rate": 1.4269136723444818e-06, "loss": 0.6622, "step": 17244 }, { "epoch": 0.714700153342451, "grad_norm": 0.45610931515693665, "learning_rate": 1.4267064528161134e-06, "loss": 0.679, "step": 17245 }, { "epoch": 0.7147415972481247, "grad_norm": 0.4353027641773224, "learning_rate": 1.426499233287745e-06, "loss": 0.6956, "step": 17246 }, { "epoch": 0.7147830411537983, "grad_norm": 0.4007241427898407, "learning_rate": 1.4262920137593768e-06, "loss": 0.6664, "step": 17247 }, { "epoch": 0.714824485059472, "grad_norm": 0.432422012090683, "learning_rate": 1.4260847942310086e-06, "loss": 0.6848, "step": 17248 }, { "epoch": 0.7148659289651457, "grad_norm": 0.4303676187992096, "learning_rate": 1.4258775747026402e-06, "loss": 0.7233, "step": 17249 }, { "epoch": 0.7149073728708193, "grad_norm": 0.38789495825767517, "learning_rate": 1.4256703551742718e-06, "loss": 0.658, "step": 17250 }, { "epoch": 0.714948816776493, "grad_norm": 0.4143153131008148, "learning_rate": 1.4254631356459034e-06, "loss": 0.6671, "step": 17251 }, { "epoch": 0.7149902606821666, "grad_norm": 0.4264090359210968, "learning_rate": 1.425255916117535e-06, "loss": 0.6056, "step": 17252 }, { "epoch": 0.7150317045878404, "grad_norm": 0.40375521779060364, "learning_rate": 1.4250486965891666e-06, "loss": 0.6565, "step": 17253 }, { "epoch": 0.715073148493514, "grad_norm": 0.4146019518375397, "learning_rate": 1.4248414770607982e-06, "loss": 0.6548, "step": 17254 }, { "epoch": 0.7151145923991877, "grad_norm": 0.3905279338359833, "learning_rate": 1.4246342575324298e-06, "loss": 0.613, "step": 17255 }, { "epoch": 0.7151560363048614, "grad_norm": 0.39962294697761536, "learning_rate": 1.4244270380040616e-06, "loss": 0.6721, "step": 17256 }, { "epoch": 0.7151974802105351, "grad_norm": 0.37589672207832336, "learning_rate": 1.4242198184756934e-06, "loss": 0.6001, "step": 17257 }, { "epoch": 0.7152389241162087, "grad_norm": 0.390352338552475, "learning_rate": 1.424012598947325e-06, "loss": 0.6802, "step": 17258 }, { "epoch": 0.7152803680218823, "grad_norm": 0.4050734043121338, "learning_rate": 1.4238053794189566e-06, "loss": 0.678, "step": 17259 }, { "epoch": 0.7153218119275561, "grad_norm": 0.41170430183410645, "learning_rate": 1.4235981598905882e-06, "loss": 0.6812, "step": 17260 }, { "epoch": 0.7153632558332297, "grad_norm": 0.4183063209056854, "learning_rate": 1.4233909403622198e-06, "loss": 0.7076, "step": 17261 }, { "epoch": 0.7154046997389034, "grad_norm": 0.4043883681297302, "learning_rate": 1.4231837208338514e-06, "loss": 0.6229, "step": 17262 }, { "epoch": 0.715446143644577, "grad_norm": 0.41671022772789, "learning_rate": 1.422976501305483e-06, "loss": 0.679, "step": 17263 }, { "epoch": 0.7154875875502508, "grad_norm": 0.457964152097702, "learning_rate": 1.4227692817771148e-06, "loss": 0.6597, "step": 17264 }, { "epoch": 0.7155290314559244, "grad_norm": 0.3986662030220032, "learning_rate": 1.4225620622487466e-06, "loss": 0.6687, "step": 17265 }, { "epoch": 0.7155704753615981, "grad_norm": 0.4151467978954315, "learning_rate": 1.4223548427203782e-06, "loss": 0.6589, "step": 17266 }, { "epoch": 0.7156119192672717, "grad_norm": 0.4237269163131714, "learning_rate": 1.4221476231920098e-06, "loss": 0.7004, "step": 17267 }, { "epoch": 0.7156533631729454, "grad_norm": 0.38333553075790405, "learning_rate": 1.4219404036636414e-06, "loss": 0.6654, "step": 17268 }, { "epoch": 0.7156948070786191, "grad_norm": 0.41093388199806213, "learning_rate": 1.421733184135273e-06, "loss": 0.7029, "step": 17269 }, { "epoch": 0.7157362509842927, "grad_norm": 0.40810373425483704, "learning_rate": 1.4215259646069046e-06, "loss": 0.6208, "step": 17270 }, { "epoch": 0.7157776948899665, "grad_norm": 0.42177239060401917, "learning_rate": 1.4213187450785362e-06, "loss": 0.6782, "step": 17271 }, { "epoch": 0.7158191387956401, "grad_norm": 0.4325515627861023, "learning_rate": 1.4211115255501678e-06, "loss": 0.7255, "step": 17272 }, { "epoch": 0.7158605827013138, "grad_norm": 0.40350043773651123, "learning_rate": 1.4209043060217996e-06, "loss": 0.6281, "step": 17273 }, { "epoch": 0.7159020266069874, "grad_norm": 0.4112646281719208, "learning_rate": 1.4206970864934314e-06, "loss": 0.6873, "step": 17274 }, { "epoch": 0.7159434705126612, "grad_norm": 0.4365899860858917, "learning_rate": 1.420489866965063e-06, "loss": 0.7063, "step": 17275 }, { "epoch": 0.7159849144183348, "grad_norm": 0.4231681227684021, "learning_rate": 1.4202826474366946e-06, "loss": 0.739, "step": 17276 }, { "epoch": 0.7160263583240084, "grad_norm": 0.41030699014663696, "learning_rate": 1.4200754279083262e-06, "loss": 0.6469, "step": 17277 }, { "epoch": 0.7160678022296821, "grad_norm": 0.40123194456100464, "learning_rate": 1.4198682083799578e-06, "loss": 0.6342, "step": 17278 }, { "epoch": 0.7161092461353558, "grad_norm": 0.42994168400764465, "learning_rate": 1.4196609888515894e-06, "loss": 0.6526, "step": 17279 }, { "epoch": 0.7161506900410295, "grad_norm": 0.39958655834198, "learning_rate": 1.419453769323221e-06, "loss": 0.6462, "step": 17280 }, { "epoch": 0.7161921339467031, "grad_norm": 0.41059520840644836, "learning_rate": 1.4192465497948526e-06, "loss": 0.6014, "step": 17281 }, { "epoch": 0.7162335778523768, "grad_norm": 0.38499975204467773, "learning_rate": 1.4190393302664846e-06, "loss": 0.644, "step": 17282 }, { "epoch": 0.7162750217580505, "grad_norm": 0.3916405737400055, "learning_rate": 1.4188321107381162e-06, "loss": 0.5753, "step": 17283 }, { "epoch": 0.7163164656637242, "grad_norm": 0.3972073197364807, "learning_rate": 1.4186248912097478e-06, "loss": 0.6235, "step": 17284 }, { "epoch": 0.7163579095693978, "grad_norm": 0.430368036031723, "learning_rate": 1.4184176716813794e-06, "loss": 0.7181, "step": 17285 }, { "epoch": 0.7163993534750714, "grad_norm": 0.40028277039527893, "learning_rate": 1.418210452153011e-06, "loss": 0.6892, "step": 17286 }, { "epoch": 0.7164407973807452, "grad_norm": 0.4451952874660492, "learning_rate": 1.4180032326246426e-06, "loss": 0.6951, "step": 17287 }, { "epoch": 0.7164822412864188, "grad_norm": 0.38671648502349854, "learning_rate": 1.4177960130962742e-06, "loss": 0.718, "step": 17288 }, { "epoch": 0.7165236851920925, "grad_norm": 0.4029669165611267, "learning_rate": 1.4175887935679058e-06, "loss": 0.6615, "step": 17289 }, { "epoch": 0.7165651290977662, "grad_norm": 0.42925021052360535, "learning_rate": 1.4173815740395374e-06, "loss": 0.668, "step": 17290 }, { "epoch": 0.7166065730034399, "grad_norm": 0.403926819562912, "learning_rate": 1.4171743545111695e-06, "loss": 0.6506, "step": 17291 }, { "epoch": 0.7166480169091135, "grad_norm": 0.3993407189846039, "learning_rate": 1.416967134982801e-06, "loss": 0.6747, "step": 17292 }, { "epoch": 0.7166894608147872, "grad_norm": 0.4468505084514618, "learning_rate": 1.4167599154544326e-06, "loss": 0.6531, "step": 17293 }, { "epoch": 0.7167309047204609, "grad_norm": 0.4041764736175537, "learning_rate": 1.4165526959260642e-06, "loss": 0.6957, "step": 17294 }, { "epoch": 0.7167723486261345, "grad_norm": 0.4140792191028595, "learning_rate": 1.4163454763976958e-06, "loss": 0.7102, "step": 17295 }, { "epoch": 0.7168137925318082, "grad_norm": 0.4193328022956848, "learning_rate": 1.4161382568693274e-06, "loss": 0.7029, "step": 17296 }, { "epoch": 0.7168552364374818, "grad_norm": 0.41624847054481506, "learning_rate": 1.415931037340959e-06, "loss": 0.7363, "step": 17297 }, { "epoch": 0.7168966803431556, "grad_norm": 0.41988906264305115, "learning_rate": 1.4157238178125906e-06, "loss": 0.7302, "step": 17298 }, { "epoch": 0.7169381242488292, "grad_norm": 0.4258204996585846, "learning_rate": 1.4155165982842227e-06, "loss": 0.675, "step": 17299 }, { "epoch": 0.7169795681545029, "grad_norm": 0.4020434617996216, "learning_rate": 1.4153093787558543e-06, "loss": 0.6556, "step": 17300 }, { "epoch": 0.7170210120601765, "grad_norm": 0.39730560779571533, "learning_rate": 1.4151021592274858e-06, "loss": 0.6381, "step": 17301 }, { "epoch": 0.7170624559658503, "grad_norm": 0.44688811898231506, "learning_rate": 1.4148949396991174e-06, "loss": 0.7285, "step": 17302 }, { "epoch": 0.7171038998715239, "grad_norm": 0.40782302618026733, "learning_rate": 1.414687720170749e-06, "loss": 0.7, "step": 17303 }, { "epoch": 0.7171453437771975, "grad_norm": 0.4457809329032898, "learning_rate": 1.4144805006423806e-06, "loss": 0.7, "step": 17304 }, { "epoch": 0.7171867876828713, "grad_norm": 0.42020443081855774, "learning_rate": 1.4142732811140122e-06, "loss": 0.7075, "step": 17305 }, { "epoch": 0.7172282315885449, "grad_norm": 0.41487154364585876, "learning_rate": 1.4140660615856438e-06, "loss": 0.6418, "step": 17306 }, { "epoch": 0.7172696754942186, "grad_norm": 0.4093276560306549, "learning_rate": 1.4138588420572754e-06, "loss": 0.665, "step": 17307 }, { "epoch": 0.7173111193998922, "grad_norm": 0.4286203384399414, "learning_rate": 1.4136516225289075e-06, "loss": 0.6326, "step": 17308 }, { "epoch": 0.717352563305566, "grad_norm": 0.39587169885635376, "learning_rate": 1.413444403000539e-06, "loss": 0.7081, "step": 17309 }, { "epoch": 0.7173940072112396, "grad_norm": 0.4468456506729126, "learning_rate": 1.4132371834721706e-06, "loss": 0.6846, "step": 17310 }, { "epoch": 0.7174354511169132, "grad_norm": 0.3953080177307129, "learning_rate": 1.4130299639438022e-06, "loss": 0.676, "step": 17311 }, { "epoch": 0.7174768950225869, "grad_norm": 0.3981952369213104, "learning_rate": 1.4128227444154338e-06, "loss": 0.6567, "step": 17312 }, { "epoch": 0.7175183389282606, "grad_norm": 0.3728790879249573, "learning_rate": 1.4126155248870654e-06, "loss": 0.6091, "step": 17313 }, { "epoch": 0.7175597828339343, "grad_norm": 0.38949382305145264, "learning_rate": 1.412408305358697e-06, "loss": 0.6062, "step": 17314 }, { "epoch": 0.7176012267396079, "grad_norm": 0.38943153619766235, "learning_rate": 1.4122010858303286e-06, "loss": 0.6392, "step": 17315 }, { "epoch": 0.7176426706452816, "grad_norm": 0.4260689914226532, "learning_rate": 1.4119938663019602e-06, "loss": 0.7114, "step": 17316 }, { "epoch": 0.7176841145509553, "grad_norm": 0.38348516821861267, "learning_rate": 1.4117866467735923e-06, "loss": 0.6091, "step": 17317 }, { "epoch": 0.717725558456629, "grad_norm": 0.41743743419647217, "learning_rate": 1.4115794272452239e-06, "loss": 0.6361, "step": 17318 }, { "epoch": 0.7177670023623026, "grad_norm": 0.3997074067592621, "learning_rate": 1.4113722077168554e-06, "loss": 0.653, "step": 17319 }, { "epoch": 0.7178084462679762, "grad_norm": 0.40662381052970886, "learning_rate": 1.411164988188487e-06, "loss": 0.6714, "step": 17320 }, { "epoch": 0.71784989017365, "grad_norm": 0.4267542362213135, "learning_rate": 1.4109577686601186e-06, "loss": 0.671, "step": 17321 }, { "epoch": 0.7178913340793236, "grad_norm": 0.4198610782623291, "learning_rate": 1.4107505491317502e-06, "loss": 0.6573, "step": 17322 }, { "epoch": 0.7179327779849973, "grad_norm": 0.40717971324920654, "learning_rate": 1.4105433296033818e-06, "loss": 0.6753, "step": 17323 }, { "epoch": 0.717974221890671, "grad_norm": 0.4537484645843506, "learning_rate": 1.4103361100750134e-06, "loss": 0.6842, "step": 17324 }, { "epoch": 0.7180156657963447, "grad_norm": 0.4328794479370117, "learning_rate": 1.4101288905466455e-06, "loss": 0.6232, "step": 17325 }, { "epoch": 0.7180571097020183, "grad_norm": 0.43901193141937256, "learning_rate": 1.409921671018277e-06, "loss": 0.7168, "step": 17326 }, { "epoch": 0.718098553607692, "grad_norm": 0.4031955897808075, "learning_rate": 1.4097144514899087e-06, "loss": 0.7026, "step": 17327 }, { "epoch": 0.7181399975133657, "grad_norm": 0.43417415022850037, "learning_rate": 1.4095072319615402e-06, "loss": 0.6554, "step": 17328 }, { "epoch": 0.7181814414190393, "grad_norm": 0.4246547222137451, "learning_rate": 1.4093000124331718e-06, "loss": 0.6907, "step": 17329 }, { "epoch": 0.718222885324713, "grad_norm": 0.4580361843109131, "learning_rate": 1.4090927929048034e-06, "loss": 0.6958, "step": 17330 }, { "epoch": 0.7182643292303866, "grad_norm": 0.44306379556655884, "learning_rate": 1.408885573376435e-06, "loss": 0.7146, "step": 17331 }, { "epoch": 0.7183057731360604, "grad_norm": 0.42164909839630127, "learning_rate": 1.4086783538480666e-06, "loss": 0.7065, "step": 17332 }, { "epoch": 0.718347217041734, "grad_norm": 0.4326670467853546, "learning_rate": 1.4084711343196982e-06, "loss": 0.6595, "step": 17333 }, { "epoch": 0.7183886609474077, "grad_norm": 0.4053807556629181, "learning_rate": 1.4082639147913303e-06, "loss": 0.6731, "step": 17334 }, { "epoch": 0.7184301048530813, "grad_norm": 0.4254748225212097, "learning_rate": 1.4080566952629619e-06, "loss": 0.6477, "step": 17335 }, { "epoch": 0.7184715487587551, "grad_norm": 0.39833804965019226, "learning_rate": 1.4078494757345935e-06, "loss": 0.6852, "step": 17336 }, { "epoch": 0.7185129926644287, "grad_norm": 0.41857248544692993, "learning_rate": 1.407642256206225e-06, "loss": 0.6484, "step": 17337 }, { "epoch": 0.7185544365701023, "grad_norm": 0.40050336718559265, "learning_rate": 1.4074350366778566e-06, "loss": 0.6471, "step": 17338 }, { "epoch": 0.718595880475776, "grad_norm": 0.39058953523635864, "learning_rate": 1.4072278171494882e-06, "loss": 0.6655, "step": 17339 }, { "epoch": 0.7186373243814497, "grad_norm": 0.39312630891799927, "learning_rate": 1.4070205976211198e-06, "loss": 0.6481, "step": 17340 }, { "epoch": 0.7186787682871234, "grad_norm": 0.3944080173969269, "learning_rate": 1.4068133780927514e-06, "loss": 0.6212, "step": 17341 }, { "epoch": 0.718720212192797, "grad_norm": 0.3970586359500885, "learning_rate": 1.406606158564383e-06, "loss": 0.6305, "step": 17342 }, { "epoch": 0.7187616560984708, "grad_norm": 0.43630170822143555, "learning_rate": 1.406398939036015e-06, "loss": 0.707, "step": 17343 }, { "epoch": 0.7188031000041444, "grad_norm": 0.40251633524894714, "learning_rate": 1.4061917195076467e-06, "loss": 0.646, "step": 17344 }, { "epoch": 0.7188445439098181, "grad_norm": 0.42355361580848694, "learning_rate": 1.4059844999792783e-06, "loss": 0.707, "step": 17345 }, { "epoch": 0.7188859878154917, "grad_norm": 0.3875163495540619, "learning_rate": 1.4057772804509098e-06, "loss": 0.6731, "step": 17346 }, { "epoch": 0.7189274317211654, "grad_norm": 0.42768093943595886, "learning_rate": 1.4055700609225414e-06, "loss": 0.6456, "step": 17347 }, { "epoch": 0.7189688756268391, "grad_norm": 0.42245447635650635, "learning_rate": 1.405362841394173e-06, "loss": 0.6521, "step": 17348 }, { "epoch": 0.7190103195325127, "grad_norm": 0.4353671967983246, "learning_rate": 1.4051556218658046e-06, "loss": 0.7292, "step": 17349 }, { "epoch": 0.7190517634381864, "grad_norm": 0.45536428689956665, "learning_rate": 1.4049484023374362e-06, "loss": 0.6943, "step": 17350 }, { "epoch": 0.7190932073438601, "grad_norm": 0.4157581627368927, "learning_rate": 1.4047411828090678e-06, "loss": 0.6274, "step": 17351 }, { "epoch": 0.7191346512495338, "grad_norm": 0.42575499415397644, "learning_rate": 1.4045339632806999e-06, "loss": 0.7222, "step": 17352 }, { "epoch": 0.7191760951552074, "grad_norm": 0.44246596097946167, "learning_rate": 1.4043267437523315e-06, "loss": 0.6748, "step": 17353 }, { "epoch": 0.7192175390608812, "grad_norm": 0.40144142508506775, "learning_rate": 1.404119524223963e-06, "loss": 0.6716, "step": 17354 }, { "epoch": 0.7192589829665548, "grad_norm": 0.4067979156970978, "learning_rate": 1.4039123046955946e-06, "loss": 0.6147, "step": 17355 }, { "epoch": 0.7193004268722284, "grad_norm": 0.408854603767395, "learning_rate": 1.4037050851672262e-06, "loss": 0.6663, "step": 17356 }, { "epoch": 0.7193418707779021, "grad_norm": 0.42338109016418457, "learning_rate": 1.4034978656388578e-06, "loss": 0.6508, "step": 17357 }, { "epoch": 0.7193833146835757, "grad_norm": 0.4001927673816681, "learning_rate": 1.4032906461104894e-06, "loss": 0.6415, "step": 17358 }, { "epoch": 0.7194247585892495, "grad_norm": 0.41887569427490234, "learning_rate": 1.403083426582121e-06, "loss": 0.6768, "step": 17359 }, { "epoch": 0.7194662024949231, "grad_norm": 0.39018580317497253, "learning_rate": 1.402876207053753e-06, "loss": 0.6296, "step": 17360 }, { "epoch": 0.7195076464005968, "grad_norm": 0.44853100180625916, "learning_rate": 1.4026689875253847e-06, "loss": 0.7053, "step": 17361 }, { "epoch": 0.7195490903062705, "grad_norm": 0.39521580934524536, "learning_rate": 1.4024617679970163e-06, "loss": 0.665, "step": 17362 }, { "epoch": 0.7195905342119442, "grad_norm": 0.389913946390152, "learning_rate": 1.4022545484686479e-06, "loss": 0.6477, "step": 17363 }, { "epoch": 0.7196319781176178, "grad_norm": 0.3807331621646881, "learning_rate": 1.4020473289402794e-06, "loss": 0.6251, "step": 17364 }, { "epoch": 0.7196734220232914, "grad_norm": 0.4107075035572052, "learning_rate": 1.401840109411911e-06, "loss": 0.6401, "step": 17365 }, { "epoch": 0.7197148659289652, "grad_norm": 0.4643884301185608, "learning_rate": 1.4016328898835426e-06, "loss": 0.7307, "step": 17366 }, { "epoch": 0.7197563098346388, "grad_norm": 0.4370321035385132, "learning_rate": 1.4014256703551742e-06, "loss": 0.6801, "step": 17367 }, { "epoch": 0.7197977537403125, "grad_norm": 0.43240052461624146, "learning_rate": 1.4012184508268058e-06, "loss": 0.6348, "step": 17368 }, { "epoch": 0.7198391976459861, "grad_norm": 0.41039302945137024, "learning_rate": 1.4010112312984379e-06, "loss": 0.6488, "step": 17369 }, { "epoch": 0.7198806415516599, "grad_norm": 0.42606598138809204, "learning_rate": 1.4008040117700695e-06, "loss": 0.6907, "step": 17370 }, { "epoch": 0.7199220854573335, "grad_norm": 0.3793336749076843, "learning_rate": 1.400596792241701e-06, "loss": 0.6514, "step": 17371 }, { "epoch": 0.7199635293630071, "grad_norm": 0.4357008635997772, "learning_rate": 1.4003895727133327e-06, "loss": 0.6796, "step": 17372 }, { "epoch": 0.7200049732686808, "grad_norm": 0.38215315341949463, "learning_rate": 1.4001823531849642e-06, "loss": 0.6132, "step": 17373 }, { "epoch": 0.7200464171743545, "grad_norm": 0.4166969954967499, "learning_rate": 1.3999751336565958e-06, "loss": 0.679, "step": 17374 }, { "epoch": 0.7200878610800282, "grad_norm": 0.4241994321346283, "learning_rate": 1.3997679141282274e-06, "loss": 0.6874, "step": 17375 }, { "epoch": 0.7201293049857018, "grad_norm": 0.42964982986450195, "learning_rate": 1.399560694599859e-06, "loss": 0.6931, "step": 17376 }, { "epoch": 0.7201707488913756, "grad_norm": 0.4299412965774536, "learning_rate": 1.3993534750714906e-06, "loss": 0.6727, "step": 17377 }, { "epoch": 0.7202121927970492, "grad_norm": 0.39656901359558105, "learning_rate": 1.3991462555431227e-06, "loss": 0.6135, "step": 17378 }, { "epoch": 0.7202536367027229, "grad_norm": 0.4515514671802521, "learning_rate": 1.3989390360147543e-06, "loss": 0.6731, "step": 17379 }, { "epoch": 0.7202950806083965, "grad_norm": 0.4034215211868286, "learning_rate": 1.3987318164863859e-06, "loss": 0.6526, "step": 17380 }, { "epoch": 0.7203365245140702, "grad_norm": 0.4588306248188019, "learning_rate": 1.3985245969580175e-06, "loss": 0.7166, "step": 17381 }, { "epoch": 0.7203779684197439, "grad_norm": 0.4249785244464874, "learning_rate": 1.398317377429649e-06, "loss": 0.6841, "step": 17382 }, { "epoch": 0.7204194123254175, "grad_norm": 0.40178245306015015, "learning_rate": 1.3981101579012806e-06, "loss": 0.6841, "step": 17383 }, { "epoch": 0.7204608562310912, "grad_norm": 0.3789336681365967, "learning_rate": 1.3979029383729122e-06, "loss": 0.6211, "step": 17384 }, { "epoch": 0.7205023001367649, "grad_norm": 0.4140903651714325, "learning_rate": 1.3976957188445438e-06, "loss": 0.6425, "step": 17385 }, { "epoch": 0.7205437440424386, "grad_norm": 0.404009073972702, "learning_rate": 1.3974884993161759e-06, "loss": 0.6903, "step": 17386 }, { "epoch": 0.7205851879481122, "grad_norm": 0.45678526163101196, "learning_rate": 1.3972812797878075e-06, "loss": 0.6659, "step": 17387 }, { "epoch": 0.720626631853786, "grad_norm": 0.4432964324951172, "learning_rate": 1.397074060259439e-06, "loss": 0.7249, "step": 17388 }, { "epoch": 0.7206680757594596, "grad_norm": 0.41829541325569153, "learning_rate": 1.3968668407310707e-06, "loss": 0.6658, "step": 17389 }, { "epoch": 0.7207095196651332, "grad_norm": 0.42822718620300293, "learning_rate": 1.3966596212027023e-06, "loss": 0.6453, "step": 17390 }, { "epoch": 0.7207509635708069, "grad_norm": 0.3907715976238251, "learning_rate": 1.3964524016743338e-06, "loss": 0.6774, "step": 17391 }, { "epoch": 0.7207924074764805, "grad_norm": 0.4679035544395447, "learning_rate": 1.3962451821459654e-06, "loss": 0.7021, "step": 17392 }, { "epoch": 0.7208338513821543, "grad_norm": 0.45747002959251404, "learning_rate": 1.396037962617597e-06, "loss": 0.7384, "step": 17393 }, { "epoch": 0.7208752952878279, "grad_norm": 0.4033137857913971, "learning_rate": 1.3958307430892286e-06, "loss": 0.6794, "step": 17394 }, { "epoch": 0.7209167391935016, "grad_norm": 0.45067644119262695, "learning_rate": 1.3956235235608607e-06, "loss": 0.7114, "step": 17395 }, { "epoch": 0.7209581830991753, "grad_norm": 0.3820972740650177, "learning_rate": 1.3954163040324923e-06, "loss": 0.6348, "step": 17396 }, { "epoch": 0.720999627004849, "grad_norm": 0.42485058307647705, "learning_rate": 1.3952090845041239e-06, "loss": 0.6766, "step": 17397 }, { "epoch": 0.7210410709105226, "grad_norm": 0.4396957457065582, "learning_rate": 1.3950018649757555e-06, "loss": 0.6793, "step": 17398 }, { "epoch": 0.7210825148161962, "grad_norm": 0.3938605487346649, "learning_rate": 1.394794645447387e-06, "loss": 0.6862, "step": 17399 }, { "epoch": 0.72112395872187, "grad_norm": 0.4191858172416687, "learning_rate": 1.3945874259190187e-06, "loss": 0.7136, "step": 17400 }, { "epoch": 0.7211654026275436, "grad_norm": 0.4656555652618408, "learning_rate": 1.3943802063906502e-06, "loss": 0.7263, "step": 17401 }, { "epoch": 0.7212068465332173, "grad_norm": 0.4497932195663452, "learning_rate": 1.3941729868622818e-06, "loss": 0.6488, "step": 17402 }, { "epoch": 0.7212482904388909, "grad_norm": 0.45916157960891724, "learning_rate": 1.3939657673339134e-06, "loss": 0.6682, "step": 17403 }, { "epoch": 0.7212897343445647, "grad_norm": 0.4112190902233124, "learning_rate": 1.3937585478055455e-06, "loss": 0.6881, "step": 17404 }, { "epoch": 0.7213311782502383, "grad_norm": 0.38071346282958984, "learning_rate": 1.393551328277177e-06, "loss": 0.6703, "step": 17405 }, { "epoch": 0.721372622155912, "grad_norm": 0.4386751055717468, "learning_rate": 1.3933441087488087e-06, "loss": 0.6342, "step": 17406 }, { "epoch": 0.7214140660615856, "grad_norm": 0.4358382225036621, "learning_rate": 1.3931368892204403e-06, "loss": 0.7031, "step": 17407 }, { "epoch": 0.7214555099672593, "grad_norm": 0.4239400029182434, "learning_rate": 1.3929296696920719e-06, "loss": 0.6095, "step": 17408 }, { "epoch": 0.721496953872933, "grad_norm": 0.3753144443035126, "learning_rate": 1.3927224501637035e-06, "loss": 0.6804, "step": 17409 }, { "epoch": 0.7215383977786066, "grad_norm": 0.38834258913993835, "learning_rate": 1.392515230635335e-06, "loss": 0.6718, "step": 17410 }, { "epoch": 0.7215798416842804, "grad_norm": 0.40292462706565857, "learning_rate": 1.3923080111069666e-06, "loss": 0.6414, "step": 17411 }, { "epoch": 0.721621285589954, "grad_norm": 0.43153274059295654, "learning_rate": 1.3921007915785982e-06, "loss": 0.7106, "step": 17412 }, { "epoch": 0.7216627294956277, "grad_norm": 0.39198118448257446, "learning_rate": 1.3918935720502303e-06, "loss": 0.6172, "step": 17413 }, { "epoch": 0.7217041734013013, "grad_norm": 0.4350663721561432, "learning_rate": 1.3916863525218619e-06, "loss": 0.6273, "step": 17414 }, { "epoch": 0.7217456173069751, "grad_norm": 0.39641499519348145, "learning_rate": 1.3914791329934935e-06, "loss": 0.6427, "step": 17415 }, { "epoch": 0.7217870612126487, "grad_norm": 0.43966054916381836, "learning_rate": 1.391271913465125e-06, "loss": 0.7163, "step": 17416 }, { "epoch": 0.7218285051183223, "grad_norm": 0.40090662240982056, "learning_rate": 1.3910646939367567e-06, "loss": 0.6404, "step": 17417 }, { "epoch": 0.721869949023996, "grad_norm": 0.42804622650146484, "learning_rate": 1.3908574744083883e-06, "loss": 0.6621, "step": 17418 }, { "epoch": 0.7219113929296697, "grad_norm": 0.39885470271110535, "learning_rate": 1.3906502548800198e-06, "loss": 0.656, "step": 17419 }, { "epoch": 0.7219528368353434, "grad_norm": 0.4252878427505493, "learning_rate": 1.3904430353516514e-06, "loss": 0.7488, "step": 17420 }, { "epoch": 0.721994280741017, "grad_norm": 0.424329936504364, "learning_rate": 1.3902358158232835e-06, "loss": 0.6688, "step": 17421 }, { "epoch": 0.7220357246466907, "grad_norm": 0.4304868280887604, "learning_rate": 1.390028596294915e-06, "loss": 0.6442, "step": 17422 }, { "epoch": 0.7220771685523644, "grad_norm": 0.46468403935432434, "learning_rate": 1.3898213767665467e-06, "loss": 0.6812, "step": 17423 }, { "epoch": 0.7221186124580381, "grad_norm": 0.4022739827632904, "learning_rate": 1.3896141572381783e-06, "loss": 0.6503, "step": 17424 }, { "epoch": 0.7221600563637117, "grad_norm": 0.4108484983444214, "learning_rate": 1.3894069377098099e-06, "loss": 0.678, "step": 17425 }, { "epoch": 0.7222015002693853, "grad_norm": 0.40491557121276855, "learning_rate": 1.3891997181814415e-06, "loss": 0.6445, "step": 17426 }, { "epoch": 0.7222429441750591, "grad_norm": 0.4266613721847534, "learning_rate": 1.388992498653073e-06, "loss": 0.6813, "step": 17427 }, { "epoch": 0.7222843880807327, "grad_norm": 0.4279089570045471, "learning_rate": 1.3887852791247046e-06, "loss": 0.7163, "step": 17428 }, { "epoch": 0.7223258319864064, "grad_norm": 0.3997454345226288, "learning_rate": 1.3885780595963362e-06, "loss": 0.6721, "step": 17429 }, { "epoch": 0.72236727589208, "grad_norm": 0.4164501130580902, "learning_rate": 1.3883708400679683e-06, "loss": 0.6871, "step": 17430 }, { "epoch": 0.7224087197977538, "grad_norm": 0.39458733797073364, "learning_rate": 1.3881636205395999e-06, "loss": 0.6342, "step": 17431 }, { "epoch": 0.7224501637034274, "grad_norm": 0.42602452635765076, "learning_rate": 1.3879564010112315e-06, "loss": 0.6781, "step": 17432 }, { "epoch": 0.722491607609101, "grad_norm": 0.40976107120513916, "learning_rate": 1.387749181482863e-06, "loss": 0.6068, "step": 17433 }, { "epoch": 0.7225330515147748, "grad_norm": 0.4227009415626526, "learning_rate": 1.3875419619544947e-06, "loss": 0.6719, "step": 17434 }, { "epoch": 0.7225744954204484, "grad_norm": 0.4550377428531647, "learning_rate": 1.3873347424261263e-06, "loss": 0.7133, "step": 17435 }, { "epoch": 0.7226159393261221, "grad_norm": 0.4322700500488281, "learning_rate": 1.3871275228977579e-06, "loss": 0.6755, "step": 17436 }, { "epoch": 0.7226573832317957, "grad_norm": 0.4307835102081299, "learning_rate": 1.3869203033693894e-06, "loss": 0.6885, "step": 17437 }, { "epoch": 0.7226988271374695, "grad_norm": 0.4248655140399933, "learning_rate": 1.3867130838410213e-06, "loss": 0.6736, "step": 17438 }, { "epoch": 0.7227402710431431, "grad_norm": 0.4186408519744873, "learning_rate": 1.386505864312653e-06, "loss": 0.6873, "step": 17439 }, { "epoch": 0.7227817149488168, "grad_norm": 0.40119022130966187, "learning_rate": 1.3862986447842847e-06, "loss": 0.6685, "step": 17440 }, { "epoch": 0.7228231588544904, "grad_norm": 0.4152214229106903, "learning_rate": 1.3860914252559163e-06, "loss": 0.6788, "step": 17441 }, { "epoch": 0.7228646027601641, "grad_norm": 0.3952624201774597, "learning_rate": 1.3858842057275479e-06, "loss": 0.652, "step": 17442 }, { "epoch": 0.7229060466658378, "grad_norm": 0.3806418776512146, "learning_rate": 1.3856769861991795e-06, "loss": 0.6597, "step": 17443 }, { "epoch": 0.7229474905715114, "grad_norm": 0.3794182538986206, "learning_rate": 1.385469766670811e-06, "loss": 0.637, "step": 17444 }, { "epoch": 0.7229889344771852, "grad_norm": 0.42944058775901794, "learning_rate": 1.3852625471424427e-06, "loss": 0.6852, "step": 17445 }, { "epoch": 0.7230303783828588, "grad_norm": 0.43929627537727356, "learning_rate": 1.3850553276140742e-06, "loss": 0.6161, "step": 17446 }, { "epoch": 0.7230718222885325, "grad_norm": 0.4343337416648865, "learning_rate": 1.3848481080857063e-06, "loss": 0.6794, "step": 17447 }, { "epoch": 0.7231132661942061, "grad_norm": 0.4136296212673187, "learning_rate": 1.3846408885573379e-06, "loss": 0.6921, "step": 17448 }, { "epoch": 0.7231547100998799, "grad_norm": 0.4051271080970764, "learning_rate": 1.3844336690289695e-06, "loss": 0.6455, "step": 17449 }, { "epoch": 0.7231961540055535, "grad_norm": 0.46531012654304504, "learning_rate": 1.384226449500601e-06, "loss": 0.6897, "step": 17450 }, { "epoch": 0.7232375979112271, "grad_norm": 0.3971078097820282, "learning_rate": 1.3840192299722327e-06, "loss": 0.6283, "step": 17451 }, { "epoch": 0.7232790418169008, "grad_norm": 0.4227045476436615, "learning_rate": 1.3838120104438643e-06, "loss": 0.668, "step": 17452 }, { "epoch": 0.7233204857225745, "grad_norm": 0.38663744926452637, "learning_rate": 1.3836047909154959e-06, "loss": 0.6387, "step": 17453 }, { "epoch": 0.7233619296282482, "grad_norm": 0.39325106143951416, "learning_rate": 1.3833975713871275e-06, "loss": 0.6163, "step": 17454 }, { "epoch": 0.7234033735339218, "grad_norm": 0.4134993255138397, "learning_rate": 1.3831903518587593e-06, "loss": 0.6733, "step": 17455 }, { "epoch": 0.7234448174395955, "grad_norm": 0.41026341915130615, "learning_rate": 1.382983132330391e-06, "loss": 0.6637, "step": 17456 }, { "epoch": 0.7234862613452692, "grad_norm": 0.4146248698234558, "learning_rate": 1.3827759128020227e-06, "loss": 0.6873, "step": 17457 }, { "epoch": 0.7235277052509429, "grad_norm": 0.4182088375091553, "learning_rate": 1.3825686932736543e-06, "loss": 0.6658, "step": 17458 }, { "epoch": 0.7235691491566165, "grad_norm": 0.37575212121009827, "learning_rate": 1.3823614737452859e-06, "loss": 0.6208, "step": 17459 }, { "epoch": 0.7236105930622901, "grad_norm": 0.4248543083667755, "learning_rate": 1.3821542542169175e-06, "loss": 0.6736, "step": 17460 }, { "epoch": 0.7236520369679639, "grad_norm": 0.3931140899658203, "learning_rate": 1.381947034688549e-06, "loss": 0.6249, "step": 17461 }, { "epoch": 0.7236934808736375, "grad_norm": 0.47457680106163025, "learning_rate": 1.3817398151601807e-06, "loss": 0.7053, "step": 17462 }, { "epoch": 0.7237349247793112, "grad_norm": 0.3681237995624542, "learning_rate": 1.3815325956318123e-06, "loss": 0.6289, "step": 17463 }, { "epoch": 0.7237763686849848, "grad_norm": 0.39734208583831787, "learning_rate": 1.381325376103444e-06, "loss": 0.6608, "step": 17464 }, { "epoch": 0.7238178125906586, "grad_norm": 0.4295814633369446, "learning_rate": 1.3811181565750759e-06, "loss": 0.7007, "step": 17465 }, { "epoch": 0.7238592564963322, "grad_norm": 0.4449225068092346, "learning_rate": 1.3809109370467075e-06, "loss": 0.705, "step": 17466 }, { "epoch": 0.7239007004020059, "grad_norm": 0.3936707377433777, "learning_rate": 1.380703717518339e-06, "loss": 0.6432, "step": 17467 }, { "epoch": 0.7239421443076796, "grad_norm": 0.4330900013446808, "learning_rate": 1.3804964979899707e-06, "loss": 0.6963, "step": 17468 }, { "epoch": 0.7239835882133532, "grad_norm": 0.40855249762535095, "learning_rate": 1.3802892784616023e-06, "loss": 0.6946, "step": 17469 }, { "epoch": 0.7240250321190269, "grad_norm": 0.40544137358665466, "learning_rate": 1.3800820589332339e-06, "loss": 0.6625, "step": 17470 }, { "epoch": 0.7240664760247005, "grad_norm": 0.4054180681705475, "learning_rate": 1.3798748394048655e-06, "loss": 0.6678, "step": 17471 }, { "epoch": 0.7241079199303743, "grad_norm": 0.4636663794517517, "learning_rate": 1.3796676198764973e-06, "loss": 0.7051, "step": 17472 }, { "epoch": 0.7241493638360479, "grad_norm": 0.39145541191101074, "learning_rate": 1.379460400348129e-06, "loss": 0.7006, "step": 17473 }, { "epoch": 0.7241908077417216, "grad_norm": 0.3848302364349365, "learning_rate": 1.3792531808197607e-06, "loss": 0.6835, "step": 17474 }, { "epoch": 0.7242322516473952, "grad_norm": 0.40552088618278503, "learning_rate": 1.3790459612913923e-06, "loss": 0.6522, "step": 17475 }, { "epoch": 0.724273695553069, "grad_norm": 0.3503236472606659, "learning_rate": 1.3788387417630239e-06, "loss": 0.5884, "step": 17476 }, { "epoch": 0.7243151394587426, "grad_norm": 0.4048171043395996, "learning_rate": 1.3786315222346555e-06, "loss": 0.6774, "step": 17477 }, { "epoch": 0.7243565833644162, "grad_norm": 0.42481541633605957, "learning_rate": 1.378424302706287e-06, "loss": 0.6759, "step": 17478 }, { "epoch": 0.72439802727009, "grad_norm": 0.39376363158226013, "learning_rate": 1.3782170831779187e-06, "loss": 0.6152, "step": 17479 }, { "epoch": 0.7244394711757636, "grad_norm": 0.393154114484787, "learning_rate": 1.3780098636495505e-06, "loss": 0.6886, "step": 17480 }, { "epoch": 0.7244809150814373, "grad_norm": 0.42169472575187683, "learning_rate": 1.377802644121182e-06, "loss": 0.6632, "step": 17481 }, { "epoch": 0.7245223589871109, "grad_norm": 0.44769179821014404, "learning_rate": 1.3775954245928139e-06, "loss": 0.6904, "step": 17482 }, { "epoch": 0.7245638028927847, "grad_norm": 0.41905510425567627, "learning_rate": 1.3773882050644455e-06, "loss": 0.6106, "step": 17483 }, { "epoch": 0.7246052467984583, "grad_norm": 0.4448399245738983, "learning_rate": 1.377180985536077e-06, "loss": 0.701, "step": 17484 }, { "epoch": 0.724646690704132, "grad_norm": 0.4232357144355774, "learning_rate": 1.3769737660077087e-06, "loss": 0.6691, "step": 17485 }, { "epoch": 0.7246881346098056, "grad_norm": 0.43226203322410583, "learning_rate": 1.3767665464793403e-06, "loss": 0.6943, "step": 17486 }, { "epoch": 0.7247295785154793, "grad_norm": 0.4052192270755768, "learning_rate": 1.3765593269509719e-06, "loss": 0.6826, "step": 17487 }, { "epoch": 0.724771022421153, "grad_norm": 0.3941108286380768, "learning_rate": 1.3763521074226035e-06, "loss": 0.5892, "step": 17488 }, { "epoch": 0.7248124663268266, "grad_norm": 0.40341296792030334, "learning_rate": 1.3761448878942353e-06, "loss": 0.6149, "step": 17489 }, { "epoch": 0.7248539102325003, "grad_norm": 0.41525524854660034, "learning_rate": 1.3759376683658669e-06, "loss": 0.6555, "step": 17490 }, { "epoch": 0.724895354138174, "grad_norm": 0.45307183265686035, "learning_rate": 1.3757304488374987e-06, "loss": 0.7219, "step": 17491 }, { "epoch": 0.7249367980438477, "grad_norm": 0.41568824648857117, "learning_rate": 1.3755232293091303e-06, "loss": 0.6758, "step": 17492 }, { "epoch": 0.7249782419495213, "grad_norm": 0.38459259271621704, "learning_rate": 1.3753160097807619e-06, "loss": 0.6287, "step": 17493 }, { "epoch": 0.7250196858551949, "grad_norm": 0.39925843477249146, "learning_rate": 1.3751087902523935e-06, "loss": 0.6636, "step": 17494 }, { "epoch": 0.7250611297608687, "grad_norm": 0.37855085730552673, "learning_rate": 1.374901570724025e-06, "loss": 0.6729, "step": 17495 }, { "epoch": 0.7251025736665423, "grad_norm": 0.4009250998497009, "learning_rate": 1.3746943511956567e-06, "loss": 0.6204, "step": 17496 }, { "epoch": 0.725144017572216, "grad_norm": 0.4213401675224304, "learning_rate": 1.3744871316672885e-06, "loss": 0.6658, "step": 17497 }, { "epoch": 0.7251854614778896, "grad_norm": 0.4160116910934448, "learning_rate": 1.37427991213892e-06, "loss": 0.6577, "step": 17498 }, { "epoch": 0.7252269053835634, "grad_norm": 0.40867945551872253, "learning_rate": 1.3740726926105517e-06, "loss": 0.6694, "step": 17499 }, { "epoch": 0.725268349289237, "grad_norm": 0.41269201040267944, "learning_rate": 1.3738654730821835e-06, "loss": 0.7019, "step": 17500 }, { "epoch": 0.7253097931949107, "grad_norm": 0.43230336904525757, "learning_rate": 1.373658253553815e-06, "loss": 0.6516, "step": 17501 }, { "epoch": 0.7253512371005844, "grad_norm": 0.4137008488178253, "learning_rate": 1.3734510340254467e-06, "loss": 0.6697, "step": 17502 }, { "epoch": 0.725392681006258, "grad_norm": 0.4572915732860565, "learning_rate": 1.3732438144970783e-06, "loss": 0.7029, "step": 17503 }, { "epoch": 0.7254341249119317, "grad_norm": 0.43488359451293945, "learning_rate": 1.3730365949687099e-06, "loss": 0.6803, "step": 17504 }, { "epoch": 0.7254755688176053, "grad_norm": 0.39924558997154236, "learning_rate": 1.3728293754403415e-06, "loss": 0.6279, "step": 17505 }, { "epoch": 0.7255170127232791, "grad_norm": 0.4372648000717163, "learning_rate": 1.3726221559119733e-06, "loss": 0.6963, "step": 17506 }, { "epoch": 0.7255584566289527, "grad_norm": 0.4213685393333435, "learning_rate": 1.3724149363836049e-06, "loss": 0.7173, "step": 17507 }, { "epoch": 0.7255999005346264, "grad_norm": 0.405083566904068, "learning_rate": 1.3722077168552367e-06, "loss": 0.6528, "step": 17508 }, { "epoch": 0.7256413444403, "grad_norm": 0.4007328450679779, "learning_rate": 1.3720004973268683e-06, "loss": 0.661, "step": 17509 }, { "epoch": 0.7256827883459738, "grad_norm": 0.44553348422050476, "learning_rate": 1.3717932777984999e-06, "loss": 0.7239, "step": 17510 }, { "epoch": 0.7257242322516474, "grad_norm": 0.4048614203929901, "learning_rate": 1.3715860582701315e-06, "loss": 0.6403, "step": 17511 }, { "epoch": 0.725765676157321, "grad_norm": 0.4508402347564697, "learning_rate": 1.371378838741763e-06, "loss": 0.7141, "step": 17512 }, { "epoch": 0.7258071200629947, "grad_norm": 0.41269198060035706, "learning_rate": 1.3711716192133947e-06, "loss": 0.6573, "step": 17513 }, { "epoch": 0.7258485639686684, "grad_norm": 0.442448228597641, "learning_rate": 1.3709643996850265e-06, "loss": 0.6686, "step": 17514 }, { "epoch": 0.7258900078743421, "grad_norm": 0.3905901610851288, "learning_rate": 1.370757180156658e-06, "loss": 0.7144, "step": 17515 }, { "epoch": 0.7259314517800157, "grad_norm": 0.42178258299827576, "learning_rate": 1.3705499606282897e-06, "loss": 0.624, "step": 17516 }, { "epoch": 0.7259728956856895, "grad_norm": 0.42789697647094727, "learning_rate": 1.3703427410999215e-06, "loss": 0.717, "step": 17517 }, { "epoch": 0.7260143395913631, "grad_norm": 0.40790408849716187, "learning_rate": 1.370135521571553e-06, "loss": 0.6204, "step": 17518 }, { "epoch": 0.7260557834970368, "grad_norm": 0.4066314995288849, "learning_rate": 1.3699283020431847e-06, "loss": 0.6973, "step": 17519 }, { "epoch": 0.7260972274027104, "grad_norm": 0.4601806700229645, "learning_rate": 1.3697210825148163e-06, "loss": 0.6786, "step": 17520 }, { "epoch": 0.726138671308384, "grad_norm": 0.41776859760284424, "learning_rate": 1.3695138629864479e-06, "loss": 0.6711, "step": 17521 }, { "epoch": 0.7261801152140578, "grad_norm": 0.3985465168952942, "learning_rate": 1.3693066434580795e-06, "loss": 0.6703, "step": 17522 }, { "epoch": 0.7262215591197314, "grad_norm": 0.43471282720565796, "learning_rate": 1.3690994239297113e-06, "loss": 0.6799, "step": 17523 }, { "epoch": 0.7262630030254051, "grad_norm": 0.4013284146785736, "learning_rate": 1.3688922044013429e-06, "loss": 0.6896, "step": 17524 }, { "epoch": 0.7263044469310788, "grad_norm": 0.4212099313735962, "learning_rate": 1.3686849848729745e-06, "loss": 0.6815, "step": 17525 }, { "epoch": 0.7263458908367525, "grad_norm": 0.4064970314502716, "learning_rate": 1.3684777653446063e-06, "loss": 0.6553, "step": 17526 }, { "epoch": 0.7263873347424261, "grad_norm": 0.42406266927719116, "learning_rate": 1.3682705458162379e-06, "loss": 0.72, "step": 17527 }, { "epoch": 0.7264287786480998, "grad_norm": 0.4300520718097687, "learning_rate": 1.3680633262878695e-06, "loss": 0.6919, "step": 17528 }, { "epoch": 0.7264702225537735, "grad_norm": 0.4189138114452362, "learning_rate": 1.367856106759501e-06, "loss": 0.6626, "step": 17529 }, { "epoch": 0.7265116664594471, "grad_norm": 0.40742337703704834, "learning_rate": 1.3676488872311327e-06, "loss": 0.6467, "step": 17530 }, { "epoch": 0.7265531103651208, "grad_norm": 0.39453136920928955, "learning_rate": 1.3674416677027645e-06, "loss": 0.6093, "step": 17531 }, { "epoch": 0.7265945542707944, "grad_norm": 0.3954002559185028, "learning_rate": 1.367234448174396e-06, "loss": 0.6655, "step": 17532 }, { "epoch": 0.7266359981764682, "grad_norm": 0.3810215890407562, "learning_rate": 1.3670272286460277e-06, "loss": 0.6763, "step": 17533 }, { "epoch": 0.7266774420821418, "grad_norm": 0.41593435406684875, "learning_rate": 1.3668200091176595e-06, "loss": 0.646, "step": 17534 }, { "epoch": 0.7267188859878155, "grad_norm": 0.4103779196739197, "learning_rate": 1.366612789589291e-06, "loss": 0.7041, "step": 17535 }, { "epoch": 0.7267603298934892, "grad_norm": 0.4070605933666229, "learning_rate": 1.3664055700609227e-06, "loss": 0.6302, "step": 17536 }, { "epoch": 0.7268017737991629, "grad_norm": 0.39627110958099365, "learning_rate": 1.3661983505325543e-06, "loss": 0.6998, "step": 17537 }, { "epoch": 0.7268432177048365, "grad_norm": 0.3947973847389221, "learning_rate": 1.3659911310041859e-06, "loss": 0.6761, "step": 17538 }, { "epoch": 0.7268846616105101, "grad_norm": 0.41158583760261536, "learning_rate": 1.3657839114758175e-06, "loss": 0.6313, "step": 17539 }, { "epoch": 0.7269261055161839, "grad_norm": 0.4412710964679718, "learning_rate": 1.3655766919474493e-06, "loss": 0.6859, "step": 17540 }, { "epoch": 0.7269675494218575, "grad_norm": 0.3694186210632324, "learning_rate": 1.3653694724190809e-06, "loss": 0.6188, "step": 17541 }, { "epoch": 0.7270089933275312, "grad_norm": 0.4904603362083435, "learning_rate": 1.3651622528907125e-06, "loss": 0.6853, "step": 17542 }, { "epoch": 0.7270504372332048, "grad_norm": 0.42320895195007324, "learning_rate": 1.3649550333623443e-06, "loss": 0.7369, "step": 17543 }, { "epoch": 0.7270918811388786, "grad_norm": 0.4059513807296753, "learning_rate": 1.3647478138339759e-06, "loss": 0.6735, "step": 17544 }, { "epoch": 0.7271333250445522, "grad_norm": 0.3872096836566925, "learning_rate": 1.3645405943056075e-06, "loss": 0.6746, "step": 17545 }, { "epoch": 0.7271747689502258, "grad_norm": 0.39571961760520935, "learning_rate": 1.364333374777239e-06, "loss": 0.7006, "step": 17546 }, { "epoch": 0.7272162128558995, "grad_norm": 0.44586309790611267, "learning_rate": 1.3641261552488707e-06, "loss": 0.7292, "step": 17547 }, { "epoch": 0.7272576567615732, "grad_norm": 0.43289631605148315, "learning_rate": 1.3639189357205025e-06, "loss": 0.6934, "step": 17548 }, { "epoch": 0.7272991006672469, "grad_norm": 0.4167085587978363, "learning_rate": 1.363711716192134e-06, "loss": 0.6963, "step": 17549 }, { "epoch": 0.7273405445729205, "grad_norm": 0.4044798016548157, "learning_rate": 1.3635044966637657e-06, "loss": 0.6321, "step": 17550 }, { "epoch": 0.7273819884785943, "grad_norm": 0.4146758019924164, "learning_rate": 1.3632972771353973e-06, "loss": 0.6246, "step": 17551 }, { "epoch": 0.7274234323842679, "grad_norm": 0.397849977016449, "learning_rate": 1.363090057607029e-06, "loss": 0.6382, "step": 17552 }, { "epoch": 0.7274648762899416, "grad_norm": 0.40415680408477783, "learning_rate": 1.3628828380786607e-06, "loss": 0.6494, "step": 17553 }, { "epoch": 0.7275063201956152, "grad_norm": 0.42717739939689636, "learning_rate": 1.3626756185502923e-06, "loss": 0.6932, "step": 17554 }, { "epoch": 0.7275477641012889, "grad_norm": 0.41101178526878357, "learning_rate": 1.3624683990219239e-06, "loss": 0.7151, "step": 17555 }, { "epoch": 0.7275892080069626, "grad_norm": 0.40928471088409424, "learning_rate": 1.3622611794935555e-06, "loss": 0.6447, "step": 17556 }, { "epoch": 0.7276306519126362, "grad_norm": 0.4402622878551483, "learning_rate": 1.3620539599651873e-06, "loss": 0.6796, "step": 17557 }, { "epoch": 0.7276720958183099, "grad_norm": 0.4760821461677551, "learning_rate": 1.3618467404368189e-06, "loss": 0.7002, "step": 17558 }, { "epoch": 0.7277135397239836, "grad_norm": 0.38029366731643677, "learning_rate": 1.3616395209084505e-06, "loss": 0.6292, "step": 17559 }, { "epoch": 0.7277549836296573, "grad_norm": 0.4174540340900421, "learning_rate": 1.361432301380082e-06, "loss": 0.658, "step": 17560 }, { "epoch": 0.7277964275353309, "grad_norm": 0.4315658509731293, "learning_rate": 1.3612250818517139e-06, "loss": 0.6744, "step": 17561 }, { "epoch": 0.7278378714410046, "grad_norm": 0.3992787003517151, "learning_rate": 1.3610178623233455e-06, "loss": 0.6451, "step": 17562 }, { "epoch": 0.7278793153466783, "grad_norm": 0.42945557832717896, "learning_rate": 1.360810642794977e-06, "loss": 0.7207, "step": 17563 }, { "epoch": 0.7279207592523519, "grad_norm": 0.4137974977493286, "learning_rate": 1.3606034232666087e-06, "loss": 0.656, "step": 17564 }, { "epoch": 0.7279622031580256, "grad_norm": 0.4560147821903229, "learning_rate": 1.3603962037382405e-06, "loss": 0.6602, "step": 17565 }, { "epoch": 0.7280036470636992, "grad_norm": 0.4137943387031555, "learning_rate": 1.360188984209872e-06, "loss": 0.7002, "step": 17566 }, { "epoch": 0.728045090969373, "grad_norm": 0.4310949742794037, "learning_rate": 1.3599817646815037e-06, "loss": 0.6711, "step": 17567 }, { "epoch": 0.7280865348750466, "grad_norm": 0.41603103280067444, "learning_rate": 1.3597745451531353e-06, "loss": 0.657, "step": 17568 }, { "epoch": 0.7281279787807203, "grad_norm": 0.4125109612941742, "learning_rate": 1.359567325624767e-06, "loss": 0.6594, "step": 17569 }, { "epoch": 0.728169422686394, "grad_norm": 0.4280267059803009, "learning_rate": 1.3593601060963987e-06, "loss": 0.6871, "step": 17570 }, { "epoch": 0.7282108665920677, "grad_norm": 0.38965243101119995, "learning_rate": 1.3591528865680303e-06, "loss": 0.6276, "step": 17571 }, { "epoch": 0.7282523104977413, "grad_norm": 0.4093649983406067, "learning_rate": 1.3589456670396619e-06, "loss": 0.689, "step": 17572 }, { "epoch": 0.7282937544034149, "grad_norm": 0.4561555087566376, "learning_rate": 1.3587384475112937e-06, "loss": 0.6891, "step": 17573 }, { "epoch": 0.7283351983090887, "grad_norm": 0.42749494314193726, "learning_rate": 1.3585312279829253e-06, "loss": 0.6737, "step": 17574 }, { "epoch": 0.7283766422147623, "grad_norm": 0.4202386140823364, "learning_rate": 1.3583240084545569e-06, "loss": 0.6113, "step": 17575 }, { "epoch": 0.728418086120436, "grad_norm": 0.3888832628726959, "learning_rate": 1.3581167889261885e-06, "loss": 0.7107, "step": 17576 }, { "epoch": 0.7284595300261096, "grad_norm": 0.4538652002811432, "learning_rate": 1.35790956939782e-06, "loss": 0.6643, "step": 17577 }, { "epoch": 0.7285009739317834, "grad_norm": 0.43097323179244995, "learning_rate": 1.3577023498694519e-06, "loss": 0.6478, "step": 17578 }, { "epoch": 0.728542417837457, "grad_norm": 0.43960240483283997, "learning_rate": 1.3574951303410835e-06, "loss": 0.6627, "step": 17579 }, { "epoch": 0.7285838617431307, "grad_norm": 0.4214801788330078, "learning_rate": 1.357287910812715e-06, "loss": 0.6461, "step": 17580 }, { "epoch": 0.7286253056488043, "grad_norm": 0.44164422154426575, "learning_rate": 1.3570806912843467e-06, "loss": 0.702, "step": 17581 }, { "epoch": 0.728666749554478, "grad_norm": 0.41146859526634216, "learning_rate": 1.3568734717559785e-06, "loss": 0.618, "step": 17582 }, { "epoch": 0.7287081934601517, "grad_norm": 0.42332640290260315, "learning_rate": 1.35666625222761e-06, "loss": 0.686, "step": 17583 }, { "epoch": 0.7287496373658253, "grad_norm": 0.4047934114933014, "learning_rate": 1.3564590326992417e-06, "loss": 0.6536, "step": 17584 }, { "epoch": 0.728791081271499, "grad_norm": 0.39197513461112976, "learning_rate": 1.3562518131708733e-06, "loss": 0.6379, "step": 17585 }, { "epoch": 0.7288325251771727, "grad_norm": 0.4133414626121521, "learning_rate": 1.3560445936425049e-06, "loss": 0.6965, "step": 17586 }, { "epoch": 0.7288739690828464, "grad_norm": 0.4677366614341736, "learning_rate": 1.3558373741141367e-06, "loss": 0.6368, "step": 17587 }, { "epoch": 0.72891541298852, "grad_norm": 0.383446604013443, "learning_rate": 1.3556301545857683e-06, "loss": 0.6034, "step": 17588 }, { "epoch": 0.7289568568941938, "grad_norm": 0.46991679072380066, "learning_rate": 1.3554229350573999e-06, "loss": 0.719, "step": 17589 }, { "epoch": 0.7289983007998674, "grad_norm": 0.4198354482650757, "learning_rate": 1.3552157155290317e-06, "loss": 0.6687, "step": 17590 }, { "epoch": 0.729039744705541, "grad_norm": 0.40832632780075073, "learning_rate": 1.3550084960006633e-06, "loss": 0.6702, "step": 17591 }, { "epoch": 0.7290811886112147, "grad_norm": 0.3995884656906128, "learning_rate": 1.3548012764722949e-06, "loss": 0.6315, "step": 17592 }, { "epoch": 0.7291226325168884, "grad_norm": 0.37485557794570923, "learning_rate": 1.3545940569439265e-06, "loss": 0.636, "step": 17593 }, { "epoch": 0.7291640764225621, "grad_norm": 0.39036667346954346, "learning_rate": 1.354386837415558e-06, "loss": 0.6782, "step": 17594 }, { "epoch": 0.7292055203282357, "grad_norm": 0.4236733317375183, "learning_rate": 1.3541796178871899e-06, "loss": 0.6836, "step": 17595 }, { "epoch": 0.7292469642339094, "grad_norm": 0.41850417852401733, "learning_rate": 1.3539723983588215e-06, "loss": 0.6467, "step": 17596 }, { "epoch": 0.7292884081395831, "grad_norm": 0.4037933051586151, "learning_rate": 1.353765178830453e-06, "loss": 0.6322, "step": 17597 }, { "epoch": 0.7293298520452568, "grad_norm": 0.4398779273033142, "learning_rate": 1.3535579593020847e-06, "loss": 0.6364, "step": 17598 }, { "epoch": 0.7293712959509304, "grad_norm": 0.3938024938106537, "learning_rate": 1.3533507397737165e-06, "loss": 0.6942, "step": 17599 }, { "epoch": 0.729412739856604, "grad_norm": 0.39998334646224976, "learning_rate": 1.353143520245348e-06, "loss": 0.6399, "step": 17600 }, { "epoch": 0.7294541837622778, "grad_norm": 0.42932459712028503, "learning_rate": 1.3529363007169797e-06, "loss": 0.6669, "step": 17601 }, { "epoch": 0.7294956276679514, "grad_norm": 0.39297598600387573, "learning_rate": 1.3527290811886113e-06, "loss": 0.6851, "step": 17602 }, { "epoch": 0.7295370715736251, "grad_norm": 0.42736852169036865, "learning_rate": 1.3525218616602429e-06, "loss": 0.7251, "step": 17603 }, { "epoch": 0.7295785154792988, "grad_norm": 0.4003267288208008, "learning_rate": 1.3523146421318747e-06, "loss": 0.6398, "step": 17604 }, { "epoch": 0.7296199593849725, "grad_norm": 0.38511863350868225, "learning_rate": 1.3521074226035063e-06, "loss": 0.6622, "step": 17605 }, { "epoch": 0.7296614032906461, "grad_norm": 0.41178011894226074, "learning_rate": 1.3519002030751379e-06, "loss": 0.6613, "step": 17606 }, { "epoch": 0.7297028471963197, "grad_norm": 0.4080874025821686, "learning_rate": 1.3516929835467697e-06, "loss": 0.6743, "step": 17607 }, { "epoch": 0.7297442911019935, "grad_norm": 0.4005504548549652, "learning_rate": 1.3514857640184013e-06, "loss": 0.6632, "step": 17608 }, { "epoch": 0.7297857350076671, "grad_norm": 0.39349642395973206, "learning_rate": 1.3512785444900329e-06, "loss": 0.6863, "step": 17609 }, { "epoch": 0.7298271789133408, "grad_norm": 0.3907824456691742, "learning_rate": 1.3510713249616645e-06, "loss": 0.6852, "step": 17610 }, { "epoch": 0.7298686228190144, "grad_norm": 0.3895411491394043, "learning_rate": 1.350864105433296e-06, "loss": 0.6487, "step": 17611 }, { "epoch": 0.7299100667246882, "grad_norm": 0.41018497943878174, "learning_rate": 1.3506568859049277e-06, "loss": 0.6935, "step": 17612 }, { "epoch": 0.7299515106303618, "grad_norm": 0.3805798888206482, "learning_rate": 1.3504496663765595e-06, "loss": 0.6595, "step": 17613 }, { "epoch": 0.7299929545360355, "grad_norm": 0.44125795364379883, "learning_rate": 1.350242446848191e-06, "loss": 0.7202, "step": 17614 }, { "epoch": 0.7300343984417091, "grad_norm": 0.4274123013019562, "learning_rate": 1.3500352273198227e-06, "loss": 0.6914, "step": 17615 }, { "epoch": 0.7300758423473828, "grad_norm": 0.4148405194282532, "learning_rate": 1.3498280077914545e-06, "loss": 0.6628, "step": 17616 }, { "epoch": 0.7301172862530565, "grad_norm": 0.43427303433418274, "learning_rate": 1.349620788263086e-06, "loss": 0.7036, "step": 17617 }, { "epoch": 0.7301587301587301, "grad_norm": 0.42457371950149536, "learning_rate": 1.3494135687347177e-06, "loss": 0.6797, "step": 17618 }, { "epoch": 0.7302001740644039, "grad_norm": 0.41605550050735474, "learning_rate": 1.3492063492063493e-06, "loss": 0.6931, "step": 17619 }, { "epoch": 0.7302416179700775, "grad_norm": 0.46486198902130127, "learning_rate": 1.3489991296779809e-06, "loss": 0.73, "step": 17620 }, { "epoch": 0.7302830618757512, "grad_norm": 0.4173734784126282, "learning_rate": 1.3487919101496125e-06, "loss": 0.6841, "step": 17621 }, { "epoch": 0.7303245057814248, "grad_norm": 0.45792585611343384, "learning_rate": 1.3485846906212443e-06, "loss": 0.7151, "step": 17622 }, { "epoch": 0.7303659496870986, "grad_norm": 0.4369369447231293, "learning_rate": 1.3483774710928759e-06, "loss": 0.6807, "step": 17623 }, { "epoch": 0.7304073935927722, "grad_norm": 0.4144294261932373, "learning_rate": 1.3481702515645077e-06, "loss": 0.6727, "step": 17624 }, { "epoch": 0.7304488374984458, "grad_norm": 0.39227136969566345, "learning_rate": 1.3479630320361393e-06, "loss": 0.6464, "step": 17625 }, { "epoch": 0.7304902814041195, "grad_norm": 0.4045243561267853, "learning_rate": 1.3477558125077709e-06, "loss": 0.661, "step": 17626 }, { "epoch": 0.7305317253097932, "grad_norm": 0.46463215351104736, "learning_rate": 1.3475485929794025e-06, "loss": 0.668, "step": 17627 }, { "epoch": 0.7305731692154669, "grad_norm": 0.3973020613193512, "learning_rate": 1.347341373451034e-06, "loss": 0.6691, "step": 17628 }, { "epoch": 0.7306146131211405, "grad_norm": 0.4016871154308319, "learning_rate": 1.3471341539226657e-06, "loss": 0.6472, "step": 17629 }, { "epoch": 0.7306560570268142, "grad_norm": 0.42157474160194397, "learning_rate": 1.3469269343942975e-06, "loss": 0.6981, "step": 17630 }, { "epoch": 0.7306975009324879, "grad_norm": 0.4324299395084381, "learning_rate": 1.346719714865929e-06, "loss": 0.6753, "step": 17631 }, { "epoch": 0.7307389448381616, "grad_norm": 0.40075892210006714, "learning_rate": 1.3465124953375607e-06, "loss": 0.7078, "step": 17632 }, { "epoch": 0.7307803887438352, "grad_norm": 0.4062173068523407, "learning_rate": 1.3463052758091925e-06, "loss": 0.6398, "step": 17633 }, { "epoch": 0.7308218326495088, "grad_norm": 0.40266022086143494, "learning_rate": 1.346098056280824e-06, "loss": 0.6924, "step": 17634 }, { "epoch": 0.7308632765551826, "grad_norm": 0.41210249066352844, "learning_rate": 1.3458908367524557e-06, "loss": 0.6958, "step": 17635 }, { "epoch": 0.7309047204608562, "grad_norm": 0.48352545499801636, "learning_rate": 1.3456836172240873e-06, "loss": 0.7185, "step": 17636 }, { "epoch": 0.7309461643665299, "grad_norm": 0.3903990089893341, "learning_rate": 1.3454763976957189e-06, "loss": 0.689, "step": 17637 }, { "epoch": 0.7309876082722035, "grad_norm": 0.4011651575565338, "learning_rate": 1.3452691781673505e-06, "loss": 0.666, "step": 17638 }, { "epoch": 0.7310290521778773, "grad_norm": 0.43048587441444397, "learning_rate": 1.3450619586389823e-06, "loss": 0.6989, "step": 17639 }, { "epoch": 0.7310704960835509, "grad_norm": 0.41844263672828674, "learning_rate": 1.3448547391106139e-06, "loss": 0.7045, "step": 17640 }, { "epoch": 0.7311119399892246, "grad_norm": 0.41407373547554016, "learning_rate": 1.3446475195822457e-06, "loss": 0.6934, "step": 17641 }, { "epoch": 0.7311533838948983, "grad_norm": 0.4160930812358856, "learning_rate": 1.3444403000538773e-06, "loss": 0.7498, "step": 17642 }, { "epoch": 0.7311948278005719, "grad_norm": 0.42814838886260986, "learning_rate": 1.3442330805255089e-06, "loss": 0.6853, "step": 17643 }, { "epoch": 0.7312362717062456, "grad_norm": 0.4032931625843048, "learning_rate": 1.3440258609971405e-06, "loss": 0.6472, "step": 17644 }, { "epoch": 0.7312777156119192, "grad_norm": 0.41552022099494934, "learning_rate": 1.343818641468772e-06, "loss": 0.606, "step": 17645 }, { "epoch": 0.731319159517593, "grad_norm": 0.4641438126564026, "learning_rate": 1.3436114219404037e-06, "loss": 0.6931, "step": 17646 }, { "epoch": 0.7313606034232666, "grad_norm": 0.40952804684638977, "learning_rate": 1.3434042024120353e-06, "loss": 0.6746, "step": 17647 }, { "epoch": 0.7314020473289403, "grad_norm": 0.38006022572517395, "learning_rate": 1.343196982883667e-06, "loss": 0.6178, "step": 17648 }, { "epoch": 0.7314434912346139, "grad_norm": 0.3994552195072174, "learning_rate": 1.3429897633552987e-06, "loss": 0.6553, "step": 17649 }, { "epoch": 0.7314849351402877, "grad_norm": 0.41635221242904663, "learning_rate": 1.3427825438269305e-06, "loss": 0.6617, "step": 17650 }, { "epoch": 0.7315263790459613, "grad_norm": 0.3968122899532318, "learning_rate": 1.342575324298562e-06, "loss": 0.6456, "step": 17651 }, { "epoch": 0.7315678229516349, "grad_norm": 0.4343999922275543, "learning_rate": 1.3423681047701937e-06, "loss": 0.709, "step": 17652 }, { "epoch": 0.7316092668573086, "grad_norm": 0.4228668808937073, "learning_rate": 1.3421608852418253e-06, "loss": 0.6821, "step": 17653 }, { "epoch": 0.7316507107629823, "grad_norm": 0.36743924021720886, "learning_rate": 1.3419536657134569e-06, "loss": 0.592, "step": 17654 }, { "epoch": 0.731692154668656, "grad_norm": 0.4461619555950165, "learning_rate": 1.3417464461850885e-06, "loss": 0.7029, "step": 17655 }, { "epoch": 0.7317335985743296, "grad_norm": 0.40960729122161865, "learning_rate": 1.3415392266567203e-06, "loss": 0.6041, "step": 17656 }, { "epoch": 0.7317750424800034, "grad_norm": 0.4142327904701233, "learning_rate": 1.3413320071283519e-06, "loss": 0.7129, "step": 17657 }, { "epoch": 0.731816486385677, "grad_norm": 0.43525567650794983, "learning_rate": 1.3411247875999837e-06, "loss": 0.6698, "step": 17658 }, { "epoch": 0.7318579302913507, "grad_norm": 0.4089859426021576, "learning_rate": 1.3409175680716153e-06, "loss": 0.6976, "step": 17659 }, { "epoch": 0.7318993741970243, "grad_norm": 0.39958620071411133, "learning_rate": 1.340710348543247e-06, "loss": 0.6334, "step": 17660 }, { "epoch": 0.731940818102698, "grad_norm": 0.43112292885780334, "learning_rate": 1.3405031290148785e-06, "loss": 0.7085, "step": 17661 }, { "epoch": 0.7319822620083717, "grad_norm": 0.4369018077850342, "learning_rate": 1.34029590948651e-06, "loss": 0.7159, "step": 17662 }, { "epoch": 0.7320237059140453, "grad_norm": 0.40243029594421387, "learning_rate": 1.3400886899581417e-06, "loss": 0.6298, "step": 17663 }, { "epoch": 0.732065149819719, "grad_norm": 0.3876744210720062, "learning_rate": 1.3398814704297733e-06, "loss": 0.6379, "step": 17664 }, { "epoch": 0.7321065937253927, "grad_norm": 0.4240640103816986, "learning_rate": 1.339674250901405e-06, "loss": 0.6869, "step": 17665 }, { "epoch": 0.7321480376310664, "grad_norm": 0.3975006639957428, "learning_rate": 1.339467031373037e-06, "loss": 0.6504, "step": 17666 }, { "epoch": 0.73218948153674, "grad_norm": 0.41036680340766907, "learning_rate": 1.3392598118446685e-06, "loss": 0.6185, "step": 17667 }, { "epoch": 0.7322309254424136, "grad_norm": 0.4121469557285309, "learning_rate": 1.3390525923163e-06, "loss": 0.6896, "step": 17668 }, { "epoch": 0.7322723693480874, "grad_norm": 0.4292762875556946, "learning_rate": 1.3388453727879317e-06, "loss": 0.66, "step": 17669 }, { "epoch": 0.732313813253761, "grad_norm": 0.4432283937931061, "learning_rate": 1.3386381532595633e-06, "loss": 0.7134, "step": 17670 }, { "epoch": 0.7323552571594347, "grad_norm": 0.4057601988315582, "learning_rate": 1.3384309337311949e-06, "loss": 0.635, "step": 17671 }, { "epoch": 0.7323967010651083, "grad_norm": 0.41985541582107544, "learning_rate": 1.3382237142028265e-06, "loss": 0.6897, "step": 17672 }, { "epoch": 0.7324381449707821, "grad_norm": 0.41044801473617554, "learning_rate": 1.338016494674458e-06, "loss": 0.666, "step": 17673 }, { "epoch": 0.7324795888764557, "grad_norm": 0.4280877113342285, "learning_rate": 1.3378092751460899e-06, "loss": 0.6909, "step": 17674 }, { "epoch": 0.7325210327821294, "grad_norm": 0.4212762117385864, "learning_rate": 1.3376020556177217e-06, "loss": 0.6967, "step": 17675 }, { "epoch": 0.732562476687803, "grad_norm": 0.4144017696380615, "learning_rate": 1.3373948360893533e-06, "loss": 0.6558, "step": 17676 }, { "epoch": 0.7326039205934767, "grad_norm": 0.4300427734851837, "learning_rate": 1.337187616560985e-06, "loss": 0.7228, "step": 17677 }, { "epoch": 0.7326453644991504, "grad_norm": 0.4473978281021118, "learning_rate": 1.3369803970326165e-06, "loss": 0.6409, "step": 17678 }, { "epoch": 0.732686808404824, "grad_norm": 0.41310301423072815, "learning_rate": 1.336773177504248e-06, "loss": 0.6411, "step": 17679 }, { "epoch": 0.7327282523104978, "grad_norm": 0.42003491520881653, "learning_rate": 1.3365659579758797e-06, "loss": 0.7157, "step": 17680 }, { "epoch": 0.7327696962161714, "grad_norm": 0.42352771759033203, "learning_rate": 1.3363587384475113e-06, "loss": 0.6516, "step": 17681 }, { "epoch": 0.7328111401218451, "grad_norm": 0.41207677125930786, "learning_rate": 1.336151518919143e-06, "loss": 0.634, "step": 17682 }, { "epoch": 0.7328525840275187, "grad_norm": 0.44245922565460205, "learning_rate": 1.335944299390775e-06, "loss": 0.749, "step": 17683 }, { "epoch": 0.7328940279331925, "grad_norm": 0.40678828954696655, "learning_rate": 1.3357370798624065e-06, "loss": 0.689, "step": 17684 }, { "epoch": 0.7329354718388661, "grad_norm": 0.40013769268989563, "learning_rate": 1.335529860334038e-06, "loss": 0.6642, "step": 17685 }, { "epoch": 0.7329769157445397, "grad_norm": 0.42995795607566833, "learning_rate": 1.3353226408056697e-06, "loss": 0.665, "step": 17686 }, { "epoch": 0.7330183596502134, "grad_norm": 0.45039281249046326, "learning_rate": 1.3351154212773013e-06, "loss": 0.6732, "step": 17687 }, { "epoch": 0.7330598035558871, "grad_norm": 0.43420520424842834, "learning_rate": 1.3349082017489329e-06, "loss": 0.6678, "step": 17688 }, { "epoch": 0.7331012474615608, "grad_norm": 0.4256800711154938, "learning_rate": 1.3347009822205645e-06, "loss": 0.6322, "step": 17689 }, { "epoch": 0.7331426913672344, "grad_norm": 0.39028656482696533, "learning_rate": 1.334493762692196e-06, "loss": 0.6454, "step": 17690 }, { "epoch": 0.7331841352729082, "grad_norm": 0.4413905739784241, "learning_rate": 1.334286543163828e-06, "loss": 0.6537, "step": 17691 }, { "epoch": 0.7332255791785818, "grad_norm": 0.4470168650150299, "learning_rate": 1.3340793236354597e-06, "loss": 0.689, "step": 17692 }, { "epoch": 0.7332670230842555, "grad_norm": 0.43094855546951294, "learning_rate": 1.3338721041070913e-06, "loss": 0.6716, "step": 17693 }, { "epoch": 0.7333084669899291, "grad_norm": 0.4629989564418793, "learning_rate": 1.333664884578723e-06, "loss": 0.7798, "step": 17694 }, { "epoch": 0.7333499108956028, "grad_norm": 0.43301618099212646, "learning_rate": 1.3334576650503545e-06, "loss": 0.6951, "step": 17695 }, { "epoch": 0.7333913548012765, "grad_norm": 0.4382196068763733, "learning_rate": 1.333250445521986e-06, "loss": 0.6774, "step": 17696 }, { "epoch": 0.7334327987069501, "grad_norm": 0.4087275266647339, "learning_rate": 1.3330432259936177e-06, "loss": 0.6686, "step": 17697 }, { "epoch": 0.7334742426126238, "grad_norm": 0.3955472707748413, "learning_rate": 1.3328360064652493e-06, "loss": 0.5831, "step": 17698 }, { "epoch": 0.7335156865182975, "grad_norm": 0.37742936611175537, "learning_rate": 1.3326287869368809e-06, "loss": 0.6017, "step": 17699 }, { "epoch": 0.7335571304239712, "grad_norm": 0.42822912335395813, "learning_rate": 1.332421567408513e-06, "loss": 0.6592, "step": 17700 }, { "epoch": 0.7335985743296448, "grad_norm": 0.421403169631958, "learning_rate": 1.3322143478801445e-06, "loss": 0.6464, "step": 17701 }, { "epoch": 0.7336400182353185, "grad_norm": 0.42078259587287903, "learning_rate": 1.332007128351776e-06, "loss": 0.6348, "step": 17702 }, { "epoch": 0.7336814621409922, "grad_norm": 0.4165763258934021, "learning_rate": 1.3317999088234077e-06, "loss": 0.6729, "step": 17703 }, { "epoch": 0.7337229060466658, "grad_norm": 0.4066922664642334, "learning_rate": 1.3315926892950393e-06, "loss": 0.6567, "step": 17704 }, { "epoch": 0.7337643499523395, "grad_norm": 0.42212092876434326, "learning_rate": 1.331385469766671e-06, "loss": 0.6749, "step": 17705 }, { "epoch": 0.7338057938580131, "grad_norm": 0.4051072597503662, "learning_rate": 1.3311782502383025e-06, "loss": 0.6683, "step": 17706 }, { "epoch": 0.7338472377636869, "grad_norm": 0.4200262725353241, "learning_rate": 1.330971030709934e-06, "loss": 0.7274, "step": 17707 }, { "epoch": 0.7338886816693605, "grad_norm": 0.45273247361183167, "learning_rate": 1.3307638111815657e-06, "loss": 0.6904, "step": 17708 }, { "epoch": 0.7339301255750342, "grad_norm": 0.4229804575443268, "learning_rate": 1.3305565916531977e-06, "loss": 0.6708, "step": 17709 }, { "epoch": 0.7339715694807079, "grad_norm": 0.4317142963409424, "learning_rate": 1.3303493721248293e-06, "loss": 0.7004, "step": 17710 }, { "epoch": 0.7340130133863816, "grad_norm": 0.43891704082489014, "learning_rate": 1.330142152596461e-06, "loss": 0.6663, "step": 17711 }, { "epoch": 0.7340544572920552, "grad_norm": 0.4107625484466553, "learning_rate": 1.3299349330680925e-06, "loss": 0.693, "step": 17712 }, { "epoch": 0.7340959011977288, "grad_norm": 0.4235994517803192, "learning_rate": 1.329727713539724e-06, "loss": 0.6418, "step": 17713 }, { "epoch": 0.7341373451034026, "grad_norm": 0.418617844581604, "learning_rate": 1.3295204940113557e-06, "loss": 0.6763, "step": 17714 }, { "epoch": 0.7341787890090762, "grad_norm": 0.41659700870513916, "learning_rate": 1.3293132744829873e-06, "loss": 0.6938, "step": 17715 }, { "epoch": 0.7342202329147499, "grad_norm": 0.42446398735046387, "learning_rate": 1.3291060549546189e-06, "loss": 0.6687, "step": 17716 }, { "epoch": 0.7342616768204235, "grad_norm": 0.3918251395225525, "learning_rate": 1.328898835426251e-06, "loss": 0.7107, "step": 17717 }, { "epoch": 0.7343031207260973, "grad_norm": 0.40055739879608154, "learning_rate": 1.3286916158978825e-06, "loss": 0.6331, "step": 17718 }, { "epoch": 0.7343445646317709, "grad_norm": 0.39058953523635864, "learning_rate": 1.3284843963695141e-06, "loss": 0.6235, "step": 17719 }, { "epoch": 0.7343860085374446, "grad_norm": 0.425293892621994, "learning_rate": 1.3282771768411457e-06, "loss": 0.6321, "step": 17720 }, { "epoch": 0.7344274524431182, "grad_norm": 0.4224386513233185, "learning_rate": 1.3280699573127773e-06, "loss": 0.6627, "step": 17721 }, { "epoch": 0.7344688963487919, "grad_norm": 0.4451560378074646, "learning_rate": 1.327862737784409e-06, "loss": 0.6941, "step": 17722 }, { "epoch": 0.7345103402544656, "grad_norm": 0.41918396949768066, "learning_rate": 1.3276555182560405e-06, "loss": 0.6611, "step": 17723 }, { "epoch": 0.7345517841601392, "grad_norm": 0.4141343832015991, "learning_rate": 1.327448298727672e-06, "loss": 0.6503, "step": 17724 }, { "epoch": 0.734593228065813, "grad_norm": 0.4357835054397583, "learning_rate": 1.3272410791993037e-06, "loss": 0.7053, "step": 17725 }, { "epoch": 0.7346346719714866, "grad_norm": 0.40157321095466614, "learning_rate": 1.3270338596709357e-06, "loss": 0.616, "step": 17726 }, { "epoch": 0.7346761158771603, "grad_norm": 0.4196839928627014, "learning_rate": 1.3268266401425673e-06, "loss": 0.6702, "step": 17727 }, { "epoch": 0.7347175597828339, "grad_norm": 0.4422299861907959, "learning_rate": 1.326619420614199e-06, "loss": 0.709, "step": 17728 }, { "epoch": 0.7347590036885076, "grad_norm": 0.376321405172348, "learning_rate": 1.3264122010858305e-06, "loss": 0.6165, "step": 17729 }, { "epoch": 0.7348004475941813, "grad_norm": 0.39355507493019104, "learning_rate": 1.326204981557462e-06, "loss": 0.6024, "step": 17730 }, { "epoch": 0.7348418914998549, "grad_norm": 0.43227970600128174, "learning_rate": 1.3259977620290937e-06, "loss": 0.6986, "step": 17731 }, { "epoch": 0.7348833354055286, "grad_norm": 0.424333781003952, "learning_rate": 1.3257905425007253e-06, "loss": 0.7397, "step": 17732 }, { "epoch": 0.7349247793112023, "grad_norm": 0.46666836738586426, "learning_rate": 1.3255833229723569e-06, "loss": 0.6829, "step": 17733 }, { "epoch": 0.734966223216876, "grad_norm": 0.39106830954551697, "learning_rate": 1.3253761034439885e-06, "loss": 0.6382, "step": 17734 }, { "epoch": 0.7350076671225496, "grad_norm": 0.42597174644470215, "learning_rate": 1.3251688839156205e-06, "loss": 0.6682, "step": 17735 }, { "epoch": 0.7350491110282233, "grad_norm": 0.3749087154865265, "learning_rate": 1.3249616643872521e-06, "loss": 0.6415, "step": 17736 }, { "epoch": 0.735090554933897, "grad_norm": 0.5266129970550537, "learning_rate": 1.3247544448588837e-06, "loss": 0.7036, "step": 17737 }, { "epoch": 0.7351319988395706, "grad_norm": 0.43422597646713257, "learning_rate": 1.3245472253305153e-06, "loss": 0.6479, "step": 17738 }, { "epoch": 0.7351734427452443, "grad_norm": 0.41420039534568787, "learning_rate": 1.324340005802147e-06, "loss": 0.6653, "step": 17739 }, { "epoch": 0.7352148866509179, "grad_norm": 0.3859080970287323, "learning_rate": 1.3241327862737785e-06, "loss": 0.6262, "step": 17740 }, { "epoch": 0.7352563305565917, "grad_norm": 0.43890058994293213, "learning_rate": 1.32392556674541e-06, "loss": 0.6637, "step": 17741 }, { "epoch": 0.7352977744622653, "grad_norm": 0.43299928307533264, "learning_rate": 1.3237183472170417e-06, "loss": 0.6539, "step": 17742 }, { "epoch": 0.735339218367939, "grad_norm": 0.394838809967041, "learning_rate": 1.3235111276886737e-06, "loss": 0.6952, "step": 17743 }, { "epoch": 0.7353806622736127, "grad_norm": 0.40062767267227173, "learning_rate": 1.3233039081603053e-06, "loss": 0.663, "step": 17744 }, { "epoch": 0.7354221061792864, "grad_norm": 0.4012412130832672, "learning_rate": 1.323096688631937e-06, "loss": 0.6458, "step": 17745 }, { "epoch": 0.73546355008496, "grad_norm": 0.40164998173713684, "learning_rate": 1.3228894691035685e-06, "loss": 0.661, "step": 17746 }, { "epoch": 0.7355049939906336, "grad_norm": 0.40151891112327576, "learning_rate": 1.3226822495752e-06, "loss": 0.6573, "step": 17747 }, { "epoch": 0.7355464378963074, "grad_norm": 0.40262463688850403, "learning_rate": 1.3224750300468317e-06, "loss": 0.6587, "step": 17748 }, { "epoch": 0.735587881801981, "grad_norm": 0.44928982853889465, "learning_rate": 1.3222678105184633e-06, "loss": 0.6361, "step": 17749 }, { "epoch": 0.7356293257076547, "grad_norm": 0.4095138907432556, "learning_rate": 1.322060590990095e-06, "loss": 0.6519, "step": 17750 }, { "epoch": 0.7356707696133283, "grad_norm": 0.42883414030075073, "learning_rate": 1.3218533714617265e-06, "loss": 0.6818, "step": 17751 }, { "epoch": 0.7357122135190021, "grad_norm": 0.3955925703048706, "learning_rate": 1.3216461519333585e-06, "loss": 0.6384, "step": 17752 }, { "epoch": 0.7357536574246757, "grad_norm": 0.41250914335250854, "learning_rate": 1.3214389324049901e-06, "loss": 0.6998, "step": 17753 }, { "epoch": 0.7357951013303494, "grad_norm": 0.4283217191696167, "learning_rate": 1.3212317128766217e-06, "loss": 0.7072, "step": 17754 }, { "epoch": 0.735836545236023, "grad_norm": 0.4297679364681244, "learning_rate": 1.3210244933482533e-06, "loss": 0.6514, "step": 17755 }, { "epoch": 0.7358779891416967, "grad_norm": 0.4424307644367218, "learning_rate": 1.320817273819885e-06, "loss": 0.6594, "step": 17756 }, { "epoch": 0.7359194330473704, "grad_norm": 0.43793049454689026, "learning_rate": 1.3206100542915165e-06, "loss": 0.6619, "step": 17757 }, { "epoch": 0.735960876953044, "grad_norm": 0.40330901741981506, "learning_rate": 1.320402834763148e-06, "loss": 0.6523, "step": 17758 }, { "epoch": 0.7360023208587178, "grad_norm": 0.4621729552745819, "learning_rate": 1.3201956152347797e-06, "loss": 0.7006, "step": 17759 }, { "epoch": 0.7360437647643914, "grad_norm": 0.4140281081199646, "learning_rate": 1.3199883957064113e-06, "loss": 0.6938, "step": 17760 }, { "epoch": 0.7360852086700651, "grad_norm": 0.3956485390663147, "learning_rate": 1.3197811761780433e-06, "loss": 0.6387, "step": 17761 }, { "epoch": 0.7361266525757387, "grad_norm": 0.44890764355659485, "learning_rate": 1.319573956649675e-06, "loss": 0.7249, "step": 17762 }, { "epoch": 0.7361680964814125, "grad_norm": 0.4246966242790222, "learning_rate": 1.3193667371213065e-06, "loss": 0.6782, "step": 17763 }, { "epoch": 0.7362095403870861, "grad_norm": 0.4175665080547333, "learning_rate": 1.3191595175929381e-06, "loss": 0.6437, "step": 17764 }, { "epoch": 0.7362509842927597, "grad_norm": 0.4314151406288147, "learning_rate": 1.3189522980645697e-06, "loss": 0.7461, "step": 17765 }, { "epoch": 0.7362924281984334, "grad_norm": 0.47177544236183167, "learning_rate": 1.3187450785362013e-06, "loss": 0.6191, "step": 17766 }, { "epoch": 0.7363338721041071, "grad_norm": 0.3895148038864136, "learning_rate": 1.318537859007833e-06, "loss": 0.6273, "step": 17767 }, { "epoch": 0.7363753160097808, "grad_norm": 0.38011813163757324, "learning_rate": 1.3183306394794645e-06, "loss": 0.6434, "step": 17768 }, { "epoch": 0.7364167599154544, "grad_norm": 0.44096285104751587, "learning_rate": 1.318123419951096e-06, "loss": 0.7034, "step": 17769 }, { "epoch": 0.7364582038211281, "grad_norm": 0.40571075677871704, "learning_rate": 1.3179162004227281e-06, "loss": 0.6322, "step": 17770 }, { "epoch": 0.7364996477268018, "grad_norm": 0.43218541145324707, "learning_rate": 1.3177089808943597e-06, "loss": 0.7397, "step": 17771 }, { "epoch": 0.7365410916324755, "grad_norm": 0.4716487526893616, "learning_rate": 1.3175017613659913e-06, "loss": 0.6415, "step": 17772 }, { "epoch": 0.7365825355381491, "grad_norm": 0.3943225145339966, "learning_rate": 1.317294541837623e-06, "loss": 0.6624, "step": 17773 }, { "epoch": 0.7366239794438227, "grad_norm": 0.3842735290527344, "learning_rate": 1.3170873223092545e-06, "loss": 0.6616, "step": 17774 }, { "epoch": 0.7366654233494965, "grad_norm": 0.4227282404899597, "learning_rate": 1.316880102780886e-06, "loss": 0.6594, "step": 17775 }, { "epoch": 0.7367068672551701, "grad_norm": 0.45664024353027344, "learning_rate": 1.3166728832525177e-06, "loss": 0.6428, "step": 17776 }, { "epoch": 0.7367483111608438, "grad_norm": 0.3969261050224304, "learning_rate": 1.3164656637241493e-06, "loss": 0.665, "step": 17777 }, { "epoch": 0.7367897550665174, "grad_norm": 0.4578079879283905, "learning_rate": 1.3162584441957813e-06, "loss": 0.6499, "step": 17778 }, { "epoch": 0.7368311989721912, "grad_norm": 0.38991278409957886, "learning_rate": 1.316051224667413e-06, "loss": 0.6611, "step": 17779 }, { "epoch": 0.7368726428778648, "grad_norm": 0.41884148120880127, "learning_rate": 1.3158440051390445e-06, "loss": 0.6992, "step": 17780 }, { "epoch": 0.7369140867835385, "grad_norm": 0.3985823690891266, "learning_rate": 1.3156367856106761e-06, "loss": 0.641, "step": 17781 }, { "epoch": 0.7369555306892122, "grad_norm": 0.47274893522262573, "learning_rate": 1.3154295660823077e-06, "loss": 0.7241, "step": 17782 }, { "epoch": 0.7369969745948858, "grad_norm": 0.4704985022544861, "learning_rate": 1.3152223465539393e-06, "loss": 0.6985, "step": 17783 }, { "epoch": 0.7370384185005595, "grad_norm": 0.43171727657318115, "learning_rate": 1.315015127025571e-06, "loss": 0.6492, "step": 17784 }, { "epoch": 0.7370798624062331, "grad_norm": 0.4260539710521698, "learning_rate": 1.3148079074972025e-06, "loss": 0.6875, "step": 17785 }, { "epoch": 0.7371213063119069, "grad_norm": 0.4236333668231964, "learning_rate": 1.314600687968834e-06, "loss": 0.6624, "step": 17786 }, { "epoch": 0.7371627502175805, "grad_norm": 0.4468648135662079, "learning_rate": 1.3143934684404661e-06, "loss": 0.6665, "step": 17787 }, { "epoch": 0.7372041941232542, "grad_norm": 0.3918130397796631, "learning_rate": 1.3141862489120977e-06, "loss": 0.6626, "step": 17788 }, { "epoch": 0.7372456380289278, "grad_norm": 0.4088038504123688, "learning_rate": 1.3139790293837293e-06, "loss": 0.6394, "step": 17789 }, { "epoch": 0.7372870819346015, "grad_norm": 0.3960227370262146, "learning_rate": 1.313771809855361e-06, "loss": 0.6317, "step": 17790 }, { "epoch": 0.7373285258402752, "grad_norm": 0.4270232319831848, "learning_rate": 1.3135645903269925e-06, "loss": 0.6884, "step": 17791 }, { "epoch": 0.7373699697459488, "grad_norm": 0.41873347759246826, "learning_rate": 1.313357370798624e-06, "loss": 0.6772, "step": 17792 }, { "epoch": 0.7374114136516225, "grad_norm": 0.4039892256259918, "learning_rate": 1.3131501512702557e-06, "loss": 0.6594, "step": 17793 }, { "epoch": 0.7374528575572962, "grad_norm": 0.41404640674591064, "learning_rate": 1.3129429317418873e-06, "loss": 0.6627, "step": 17794 }, { "epoch": 0.7374943014629699, "grad_norm": 0.4048686921596527, "learning_rate": 1.312735712213519e-06, "loss": 0.6337, "step": 17795 }, { "epoch": 0.7375357453686435, "grad_norm": 0.43747371435165405, "learning_rate": 1.312528492685151e-06, "loss": 0.6777, "step": 17796 }, { "epoch": 0.7375771892743173, "grad_norm": 0.37258535623550415, "learning_rate": 1.3123212731567825e-06, "loss": 0.6621, "step": 17797 }, { "epoch": 0.7376186331799909, "grad_norm": 0.42875465750694275, "learning_rate": 1.3121140536284141e-06, "loss": 0.6936, "step": 17798 }, { "epoch": 0.7376600770856645, "grad_norm": 0.45931416749954224, "learning_rate": 1.3119068341000457e-06, "loss": 0.7268, "step": 17799 }, { "epoch": 0.7377015209913382, "grad_norm": 0.43424221873283386, "learning_rate": 1.3116996145716773e-06, "loss": 0.6775, "step": 17800 }, { "epoch": 0.7377429648970119, "grad_norm": 0.430513471364975, "learning_rate": 1.311492395043309e-06, "loss": 0.66, "step": 17801 }, { "epoch": 0.7377844088026856, "grad_norm": 0.3857762813568115, "learning_rate": 1.3112851755149405e-06, "loss": 0.6638, "step": 17802 }, { "epoch": 0.7378258527083592, "grad_norm": 0.42836034297943115, "learning_rate": 1.311077955986572e-06, "loss": 0.65, "step": 17803 }, { "epoch": 0.7378672966140329, "grad_norm": 0.403128057718277, "learning_rate": 1.3108707364582041e-06, "loss": 0.7083, "step": 17804 }, { "epoch": 0.7379087405197066, "grad_norm": 0.41026806831359863, "learning_rate": 1.3106635169298357e-06, "loss": 0.6323, "step": 17805 }, { "epoch": 0.7379501844253803, "grad_norm": 0.43012359738349915, "learning_rate": 1.3104562974014673e-06, "loss": 0.6992, "step": 17806 }, { "epoch": 0.7379916283310539, "grad_norm": 0.3895985782146454, "learning_rate": 1.310249077873099e-06, "loss": 0.6317, "step": 17807 }, { "epoch": 0.7380330722367275, "grad_norm": 0.42365992069244385, "learning_rate": 1.3100418583447305e-06, "loss": 0.6577, "step": 17808 }, { "epoch": 0.7380745161424013, "grad_norm": 0.4378974735736847, "learning_rate": 1.3098346388163621e-06, "loss": 0.6804, "step": 17809 }, { "epoch": 0.7381159600480749, "grad_norm": 0.4388437569141388, "learning_rate": 1.3096274192879937e-06, "loss": 0.7006, "step": 17810 }, { "epoch": 0.7381574039537486, "grad_norm": 0.4295605421066284, "learning_rate": 1.3094201997596253e-06, "loss": 0.7024, "step": 17811 }, { "epoch": 0.7381988478594222, "grad_norm": 0.44909030199050903, "learning_rate": 1.309212980231257e-06, "loss": 0.6926, "step": 17812 }, { "epoch": 0.738240291765096, "grad_norm": 0.4091121554374695, "learning_rate": 1.309005760702889e-06, "loss": 0.6702, "step": 17813 }, { "epoch": 0.7382817356707696, "grad_norm": 0.4011324346065521, "learning_rate": 1.3087985411745205e-06, "loss": 0.6848, "step": 17814 }, { "epoch": 0.7383231795764433, "grad_norm": 0.4642154276371002, "learning_rate": 1.3085913216461521e-06, "loss": 0.656, "step": 17815 }, { "epoch": 0.738364623482117, "grad_norm": 0.4398166537284851, "learning_rate": 1.3083841021177837e-06, "loss": 0.6774, "step": 17816 }, { "epoch": 0.7384060673877906, "grad_norm": 0.4138544499874115, "learning_rate": 1.3081768825894153e-06, "loss": 0.6895, "step": 17817 }, { "epoch": 0.7384475112934643, "grad_norm": 0.4339129626750946, "learning_rate": 1.307969663061047e-06, "loss": 0.688, "step": 17818 }, { "epoch": 0.7384889551991379, "grad_norm": 0.39784836769104004, "learning_rate": 1.3077624435326785e-06, "loss": 0.6719, "step": 17819 }, { "epoch": 0.7385303991048117, "grad_norm": 0.41233399510383606, "learning_rate": 1.30755522400431e-06, "loss": 0.6079, "step": 17820 }, { "epoch": 0.7385718430104853, "grad_norm": 0.45124754309654236, "learning_rate": 1.3073480044759417e-06, "loss": 0.7363, "step": 17821 }, { "epoch": 0.738613286916159, "grad_norm": 0.39800024032592773, "learning_rate": 1.3071407849475737e-06, "loss": 0.6488, "step": 17822 }, { "epoch": 0.7386547308218326, "grad_norm": 0.41092902421951294, "learning_rate": 1.3069335654192053e-06, "loss": 0.6541, "step": 17823 }, { "epoch": 0.7386961747275064, "grad_norm": 0.39009666442871094, "learning_rate": 1.306726345890837e-06, "loss": 0.6707, "step": 17824 }, { "epoch": 0.73873761863318, "grad_norm": 0.3923057019710541, "learning_rate": 1.3065191263624685e-06, "loss": 0.6541, "step": 17825 }, { "epoch": 0.7387790625388536, "grad_norm": 0.3749821186065674, "learning_rate": 1.3063119068341001e-06, "loss": 0.6619, "step": 17826 }, { "epoch": 0.7388205064445273, "grad_norm": 0.4053066074848175, "learning_rate": 1.3061046873057317e-06, "loss": 0.6917, "step": 17827 }, { "epoch": 0.738861950350201, "grad_norm": 0.4341716766357422, "learning_rate": 1.3058974677773633e-06, "loss": 0.6584, "step": 17828 }, { "epoch": 0.7389033942558747, "grad_norm": 0.36815115809440613, "learning_rate": 1.305690248248995e-06, "loss": 0.6223, "step": 17829 }, { "epoch": 0.7389448381615483, "grad_norm": 0.3982944190502167, "learning_rate": 1.305483028720627e-06, "loss": 0.6804, "step": 17830 }, { "epoch": 0.7389862820672221, "grad_norm": 0.4061671197414398, "learning_rate": 1.3052758091922585e-06, "loss": 0.6686, "step": 17831 }, { "epoch": 0.7390277259728957, "grad_norm": 0.4052245020866394, "learning_rate": 1.3050685896638901e-06, "loss": 0.6683, "step": 17832 }, { "epoch": 0.7390691698785694, "grad_norm": 0.39896178245544434, "learning_rate": 1.3048613701355217e-06, "loss": 0.6899, "step": 17833 }, { "epoch": 0.739110613784243, "grad_norm": 0.4067554771900177, "learning_rate": 1.3046541506071533e-06, "loss": 0.6266, "step": 17834 }, { "epoch": 0.7391520576899167, "grad_norm": 0.44535553455352783, "learning_rate": 1.304446931078785e-06, "loss": 0.7183, "step": 17835 }, { "epoch": 0.7391935015955904, "grad_norm": 0.39614152908325195, "learning_rate": 1.3042397115504165e-06, "loss": 0.6327, "step": 17836 }, { "epoch": 0.739234945501264, "grad_norm": 0.4104803800582886, "learning_rate": 1.3040324920220481e-06, "loss": 0.6754, "step": 17837 }, { "epoch": 0.7392763894069377, "grad_norm": 0.44602707028388977, "learning_rate": 1.3038252724936797e-06, "loss": 0.6852, "step": 17838 }, { "epoch": 0.7393178333126114, "grad_norm": 0.4120709300041199, "learning_rate": 1.3036180529653117e-06, "loss": 0.6488, "step": 17839 }, { "epoch": 0.7393592772182851, "grad_norm": 0.40208005905151367, "learning_rate": 1.3034108334369433e-06, "loss": 0.6582, "step": 17840 }, { "epoch": 0.7394007211239587, "grad_norm": 0.3729678690433502, "learning_rate": 1.303203613908575e-06, "loss": 0.6875, "step": 17841 }, { "epoch": 0.7394421650296324, "grad_norm": 0.3816264867782593, "learning_rate": 1.3029963943802065e-06, "loss": 0.667, "step": 17842 }, { "epoch": 0.7394836089353061, "grad_norm": 0.4855034649372101, "learning_rate": 1.3027891748518381e-06, "loss": 0.7576, "step": 17843 }, { "epoch": 0.7395250528409797, "grad_norm": 0.4666666090488434, "learning_rate": 1.3025819553234697e-06, "loss": 0.7017, "step": 17844 }, { "epoch": 0.7395664967466534, "grad_norm": 0.4072892367839813, "learning_rate": 1.3023747357951013e-06, "loss": 0.6514, "step": 17845 }, { "epoch": 0.739607940652327, "grad_norm": 0.464616060256958, "learning_rate": 1.302167516266733e-06, "loss": 0.6953, "step": 17846 }, { "epoch": 0.7396493845580008, "grad_norm": 0.4091990888118744, "learning_rate": 1.3019602967383645e-06, "loss": 0.6558, "step": 17847 }, { "epoch": 0.7396908284636744, "grad_norm": 0.4112918972969055, "learning_rate": 1.3017530772099965e-06, "loss": 0.6617, "step": 17848 }, { "epoch": 0.7397322723693481, "grad_norm": 0.39194589853286743, "learning_rate": 1.3015458576816281e-06, "loss": 0.6643, "step": 17849 }, { "epoch": 0.7397737162750218, "grad_norm": 0.438322514295578, "learning_rate": 1.3013386381532597e-06, "loss": 0.6544, "step": 17850 }, { "epoch": 0.7398151601806954, "grad_norm": 0.4379824697971344, "learning_rate": 1.3011314186248913e-06, "loss": 0.6677, "step": 17851 }, { "epoch": 0.7398566040863691, "grad_norm": 0.4417119324207306, "learning_rate": 1.300924199096523e-06, "loss": 0.7056, "step": 17852 }, { "epoch": 0.7398980479920427, "grad_norm": 0.4286735951900482, "learning_rate": 1.3007169795681545e-06, "loss": 0.7324, "step": 17853 }, { "epoch": 0.7399394918977165, "grad_norm": 0.4006495773792267, "learning_rate": 1.3005097600397861e-06, "loss": 0.6689, "step": 17854 }, { "epoch": 0.7399809358033901, "grad_norm": 0.5095283389091492, "learning_rate": 1.3003025405114177e-06, "loss": 0.671, "step": 17855 }, { "epoch": 0.7400223797090638, "grad_norm": 0.421403706073761, "learning_rate": 1.3000953209830495e-06, "loss": 0.6909, "step": 17856 }, { "epoch": 0.7400638236147374, "grad_norm": 0.4157628118991852, "learning_rate": 1.2998881014546813e-06, "loss": 0.729, "step": 17857 }, { "epoch": 0.7401052675204112, "grad_norm": 0.3860557973384857, "learning_rate": 1.299680881926313e-06, "loss": 0.6342, "step": 17858 }, { "epoch": 0.7401467114260848, "grad_norm": 0.4295955002307892, "learning_rate": 1.2994736623979445e-06, "loss": 0.6633, "step": 17859 }, { "epoch": 0.7401881553317584, "grad_norm": 0.44841268658638, "learning_rate": 1.2992664428695761e-06, "loss": 0.6821, "step": 17860 }, { "epoch": 0.7402295992374321, "grad_norm": 0.4424310028553009, "learning_rate": 1.2990592233412077e-06, "loss": 0.7317, "step": 17861 }, { "epoch": 0.7402710431431058, "grad_norm": 0.42950788140296936, "learning_rate": 1.2988520038128393e-06, "loss": 0.6353, "step": 17862 }, { "epoch": 0.7403124870487795, "grad_norm": 0.459308385848999, "learning_rate": 1.298644784284471e-06, "loss": 0.7374, "step": 17863 }, { "epoch": 0.7403539309544531, "grad_norm": 0.41612425446510315, "learning_rate": 1.2984375647561025e-06, "loss": 0.6201, "step": 17864 }, { "epoch": 0.7403953748601269, "grad_norm": 0.3812253773212433, "learning_rate": 1.2982303452277345e-06, "loss": 0.6882, "step": 17865 }, { "epoch": 0.7404368187658005, "grad_norm": 0.4150495231151581, "learning_rate": 1.2980231256993661e-06, "loss": 0.6058, "step": 17866 }, { "epoch": 0.7404782626714742, "grad_norm": 0.43041861057281494, "learning_rate": 1.2978159061709977e-06, "loss": 0.6814, "step": 17867 }, { "epoch": 0.7405197065771478, "grad_norm": 0.4236215054988861, "learning_rate": 1.2976086866426293e-06, "loss": 0.6477, "step": 17868 }, { "epoch": 0.7405611504828215, "grad_norm": 0.40639829635620117, "learning_rate": 1.297401467114261e-06, "loss": 0.6544, "step": 17869 }, { "epoch": 0.7406025943884952, "grad_norm": 0.4288565218448639, "learning_rate": 1.2971942475858925e-06, "loss": 0.6696, "step": 17870 }, { "epoch": 0.7406440382941688, "grad_norm": 0.4309433102607727, "learning_rate": 1.2969870280575241e-06, "loss": 0.6738, "step": 17871 }, { "epoch": 0.7406854821998425, "grad_norm": 0.4066268801689148, "learning_rate": 1.2967798085291557e-06, "loss": 0.6578, "step": 17872 }, { "epoch": 0.7407269261055162, "grad_norm": 0.400766521692276, "learning_rate": 1.2965725890007875e-06, "loss": 0.6633, "step": 17873 }, { "epoch": 0.7407683700111899, "grad_norm": 0.4134844243526459, "learning_rate": 1.2963653694724193e-06, "loss": 0.6411, "step": 17874 }, { "epoch": 0.7408098139168635, "grad_norm": 0.4513557255268097, "learning_rate": 1.296158149944051e-06, "loss": 0.6978, "step": 17875 }, { "epoch": 0.7408512578225372, "grad_norm": 0.4455469250679016, "learning_rate": 1.2959509304156825e-06, "loss": 0.6899, "step": 17876 }, { "epoch": 0.7408927017282109, "grad_norm": 0.401604026556015, "learning_rate": 1.2957437108873141e-06, "loss": 0.6682, "step": 17877 }, { "epoch": 0.7409341456338845, "grad_norm": 0.42244118452072144, "learning_rate": 1.2955364913589457e-06, "loss": 0.6348, "step": 17878 }, { "epoch": 0.7409755895395582, "grad_norm": 0.3995290696620941, "learning_rate": 1.2953292718305773e-06, "loss": 0.63, "step": 17879 }, { "epoch": 0.7410170334452318, "grad_norm": 0.4102252125740051, "learning_rate": 1.295122052302209e-06, "loss": 0.6632, "step": 17880 }, { "epoch": 0.7410584773509056, "grad_norm": 0.36665990948677063, "learning_rate": 1.2949148327738405e-06, "loss": 0.6511, "step": 17881 }, { "epoch": 0.7410999212565792, "grad_norm": 0.3950020968914032, "learning_rate": 1.2947076132454723e-06, "loss": 0.629, "step": 17882 }, { "epoch": 0.7411413651622529, "grad_norm": 0.40033072233200073, "learning_rate": 1.2945003937171041e-06, "loss": 0.6572, "step": 17883 }, { "epoch": 0.7411828090679266, "grad_norm": 0.40632957220077515, "learning_rate": 1.2942931741887357e-06, "loss": 0.6794, "step": 17884 }, { "epoch": 0.7412242529736003, "grad_norm": 0.45877355337142944, "learning_rate": 1.2940859546603673e-06, "loss": 0.7332, "step": 17885 }, { "epoch": 0.7412656968792739, "grad_norm": 0.4210985004901886, "learning_rate": 1.293878735131999e-06, "loss": 0.6479, "step": 17886 }, { "epoch": 0.7413071407849475, "grad_norm": 0.40694260597229004, "learning_rate": 1.2936715156036305e-06, "loss": 0.6807, "step": 17887 }, { "epoch": 0.7413485846906213, "grad_norm": 0.4106813967227936, "learning_rate": 1.2934642960752621e-06, "loss": 0.7045, "step": 17888 }, { "epoch": 0.7413900285962949, "grad_norm": 0.4084658920764923, "learning_rate": 1.2932570765468937e-06, "loss": 0.7092, "step": 17889 }, { "epoch": 0.7414314725019686, "grad_norm": 0.39607176184654236, "learning_rate": 1.2930498570185255e-06, "loss": 0.6851, "step": 17890 }, { "epoch": 0.7414729164076422, "grad_norm": 0.38920292258262634, "learning_rate": 1.2928426374901573e-06, "loss": 0.6582, "step": 17891 }, { "epoch": 0.741514360313316, "grad_norm": 0.38739362359046936, "learning_rate": 1.292635417961789e-06, "loss": 0.6216, "step": 17892 }, { "epoch": 0.7415558042189896, "grad_norm": 0.4156036078929901, "learning_rate": 1.2924281984334205e-06, "loss": 0.6426, "step": 17893 }, { "epoch": 0.7415972481246633, "grad_norm": 0.39145782589912415, "learning_rate": 1.2922209789050521e-06, "loss": 0.6056, "step": 17894 }, { "epoch": 0.7416386920303369, "grad_norm": 0.4178645610809326, "learning_rate": 1.2920137593766837e-06, "loss": 0.72, "step": 17895 }, { "epoch": 0.7416801359360106, "grad_norm": 0.5193049907684326, "learning_rate": 1.2918065398483153e-06, "loss": 0.6686, "step": 17896 }, { "epoch": 0.7417215798416843, "grad_norm": 0.4042234420776367, "learning_rate": 1.291599320319947e-06, "loss": 0.6798, "step": 17897 }, { "epoch": 0.7417630237473579, "grad_norm": 0.41255268454551697, "learning_rate": 1.2913921007915785e-06, "loss": 0.6931, "step": 17898 }, { "epoch": 0.7418044676530317, "grad_norm": 0.3810236155986786, "learning_rate": 1.2911848812632103e-06, "loss": 0.6436, "step": 17899 }, { "epoch": 0.7418459115587053, "grad_norm": 0.394985169172287, "learning_rate": 1.2909776617348421e-06, "loss": 0.6833, "step": 17900 }, { "epoch": 0.741887355464379, "grad_norm": 0.4578222334384918, "learning_rate": 1.2907704422064737e-06, "loss": 0.6598, "step": 17901 }, { "epoch": 0.7419287993700526, "grad_norm": 0.4051802456378937, "learning_rate": 1.2905632226781053e-06, "loss": 0.7041, "step": 17902 }, { "epoch": 0.7419702432757264, "grad_norm": 0.44669273495674133, "learning_rate": 1.290356003149737e-06, "loss": 0.6682, "step": 17903 }, { "epoch": 0.7420116871814, "grad_norm": 0.40941309928894043, "learning_rate": 1.2901487836213685e-06, "loss": 0.6798, "step": 17904 }, { "epoch": 0.7420531310870736, "grad_norm": 0.4025413393974304, "learning_rate": 1.2899415640930001e-06, "loss": 0.6277, "step": 17905 }, { "epoch": 0.7420945749927473, "grad_norm": 0.4081211984157562, "learning_rate": 1.2897343445646317e-06, "loss": 0.6469, "step": 17906 }, { "epoch": 0.742136018898421, "grad_norm": 0.44304975867271423, "learning_rate": 1.2895271250362635e-06, "loss": 0.6666, "step": 17907 }, { "epoch": 0.7421774628040947, "grad_norm": 0.4066527187824249, "learning_rate": 1.2893199055078951e-06, "loss": 0.6685, "step": 17908 }, { "epoch": 0.7422189067097683, "grad_norm": 0.42509549856185913, "learning_rate": 1.289112685979527e-06, "loss": 0.7051, "step": 17909 }, { "epoch": 0.742260350615442, "grad_norm": 0.406710147857666, "learning_rate": 1.2889054664511585e-06, "loss": 0.637, "step": 17910 }, { "epoch": 0.7423017945211157, "grad_norm": 0.399396687746048, "learning_rate": 1.2886982469227901e-06, "loss": 0.6361, "step": 17911 }, { "epoch": 0.7423432384267893, "grad_norm": 0.4481818377971649, "learning_rate": 1.2884910273944217e-06, "loss": 0.6251, "step": 17912 }, { "epoch": 0.742384682332463, "grad_norm": 0.40548276901245117, "learning_rate": 1.2882838078660533e-06, "loss": 0.637, "step": 17913 }, { "epoch": 0.7424261262381366, "grad_norm": 0.4383731186389923, "learning_rate": 1.288076588337685e-06, "loss": 0.6917, "step": 17914 }, { "epoch": 0.7424675701438104, "grad_norm": 0.4054107367992401, "learning_rate": 1.2878693688093165e-06, "loss": 0.6633, "step": 17915 }, { "epoch": 0.742509014049484, "grad_norm": 0.4100343883037567, "learning_rate": 1.2876621492809483e-06, "loss": 0.687, "step": 17916 }, { "epoch": 0.7425504579551577, "grad_norm": 0.45086705684661865, "learning_rate": 1.28745492975258e-06, "loss": 0.7053, "step": 17917 }, { "epoch": 0.7425919018608313, "grad_norm": 0.37883898615837097, "learning_rate": 1.2872477102242117e-06, "loss": 0.6555, "step": 17918 }, { "epoch": 0.7426333457665051, "grad_norm": 0.39629262685775757, "learning_rate": 1.2870404906958433e-06, "loss": 0.6755, "step": 17919 }, { "epoch": 0.7426747896721787, "grad_norm": 0.41545966267585754, "learning_rate": 1.286833271167475e-06, "loss": 0.6841, "step": 17920 }, { "epoch": 0.7427162335778523, "grad_norm": 0.40257102251052856, "learning_rate": 1.2866260516391065e-06, "loss": 0.6635, "step": 17921 }, { "epoch": 0.7427576774835261, "grad_norm": 0.44758176803588867, "learning_rate": 1.2864188321107381e-06, "loss": 0.6904, "step": 17922 }, { "epoch": 0.7427991213891997, "grad_norm": 0.444850891828537, "learning_rate": 1.2862116125823697e-06, "loss": 0.6779, "step": 17923 }, { "epoch": 0.7428405652948734, "grad_norm": 0.440321147441864, "learning_rate": 1.2860043930540015e-06, "loss": 0.683, "step": 17924 }, { "epoch": 0.742882009200547, "grad_norm": 0.38065704703330994, "learning_rate": 1.2857971735256331e-06, "loss": 0.6257, "step": 17925 }, { "epoch": 0.7429234531062208, "grad_norm": 0.48940539360046387, "learning_rate": 1.285589953997265e-06, "loss": 0.7411, "step": 17926 }, { "epoch": 0.7429648970118944, "grad_norm": 0.40706580877304077, "learning_rate": 1.2853827344688965e-06, "loss": 0.698, "step": 17927 }, { "epoch": 0.7430063409175681, "grad_norm": 0.45417356491088867, "learning_rate": 1.2851755149405281e-06, "loss": 0.6814, "step": 17928 }, { "epoch": 0.7430477848232417, "grad_norm": 0.41347941756248474, "learning_rate": 1.2849682954121597e-06, "loss": 0.7095, "step": 17929 }, { "epoch": 0.7430892287289154, "grad_norm": 0.41316214203834534, "learning_rate": 1.2847610758837913e-06, "loss": 0.644, "step": 17930 }, { "epoch": 0.7431306726345891, "grad_norm": 0.4080352187156677, "learning_rate": 1.284553856355423e-06, "loss": 0.7072, "step": 17931 }, { "epoch": 0.7431721165402627, "grad_norm": 0.4291748106479645, "learning_rate": 1.2843466368270547e-06, "loss": 0.6503, "step": 17932 }, { "epoch": 0.7432135604459364, "grad_norm": 0.40580302476882935, "learning_rate": 1.2841394172986863e-06, "loss": 0.6953, "step": 17933 }, { "epoch": 0.7432550043516101, "grad_norm": 0.441336452960968, "learning_rate": 1.283932197770318e-06, "loss": 0.7158, "step": 17934 }, { "epoch": 0.7432964482572838, "grad_norm": 0.42175135016441345, "learning_rate": 1.2837249782419497e-06, "loss": 0.6641, "step": 17935 }, { "epoch": 0.7433378921629574, "grad_norm": 0.4386596381664276, "learning_rate": 1.2835177587135813e-06, "loss": 0.7073, "step": 17936 }, { "epoch": 0.7433793360686312, "grad_norm": 0.3928120732307434, "learning_rate": 1.283310539185213e-06, "loss": 0.6169, "step": 17937 }, { "epoch": 0.7434207799743048, "grad_norm": 0.38779523968696594, "learning_rate": 1.2831033196568445e-06, "loss": 0.6965, "step": 17938 }, { "epoch": 0.7434622238799784, "grad_norm": 0.4124859869480133, "learning_rate": 1.2828961001284761e-06, "loss": 0.691, "step": 17939 }, { "epoch": 0.7435036677856521, "grad_norm": 0.4071972370147705, "learning_rate": 1.2826888806001077e-06, "loss": 0.6802, "step": 17940 }, { "epoch": 0.7435451116913258, "grad_norm": 0.40728041529655457, "learning_rate": 1.2824816610717395e-06, "loss": 0.6719, "step": 17941 }, { "epoch": 0.7435865555969995, "grad_norm": 0.4035493731498718, "learning_rate": 1.2822744415433711e-06, "loss": 0.6348, "step": 17942 }, { "epoch": 0.7436279995026731, "grad_norm": 0.48423340916633606, "learning_rate": 1.2820672220150027e-06, "loss": 0.6938, "step": 17943 }, { "epoch": 0.7436694434083468, "grad_norm": 0.42305925488471985, "learning_rate": 1.2818600024866345e-06, "loss": 0.6431, "step": 17944 }, { "epoch": 0.7437108873140205, "grad_norm": 0.4159092605113983, "learning_rate": 1.2816527829582661e-06, "loss": 0.6229, "step": 17945 }, { "epoch": 0.7437523312196942, "grad_norm": 0.445028692483902, "learning_rate": 1.2814455634298977e-06, "loss": 0.6569, "step": 17946 }, { "epoch": 0.7437937751253678, "grad_norm": 0.4062625467777252, "learning_rate": 1.2812383439015293e-06, "loss": 0.6719, "step": 17947 }, { "epoch": 0.7438352190310414, "grad_norm": 0.4103319048881531, "learning_rate": 1.281031124373161e-06, "loss": 0.6799, "step": 17948 }, { "epoch": 0.7438766629367152, "grad_norm": 0.4249497950077057, "learning_rate": 1.2808239048447927e-06, "loss": 0.7114, "step": 17949 }, { "epoch": 0.7439181068423888, "grad_norm": 0.44548988342285156, "learning_rate": 1.2806166853164243e-06, "loss": 0.6892, "step": 17950 }, { "epoch": 0.7439595507480625, "grad_norm": 0.37914037704467773, "learning_rate": 1.280409465788056e-06, "loss": 0.6487, "step": 17951 }, { "epoch": 0.7440009946537361, "grad_norm": 0.47239696979522705, "learning_rate": 1.2802022462596877e-06, "loss": 0.7035, "step": 17952 }, { "epoch": 0.7440424385594099, "grad_norm": 0.4493926763534546, "learning_rate": 1.2799950267313193e-06, "loss": 0.7102, "step": 17953 }, { "epoch": 0.7440838824650835, "grad_norm": 0.4170963168144226, "learning_rate": 1.279787807202951e-06, "loss": 0.6731, "step": 17954 }, { "epoch": 0.7441253263707572, "grad_norm": 0.3886983096599579, "learning_rate": 1.2795805876745825e-06, "loss": 0.6208, "step": 17955 }, { "epoch": 0.7441667702764309, "grad_norm": 0.4078763723373413, "learning_rate": 1.2793733681462141e-06, "loss": 0.6466, "step": 17956 }, { "epoch": 0.7442082141821045, "grad_norm": 0.40421056747436523, "learning_rate": 1.2791661486178457e-06, "loss": 0.6508, "step": 17957 }, { "epoch": 0.7442496580877782, "grad_norm": 0.4042975604534149, "learning_rate": 1.2789589290894775e-06, "loss": 0.6443, "step": 17958 }, { "epoch": 0.7442911019934518, "grad_norm": 0.4701588749885559, "learning_rate": 1.2787517095611091e-06, "loss": 0.6626, "step": 17959 }, { "epoch": 0.7443325458991256, "grad_norm": 0.43352964520454407, "learning_rate": 1.2785444900327407e-06, "loss": 0.7665, "step": 17960 }, { "epoch": 0.7443739898047992, "grad_norm": 0.3970031440258026, "learning_rate": 1.2783372705043725e-06, "loss": 0.6658, "step": 17961 }, { "epoch": 0.7444154337104729, "grad_norm": 0.4194445312023163, "learning_rate": 1.2781300509760041e-06, "loss": 0.6653, "step": 17962 }, { "epoch": 0.7444568776161465, "grad_norm": 0.40324634313583374, "learning_rate": 1.2779228314476357e-06, "loss": 0.6177, "step": 17963 }, { "epoch": 0.7444983215218202, "grad_norm": 0.38328710198402405, "learning_rate": 1.2777156119192673e-06, "loss": 0.6924, "step": 17964 }, { "epoch": 0.7445397654274939, "grad_norm": 0.43711209297180176, "learning_rate": 1.277508392390899e-06, "loss": 0.6967, "step": 17965 }, { "epoch": 0.7445812093331675, "grad_norm": 0.45097804069519043, "learning_rate": 1.2773011728625307e-06, "loss": 0.7024, "step": 17966 }, { "epoch": 0.7446226532388412, "grad_norm": 0.40400218963623047, "learning_rate": 1.2770939533341623e-06, "loss": 0.6613, "step": 17967 }, { "epoch": 0.7446640971445149, "grad_norm": 0.43957170844078064, "learning_rate": 1.276886733805794e-06, "loss": 0.6565, "step": 17968 }, { "epoch": 0.7447055410501886, "grad_norm": 0.3820628821849823, "learning_rate": 1.2766795142774255e-06, "loss": 0.6412, "step": 17969 }, { "epoch": 0.7447469849558622, "grad_norm": 0.39659222960472107, "learning_rate": 1.2764722947490573e-06, "loss": 0.6184, "step": 17970 }, { "epoch": 0.744788428861536, "grad_norm": 0.4141824543476105, "learning_rate": 1.276265075220689e-06, "loss": 0.6173, "step": 17971 }, { "epoch": 0.7448298727672096, "grad_norm": 0.4708532691001892, "learning_rate": 1.2760578556923205e-06, "loss": 0.6914, "step": 17972 }, { "epoch": 0.7448713166728832, "grad_norm": 0.40130093693733215, "learning_rate": 1.2758506361639521e-06, "loss": 0.6447, "step": 17973 }, { "epoch": 0.7449127605785569, "grad_norm": 0.431858092546463, "learning_rate": 1.2756434166355837e-06, "loss": 0.6677, "step": 17974 }, { "epoch": 0.7449542044842306, "grad_norm": 0.3850264549255371, "learning_rate": 1.2754361971072155e-06, "loss": 0.6567, "step": 17975 }, { "epoch": 0.7449956483899043, "grad_norm": 0.40212851762771606, "learning_rate": 1.2752289775788471e-06, "loss": 0.6533, "step": 17976 }, { "epoch": 0.7450370922955779, "grad_norm": 0.40164652466773987, "learning_rate": 1.2750217580504787e-06, "loss": 0.6368, "step": 17977 }, { "epoch": 0.7450785362012516, "grad_norm": 0.42696163058280945, "learning_rate": 1.2748145385221103e-06, "loss": 0.6661, "step": 17978 }, { "epoch": 0.7451199801069253, "grad_norm": 0.4164913594722748, "learning_rate": 1.2746073189937421e-06, "loss": 0.6803, "step": 17979 }, { "epoch": 0.745161424012599, "grad_norm": 0.39903637766838074, "learning_rate": 1.2744000994653737e-06, "loss": 0.6675, "step": 17980 }, { "epoch": 0.7452028679182726, "grad_norm": 0.4060525596141815, "learning_rate": 1.2741928799370053e-06, "loss": 0.6534, "step": 17981 }, { "epoch": 0.7452443118239462, "grad_norm": 0.46491873264312744, "learning_rate": 1.273985660408637e-06, "loss": 0.7365, "step": 17982 }, { "epoch": 0.74528575572962, "grad_norm": 0.3856048583984375, "learning_rate": 1.2737784408802687e-06, "loss": 0.656, "step": 17983 }, { "epoch": 0.7453271996352936, "grad_norm": 0.3953703045845032, "learning_rate": 1.2735712213519003e-06, "loss": 0.6663, "step": 17984 }, { "epoch": 0.7453686435409673, "grad_norm": 0.39112481474876404, "learning_rate": 1.273364001823532e-06, "loss": 0.687, "step": 17985 }, { "epoch": 0.745410087446641, "grad_norm": 0.44401276111602783, "learning_rate": 1.2731567822951635e-06, "loss": 0.6697, "step": 17986 }, { "epoch": 0.7454515313523147, "grad_norm": 0.41151028871536255, "learning_rate": 1.2729495627667953e-06, "loss": 0.6749, "step": 17987 }, { "epoch": 0.7454929752579883, "grad_norm": 0.4557395279407501, "learning_rate": 1.272742343238427e-06, "loss": 0.7485, "step": 17988 }, { "epoch": 0.745534419163662, "grad_norm": 0.3865765631198883, "learning_rate": 1.2725351237100585e-06, "loss": 0.5995, "step": 17989 }, { "epoch": 0.7455758630693357, "grad_norm": 0.4425818920135498, "learning_rate": 1.2723279041816901e-06, "loss": 0.6631, "step": 17990 }, { "epoch": 0.7456173069750093, "grad_norm": 0.3990689814090729, "learning_rate": 1.2721206846533217e-06, "loss": 0.741, "step": 17991 }, { "epoch": 0.745658750880683, "grad_norm": 0.41478872299194336, "learning_rate": 1.2719134651249535e-06, "loss": 0.6328, "step": 17992 }, { "epoch": 0.7457001947863566, "grad_norm": 0.4028747081756592, "learning_rate": 1.2717062455965851e-06, "loss": 0.6755, "step": 17993 }, { "epoch": 0.7457416386920304, "grad_norm": 0.3823970556259155, "learning_rate": 1.2714990260682167e-06, "loss": 0.671, "step": 17994 }, { "epoch": 0.745783082597704, "grad_norm": 0.39892029762268066, "learning_rate": 1.2712918065398483e-06, "loss": 0.6694, "step": 17995 }, { "epoch": 0.7458245265033777, "grad_norm": 0.424058198928833, "learning_rate": 1.2710845870114801e-06, "loss": 0.6779, "step": 17996 }, { "epoch": 0.7458659704090513, "grad_norm": 0.41004472970962524, "learning_rate": 1.2708773674831117e-06, "loss": 0.6887, "step": 17997 }, { "epoch": 0.7459074143147251, "grad_norm": 0.39678674936294556, "learning_rate": 1.2706701479547433e-06, "loss": 0.5887, "step": 17998 }, { "epoch": 0.7459488582203987, "grad_norm": 0.3990894556045532, "learning_rate": 1.270462928426375e-06, "loss": 0.6483, "step": 17999 }, { "epoch": 0.7459903021260723, "grad_norm": 0.4123969078063965, "learning_rate": 1.2702557088980067e-06, "loss": 0.7126, "step": 18000 }, { "epoch": 0.746031746031746, "grad_norm": 0.43104955554008484, "learning_rate": 1.2700484893696383e-06, "loss": 0.7192, "step": 18001 }, { "epoch": 0.7460731899374197, "grad_norm": 0.3994036614894867, "learning_rate": 1.26984126984127e-06, "loss": 0.693, "step": 18002 }, { "epoch": 0.7461146338430934, "grad_norm": 0.4198873043060303, "learning_rate": 1.2696340503129015e-06, "loss": 0.6831, "step": 18003 }, { "epoch": 0.746156077748767, "grad_norm": 0.44471997022628784, "learning_rate": 1.2694268307845331e-06, "loss": 0.6573, "step": 18004 }, { "epoch": 0.7461975216544408, "grad_norm": 0.4223025143146515, "learning_rate": 1.269219611256165e-06, "loss": 0.686, "step": 18005 }, { "epoch": 0.7462389655601144, "grad_norm": 0.40300053358078003, "learning_rate": 1.2690123917277965e-06, "loss": 0.696, "step": 18006 }, { "epoch": 0.7462804094657881, "grad_norm": 0.39616361260414124, "learning_rate": 1.2688051721994281e-06, "loss": 0.6848, "step": 18007 }, { "epoch": 0.7463218533714617, "grad_norm": 0.40903550386428833, "learning_rate": 1.2685979526710597e-06, "loss": 0.6792, "step": 18008 }, { "epoch": 0.7463632972771354, "grad_norm": 0.44018179178237915, "learning_rate": 1.2683907331426915e-06, "loss": 0.66, "step": 18009 }, { "epoch": 0.7464047411828091, "grad_norm": 0.43438011407852173, "learning_rate": 1.2681835136143231e-06, "loss": 0.7068, "step": 18010 }, { "epoch": 0.7464461850884827, "grad_norm": 0.42794495820999146, "learning_rate": 1.2679762940859547e-06, "loss": 0.6783, "step": 18011 }, { "epoch": 0.7464876289941564, "grad_norm": 0.4277538061141968, "learning_rate": 1.2677690745575863e-06, "loss": 0.6783, "step": 18012 }, { "epoch": 0.7465290728998301, "grad_norm": 0.41058090329170227, "learning_rate": 1.2675618550292181e-06, "loss": 0.6754, "step": 18013 }, { "epoch": 0.7465705168055038, "grad_norm": 0.449241042137146, "learning_rate": 1.2673546355008497e-06, "loss": 0.7073, "step": 18014 }, { "epoch": 0.7466119607111774, "grad_norm": 0.4004034698009491, "learning_rate": 1.2671474159724813e-06, "loss": 0.6575, "step": 18015 }, { "epoch": 0.7466534046168511, "grad_norm": 0.38303661346435547, "learning_rate": 1.266940196444113e-06, "loss": 0.6738, "step": 18016 }, { "epoch": 0.7466948485225248, "grad_norm": 0.3967627286911011, "learning_rate": 1.2667329769157447e-06, "loss": 0.6816, "step": 18017 }, { "epoch": 0.7467362924281984, "grad_norm": 0.39240753650665283, "learning_rate": 1.2665257573873763e-06, "loss": 0.6499, "step": 18018 }, { "epoch": 0.7467777363338721, "grad_norm": 0.37365520000457764, "learning_rate": 1.266318537859008e-06, "loss": 0.6182, "step": 18019 }, { "epoch": 0.7468191802395457, "grad_norm": 0.39595508575439453, "learning_rate": 1.2661113183306395e-06, "loss": 0.6383, "step": 18020 }, { "epoch": 0.7468606241452195, "grad_norm": 0.4256483018398285, "learning_rate": 1.2659040988022711e-06, "loss": 0.6814, "step": 18021 }, { "epoch": 0.7469020680508931, "grad_norm": 0.4003860056400299, "learning_rate": 1.265696879273903e-06, "loss": 0.6572, "step": 18022 }, { "epoch": 0.7469435119565668, "grad_norm": 0.44488412141799927, "learning_rate": 1.2654896597455345e-06, "loss": 0.7125, "step": 18023 }, { "epoch": 0.7469849558622405, "grad_norm": 0.41488760709762573, "learning_rate": 1.2652824402171661e-06, "loss": 0.6648, "step": 18024 }, { "epoch": 0.7470263997679141, "grad_norm": 0.41693955659866333, "learning_rate": 1.265075220688798e-06, "loss": 0.7166, "step": 18025 }, { "epoch": 0.7470678436735878, "grad_norm": 0.47288286685943604, "learning_rate": 1.2648680011604295e-06, "loss": 0.689, "step": 18026 }, { "epoch": 0.7471092875792614, "grad_norm": 0.4486538767814636, "learning_rate": 1.2646607816320611e-06, "loss": 0.6997, "step": 18027 }, { "epoch": 0.7471507314849352, "grad_norm": 0.38557592034339905, "learning_rate": 1.2644535621036927e-06, "loss": 0.6643, "step": 18028 }, { "epoch": 0.7471921753906088, "grad_norm": 0.42697638273239136, "learning_rate": 1.2642463425753243e-06, "loss": 0.6384, "step": 18029 }, { "epoch": 0.7472336192962825, "grad_norm": 0.4298316538333893, "learning_rate": 1.264039123046956e-06, "loss": 0.7166, "step": 18030 }, { "epoch": 0.7472750632019561, "grad_norm": 0.4366237223148346, "learning_rate": 1.2638319035185877e-06, "loss": 0.774, "step": 18031 }, { "epoch": 0.7473165071076299, "grad_norm": 0.3822805881500244, "learning_rate": 1.2636246839902193e-06, "loss": 0.6539, "step": 18032 }, { "epoch": 0.7473579510133035, "grad_norm": 0.4416252374649048, "learning_rate": 1.263417464461851e-06, "loss": 0.6661, "step": 18033 }, { "epoch": 0.7473993949189771, "grad_norm": 0.41685259342193604, "learning_rate": 1.2632102449334827e-06, "loss": 0.6807, "step": 18034 }, { "epoch": 0.7474408388246508, "grad_norm": 0.4027719497680664, "learning_rate": 1.2630030254051143e-06, "loss": 0.6689, "step": 18035 }, { "epoch": 0.7474822827303245, "grad_norm": 0.3914279639720917, "learning_rate": 1.262795805876746e-06, "loss": 0.6635, "step": 18036 }, { "epoch": 0.7475237266359982, "grad_norm": 0.4317137598991394, "learning_rate": 1.2625885863483775e-06, "loss": 0.6521, "step": 18037 }, { "epoch": 0.7475651705416718, "grad_norm": 0.4281472861766815, "learning_rate": 1.2623813668200091e-06, "loss": 0.6542, "step": 18038 }, { "epoch": 0.7476066144473456, "grad_norm": 0.4050513207912445, "learning_rate": 1.262174147291641e-06, "loss": 0.6332, "step": 18039 }, { "epoch": 0.7476480583530192, "grad_norm": 0.42278122901916504, "learning_rate": 1.2619669277632725e-06, "loss": 0.6649, "step": 18040 }, { "epoch": 0.7476895022586929, "grad_norm": 0.4415608048439026, "learning_rate": 1.2617597082349041e-06, "loss": 0.6833, "step": 18041 }, { "epoch": 0.7477309461643665, "grad_norm": 0.4337461292743683, "learning_rate": 1.261552488706536e-06, "loss": 0.6838, "step": 18042 }, { "epoch": 0.7477723900700401, "grad_norm": 0.41062915325164795, "learning_rate": 1.2613452691781675e-06, "loss": 0.6548, "step": 18043 }, { "epoch": 0.7478138339757139, "grad_norm": 0.4038822054862976, "learning_rate": 1.2611380496497991e-06, "loss": 0.6841, "step": 18044 }, { "epoch": 0.7478552778813875, "grad_norm": 0.40963801741600037, "learning_rate": 1.2609308301214307e-06, "loss": 0.6713, "step": 18045 }, { "epoch": 0.7478967217870612, "grad_norm": 0.4199795126914978, "learning_rate": 1.2607236105930623e-06, "loss": 0.7594, "step": 18046 }, { "epoch": 0.7479381656927349, "grad_norm": 0.399190217256546, "learning_rate": 1.260516391064694e-06, "loss": 0.6516, "step": 18047 }, { "epoch": 0.7479796095984086, "grad_norm": 0.4055478870868683, "learning_rate": 1.2603091715363257e-06, "loss": 0.6759, "step": 18048 }, { "epoch": 0.7480210535040822, "grad_norm": 0.41443192958831787, "learning_rate": 1.2601019520079573e-06, "loss": 0.6418, "step": 18049 }, { "epoch": 0.7480624974097559, "grad_norm": 0.38799428939819336, "learning_rate": 1.259894732479589e-06, "loss": 0.6208, "step": 18050 }, { "epoch": 0.7481039413154296, "grad_norm": 0.4371360242366791, "learning_rate": 1.2596875129512208e-06, "loss": 0.6873, "step": 18051 }, { "epoch": 0.7481453852211032, "grad_norm": 0.3942939341068268, "learning_rate": 1.2594802934228524e-06, "loss": 0.6792, "step": 18052 }, { "epoch": 0.7481868291267769, "grad_norm": 0.408089280128479, "learning_rate": 1.259273073894484e-06, "loss": 0.7136, "step": 18053 }, { "epoch": 0.7482282730324505, "grad_norm": 0.393614798784256, "learning_rate": 1.2590658543661155e-06, "loss": 0.6497, "step": 18054 }, { "epoch": 0.7482697169381243, "grad_norm": 0.3956488072872162, "learning_rate": 1.2588586348377471e-06, "loss": 0.6613, "step": 18055 }, { "epoch": 0.7483111608437979, "grad_norm": 0.3749770522117615, "learning_rate": 1.2586514153093787e-06, "loss": 0.6278, "step": 18056 }, { "epoch": 0.7483526047494716, "grad_norm": 0.3833193778991699, "learning_rate": 1.2584441957810105e-06, "loss": 0.593, "step": 18057 }, { "epoch": 0.7483940486551452, "grad_norm": 0.42074131965637207, "learning_rate": 1.2582369762526421e-06, "loss": 0.6802, "step": 18058 }, { "epoch": 0.748435492560819, "grad_norm": 0.42019370198249817, "learning_rate": 1.258029756724274e-06, "loss": 0.6914, "step": 18059 }, { "epoch": 0.7484769364664926, "grad_norm": 0.4084164798259735, "learning_rate": 1.2578225371959056e-06, "loss": 0.6626, "step": 18060 }, { "epoch": 0.7485183803721662, "grad_norm": 0.43233099579811096, "learning_rate": 1.2576153176675372e-06, "loss": 0.6737, "step": 18061 }, { "epoch": 0.74855982427784, "grad_norm": 0.4396106004714966, "learning_rate": 1.2574080981391687e-06, "loss": 0.6855, "step": 18062 }, { "epoch": 0.7486012681835136, "grad_norm": 0.4270753562450409, "learning_rate": 1.2572008786108003e-06, "loss": 0.6877, "step": 18063 }, { "epoch": 0.7486427120891873, "grad_norm": 0.3926583528518677, "learning_rate": 1.256993659082432e-06, "loss": 0.6663, "step": 18064 }, { "epoch": 0.7486841559948609, "grad_norm": 0.40158137679100037, "learning_rate": 1.2567864395540635e-06, "loss": 0.6919, "step": 18065 }, { "epoch": 0.7487255999005347, "grad_norm": 0.39559832215309143, "learning_rate": 1.2565792200256953e-06, "loss": 0.7156, "step": 18066 }, { "epoch": 0.7487670438062083, "grad_norm": 0.3891668915748596, "learning_rate": 1.256372000497327e-06, "loss": 0.5944, "step": 18067 }, { "epoch": 0.748808487711882, "grad_norm": 0.39854809641838074, "learning_rate": 1.2561647809689588e-06, "loss": 0.667, "step": 18068 }, { "epoch": 0.7488499316175556, "grad_norm": 0.48829442262649536, "learning_rate": 1.2559575614405904e-06, "loss": 0.7683, "step": 18069 }, { "epoch": 0.7488913755232293, "grad_norm": 0.41078442335128784, "learning_rate": 1.255750341912222e-06, "loss": 0.6406, "step": 18070 }, { "epoch": 0.748932819428903, "grad_norm": 0.40055447816848755, "learning_rate": 1.2555431223838535e-06, "loss": 0.6487, "step": 18071 }, { "epoch": 0.7489742633345766, "grad_norm": 0.424227237701416, "learning_rate": 1.2553359028554851e-06, "loss": 0.6575, "step": 18072 }, { "epoch": 0.7490157072402504, "grad_norm": 0.4163024425506592, "learning_rate": 1.2551286833271167e-06, "loss": 0.6853, "step": 18073 }, { "epoch": 0.749057151145924, "grad_norm": 0.4982098937034607, "learning_rate": 1.2549214637987486e-06, "loss": 0.6943, "step": 18074 }, { "epoch": 0.7490985950515977, "grad_norm": 0.42212527990341187, "learning_rate": 1.2547142442703801e-06, "loss": 0.6694, "step": 18075 }, { "epoch": 0.7491400389572713, "grad_norm": 0.41553330421447754, "learning_rate": 1.254507024742012e-06, "loss": 0.611, "step": 18076 }, { "epoch": 0.7491814828629451, "grad_norm": 0.3973764181137085, "learning_rate": 1.2542998052136436e-06, "loss": 0.6399, "step": 18077 }, { "epoch": 0.7492229267686187, "grad_norm": 0.49291864037513733, "learning_rate": 1.2540925856852752e-06, "loss": 0.7334, "step": 18078 }, { "epoch": 0.7492643706742923, "grad_norm": 0.42273131012916565, "learning_rate": 1.2538853661569068e-06, "loss": 0.6779, "step": 18079 }, { "epoch": 0.749305814579966, "grad_norm": 0.41080018877983093, "learning_rate": 1.2536781466285383e-06, "loss": 0.6726, "step": 18080 }, { "epoch": 0.7493472584856397, "grad_norm": 0.41369596123695374, "learning_rate": 1.25347092710017e-06, "loss": 0.6833, "step": 18081 }, { "epoch": 0.7493887023913134, "grad_norm": 0.41463503241539, "learning_rate": 1.2532637075718015e-06, "loss": 0.6587, "step": 18082 }, { "epoch": 0.749430146296987, "grad_norm": 0.47034165263175964, "learning_rate": 1.2530564880434334e-06, "loss": 0.6359, "step": 18083 }, { "epoch": 0.7494715902026607, "grad_norm": 0.4203554093837738, "learning_rate": 1.252849268515065e-06, "loss": 0.6602, "step": 18084 }, { "epoch": 0.7495130341083344, "grad_norm": 0.40883710980415344, "learning_rate": 1.2526420489866968e-06, "loss": 0.6598, "step": 18085 }, { "epoch": 0.749554478014008, "grad_norm": 0.43225979804992676, "learning_rate": 1.2524348294583284e-06, "loss": 0.6248, "step": 18086 }, { "epoch": 0.7495959219196817, "grad_norm": 0.4079810380935669, "learning_rate": 1.25222760992996e-06, "loss": 0.6628, "step": 18087 }, { "epoch": 0.7496373658253553, "grad_norm": 0.42913246154785156, "learning_rate": 1.2520203904015916e-06, "loss": 0.7161, "step": 18088 }, { "epoch": 0.7496788097310291, "grad_norm": 0.41871094703674316, "learning_rate": 1.2518131708732231e-06, "loss": 0.6808, "step": 18089 }, { "epoch": 0.7497202536367027, "grad_norm": 0.43216896057128906, "learning_rate": 1.2516059513448547e-06, "loss": 0.6924, "step": 18090 }, { "epoch": 0.7497616975423764, "grad_norm": 0.4284150004386902, "learning_rate": 1.2513987318164863e-06, "loss": 0.6167, "step": 18091 }, { "epoch": 0.74980314144805, "grad_norm": 0.4093926250934601, "learning_rate": 1.2511915122881182e-06, "loss": 0.698, "step": 18092 }, { "epoch": 0.7498445853537238, "grad_norm": 0.4449462592601776, "learning_rate": 1.25098429275975e-06, "loss": 0.6599, "step": 18093 }, { "epoch": 0.7498860292593974, "grad_norm": 0.39144787192344666, "learning_rate": 1.2507770732313816e-06, "loss": 0.6229, "step": 18094 }, { "epoch": 0.749927473165071, "grad_norm": 0.42938515543937683, "learning_rate": 1.2505698537030132e-06, "loss": 0.7241, "step": 18095 }, { "epoch": 0.7499689170707448, "grad_norm": 0.40155020356178284, "learning_rate": 1.2503626341746448e-06, "loss": 0.6609, "step": 18096 }, { "epoch": 0.7500103609764184, "grad_norm": 0.4145388603210449, "learning_rate": 1.2501554146462764e-06, "loss": 0.7012, "step": 18097 }, { "epoch": 0.7500518048820921, "grad_norm": 0.4284125864505768, "learning_rate": 1.2499481951179082e-06, "loss": 0.6949, "step": 18098 }, { "epoch": 0.7500932487877657, "grad_norm": 0.4008921682834625, "learning_rate": 1.2497409755895398e-06, "loss": 0.6569, "step": 18099 }, { "epoch": 0.7501346926934395, "grad_norm": 0.40656721591949463, "learning_rate": 1.2495337560611714e-06, "loss": 0.6879, "step": 18100 }, { "epoch": 0.7501761365991131, "grad_norm": 0.3756991922855377, "learning_rate": 1.249326536532803e-06, "loss": 0.6637, "step": 18101 }, { "epoch": 0.7502175805047868, "grad_norm": 0.4266617000102997, "learning_rate": 1.2491193170044346e-06, "loss": 0.707, "step": 18102 }, { "epoch": 0.7502590244104604, "grad_norm": 0.3844257593154907, "learning_rate": 1.2489120974760664e-06, "loss": 0.6318, "step": 18103 }, { "epoch": 0.7503004683161341, "grad_norm": 0.44730669260025024, "learning_rate": 1.248704877947698e-06, "loss": 0.6829, "step": 18104 }, { "epoch": 0.7503419122218078, "grad_norm": 0.40021583437919617, "learning_rate": 1.2484976584193296e-06, "loss": 0.699, "step": 18105 }, { "epoch": 0.7503833561274814, "grad_norm": 0.37868693470954895, "learning_rate": 1.2482904388909612e-06, "loss": 0.6517, "step": 18106 }, { "epoch": 0.7504248000331551, "grad_norm": 0.3893684446811676, "learning_rate": 1.248083219362593e-06, "loss": 0.6608, "step": 18107 }, { "epoch": 0.7504662439388288, "grad_norm": 0.41274234652519226, "learning_rate": 1.2478759998342246e-06, "loss": 0.6819, "step": 18108 }, { "epoch": 0.7505076878445025, "grad_norm": 0.4035905599594116, "learning_rate": 1.2476687803058562e-06, "loss": 0.6495, "step": 18109 }, { "epoch": 0.7505491317501761, "grad_norm": 0.415272980928421, "learning_rate": 1.2474615607774878e-06, "loss": 0.6495, "step": 18110 }, { "epoch": 0.7505905756558499, "grad_norm": 0.43477997183799744, "learning_rate": 1.2472543412491196e-06, "loss": 0.6788, "step": 18111 }, { "epoch": 0.7506320195615235, "grad_norm": 0.43373411893844604, "learning_rate": 1.2470471217207512e-06, "loss": 0.6726, "step": 18112 }, { "epoch": 0.7506734634671971, "grad_norm": 0.39661386609077454, "learning_rate": 1.2468399021923828e-06, "loss": 0.7021, "step": 18113 }, { "epoch": 0.7507149073728708, "grad_norm": 0.4066760241985321, "learning_rate": 1.2466326826640144e-06, "loss": 0.6407, "step": 18114 }, { "epoch": 0.7507563512785445, "grad_norm": 0.4205028712749481, "learning_rate": 1.246425463135646e-06, "loss": 0.6793, "step": 18115 }, { "epoch": 0.7507977951842182, "grad_norm": 0.4091859757900238, "learning_rate": 1.2462182436072778e-06, "loss": 0.6455, "step": 18116 }, { "epoch": 0.7508392390898918, "grad_norm": 0.44470667839050293, "learning_rate": 1.2460110240789094e-06, "loss": 0.6698, "step": 18117 }, { "epoch": 0.7508806829955655, "grad_norm": 0.40063872933387756, "learning_rate": 1.245803804550541e-06, "loss": 0.6523, "step": 18118 }, { "epoch": 0.7509221269012392, "grad_norm": 0.40479418635368347, "learning_rate": 1.2455965850221726e-06, "loss": 0.6649, "step": 18119 }, { "epoch": 0.7509635708069129, "grad_norm": 0.4068812429904938, "learning_rate": 1.2453893654938044e-06, "loss": 0.696, "step": 18120 }, { "epoch": 0.7510050147125865, "grad_norm": 0.4208543300628662, "learning_rate": 1.245182145965436e-06, "loss": 0.6858, "step": 18121 }, { "epoch": 0.7510464586182601, "grad_norm": 0.4267224669456482, "learning_rate": 1.2449749264370676e-06, "loss": 0.6775, "step": 18122 }, { "epoch": 0.7510879025239339, "grad_norm": 0.4049726128578186, "learning_rate": 1.2447677069086992e-06, "loss": 0.7092, "step": 18123 }, { "epoch": 0.7511293464296075, "grad_norm": 0.43642547726631165, "learning_rate": 1.244560487380331e-06, "loss": 0.6289, "step": 18124 }, { "epoch": 0.7511707903352812, "grad_norm": 0.4001486003398895, "learning_rate": 1.2443532678519626e-06, "loss": 0.6582, "step": 18125 }, { "epoch": 0.7512122342409548, "grad_norm": 0.4406491816043854, "learning_rate": 1.2441460483235942e-06, "loss": 0.7123, "step": 18126 }, { "epoch": 0.7512536781466286, "grad_norm": 0.38485848903656006, "learning_rate": 1.2439388287952258e-06, "loss": 0.6326, "step": 18127 }, { "epoch": 0.7512951220523022, "grad_norm": 0.4575856626033783, "learning_rate": 1.2437316092668574e-06, "loss": 0.6323, "step": 18128 }, { "epoch": 0.7513365659579759, "grad_norm": 0.36781466007232666, "learning_rate": 1.2435243897384892e-06, "loss": 0.6643, "step": 18129 }, { "epoch": 0.7513780098636496, "grad_norm": 0.41730374097824097, "learning_rate": 1.2433171702101208e-06, "loss": 0.6599, "step": 18130 }, { "epoch": 0.7514194537693232, "grad_norm": 0.4034154415130615, "learning_rate": 1.2431099506817524e-06, "loss": 0.6648, "step": 18131 }, { "epoch": 0.7514608976749969, "grad_norm": 0.3895029127597809, "learning_rate": 1.242902731153384e-06, "loss": 0.5907, "step": 18132 }, { "epoch": 0.7515023415806705, "grad_norm": 0.38178855180740356, "learning_rate": 1.2426955116250158e-06, "loss": 0.6859, "step": 18133 }, { "epoch": 0.7515437854863443, "grad_norm": 0.4604616165161133, "learning_rate": 1.2424882920966474e-06, "loss": 0.7395, "step": 18134 }, { "epoch": 0.7515852293920179, "grad_norm": 0.41205278038978577, "learning_rate": 1.242281072568279e-06, "loss": 0.6758, "step": 18135 }, { "epoch": 0.7516266732976916, "grad_norm": 0.3968134820461273, "learning_rate": 1.2420738530399106e-06, "loss": 0.6614, "step": 18136 }, { "epoch": 0.7516681172033652, "grad_norm": 0.4180302321910858, "learning_rate": 1.2418666335115424e-06, "loss": 0.6379, "step": 18137 }, { "epoch": 0.751709561109039, "grad_norm": 0.4360416531562805, "learning_rate": 1.241659413983174e-06, "loss": 0.7087, "step": 18138 }, { "epoch": 0.7517510050147126, "grad_norm": 0.4283049404621124, "learning_rate": 1.2414521944548056e-06, "loss": 0.6364, "step": 18139 }, { "epoch": 0.7517924489203862, "grad_norm": 0.43910738825798035, "learning_rate": 1.2412449749264372e-06, "loss": 0.7266, "step": 18140 }, { "epoch": 0.75183389282606, "grad_norm": 0.42952442169189453, "learning_rate": 1.2410377553980688e-06, "loss": 0.686, "step": 18141 }, { "epoch": 0.7518753367317336, "grad_norm": 0.39954304695129395, "learning_rate": 1.2408305358697006e-06, "loss": 0.6956, "step": 18142 }, { "epoch": 0.7519167806374073, "grad_norm": 0.43185874819755554, "learning_rate": 1.2406233163413322e-06, "loss": 0.637, "step": 18143 }, { "epoch": 0.7519582245430809, "grad_norm": 0.420297235250473, "learning_rate": 1.2404160968129638e-06, "loss": 0.644, "step": 18144 }, { "epoch": 0.7519996684487547, "grad_norm": 0.4051879644393921, "learning_rate": 1.2402088772845954e-06, "loss": 0.6731, "step": 18145 }, { "epoch": 0.7520411123544283, "grad_norm": 0.4346986711025238, "learning_rate": 1.2400016577562272e-06, "loss": 0.7161, "step": 18146 }, { "epoch": 0.7520825562601019, "grad_norm": 0.39401689171791077, "learning_rate": 1.2397944382278588e-06, "loss": 0.6234, "step": 18147 }, { "epoch": 0.7521240001657756, "grad_norm": 0.4115256667137146, "learning_rate": 1.2395872186994904e-06, "loss": 0.6378, "step": 18148 }, { "epoch": 0.7521654440714493, "grad_norm": 0.41027265787124634, "learning_rate": 1.239379999171122e-06, "loss": 0.6699, "step": 18149 }, { "epoch": 0.752206887977123, "grad_norm": 0.42018914222717285, "learning_rate": 1.2391727796427538e-06, "loss": 0.6675, "step": 18150 }, { "epoch": 0.7522483318827966, "grad_norm": 0.4364220201969147, "learning_rate": 1.2389655601143854e-06, "loss": 0.6323, "step": 18151 }, { "epoch": 0.7522897757884703, "grad_norm": 0.3931707441806793, "learning_rate": 1.238758340586017e-06, "loss": 0.6407, "step": 18152 }, { "epoch": 0.752331219694144, "grad_norm": 0.4244634211063385, "learning_rate": 1.2385511210576486e-06, "loss": 0.6783, "step": 18153 }, { "epoch": 0.7523726635998177, "grad_norm": 0.4203733503818512, "learning_rate": 1.2383439015292802e-06, "loss": 0.6515, "step": 18154 }, { "epoch": 0.7524141075054913, "grad_norm": 0.4208974242210388, "learning_rate": 1.238136682000912e-06, "loss": 0.6531, "step": 18155 }, { "epoch": 0.7524555514111649, "grad_norm": 0.4043782353401184, "learning_rate": 1.2379294624725436e-06, "loss": 0.6619, "step": 18156 }, { "epoch": 0.7524969953168387, "grad_norm": 0.42378875613212585, "learning_rate": 1.2377222429441752e-06, "loss": 0.6334, "step": 18157 }, { "epoch": 0.7525384392225123, "grad_norm": 0.3850429654121399, "learning_rate": 1.2375150234158068e-06, "loss": 0.6434, "step": 18158 }, { "epoch": 0.752579883128186, "grad_norm": 0.43882259726524353, "learning_rate": 1.2373078038874386e-06, "loss": 0.678, "step": 18159 }, { "epoch": 0.7526213270338596, "grad_norm": 0.42201563715934753, "learning_rate": 1.2371005843590702e-06, "loss": 0.6846, "step": 18160 }, { "epoch": 0.7526627709395334, "grad_norm": 0.4154178500175476, "learning_rate": 1.2368933648307018e-06, "loss": 0.7286, "step": 18161 }, { "epoch": 0.752704214845207, "grad_norm": 0.3873840868473053, "learning_rate": 1.2366861453023334e-06, "loss": 0.6908, "step": 18162 }, { "epoch": 0.7527456587508807, "grad_norm": 0.4223342835903168, "learning_rate": 1.236478925773965e-06, "loss": 0.6902, "step": 18163 }, { "epoch": 0.7527871026565544, "grad_norm": 0.4386080205440521, "learning_rate": 1.2362717062455968e-06, "loss": 0.6694, "step": 18164 }, { "epoch": 0.752828546562228, "grad_norm": 0.4245038330554962, "learning_rate": 1.2360644867172284e-06, "loss": 0.6783, "step": 18165 }, { "epoch": 0.7528699904679017, "grad_norm": 0.4469040632247925, "learning_rate": 1.23585726718886e-06, "loss": 0.6506, "step": 18166 }, { "epoch": 0.7529114343735753, "grad_norm": 0.4272489547729492, "learning_rate": 1.2356500476604916e-06, "loss": 0.6946, "step": 18167 }, { "epoch": 0.7529528782792491, "grad_norm": 0.43511152267456055, "learning_rate": 1.2354428281321234e-06, "loss": 0.6968, "step": 18168 }, { "epoch": 0.7529943221849227, "grad_norm": 0.4406329393386841, "learning_rate": 1.235235608603755e-06, "loss": 0.7256, "step": 18169 }, { "epoch": 0.7530357660905964, "grad_norm": 0.3928154706954956, "learning_rate": 1.2350283890753866e-06, "loss": 0.6577, "step": 18170 }, { "epoch": 0.75307720999627, "grad_norm": 0.4237310588359833, "learning_rate": 1.2348211695470182e-06, "loss": 0.6931, "step": 18171 }, { "epoch": 0.7531186539019438, "grad_norm": 0.5531282424926758, "learning_rate": 1.23461395001865e-06, "loss": 0.7029, "step": 18172 }, { "epoch": 0.7531600978076174, "grad_norm": 0.4064716398715973, "learning_rate": 1.2344067304902816e-06, "loss": 0.6449, "step": 18173 }, { "epoch": 0.753201541713291, "grad_norm": 0.3712570369243622, "learning_rate": 1.2341995109619132e-06, "loss": 0.6521, "step": 18174 }, { "epoch": 0.7532429856189647, "grad_norm": 0.3940562903881073, "learning_rate": 1.2339922914335448e-06, "loss": 0.7268, "step": 18175 }, { "epoch": 0.7532844295246384, "grad_norm": 0.39350855350494385, "learning_rate": 1.2337850719051764e-06, "loss": 0.6851, "step": 18176 }, { "epoch": 0.7533258734303121, "grad_norm": 0.4416342079639435, "learning_rate": 1.2335778523768082e-06, "loss": 0.6606, "step": 18177 }, { "epoch": 0.7533673173359857, "grad_norm": 0.40940192341804504, "learning_rate": 1.2333706328484398e-06, "loss": 0.6306, "step": 18178 }, { "epoch": 0.7534087612416595, "grad_norm": 0.4198229908943176, "learning_rate": 1.2331634133200714e-06, "loss": 0.642, "step": 18179 }, { "epoch": 0.7534502051473331, "grad_norm": 0.4120827913284302, "learning_rate": 1.232956193791703e-06, "loss": 0.7017, "step": 18180 }, { "epoch": 0.7534916490530068, "grad_norm": 0.38771459460258484, "learning_rate": 1.2327489742633348e-06, "loss": 0.5988, "step": 18181 }, { "epoch": 0.7535330929586804, "grad_norm": 0.44402164220809937, "learning_rate": 1.2325417547349664e-06, "loss": 0.696, "step": 18182 }, { "epoch": 0.753574536864354, "grad_norm": 0.44455617666244507, "learning_rate": 1.232334535206598e-06, "loss": 0.6869, "step": 18183 }, { "epoch": 0.7536159807700278, "grad_norm": 0.39785236120224, "learning_rate": 1.2321273156782296e-06, "loss": 0.6753, "step": 18184 }, { "epoch": 0.7536574246757014, "grad_norm": 0.38366401195526123, "learning_rate": 1.2319200961498614e-06, "loss": 0.604, "step": 18185 }, { "epoch": 0.7536988685813751, "grad_norm": 0.4519854187965393, "learning_rate": 1.231712876621493e-06, "loss": 0.7439, "step": 18186 }, { "epoch": 0.7537403124870488, "grad_norm": 0.4359987676143646, "learning_rate": 1.2315056570931246e-06, "loss": 0.6548, "step": 18187 }, { "epoch": 0.7537817563927225, "grad_norm": 0.42344972491264343, "learning_rate": 1.2312984375647562e-06, "loss": 0.6278, "step": 18188 }, { "epoch": 0.7538232002983961, "grad_norm": 0.4253320097923279, "learning_rate": 1.2310912180363878e-06, "loss": 0.6747, "step": 18189 }, { "epoch": 0.7538646442040698, "grad_norm": 0.42661672830581665, "learning_rate": 1.2308839985080196e-06, "loss": 0.6863, "step": 18190 }, { "epoch": 0.7539060881097435, "grad_norm": 0.41609615087509155, "learning_rate": 1.2306767789796512e-06, "loss": 0.6807, "step": 18191 }, { "epoch": 0.7539475320154171, "grad_norm": 0.4082472622394562, "learning_rate": 1.2304695594512828e-06, "loss": 0.6409, "step": 18192 }, { "epoch": 0.7539889759210908, "grad_norm": 0.40783607959747314, "learning_rate": 1.2302623399229144e-06, "loss": 0.7031, "step": 18193 }, { "epoch": 0.7540304198267644, "grad_norm": 0.414442241191864, "learning_rate": 1.2300551203945462e-06, "loss": 0.6461, "step": 18194 }, { "epoch": 0.7540718637324382, "grad_norm": 0.42578059434890747, "learning_rate": 1.2298479008661778e-06, "loss": 0.7225, "step": 18195 }, { "epoch": 0.7541133076381118, "grad_norm": 0.42140722274780273, "learning_rate": 1.2296406813378094e-06, "loss": 0.668, "step": 18196 }, { "epoch": 0.7541547515437855, "grad_norm": 0.44172757863998413, "learning_rate": 1.229433461809441e-06, "loss": 0.7159, "step": 18197 }, { "epoch": 0.7541961954494592, "grad_norm": 0.4471127688884735, "learning_rate": 1.2292262422810728e-06, "loss": 0.6715, "step": 18198 }, { "epoch": 0.7542376393551329, "grad_norm": 0.48301398754119873, "learning_rate": 1.2290190227527044e-06, "loss": 0.7354, "step": 18199 }, { "epoch": 0.7542790832608065, "grad_norm": 0.38618719577789307, "learning_rate": 1.228811803224336e-06, "loss": 0.6012, "step": 18200 }, { "epoch": 0.7543205271664801, "grad_norm": 0.4431675374507904, "learning_rate": 1.2286045836959676e-06, "loss": 0.7437, "step": 18201 }, { "epoch": 0.7543619710721539, "grad_norm": 0.42908886075019836, "learning_rate": 1.2283973641675992e-06, "loss": 0.6815, "step": 18202 }, { "epoch": 0.7544034149778275, "grad_norm": 0.4171474874019623, "learning_rate": 1.228190144639231e-06, "loss": 0.6636, "step": 18203 }, { "epoch": 0.7544448588835012, "grad_norm": 0.43771809339523315, "learning_rate": 1.2279829251108626e-06, "loss": 0.6777, "step": 18204 }, { "epoch": 0.7544863027891748, "grad_norm": 0.3687293231487274, "learning_rate": 1.2277757055824942e-06, "loss": 0.6604, "step": 18205 }, { "epoch": 0.7545277466948486, "grad_norm": 0.40466344356536865, "learning_rate": 1.2275684860541258e-06, "loss": 0.6802, "step": 18206 }, { "epoch": 0.7545691906005222, "grad_norm": 0.4073166251182556, "learning_rate": 1.2273612665257576e-06, "loss": 0.6816, "step": 18207 }, { "epoch": 0.7546106345061958, "grad_norm": 0.4285399615764618, "learning_rate": 1.2271540469973892e-06, "loss": 0.7079, "step": 18208 }, { "epoch": 0.7546520784118695, "grad_norm": 0.4258655309677124, "learning_rate": 1.2269468274690208e-06, "loss": 0.6793, "step": 18209 }, { "epoch": 0.7546935223175432, "grad_norm": 0.4392007291316986, "learning_rate": 1.2267396079406524e-06, "loss": 0.6782, "step": 18210 }, { "epoch": 0.7547349662232169, "grad_norm": 0.4320068657398224, "learning_rate": 1.2265323884122842e-06, "loss": 0.6587, "step": 18211 }, { "epoch": 0.7547764101288905, "grad_norm": 0.4275197982788086, "learning_rate": 1.2263251688839158e-06, "loss": 0.6915, "step": 18212 }, { "epoch": 0.7548178540345643, "grad_norm": 0.4353669285774231, "learning_rate": 1.2261179493555474e-06, "loss": 0.6586, "step": 18213 }, { "epoch": 0.7548592979402379, "grad_norm": 0.39792516827583313, "learning_rate": 1.225910729827179e-06, "loss": 0.6119, "step": 18214 }, { "epoch": 0.7549007418459116, "grad_norm": 0.46582213044166565, "learning_rate": 1.2257035102988106e-06, "loss": 0.6675, "step": 18215 }, { "epoch": 0.7549421857515852, "grad_norm": 0.37943220138549805, "learning_rate": 1.2254962907704424e-06, "loss": 0.6373, "step": 18216 }, { "epoch": 0.7549836296572588, "grad_norm": 0.43685299158096313, "learning_rate": 1.225289071242074e-06, "loss": 0.6526, "step": 18217 }, { "epoch": 0.7550250735629326, "grad_norm": 0.40441015362739563, "learning_rate": 1.2250818517137056e-06, "loss": 0.626, "step": 18218 }, { "epoch": 0.7550665174686062, "grad_norm": 0.42112988233566284, "learning_rate": 1.2248746321853372e-06, "loss": 0.6433, "step": 18219 }, { "epoch": 0.7551079613742799, "grad_norm": 0.4789241552352905, "learning_rate": 1.224667412656969e-06, "loss": 0.7092, "step": 18220 }, { "epoch": 0.7551494052799536, "grad_norm": 0.4105861186981201, "learning_rate": 1.2244601931286006e-06, "loss": 0.6697, "step": 18221 }, { "epoch": 0.7551908491856273, "grad_norm": 0.3997950553894043, "learning_rate": 1.2242529736002322e-06, "loss": 0.6599, "step": 18222 }, { "epoch": 0.7552322930913009, "grad_norm": 0.4342069625854492, "learning_rate": 1.2240457540718638e-06, "loss": 0.7213, "step": 18223 }, { "epoch": 0.7552737369969746, "grad_norm": 0.4278947412967682, "learning_rate": 1.2238385345434954e-06, "loss": 0.6559, "step": 18224 }, { "epoch": 0.7553151809026483, "grad_norm": 0.4252760410308838, "learning_rate": 1.2236313150151272e-06, "loss": 0.6602, "step": 18225 }, { "epoch": 0.7553566248083219, "grad_norm": 0.3982548117637634, "learning_rate": 1.2234240954867588e-06, "loss": 0.6593, "step": 18226 }, { "epoch": 0.7553980687139956, "grad_norm": 0.4037231206893921, "learning_rate": 1.2232168759583904e-06, "loss": 0.6345, "step": 18227 }, { "epoch": 0.7554395126196692, "grad_norm": 0.4367417097091675, "learning_rate": 1.223009656430022e-06, "loss": 0.6672, "step": 18228 }, { "epoch": 0.755480956525343, "grad_norm": 0.40735286474227905, "learning_rate": 1.2228024369016538e-06, "loss": 0.6902, "step": 18229 }, { "epoch": 0.7555224004310166, "grad_norm": 0.397272527217865, "learning_rate": 1.2225952173732854e-06, "loss": 0.6533, "step": 18230 }, { "epoch": 0.7555638443366903, "grad_norm": 0.4441736042499542, "learning_rate": 1.222387997844917e-06, "loss": 0.6996, "step": 18231 }, { "epoch": 0.755605288242364, "grad_norm": 0.43984255194664, "learning_rate": 1.2221807783165486e-06, "loss": 0.6748, "step": 18232 }, { "epoch": 0.7556467321480377, "grad_norm": 0.4073004424571991, "learning_rate": 1.2219735587881804e-06, "loss": 0.6951, "step": 18233 }, { "epoch": 0.7556881760537113, "grad_norm": 0.4917154908180237, "learning_rate": 1.221766339259812e-06, "loss": 0.6804, "step": 18234 }, { "epoch": 0.7557296199593849, "grad_norm": 0.3972444534301758, "learning_rate": 1.2215591197314436e-06, "loss": 0.6576, "step": 18235 }, { "epoch": 0.7557710638650587, "grad_norm": 0.422237753868103, "learning_rate": 1.2213519002030752e-06, "loss": 0.6764, "step": 18236 }, { "epoch": 0.7558125077707323, "grad_norm": 0.37819766998291016, "learning_rate": 1.2211446806747068e-06, "loss": 0.6469, "step": 18237 }, { "epoch": 0.755853951676406, "grad_norm": 0.42419740557670593, "learning_rate": 1.2209374611463386e-06, "loss": 0.7075, "step": 18238 }, { "epoch": 0.7558953955820796, "grad_norm": 0.39829257130622864, "learning_rate": 1.2207302416179702e-06, "loss": 0.6418, "step": 18239 }, { "epoch": 0.7559368394877534, "grad_norm": 0.4384540319442749, "learning_rate": 1.2205230220896018e-06, "loss": 0.6525, "step": 18240 }, { "epoch": 0.755978283393427, "grad_norm": 0.3931398093700409, "learning_rate": 1.2203158025612334e-06, "loss": 0.6881, "step": 18241 }, { "epoch": 0.7560197272991007, "grad_norm": 0.4000490605831146, "learning_rate": 1.2201085830328652e-06, "loss": 0.6613, "step": 18242 }, { "epoch": 0.7560611712047743, "grad_norm": 0.4376785159111023, "learning_rate": 1.2199013635044968e-06, "loss": 0.6873, "step": 18243 }, { "epoch": 0.756102615110448, "grad_norm": 0.41143882274627686, "learning_rate": 1.2196941439761284e-06, "loss": 0.6169, "step": 18244 }, { "epoch": 0.7561440590161217, "grad_norm": 0.4073342978954315, "learning_rate": 1.21948692444776e-06, "loss": 0.663, "step": 18245 }, { "epoch": 0.7561855029217953, "grad_norm": 0.4308210015296936, "learning_rate": 1.2192797049193918e-06, "loss": 0.6985, "step": 18246 }, { "epoch": 0.756226946827469, "grad_norm": 0.3765134811401367, "learning_rate": 1.2190724853910234e-06, "loss": 0.6425, "step": 18247 }, { "epoch": 0.7562683907331427, "grad_norm": 0.4012081027030945, "learning_rate": 1.218865265862655e-06, "loss": 0.6855, "step": 18248 }, { "epoch": 0.7563098346388164, "grad_norm": 0.3964138329029083, "learning_rate": 1.2186580463342866e-06, "loss": 0.6771, "step": 18249 }, { "epoch": 0.75635127854449, "grad_norm": 0.41609707474708557, "learning_rate": 1.2184508268059182e-06, "loss": 0.6288, "step": 18250 }, { "epoch": 0.7563927224501638, "grad_norm": 0.40993911027908325, "learning_rate": 1.21824360727755e-06, "loss": 0.6721, "step": 18251 }, { "epoch": 0.7564341663558374, "grad_norm": 0.40015077590942383, "learning_rate": 1.2180363877491816e-06, "loss": 0.6396, "step": 18252 }, { "epoch": 0.756475610261511, "grad_norm": 0.4193427562713623, "learning_rate": 1.2178291682208132e-06, "loss": 0.6763, "step": 18253 }, { "epoch": 0.7565170541671847, "grad_norm": 0.3957098126411438, "learning_rate": 1.2176219486924448e-06, "loss": 0.6892, "step": 18254 }, { "epoch": 0.7565584980728584, "grad_norm": 0.42201468348503113, "learning_rate": 1.2174147291640766e-06, "loss": 0.6495, "step": 18255 }, { "epoch": 0.7565999419785321, "grad_norm": 0.4222988784313202, "learning_rate": 1.2172075096357082e-06, "loss": 0.6743, "step": 18256 }, { "epoch": 0.7566413858842057, "grad_norm": 0.40637075901031494, "learning_rate": 1.2170002901073398e-06, "loss": 0.6611, "step": 18257 }, { "epoch": 0.7566828297898794, "grad_norm": 0.42748862504959106, "learning_rate": 1.2167930705789714e-06, "loss": 0.674, "step": 18258 }, { "epoch": 0.7567242736955531, "grad_norm": 0.42896154522895813, "learning_rate": 1.2165858510506032e-06, "loss": 0.6821, "step": 18259 }, { "epoch": 0.7567657176012268, "grad_norm": 0.40537121891975403, "learning_rate": 1.2163786315222348e-06, "loss": 0.6561, "step": 18260 }, { "epoch": 0.7568071615069004, "grad_norm": 0.46956291794776917, "learning_rate": 1.2161714119938664e-06, "loss": 0.741, "step": 18261 }, { "epoch": 0.756848605412574, "grad_norm": 0.43676742911338806, "learning_rate": 1.215964192465498e-06, "loss": 0.6871, "step": 18262 }, { "epoch": 0.7568900493182478, "grad_norm": 0.47088801860809326, "learning_rate": 1.2157569729371296e-06, "loss": 0.7427, "step": 18263 }, { "epoch": 0.7569314932239214, "grad_norm": 0.40483951568603516, "learning_rate": 1.2155497534087614e-06, "loss": 0.7, "step": 18264 }, { "epoch": 0.7569729371295951, "grad_norm": 0.4142512083053589, "learning_rate": 1.215342533880393e-06, "loss": 0.679, "step": 18265 }, { "epoch": 0.7570143810352687, "grad_norm": 0.4070943593978882, "learning_rate": 1.2151353143520246e-06, "loss": 0.6826, "step": 18266 }, { "epoch": 0.7570558249409425, "grad_norm": 0.37281253933906555, "learning_rate": 1.2149280948236562e-06, "loss": 0.6375, "step": 18267 }, { "epoch": 0.7570972688466161, "grad_norm": 0.4313116669654846, "learning_rate": 1.214720875295288e-06, "loss": 0.6458, "step": 18268 }, { "epoch": 0.7571387127522897, "grad_norm": 0.4331933259963989, "learning_rate": 1.2145136557669196e-06, "loss": 0.6594, "step": 18269 }, { "epoch": 0.7571801566579635, "grad_norm": 0.4115048050880432, "learning_rate": 1.2143064362385512e-06, "loss": 0.677, "step": 18270 }, { "epoch": 0.7572216005636371, "grad_norm": 0.44158029556274414, "learning_rate": 1.2140992167101828e-06, "loss": 0.6884, "step": 18271 }, { "epoch": 0.7572630444693108, "grad_norm": 0.4029923677444458, "learning_rate": 1.2138919971818146e-06, "loss": 0.6719, "step": 18272 }, { "epoch": 0.7573044883749844, "grad_norm": 0.42345482110977173, "learning_rate": 1.2136847776534462e-06, "loss": 0.6584, "step": 18273 }, { "epoch": 0.7573459322806582, "grad_norm": 0.4061579406261444, "learning_rate": 1.2134775581250778e-06, "loss": 0.6648, "step": 18274 }, { "epoch": 0.7573873761863318, "grad_norm": 0.4210527241230011, "learning_rate": 1.2132703385967094e-06, "loss": 0.6815, "step": 18275 }, { "epoch": 0.7574288200920055, "grad_norm": 0.37955883145332336, "learning_rate": 1.213063119068341e-06, "loss": 0.6475, "step": 18276 }, { "epoch": 0.7574702639976791, "grad_norm": 0.40023282170295715, "learning_rate": 1.2128558995399728e-06, "loss": 0.6849, "step": 18277 }, { "epoch": 0.7575117079033528, "grad_norm": 0.40901604294776917, "learning_rate": 1.2126486800116044e-06, "loss": 0.687, "step": 18278 }, { "epoch": 0.7575531518090265, "grad_norm": 0.4351852834224701, "learning_rate": 1.212441460483236e-06, "loss": 0.6682, "step": 18279 }, { "epoch": 0.7575945957147001, "grad_norm": 0.40971407294273376, "learning_rate": 1.2122342409548676e-06, "loss": 0.6212, "step": 18280 }, { "epoch": 0.7576360396203738, "grad_norm": 0.42162537574768066, "learning_rate": 1.2120270214264994e-06, "loss": 0.691, "step": 18281 }, { "epoch": 0.7576774835260475, "grad_norm": 0.40328946709632874, "learning_rate": 1.211819801898131e-06, "loss": 0.6921, "step": 18282 }, { "epoch": 0.7577189274317212, "grad_norm": 0.4126608073711395, "learning_rate": 1.2116125823697626e-06, "loss": 0.6707, "step": 18283 }, { "epoch": 0.7577603713373948, "grad_norm": 0.4197458028793335, "learning_rate": 1.2114053628413942e-06, "loss": 0.6921, "step": 18284 }, { "epoch": 0.7578018152430686, "grad_norm": 0.40786877274513245, "learning_rate": 1.211198143313026e-06, "loss": 0.6714, "step": 18285 }, { "epoch": 0.7578432591487422, "grad_norm": 0.4516717493534088, "learning_rate": 1.2109909237846576e-06, "loss": 0.6602, "step": 18286 }, { "epoch": 0.7578847030544158, "grad_norm": 0.44697824120521545, "learning_rate": 1.2107837042562892e-06, "loss": 0.6938, "step": 18287 }, { "epoch": 0.7579261469600895, "grad_norm": 0.397945374250412, "learning_rate": 1.2105764847279208e-06, "loss": 0.6517, "step": 18288 }, { "epoch": 0.7579675908657632, "grad_norm": 0.4477408826351166, "learning_rate": 1.2103692651995524e-06, "loss": 0.7263, "step": 18289 }, { "epoch": 0.7580090347714369, "grad_norm": 0.40338435769081116, "learning_rate": 1.2101620456711842e-06, "loss": 0.6824, "step": 18290 }, { "epoch": 0.7580504786771105, "grad_norm": 0.46308666467666626, "learning_rate": 1.2099548261428158e-06, "loss": 0.6993, "step": 18291 }, { "epoch": 0.7580919225827842, "grad_norm": 0.4168059527873993, "learning_rate": 1.2097476066144474e-06, "loss": 0.634, "step": 18292 }, { "epoch": 0.7581333664884579, "grad_norm": 0.40961647033691406, "learning_rate": 1.209540387086079e-06, "loss": 0.655, "step": 18293 }, { "epoch": 0.7581748103941316, "grad_norm": 0.4179156720638275, "learning_rate": 1.2093331675577108e-06, "loss": 0.6958, "step": 18294 }, { "epoch": 0.7582162542998052, "grad_norm": 0.43189772963523865, "learning_rate": 1.2091259480293424e-06, "loss": 0.7048, "step": 18295 }, { "epoch": 0.7582576982054788, "grad_norm": 0.3966561555862427, "learning_rate": 1.208918728500974e-06, "loss": 0.6561, "step": 18296 }, { "epoch": 0.7582991421111526, "grad_norm": 0.4419609606266022, "learning_rate": 1.2087115089726056e-06, "loss": 0.6429, "step": 18297 }, { "epoch": 0.7583405860168262, "grad_norm": 0.3821149170398712, "learning_rate": 1.2085042894442372e-06, "loss": 0.6206, "step": 18298 }, { "epoch": 0.7583820299224999, "grad_norm": 0.3998735547065735, "learning_rate": 1.208297069915869e-06, "loss": 0.691, "step": 18299 }, { "epoch": 0.7584234738281735, "grad_norm": 0.40938466787338257, "learning_rate": 1.2080898503875006e-06, "loss": 0.6595, "step": 18300 }, { "epoch": 0.7584649177338473, "grad_norm": 0.3774470090866089, "learning_rate": 1.2078826308591322e-06, "loss": 0.6379, "step": 18301 }, { "epoch": 0.7585063616395209, "grad_norm": 0.39445528388023376, "learning_rate": 1.2076754113307638e-06, "loss": 0.6323, "step": 18302 }, { "epoch": 0.7585478055451946, "grad_norm": 0.3761560916900635, "learning_rate": 1.2074681918023956e-06, "loss": 0.5693, "step": 18303 }, { "epoch": 0.7585892494508683, "grad_norm": 0.4001929461956024, "learning_rate": 1.2072609722740272e-06, "loss": 0.6836, "step": 18304 }, { "epoch": 0.7586306933565419, "grad_norm": 0.42420458793640137, "learning_rate": 1.2070537527456588e-06, "loss": 0.6941, "step": 18305 }, { "epoch": 0.7586721372622156, "grad_norm": 0.44530537724494934, "learning_rate": 1.2068465332172904e-06, "loss": 0.6677, "step": 18306 }, { "epoch": 0.7587135811678892, "grad_norm": 0.408319354057312, "learning_rate": 1.2066393136889222e-06, "loss": 0.678, "step": 18307 }, { "epoch": 0.758755025073563, "grad_norm": 0.421370267868042, "learning_rate": 1.2064320941605538e-06, "loss": 0.7195, "step": 18308 }, { "epoch": 0.7587964689792366, "grad_norm": 0.37915274500846863, "learning_rate": 1.2062248746321854e-06, "loss": 0.6678, "step": 18309 }, { "epoch": 0.7588379128849103, "grad_norm": 0.46476539969444275, "learning_rate": 1.206017655103817e-06, "loss": 0.7148, "step": 18310 }, { "epoch": 0.7588793567905839, "grad_norm": 0.4160342216491699, "learning_rate": 1.2058104355754486e-06, "loss": 0.6646, "step": 18311 }, { "epoch": 0.7589208006962577, "grad_norm": 0.4177989661693573, "learning_rate": 1.2056032160470804e-06, "loss": 0.6849, "step": 18312 }, { "epoch": 0.7589622446019313, "grad_norm": 0.3905975818634033, "learning_rate": 1.205395996518712e-06, "loss": 0.6335, "step": 18313 }, { "epoch": 0.7590036885076049, "grad_norm": 0.40524470806121826, "learning_rate": 1.2051887769903436e-06, "loss": 0.6509, "step": 18314 }, { "epoch": 0.7590451324132786, "grad_norm": 0.401454359292984, "learning_rate": 1.2049815574619752e-06, "loss": 0.649, "step": 18315 }, { "epoch": 0.7590865763189523, "grad_norm": 0.42186182737350464, "learning_rate": 1.204774337933607e-06, "loss": 0.6375, "step": 18316 }, { "epoch": 0.759128020224626, "grad_norm": 0.41850799322128296, "learning_rate": 1.2045671184052386e-06, "loss": 0.7015, "step": 18317 }, { "epoch": 0.7591694641302996, "grad_norm": 0.45139169692993164, "learning_rate": 1.2043598988768702e-06, "loss": 0.6967, "step": 18318 }, { "epoch": 0.7592109080359734, "grad_norm": 0.4083717465400696, "learning_rate": 1.2041526793485018e-06, "loss": 0.6482, "step": 18319 }, { "epoch": 0.759252351941647, "grad_norm": 0.3886021077632904, "learning_rate": 1.2039454598201336e-06, "loss": 0.666, "step": 18320 }, { "epoch": 0.7592937958473206, "grad_norm": 0.4052678048610687, "learning_rate": 1.2037382402917652e-06, "loss": 0.6685, "step": 18321 }, { "epoch": 0.7593352397529943, "grad_norm": 0.41507044434547424, "learning_rate": 1.2035310207633968e-06, "loss": 0.6888, "step": 18322 }, { "epoch": 0.759376683658668, "grad_norm": 0.40326935052871704, "learning_rate": 1.2033238012350284e-06, "loss": 0.6558, "step": 18323 }, { "epoch": 0.7594181275643417, "grad_norm": 0.4302123188972473, "learning_rate": 1.20311658170666e-06, "loss": 0.7517, "step": 18324 }, { "epoch": 0.7594595714700153, "grad_norm": 0.4274311065673828, "learning_rate": 1.2029093621782918e-06, "loss": 0.6716, "step": 18325 }, { "epoch": 0.759501015375689, "grad_norm": 0.4138660728931427, "learning_rate": 1.2027021426499234e-06, "loss": 0.6858, "step": 18326 }, { "epoch": 0.7595424592813627, "grad_norm": 0.4584431052207947, "learning_rate": 1.202494923121555e-06, "loss": 0.6763, "step": 18327 }, { "epoch": 0.7595839031870364, "grad_norm": 0.4378434121608734, "learning_rate": 1.2022877035931866e-06, "loss": 0.7101, "step": 18328 }, { "epoch": 0.75962534709271, "grad_norm": 0.43136072158813477, "learning_rate": 1.2020804840648184e-06, "loss": 0.6812, "step": 18329 }, { "epoch": 0.7596667909983836, "grad_norm": 0.4279765486717224, "learning_rate": 1.20187326453645e-06, "loss": 0.6841, "step": 18330 }, { "epoch": 0.7597082349040574, "grad_norm": 0.41779157519340515, "learning_rate": 1.2016660450080816e-06, "loss": 0.7, "step": 18331 }, { "epoch": 0.759749678809731, "grad_norm": 0.40034160017967224, "learning_rate": 1.2014588254797132e-06, "loss": 0.6382, "step": 18332 }, { "epoch": 0.7597911227154047, "grad_norm": 0.4380475878715515, "learning_rate": 1.201251605951345e-06, "loss": 0.6812, "step": 18333 }, { "epoch": 0.7598325666210783, "grad_norm": 0.47501856088638306, "learning_rate": 1.2010443864229766e-06, "loss": 0.6825, "step": 18334 }, { "epoch": 0.7598740105267521, "grad_norm": 0.4194132685661316, "learning_rate": 1.2008371668946082e-06, "loss": 0.6776, "step": 18335 }, { "epoch": 0.7599154544324257, "grad_norm": 0.4407571852207184, "learning_rate": 1.2006299473662398e-06, "loss": 0.668, "step": 18336 }, { "epoch": 0.7599568983380994, "grad_norm": 0.42548617720603943, "learning_rate": 1.2004227278378714e-06, "loss": 0.6802, "step": 18337 }, { "epoch": 0.759998342243773, "grad_norm": 0.4193524122238159, "learning_rate": 1.2002155083095032e-06, "loss": 0.6853, "step": 18338 }, { "epoch": 0.7600397861494467, "grad_norm": 0.4022304117679596, "learning_rate": 1.2000082887811348e-06, "loss": 0.6377, "step": 18339 }, { "epoch": 0.7600812300551204, "grad_norm": 0.4295608401298523, "learning_rate": 1.1998010692527664e-06, "loss": 0.7039, "step": 18340 }, { "epoch": 0.760122673960794, "grad_norm": 0.39926278591156006, "learning_rate": 1.199593849724398e-06, "loss": 0.6918, "step": 18341 }, { "epoch": 0.7601641178664678, "grad_norm": 0.38446712493896484, "learning_rate": 1.1993866301960298e-06, "loss": 0.6587, "step": 18342 }, { "epoch": 0.7602055617721414, "grad_norm": 0.41846922039985657, "learning_rate": 1.1991794106676614e-06, "loss": 0.6566, "step": 18343 }, { "epoch": 0.7602470056778151, "grad_norm": 0.4314047694206238, "learning_rate": 1.198972191139293e-06, "loss": 0.73, "step": 18344 }, { "epoch": 0.7602884495834887, "grad_norm": 0.4201318919658661, "learning_rate": 1.1987649716109246e-06, "loss": 0.6624, "step": 18345 }, { "epoch": 0.7603298934891625, "grad_norm": 0.45999568700790405, "learning_rate": 1.1985577520825564e-06, "loss": 0.6759, "step": 18346 }, { "epoch": 0.7603713373948361, "grad_norm": 0.4237552881240845, "learning_rate": 1.198350532554188e-06, "loss": 0.6609, "step": 18347 }, { "epoch": 0.7604127813005097, "grad_norm": 0.40629950165748596, "learning_rate": 1.1981433130258196e-06, "loss": 0.7065, "step": 18348 }, { "epoch": 0.7604542252061834, "grad_norm": 0.43076226115226746, "learning_rate": 1.1979360934974512e-06, "loss": 0.6921, "step": 18349 }, { "epoch": 0.7604956691118571, "grad_norm": 0.46152153611183167, "learning_rate": 1.1977288739690828e-06, "loss": 0.7126, "step": 18350 }, { "epoch": 0.7605371130175308, "grad_norm": 0.40662476420402527, "learning_rate": 1.1975216544407146e-06, "loss": 0.6907, "step": 18351 }, { "epoch": 0.7605785569232044, "grad_norm": 0.39291083812713623, "learning_rate": 1.1973144349123462e-06, "loss": 0.6345, "step": 18352 }, { "epoch": 0.7606200008288782, "grad_norm": 0.41597530245780945, "learning_rate": 1.1971072153839778e-06, "loss": 0.6819, "step": 18353 }, { "epoch": 0.7606614447345518, "grad_norm": 0.3715381622314453, "learning_rate": 1.1968999958556094e-06, "loss": 0.6166, "step": 18354 }, { "epoch": 0.7607028886402255, "grad_norm": 0.3759414851665497, "learning_rate": 1.1966927763272412e-06, "loss": 0.6288, "step": 18355 }, { "epoch": 0.7607443325458991, "grad_norm": 0.4244822859764099, "learning_rate": 1.1964855567988728e-06, "loss": 0.6931, "step": 18356 }, { "epoch": 0.7607857764515727, "grad_norm": 0.41699737310409546, "learning_rate": 1.1962783372705044e-06, "loss": 0.7198, "step": 18357 }, { "epoch": 0.7608272203572465, "grad_norm": 0.38307371735572815, "learning_rate": 1.196071117742136e-06, "loss": 0.6262, "step": 18358 }, { "epoch": 0.7608686642629201, "grad_norm": 0.4011707007884979, "learning_rate": 1.1958638982137678e-06, "loss": 0.658, "step": 18359 }, { "epoch": 0.7609101081685938, "grad_norm": 0.43034785985946655, "learning_rate": 1.1956566786853994e-06, "loss": 0.7031, "step": 18360 }, { "epoch": 0.7609515520742675, "grad_norm": 0.43973734974861145, "learning_rate": 1.195449459157031e-06, "loss": 0.6937, "step": 18361 }, { "epoch": 0.7609929959799412, "grad_norm": 0.38501331210136414, "learning_rate": 1.1952422396286626e-06, "loss": 0.6895, "step": 18362 }, { "epoch": 0.7610344398856148, "grad_norm": 0.4142957925796509, "learning_rate": 1.1950350201002944e-06, "loss": 0.6885, "step": 18363 }, { "epoch": 0.7610758837912885, "grad_norm": 0.4294329285621643, "learning_rate": 1.194827800571926e-06, "loss": 0.7302, "step": 18364 }, { "epoch": 0.7611173276969622, "grad_norm": 0.4126407504081726, "learning_rate": 1.1946205810435576e-06, "loss": 0.6763, "step": 18365 }, { "epoch": 0.7611587716026358, "grad_norm": 0.40298980474472046, "learning_rate": 1.1944133615151892e-06, "loss": 0.676, "step": 18366 }, { "epoch": 0.7612002155083095, "grad_norm": 0.40391093492507935, "learning_rate": 1.194206141986821e-06, "loss": 0.6821, "step": 18367 }, { "epoch": 0.7612416594139831, "grad_norm": 0.43327081203460693, "learning_rate": 1.1939989224584526e-06, "loss": 0.6842, "step": 18368 }, { "epoch": 0.7612831033196569, "grad_norm": 0.4301246702671051, "learning_rate": 1.1937917029300842e-06, "loss": 0.6619, "step": 18369 }, { "epoch": 0.7613245472253305, "grad_norm": 0.4142049252986908, "learning_rate": 1.1935844834017158e-06, "loss": 0.6592, "step": 18370 }, { "epoch": 0.7613659911310042, "grad_norm": 0.4257356822490692, "learning_rate": 1.1933772638733474e-06, "loss": 0.6987, "step": 18371 }, { "epoch": 0.7614074350366778, "grad_norm": 0.4078195095062256, "learning_rate": 1.1931700443449792e-06, "loss": 0.7117, "step": 18372 }, { "epoch": 0.7614488789423516, "grad_norm": 0.4216165542602539, "learning_rate": 1.1929628248166108e-06, "loss": 0.7107, "step": 18373 }, { "epoch": 0.7614903228480252, "grad_norm": 0.4060041606426239, "learning_rate": 1.1927556052882424e-06, "loss": 0.6427, "step": 18374 }, { "epoch": 0.7615317667536988, "grad_norm": 0.44099846482276917, "learning_rate": 1.192548385759874e-06, "loss": 0.7004, "step": 18375 }, { "epoch": 0.7615732106593726, "grad_norm": 0.4212326407432556, "learning_rate": 1.1923411662315058e-06, "loss": 0.7178, "step": 18376 }, { "epoch": 0.7616146545650462, "grad_norm": 0.4202856123447418, "learning_rate": 1.1921339467031374e-06, "loss": 0.6892, "step": 18377 }, { "epoch": 0.7616560984707199, "grad_norm": 0.43056002259254456, "learning_rate": 1.191926727174769e-06, "loss": 0.7247, "step": 18378 }, { "epoch": 0.7616975423763935, "grad_norm": 0.4548676311969757, "learning_rate": 1.1917195076464006e-06, "loss": 0.6635, "step": 18379 }, { "epoch": 0.7617389862820673, "grad_norm": 0.4552077054977417, "learning_rate": 1.1915122881180324e-06, "loss": 0.7075, "step": 18380 }, { "epoch": 0.7617804301877409, "grad_norm": 0.40756136178970337, "learning_rate": 1.191305068589664e-06, "loss": 0.6361, "step": 18381 }, { "epoch": 0.7618218740934145, "grad_norm": 0.4407983422279358, "learning_rate": 1.1910978490612956e-06, "loss": 0.6906, "step": 18382 }, { "epoch": 0.7618633179990882, "grad_norm": 0.40262487530708313, "learning_rate": 1.1908906295329272e-06, "loss": 0.6904, "step": 18383 }, { "epoch": 0.7619047619047619, "grad_norm": 0.38154393434524536, "learning_rate": 1.190683410004559e-06, "loss": 0.6598, "step": 18384 }, { "epoch": 0.7619462058104356, "grad_norm": 0.3998153805732727, "learning_rate": 1.1904761904761906e-06, "loss": 0.6675, "step": 18385 }, { "epoch": 0.7619876497161092, "grad_norm": 0.4057264029979706, "learning_rate": 1.1902689709478222e-06, "loss": 0.6885, "step": 18386 }, { "epoch": 0.762029093621783, "grad_norm": 0.40950122475624084, "learning_rate": 1.1900617514194538e-06, "loss": 0.6589, "step": 18387 }, { "epoch": 0.7620705375274566, "grad_norm": 0.43461716175079346, "learning_rate": 1.1898545318910854e-06, "loss": 0.6902, "step": 18388 }, { "epoch": 0.7621119814331303, "grad_norm": 0.4395633935928345, "learning_rate": 1.1896473123627172e-06, "loss": 0.6974, "step": 18389 }, { "epoch": 0.7621534253388039, "grad_norm": 0.3979886770248413, "learning_rate": 1.1894400928343488e-06, "loss": 0.6149, "step": 18390 }, { "epoch": 0.7621948692444775, "grad_norm": 0.48363152146339417, "learning_rate": 1.1892328733059804e-06, "loss": 0.7729, "step": 18391 }, { "epoch": 0.7622363131501513, "grad_norm": 0.43511006236076355, "learning_rate": 1.189025653777612e-06, "loss": 0.6926, "step": 18392 }, { "epoch": 0.7622777570558249, "grad_norm": 0.41377049684524536, "learning_rate": 1.1888184342492438e-06, "loss": 0.6338, "step": 18393 }, { "epoch": 0.7623192009614986, "grad_norm": 0.43079859018325806, "learning_rate": 1.1886112147208754e-06, "loss": 0.7416, "step": 18394 }, { "epoch": 0.7623606448671723, "grad_norm": 0.4158470034599304, "learning_rate": 1.188403995192507e-06, "loss": 0.684, "step": 18395 }, { "epoch": 0.762402088772846, "grad_norm": 0.3965252637863159, "learning_rate": 1.1881967756641386e-06, "loss": 0.6682, "step": 18396 }, { "epoch": 0.7624435326785196, "grad_norm": 0.4280250668525696, "learning_rate": 1.1879895561357704e-06, "loss": 0.7129, "step": 18397 }, { "epoch": 0.7624849765841933, "grad_norm": 0.3984662592411041, "learning_rate": 1.187782336607402e-06, "loss": 0.7018, "step": 18398 }, { "epoch": 0.762526420489867, "grad_norm": 0.40761762857437134, "learning_rate": 1.1875751170790336e-06, "loss": 0.6538, "step": 18399 }, { "epoch": 0.7625678643955406, "grad_norm": 0.4361613988876343, "learning_rate": 1.1873678975506652e-06, "loss": 0.6909, "step": 18400 }, { "epoch": 0.7626093083012143, "grad_norm": 0.3918582797050476, "learning_rate": 1.187160678022297e-06, "loss": 0.6599, "step": 18401 }, { "epoch": 0.7626507522068879, "grad_norm": 0.39397314190864563, "learning_rate": 1.1869534584939286e-06, "loss": 0.6316, "step": 18402 }, { "epoch": 0.7626921961125617, "grad_norm": 0.4001719653606415, "learning_rate": 1.1867462389655602e-06, "loss": 0.6482, "step": 18403 }, { "epoch": 0.7627336400182353, "grad_norm": 0.39547988772392273, "learning_rate": 1.1865390194371918e-06, "loss": 0.615, "step": 18404 }, { "epoch": 0.762775083923909, "grad_norm": 0.3844884932041168, "learning_rate": 1.1863317999088234e-06, "loss": 0.6655, "step": 18405 }, { "epoch": 0.7628165278295826, "grad_norm": 0.41524896025657654, "learning_rate": 1.1861245803804552e-06, "loss": 0.6772, "step": 18406 }, { "epoch": 0.7628579717352564, "grad_norm": 0.42783123254776, "learning_rate": 1.1859173608520868e-06, "loss": 0.6722, "step": 18407 }, { "epoch": 0.76289941564093, "grad_norm": 0.43278905749320984, "learning_rate": 1.1857101413237184e-06, "loss": 0.6589, "step": 18408 }, { "epoch": 0.7629408595466036, "grad_norm": 0.4240676760673523, "learning_rate": 1.18550292179535e-06, "loss": 0.6602, "step": 18409 }, { "epoch": 0.7629823034522774, "grad_norm": 0.41042715311050415, "learning_rate": 1.1852957022669818e-06, "loss": 0.6708, "step": 18410 }, { "epoch": 0.763023747357951, "grad_norm": 0.39893192052841187, "learning_rate": 1.1850884827386134e-06, "loss": 0.6569, "step": 18411 }, { "epoch": 0.7630651912636247, "grad_norm": 0.41617295145988464, "learning_rate": 1.184881263210245e-06, "loss": 0.6455, "step": 18412 }, { "epoch": 0.7631066351692983, "grad_norm": 0.42428210377693176, "learning_rate": 1.1846740436818766e-06, "loss": 0.698, "step": 18413 }, { "epoch": 0.7631480790749721, "grad_norm": 0.44222119450569153, "learning_rate": 1.1844668241535084e-06, "loss": 0.6927, "step": 18414 }, { "epoch": 0.7631895229806457, "grad_norm": 0.43572643399238586, "learning_rate": 1.18425960462514e-06, "loss": 0.7134, "step": 18415 }, { "epoch": 0.7632309668863194, "grad_norm": 0.42910078167915344, "learning_rate": 1.1840523850967716e-06, "loss": 0.7007, "step": 18416 }, { "epoch": 0.763272410791993, "grad_norm": 0.41192805767059326, "learning_rate": 1.1838451655684032e-06, "loss": 0.6672, "step": 18417 }, { "epoch": 0.7633138546976667, "grad_norm": 0.4022907316684723, "learning_rate": 1.183637946040035e-06, "loss": 0.6373, "step": 18418 }, { "epoch": 0.7633552986033404, "grad_norm": 0.4238658547401428, "learning_rate": 1.1834307265116666e-06, "loss": 0.6873, "step": 18419 }, { "epoch": 0.763396742509014, "grad_norm": 0.40032732486724854, "learning_rate": 1.1832235069832982e-06, "loss": 0.6631, "step": 18420 }, { "epoch": 0.7634381864146877, "grad_norm": 0.4157845377922058, "learning_rate": 1.1830162874549298e-06, "loss": 0.6398, "step": 18421 }, { "epoch": 0.7634796303203614, "grad_norm": 0.4107213616371155, "learning_rate": 1.1828090679265616e-06, "loss": 0.7031, "step": 18422 }, { "epoch": 0.7635210742260351, "grad_norm": 0.444105327129364, "learning_rate": 1.1826018483981932e-06, "loss": 0.718, "step": 18423 }, { "epoch": 0.7635625181317087, "grad_norm": 0.4231577515602112, "learning_rate": 1.1823946288698248e-06, "loss": 0.6942, "step": 18424 }, { "epoch": 0.7636039620373825, "grad_norm": 0.4133392572402954, "learning_rate": 1.1821874093414564e-06, "loss": 0.6575, "step": 18425 }, { "epoch": 0.7636454059430561, "grad_norm": 0.4107622802257538, "learning_rate": 1.181980189813088e-06, "loss": 0.6644, "step": 18426 }, { "epoch": 0.7636868498487297, "grad_norm": 0.43001824617385864, "learning_rate": 1.1817729702847198e-06, "loss": 0.6426, "step": 18427 }, { "epoch": 0.7637282937544034, "grad_norm": 0.4136884808540344, "learning_rate": 1.1815657507563514e-06, "loss": 0.7131, "step": 18428 }, { "epoch": 0.763769737660077, "grad_norm": 0.3851294219493866, "learning_rate": 1.181358531227983e-06, "loss": 0.6063, "step": 18429 }, { "epoch": 0.7638111815657508, "grad_norm": 0.4164036214351654, "learning_rate": 1.1811513116996146e-06, "loss": 0.7271, "step": 18430 }, { "epoch": 0.7638526254714244, "grad_norm": 0.4233842194080353, "learning_rate": 1.1809440921712464e-06, "loss": 0.6768, "step": 18431 }, { "epoch": 0.7638940693770981, "grad_norm": 0.4609467089176178, "learning_rate": 1.180736872642878e-06, "loss": 0.6528, "step": 18432 }, { "epoch": 0.7639355132827718, "grad_norm": 0.42858242988586426, "learning_rate": 1.1805296531145096e-06, "loss": 0.6564, "step": 18433 }, { "epoch": 0.7639769571884455, "grad_norm": 0.4150758385658264, "learning_rate": 1.1803224335861412e-06, "loss": 0.6674, "step": 18434 }, { "epoch": 0.7640184010941191, "grad_norm": 0.4111219346523285, "learning_rate": 1.180115214057773e-06, "loss": 0.6182, "step": 18435 }, { "epoch": 0.7640598449997927, "grad_norm": 0.413320928812027, "learning_rate": 1.1799079945294046e-06, "loss": 0.6726, "step": 18436 }, { "epoch": 0.7641012889054665, "grad_norm": 0.4055173397064209, "learning_rate": 1.1797007750010362e-06, "loss": 0.6526, "step": 18437 }, { "epoch": 0.7641427328111401, "grad_norm": 0.4086606800556183, "learning_rate": 1.1794935554726678e-06, "loss": 0.6664, "step": 18438 }, { "epoch": 0.7641841767168138, "grad_norm": 0.43589872121810913, "learning_rate": 1.1792863359442996e-06, "loss": 0.7075, "step": 18439 }, { "epoch": 0.7642256206224874, "grad_norm": 0.40485116839408875, "learning_rate": 1.1790791164159312e-06, "loss": 0.6895, "step": 18440 }, { "epoch": 0.7642670645281612, "grad_norm": 0.42566031217575073, "learning_rate": 1.1788718968875628e-06, "loss": 0.7073, "step": 18441 }, { "epoch": 0.7643085084338348, "grad_norm": 0.42485877871513367, "learning_rate": 1.1786646773591944e-06, "loss": 0.6808, "step": 18442 }, { "epoch": 0.7643499523395084, "grad_norm": 0.3711492419242859, "learning_rate": 1.178457457830826e-06, "loss": 0.5986, "step": 18443 }, { "epoch": 0.7643913962451822, "grad_norm": 0.43041500449180603, "learning_rate": 1.1782502383024578e-06, "loss": 0.6887, "step": 18444 }, { "epoch": 0.7644328401508558, "grad_norm": 0.4731006324291229, "learning_rate": 1.1780430187740894e-06, "loss": 0.665, "step": 18445 }, { "epoch": 0.7644742840565295, "grad_norm": 0.42910102009773254, "learning_rate": 1.177835799245721e-06, "loss": 0.6924, "step": 18446 }, { "epoch": 0.7645157279622031, "grad_norm": 0.455193430185318, "learning_rate": 1.1776285797173526e-06, "loss": 0.6975, "step": 18447 }, { "epoch": 0.7645571718678769, "grad_norm": 0.42143282294273376, "learning_rate": 1.1774213601889844e-06, "loss": 0.6909, "step": 18448 }, { "epoch": 0.7645986157735505, "grad_norm": 0.38527652621269226, "learning_rate": 1.177214140660616e-06, "loss": 0.6627, "step": 18449 }, { "epoch": 0.7646400596792242, "grad_norm": 0.3927207291126251, "learning_rate": 1.1770069211322476e-06, "loss": 0.6536, "step": 18450 }, { "epoch": 0.7646815035848978, "grad_norm": 0.4658842980861664, "learning_rate": 1.1767997016038792e-06, "loss": 0.6792, "step": 18451 }, { "epoch": 0.7647229474905715, "grad_norm": 0.41170987486839294, "learning_rate": 1.176592482075511e-06, "loss": 0.6454, "step": 18452 }, { "epoch": 0.7647643913962452, "grad_norm": 0.44729188084602356, "learning_rate": 1.1763852625471426e-06, "loss": 0.6562, "step": 18453 }, { "epoch": 0.7648058353019188, "grad_norm": 0.4143582880496979, "learning_rate": 1.1761780430187742e-06, "loss": 0.6487, "step": 18454 }, { "epoch": 0.7648472792075925, "grad_norm": 0.41327160596847534, "learning_rate": 1.1759708234904058e-06, "loss": 0.6753, "step": 18455 }, { "epoch": 0.7648887231132662, "grad_norm": 0.4269656836986542, "learning_rate": 1.1757636039620376e-06, "loss": 0.7073, "step": 18456 }, { "epoch": 0.7649301670189399, "grad_norm": 0.38702014088630676, "learning_rate": 1.1755563844336692e-06, "loss": 0.6414, "step": 18457 }, { "epoch": 0.7649716109246135, "grad_norm": 0.3879673182964325, "learning_rate": 1.1753491649053008e-06, "loss": 0.644, "step": 18458 }, { "epoch": 0.7650130548302873, "grad_norm": 0.39750558137893677, "learning_rate": 1.1751419453769324e-06, "loss": 0.6602, "step": 18459 }, { "epoch": 0.7650544987359609, "grad_norm": 0.395530104637146, "learning_rate": 1.1749347258485642e-06, "loss": 0.6448, "step": 18460 }, { "epoch": 0.7650959426416345, "grad_norm": 0.42407405376434326, "learning_rate": 1.1747275063201958e-06, "loss": 0.6112, "step": 18461 }, { "epoch": 0.7651373865473082, "grad_norm": 0.43486905097961426, "learning_rate": 1.1745202867918274e-06, "loss": 0.6525, "step": 18462 }, { "epoch": 0.7651788304529819, "grad_norm": 0.4844317138195038, "learning_rate": 1.174313067263459e-06, "loss": 0.7026, "step": 18463 }, { "epoch": 0.7652202743586556, "grad_norm": 0.38536015152931213, "learning_rate": 1.1741058477350906e-06, "loss": 0.6443, "step": 18464 }, { "epoch": 0.7652617182643292, "grad_norm": 0.3893115520477295, "learning_rate": 1.1738986282067224e-06, "loss": 0.6158, "step": 18465 }, { "epoch": 0.7653031621700029, "grad_norm": 0.4041883945465088, "learning_rate": 1.173691408678354e-06, "loss": 0.6658, "step": 18466 }, { "epoch": 0.7653446060756766, "grad_norm": 0.4559694826602936, "learning_rate": 1.1734841891499856e-06, "loss": 0.6636, "step": 18467 }, { "epoch": 0.7653860499813503, "grad_norm": 0.37963739037513733, "learning_rate": 1.1732769696216172e-06, "loss": 0.6163, "step": 18468 }, { "epoch": 0.7654274938870239, "grad_norm": 0.3712363541126251, "learning_rate": 1.173069750093249e-06, "loss": 0.6195, "step": 18469 }, { "epoch": 0.7654689377926975, "grad_norm": 0.4640085995197296, "learning_rate": 1.1728625305648806e-06, "loss": 0.6665, "step": 18470 }, { "epoch": 0.7655103816983713, "grad_norm": 0.4123537540435791, "learning_rate": 1.1726553110365122e-06, "loss": 0.6627, "step": 18471 }, { "epoch": 0.7655518256040449, "grad_norm": 0.3673608601093292, "learning_rate": 1.1724480915081438e-06, "loss": 0.6521, "step": 18472 }, { "epoch": 0.7655932695097186, "grad_norm": 0.39640936255455017, "learning_rate": 1.1722408719797756e-06, "loss": 0.6624, "step": 18473 }, { "epoch": 0.7656347134153922, "grad_norm": 0.37369483709335327, "learning_rate": 1.1720336524514072e-06, "loss": 0.6525, "step": 18474 }, { "epoch": 0.765676157321066, "grad_norm": 0.38504764437675476, "learning_rate": 1.1718264329230388e-06, "loss": 0.6244, "step": 18475 }, { "epoch": 0.7657176012267396, "grad_norm": 0.4304869472980499, "learning_rate": 1.1716192133946704e-06, "loss": 0.6904, "step": 18476 }, { "epoch": 0.7657590451324133, "grad_norm": 0.37363943457603455, "learning_rate": 1.1714119938663022e-06, "loss": 0.6263, "step": 18477 }, { "epoch": 0.765800489038087, "grad_norm": 0.3924352526664734, "learning_rate": 1.1712047743379338e-06, "loss": 0.6182, "step": 18478 }, { "epoch": 0.7658419329437606, "grad_norm": 0.406818151473999, "learning_rate": 1.1709975548095654e-06, "loss": 0.6556, "step": 18479 }, { "epoch": 0.7658833768494343, "grad_norm": 0.3982245922088623, "learning_rate": 1.170790335281197e-06, "loss": 0.6305, "step": 18480 }, { "epoch": 0.7659248207551079, "grad_norm": 0.4387570321559906, "learning_rate": 1.1705831157528286e-06, "loss": 0.6798, "step": 18481 }, { "epoch": 0.7659662646607817, "grad_norm": 0.40468600392341614, "learning_rate": 1.1703758962244604e-06, "loss": 0.6432, "step": 18482 }, { "epoch": 0.7660077085664553, "grad_norm": 0.4335421919822693, "learning_rate": 1.170168676696092e-06, "loss": 0.6388, "step": 18483 }, { "epoch": 0.766049152472129, "grad_norm": 0.4113207459449768, "learning_rate": 1.1699614571677236e-06, "loss": 0.6445, "step": 18484 }, { "epoch": 0.7660905963778026, "grad_norm": 0.41679611802101135, "learning_rate": 1.1697542376393552e-06, "loss": 0.6608, "step": 18485 }, { "epoch": 0.7661320402834764, "grad_norm": 0.44585224986076355, "learning_rate": 1.169547018110987e-06, "loss": 0.7113, "step": 18486 }, { "epoch": 0.76617348418915, "grad_norm": 0.45966386795043945, "learning_rate": 1.1693397985826186e-06, "loss": 0.7385, "step": 18487 }, { "epoch": 0.7662149280948236, "grad_norm": 0.45024600625038147, "learning_rate": 1.1691325790542502e-06, "loss": 0.7181, "step": 18488 }, { "epoch": 0.7662563720004973, "grad_norm": 0.40605056285858154, "learning_rate": 1.1689253595258818e-06, "loss": 0.6372, "step": 18489 }, { "epoch": 0.766297815906171, "grad_norm": 0.4288059175014496, "learning_rate": 1.1687181399975136e-06, "loss": 0.694, "step": 18490 }, { "epoch": 0.7663392598118447, "grad_norm": 0.4235321581363678, "learning_rate": 1.1685109204691452e-06, "loss": 0.6887, "step": 18491 }, { "epoch": 0.7663807037175183, "grad_norm": 0.4153278172016144, "learning_rate": 1.1683037009407768e-06, "loss": 0.6536, "step": 18492 }, { "epoch": 0.766422147623192, "grad_norm": 0.3742377460002899, "learning_rate": 1.1680964814124084e-06, "loss": 0.6241, "step": 18493 }, { "epoch": 0.7664635915288657, "grad_norm": 0.4051739573478699, "learning_rate": 1.1678892618840402e-06, "loss": 0.6682, "step": 18494 }, { "epoch": 0.7665050354345394, "grad_norm": 0.41767990589141846, "learning_rate": 1.1676820423556718e-06, "loss": 0.6536, "step": 18495 }, { "epoch": 0.766546479340213, "grad_norm": 0.4442001283168793, "learning_rate": 1.1674748228273034e-06, "loss": 0.6741, "step": 18496 }, { "epoch": 0.7665879232458866, "grad_norm": 0.44730162620544434, "learning_rate": 1.167267603298935e-06, "loss": 0.7124, "step": 18497 }, { "epoch": 0.7666293671515604, "grad_norm": 0.4113895297050476, "learning_rate": 1.1670603837705666e-06, "loss": 0.6086, "step": 18498 }, { "epoch": 0.766670811057234, "grad_norm": 0.3887301981449127, "learning_rate": 1.1668531642421984e-06, "loss": 0.6648, "step": 18499 }, { "epoch": 0.7667122549629077, "grad_norm": 0.42825308442115784, "learning_rate": 1.16664594471383e-06, "loss": 0.6331, "step": 18500 }, { "epoch": 0.7667536988685814, "grad_norm": 0.4334266185760498, "learning_rate": 1.1664387251854616e-06, "loss": 0.7128, "step": 18501 }, { "epoch": 0.7667951427742551, "grad_norm": 0.41184303164482117, "learning_rate": 1.1662315056570932e-06, "loss": 0.6627, "step": 18502 }, { "epoch": 0.7668365866799287, "grad_norm": 0.42716002464294434, "learning_rate": 1.166024286128725e-06, "loss": 0.657, "step": 18503 }, { "epoch": 0.7668780305856023, "grad_norm": 0.4353923201560974, "learning_rate": 1.1658170666003566e-06, "loss": 0.6699, "step": 18504 }, { "epoch": 0.7669194744912761, "grad_norm": 0.40347740054130554, "learning_rate": 1.1656098470719882e-06, "loss": 0.6812, "step": 18505 }, { "epoch": 0.7669609183969497, "grad_norm": 0.41010141372680664, "learning_rate": 1.1654026275436198e-06, "loss": 0.6416, "step": 18506 }, { "epoch": 0.7670023623026234, "grad_norm": 0.4176444709300995, "learning_rate": 1.1651954080152514e-06, "loss": 0.6664, "step": 18507 }, { "epoch": 0.767043806208297, "grad_norm": 0.3686901032924652, "learning_rate": 1.1649881884868832e-06, "loss": 0.6389, "step": 18508 }, { "epoch": 0.7670852501139708, "grad_norm": 0.43918144702911377, "learning_rate": 1.1647809689585148e-06, "loss": 0.6884, "step": 18509 }, { "epoch": 0.7671266940196444, "grad_norm": 0.4338878393173218, "learning_rate": 1.1645737494301464e-06, "loss": 0.7166, "step": 18510 }, { "epoch": 0.7671681379253181, "grad_norm": 0.4349721372127533, "learning_rate": 1.164366529901778e-06, "loss": 0.6688, "step": 18511 }, { "epoch": 0.7672095818309917, "grad_norm": 0.40066230297088623, "learning_rate": 1.1641593103734098e-06, "loss": 0.6277, "step": 18512 }, { "epoch": 0.7672510257366654, "grad_norm": 0.39259424805641174, "learning_rate": 1.1639520908450414e-06, "loss": 0.6562, "step": 18513 }, { "epoch": 0.7672924696423391, "grad_norm": 0.4056980013847351, "learning_rate": 1.163744871316673e-06, "loss": 0.6245, "step": 18514 }, { "epoch": 0.7673339135480127, "grad_norm": 0.41037675738334656, "learning_rate": 1.1635376517883046e-06, "loss": 0.6943, "step": 18515 }, { "epoch": 0.7673753574536865, "grad_norm": 0.40309709310531616, "learning_rate": 1.1633304322599364e-06, "loss": 0.6935, "step": 18516 }, { "epoch": 0.7674168013593601, "grad_norm": 0.3900870680809021, "learning_rate": 1.163123212731568e-06, "loss": 0.655, "step": 18517 }, { "epoch": 0.7674582452650338, "grad_norm": 0.443907231092453, "learning_rate": 1.1629159932031996e-06, "loss": 0.7085, "step": 18518 }, { "epoch": 0.7674996891707074, "grad_norm": 0.3788963854312897, "learning_rate": 1.1627087736748312e-06, "loss": 0.6425, "step": 18519 }, { "epoch": 0.7675411330763812, "grad_norm": 0.3821406960487366, "learning_rate": 1.1625015541464628e-06, "loss": 0.6785, "step": 18520 }, { "epoch": 0.7675825769820548, "grad_norm": 0.4338715672492981, "learning_rate": 1.1622943346180946e-06, "loss": 0.6886, "step": 18521 }, { "epoch": 0.7676240208877284, "grad_norm": 0.40271928906440735, "learning_rate": 1.1620871150897262e-06, "loss": 0.6396, "step": 18522 }, { "epoch": 0.7676654647934021, "grad_norm": 0.4060998558998108, "learning_rate": 1.1618798955613578e-06, "loss": 0.6859, "step": 18523 }, { "epoch": 0.7677069086990758, "grad_norm": 0.42858368158340454, "learning_rate": 1.1616726760329894e-06, "loss": 0.6621, "step": 18524 }, { "epoch": 0.7677483526047495, "grad_norm": 0.367145299911499, "learning_rate": 1.1614654565046212e-06, "loss": 0.6489, "step": 18525 }, { "epoch": 0.7677897965104231, "grad_norm": 0.4178408980369568, "learning_rate": 1.1612582369762528e-06, "loss": 0.7119, "step": 18526 }, { "epoch": 0.7678312404160968, "grad_norm": 0.4580093324184418, "learning_rate": 1.1610510174478844e-06, "loss": 0.7937, "step": 18527 }, { "epoch": 0.7678726843217705, "grad_norm": 0.4395301043987274, "learning_rate": 1.160843797919516e-06, "loss": 0.7051, "step": 18528 }, { "epoch": 0.7679141282274442, "grad_norm": 0.39206668734550476, "learning_rate": 1.1606365783911478e-06, "loss": 0.6702, "step": 18529 }, { "epoch": 0.7679555721331178, "grad_norm": 0.409267783164978, "learning_rate": 1.1604293588627794e-06, "loss": 0.6729, "step": 18530 }, { "epoch": 0.7679970160387914, "grad_norm": 0.43972328305244446, "learning_rate": 1.160222139334411e-06, "loss": 0.6597, "step": 18531 }, { "epoch": 0.7680384599444652, "grad_norm": 0.40109190344810486, "learning_rate": 1.1600149198060426e-06, "loss": 0.6602, "step": 18532 }, { "epoch": 0.7680799038501388, "grad_norm": 0.39063355326652527, "learning_rate": 1.1598077002776742e-06, "loss": 0.6273, "step": 18533 }, { "epoch": 0.7681213477558125, "grad_norm": 0.44438230991363525, "learning_rate": 1.159600480749306e-06, "loss": 0.6354, "step": 18534 }, { "epoch": 0.7681627916614862, "grad_norm": 0.4230665862560272, "learning_rate": 1.1593932612209376e-06, "loss": 0.637, "step": 18535 }, { "epoch": 0.7682042355671599, "grad_norm": 0.43769678473472595, "learning_rate": 1.1591860416925692e-06, "loss": 0.7104, "step": 18536 }, { "epoch": 0.7682456794728335, "grad_norm": 0.41632193326950073, "learning_rate": 1.1589788221642008e-06, "loss": 0.7039, "step": 18537 }, { "epoch": 0.7682871233785072, "grad_norm": 0.3924875259399414, "learning_rate": 1.1587716026358326e-06, "loss": 0.6177, "step": 18538 }, { "epoch": 0.7683285672841809, "grad_norm": 0.4123399555683136, "learning_rate": 1.1585643831074642e-06, "loss": 0.6517, "step": 18539 }, { "epoch": 0.7683700111898545, "grad_norm": 0.3976581394672394, "learning_rate": 1.1583571635790958e-06, "loss": 0.6951, "step": 18540 }, { "epoch": 0.7684114550955282, "grad_norm": 0.42206886410713196, "learning_rate": 1.1581499440507274e-06, "loss": 0.6785, "step": 18541 }, { "epoch": 0.7684528990012018, "grad_norm": 0.4087122976779938, "learning_rate": 1.1579427245223592e-06, "loss": 0.6641, "step": 18542 }, { "epoch": 0.7684943429068756, "grad_norm": 0.39679965376853943, "learning_rate": 1.1577355049939908e-06, "loss": 0.6572, "step": 18543 }, { "epoch": 0.7685357868125492, "grad_norm": 0.42872047424316406, "learning_rate": 1.1575282854656224e-06, "loss": 0.6372, "step": 18544 }, { "epoch": 0.7685772307182229, "grad_norm": 0.44230228662490845, "learning_rate": 1.157321065937254e-06, "loss": 0.6653, "step": 18545 }, { "epoch": 0.7686186746238965, "grad_norm": 0.4191790521144867, "learning_rate": 1.1571138464088856e-06, "loss": 0.6431, "step": 18546 }, { "epoch": 0.7686601185295703, "grad_norm": 0.4560861885547638, "learning_rate": 1.1569066268805174e-06, "loss": 0.6555, "step": 18547 }, { "epoch": 0.7687015624352439, "grad_norm": 0.390288770198822, "learning_rate": 1.156699407352149e-06, "loss": 0.6685, "step": 18548 }, { "epoch": 0.7687430063409175, "grad_norm": 0.41414180397987366, "learning_rate": 1.1564921878237806e-06, "loss": 0.7012, "step": 18549 }, { "epoch": 0.7687844502465913, "grad_norm": 0.4463585913181305, "learning_rate": 1.1562849682954122e-06, "loss": 0.6968, "step": 18550 }, { "epoch": 0.7688258941522649, "grad_norm": 0.45496705174446106, "learning_rate": 1.156077748767044e-06, "loss": 0.6447, "step": 18551 }, { "epoch": 0.7688673380579386, "grad_norm": 0.405990332365036, "learning_rate": 1.1558705292386756e-06, "loss": 0.6873, "step": 18552 }, { "epoch": 0.7689087819636122, "grad_norm": 0.4221707880496979, "learning_rate": 1.1556633097103072e-06, "loss": 0.6873, "step": 18553 }, { "epoch": 0.768950225869286, "grad_norm": 0.40096721053123474, "learning_rate": 1.1554560901819388e-06, "loss": 0.6316, "step": 18554 }, { "epoch": 0.7689916697749596, "grad_norm": 0.41256675124168396, "learning_rate": 1.1552488706535706e-06, "loss": 0.678, "step": 18555 }, { "epoch": 0.7690331136806333, "grad_norm": 0.4267052710056305, "learning_rate": 1.1550416511252022e-06, "loss": 0.6801, "step": 18556 }, { "epoch": 0.7690745575863069, "grad_norm": 0.43435922265052795, "learning_rate": 1.1548344315968338e-06, "loss": 0.7118, "step": 18557 }, { "epoch": 0.7691160014919806, "grad_norm": 0.45345303416252136, "learning_rate": 1.1546272120684654e-06, "loss": 0.6426, "step": 18558 }, { "epoch": 0.7691574453976543, "grad_norm": 0.3971325159072876, "learning_rate": 1.154419992540097e-06, "loss": 0.6333, "step": 18559 }, { "epoch": 0.7691988893033279, "grad_norm": 0.4306395351886749, "learning_rate": 1.1542127730117288e-06, "loss": 0.6558, "step": 18560 }, { "epoch": 0.7692403332090016, "grad_norm": 0.41357091069221497, "learning_rate": 1.1540055534833604e-06, "loss": 0.6914, "step": 18561 }, { "epoch": 0.7692817771146753, "grad_norm": 0.41950181126594543, "learning_rate": 1.153798333954992e-06, "loss": 0.6665, "step": 18562 }, { "epoch": 0.769323221020349, "grad_norm": 0.43289950489997864, "learning_rate": 1.1535911144266236e-06, "loss": 0.6517, "step": 18563 }, { "epoch": 0.7693646649260226, "grad_norm": 0.4054003059864044, "learning_rate": 1.1533838948982554e-06, "loss": 0.6503, "step": 18564 }, { "epoch": 0.7694061088316962, "grad_norm": 0.3784058094024658, "learning_rate": 1.153176675369887e-06, "loss": 0.641, "step": 18565 }, { "epoch": 0.76944755273737, "grad_norm": 0.4092369079589844, "learning_rate": 1.1529694558415186e-06, "loss": 0.6354, "step": 18566 }, { "epoch": 0.7694889966430436, "grad_norm": 0.42733046412467957, "learning_rate": 1.1527622363131502e-06, "loss": 0.678, "step": 18567 }, { "epoch": 0.7695304405487173, "grad_norm": 0.4132389724254608, "learning_rate": 1.152555016784782e-06, "loss": 0.6943, "step": 18568 }, { "epoch": 0.769571884454391, "grad_norm": 0.4011296331882477, "learning_rate": 1.1523477972564136e-06, "loss": 0.5869, "step": 18569 }, { "epoch": 0.7696133283600647, "grad_norm": 0.4523649513721466, "learning_rate": 1.1521405777280452e-06, "loss": 0.7327, "step": 18570 }, { "epoch": 0.7696547722657383, "grad_norm": 0.3923201262950897, "learning_rate": 1.1519333581996768e-06, "loss": 0.6693, "step": 18571 }, { "epoch": 0.769696216171412, "grad_norm": 0.4153224527835846, "learning_rate": 1.1517261386713084e-06, "loss": 0.6848, "step": 18572 }, { "epoch": 0.7697376600770857, "grad_norm": 0.5003382563591003, "learning_rate": 1.1515189191429402e-06, "loss": 0.6887, "step": 18573 }, { "epoch": 0.7697791039827593, "grad_norm": 0.4181358218193054, "learning_rate": 1.1513116996145718e-06, "loss": 0.6614, "step": 18574 }, { "epoch": 0.769820547888433, "grad_norm": 0.4148607850074768, "learning_rate": 1.1511044800862034e-06, "loss": 0.642, "step": 18575 }, { "epoch": 0.7698619917941066, "grad_norm": 0.3932316303253174, "learning_rate": 1.150897260557835e-06, "loss": 0.5818, "step": 18576 }, { "epoch": 0.7699034356997804, "grad_norm": 0.4383046627044678, "learning_rate": 1.1506900410294668e-06, "loss": 0.7273, "step": 18577 }, { "epoch": 0.769944879605454, "grad_norm": 0.4460391402244568, "learning_rate": 1.1504828215010984e-06, "loss": 0.6467, "step": 18578 }, { "epoch": 0.7699863235111277, "grad_norm": 0.4331652522087097, "learning_rate": 1.15027560197273e-06, "loss": 0.7046, "step": 18579 }, { "epoch": 0.7700277674168013, "grad_norm": 0.4264139235019684, "learning_rate": 1.1500683824443616e-06, "loss": 0.6946, "step": 18580 }, { "epoch": 0.7700692113224751, "grad_norm": 0.41391050815582275, "learning_rate": 1.1498611629159932e-06, "loss": 0.6637, "step": 18581 }, { "epoch": 0.7701106552281487, "grad_norm": 0.45625627040863037, "learning_rate": 1.149653943387625e-06, "loss": 0.7258, "step": 18582 }, { "epoch": 0.7701520991338223, "grad_norm": 0.4479356110095978, "learning_rate": 1.1494467238592566e-06, "loss": 0.6826, "step": 18583 }, { "epoch": 0.770193543039496, "grad_norm": 0.4043210446834564, "learning_rate": 1.1492395043308882e-06, "loss": 0.6442, "step": 18584 }, { "epoch": 0.7702349869451697, "grad_norm": 0.40128234028816223, "learning_rate": 1.1490322848025198e-06, "loss": 0.6525, "step": 18585 }, { "epoch": 0.7702764308508434, "grad_norm": 0.4102870523929596, "learning_rate": 1.1488250652741516e-06, "loss": 0.6726, "step": 18586 }, { "epoch": 0.770317874756517, "grad_norm": 0.4233836233615875, "learning_rate": 1.1486178457457832e-06, "loss": 0.6914, "step": 18587 }, { "epoch": 0.7703593186621908, "grad_norm": 0.38220882415771484, "learning_rate": 1.1484106262174148e-06, "loss": 0.6382, "step": 18588 }, { "epoch": 0.7704007625678644, "grad_norm": 0.40496397018432617, "learning_rate": 1.1482034066890464e-06, "loss": 0.682, "step": 18589 }, { "epoch": 0.7704422064735381, "grad_norm": 0.4491124451160431, "learning_rate": 1.1479961871606782e-06, "loss": 0.7189, "step": 18590 }, { "epoch": 0.7704836503792117, "grad_norm": 0.40522831678390503, "learning_rate": 1.1477889676323098e-06, "loss": 0.6604, "step": 18591 }, { "epoch": 0.7705250942848854, "grad_norm": 0.433199018239975, "learning_rate": 1.1475817481039414e-06, "loss": 0.6809, "step": 18592 }, { "epoch": 0.7705665381905591, "grad_norm": 0.4345639646053314, "learning_rate": 1.147374528575573e-06, "loss": 0.6847, "step": 18593 }, { "epoch": 0.7706079820962327, "grad_norm": 0.44261783361434937, "learning_rate": 1.1471673090472046e-06, "loss": 0.7377, "step": 18594 }, { "epoch": 0.7706494260019064, "grad_norm": 0.41557615995407104, "learning_rate": 1.1469600895188364e-06, "loss": 0.6368, "step": 18595 }, { "epoch": 0.7706908699075801, "grad_norm": 0.40809065103530884, "learning_rate": 1.146752869990468e-06, "loss": 0.7234, "step": 18596 }, { "epoch": 0.7707323138132538, "grad_norm": 0.40976637601852417, "learning_rate": 1.1465456504620996e-06, "loss": 0.6381, "step": 18597 }, { "epoch": 0.7707737577189274, "grad_norm": 0.4206528663635254, "learning_rate": 1.1463384309337312e-06, "loss": 0.64, "step": 18598 }, { "epoch": 0.7708152016246012, "grad_norm": 0.4338268041610718, "learning_rate": 1.146131211405363e-06, "loss": 0.6899, "step": 18599 }, { "epoch": 0.7708566455302748, "grad_norm": 0.4137064814567566, "learning_rate": 1.1459239918769946e-06, "loss": 0.6779, "step": 18600 }, { "epoch": 0.7708980894359484, "grad_norm": 0.4082860052585602, "learning_rate": 1.1457167723486262e-06, "loss": 0.6826, "step": 18601 }, { "epoch": 0.7709395333416221, "grad_norm": 0.4037609398365021, "learning_rate": 1.1455095528202578e-06, "loss": 0.662, "step": 18602 }, { "epoch": 0.7709809772472958, "grad_norm": 0.3922029137611389, "learning_rate": 1.1453023332918896e-06, "loss": 0.6444, "step": 18603 }, { "epoch": 0.7710224211529695, "grad_norm": 0.4038182199001312, "learning_rate": 1.1450951137635212e-06, "loss": 0.677, "step": 18604 }, { "epoch": 0.7710638650586431, "grad_norm": 0.4116866886615753, "learning_rate": 1.1448878942351528e-06, "loss": 0.6696, "step": 18605 }, { "epoch": 0.7711053089643168, "grad_norm": 0.419552206993103, "learning_rate": 1.1446806747067844e-06, "loss": 0.7444, "step": 18606 }, { "epoch": 0.7711467528699905, "grad_norm": 0.40074682235717773, "learning_rate": 1.144473455178416e-06, "loss": 0.6091, "step": 18607 }, { "epoch": 0.7711881967756642, "grad_norm": 0.41989514231681824, "learning_rate": 1.1442662356500478e-06, "loss": 0.6621, "step": 18608 }, { "epoch": 0.7712296406813378, "grad_norm": 0.42825815081596375, "learning_rate": 1.1440590161216794e-06, "loss": 0.6545, "step": 18609 }, { "epoch": 0.7712710845870114, "grad_norm": 0.4134232699871063, "learning_rate": 1.143851796593311e-06, "loss": 0.6871, "step": 18610 }, { "epoch": 0.7713125284926852, "grad_norm": 0.3758434057235718, "learning_rate": 1.1436445770649426e-06, "loss": 0.6089, "step": 18611 }, { "epoch": 0.7713539723983588, "grad_norm": 0.3733452260494232, "learning_rate": 1.1434373575365744e-06, "loss": 0.6907, "step": 18612 }, { "epoch": 0.7713954163040325, "grad_norm": 0.4429336190223694, "learning_rate": 1.143230138008206e-06, "loss": 0.6959, "step": 18613 }, { "epoch": 0.7714368602097061, "grad_norm": 0.37851688265800476, "learning_rate": 1.1430229184798376e-06, "loss": 0.65, "step": 18614 }, { "epoch": 0.7714783041153799, "grad_norm": 0.4046182930469513, "learning_rate": 1.1428156989514692e-06, "loss": 0.6455, "step": 18615 }, { "epoch": 0.7715197480210535, "grad_norm": 0.44494199752807617, "learning_rate": 1.142608479423101e-06, "loss": 0.7251, "step": 18616 }, { "epoch": 0.7715611919267272, "grad_norm": 0.41044414043426514, "learning_rate": 1.1424012598947326e-06, "loss": 0.6527, "step": 18617 }, { "epoch": 0.7716026358324009, "grad_norm": 0.4215778112411499, "learning_rate": 1.1421940403663642e-06, "loss": 0.6835, "step": 18618 }, { "epoch": 0.7716440797380745, "grad_norm": 0.3994966149330139, "learning_rate": 1.1419868208379958e-06, "loss": 0.6703, "step": 18619 }, { "epoch": 0.7716855236437482, "grad_norm": 0.45467814803123474, "learning_rate": 1.1417796013096274e-06, "loss": 0.6818, "step": 18620 }, { "epoch": 0.7717269675494218, "grad_norm": 0.49117904901504517, "learning_rate": 1.1415723817812592e-06, "loss": 0.7037, "step": 18621 }, { "epoch": 0.7717684114550956, "grad_norm": 0.43307989835739136, "learning_rate": 1.1413651622528908e-06, "loss": 0.7048, "step": 18622 }, { "epoch": 0.7718098553607692, "grad_norm": 0.4177330434322357, "learning_rate": 1.1411579427245224e-06, "loss": 0.6305, "step": 18623 }, { "epoch": 0.7718512992664429, "grad_norm": 0.4392470717430115, "learning_rate": 1.140950723196154e-06, "loss": 0.6896, "step": 18624 }, { "epoch": 0.7718927431721165, "grad_norm": 0.39668378233909607, "learning_rate": 1.1407435036677858e-06, "loss": 0.66, "step": 18625 }, { "epoch": 0.7719341870777902, "grad_norm": 0.40316227078437805, "learning_rate": 1.1405362841394174e-06, "loss": 0.6266, "step": 18626 }, { "epoch": 0.7719756309834639, "grad_norm": 0.44575291872024536, "learning_rate": 1.140329064611049e-06, "loss": 0.7234, "step": 18627 }, { "epoch": 0.7720170748891375, "grad_norm": 0.44293880462646484, "learning_rate": 1.1401218450826806e-06, "loss": 0.7629, "step": 18628 }, { "epoch": 0.7720585187948112, "grad_norm": 0.4704482853412628, "learning_rate": 1.1399146255543124e-06, "loss": 0.666, "step": 18629 }, { "epoch": 0.7720999627004849, "grad_norm": 0.4036056697368622, "learning_rate": 1.139707406025944e-06, "loss": 0.6243, "step": 18630 }, { "epoch": 0.7721414066061586, "grad_norm": 0.41639819741249084, "learning_rate": 1.1395001864975756e-06, "loss": 0.6492, "step": 18631 }, { "epoch": 0.7721828505118322, "grad_norm": 0.42895159125328064, "learning_rate": 1.1392929669692072e-06, "loss": 0.6853, "step": 18632 }, { "epoch": 0.772224294417506, "grad_norm": 0.37037062644958496, "learning_rate": 1.1390857474408388e-06, "loss": 0.6193, "step": 18633 }, { "epoch": 0.7722657383231796, "grad_norm": 0.49012070894241333, "learning_rate": 1.1388785279124706e-06, "loss": 0.7498, "step": 18634 }, { "epoch": 0.7723071822288532, "grad_norm": 0.40547385811805725, "learning_rate": 1.1386713083841022e-06, "loss": 0.6821, "step": 18635 }, { "epoch": 0.7723486261345269, "grad_norm": 0.4003818929195404, "learning_rate": 1.1384640888557338e-06, "loss": 0.7014, "step": 18636 }, { "epoch": 0.7723900700402005, "grad_norm": 0.40989387035369873, "learning_rate": 1.1382568693273654e-06, "loss": 0.6407, "step": 18637 }, { "epoch": 0.7724315139458743, "grad_norm": 0.4199989140033722, "learning_rate": 1.1380496497989972e-06, "loss": 0.6506, "step": 18638 }, { "epoch": 0.7724729578515479, "grad_norm": 0.4276945888996124, "learning_rate": 1.1378424302706288e-06, "loss": 0.6554, "step": 18639 }, { "epoch": 0.7725144017572216, "grad_norm": 0.4385223984718323, "learning_rate": 1.1376352107422604e-06, "loss": 0.6053, "step": 18640 }, { "epoch": 0.7725558456628953, "grad_norm": 0.4457826018333435, "learning_rate": 1.137427991213892e-06, "loss": 0.7139, "step": 18641 }, { "epoch": 0.772597289568569, "grad_norm": 0.4264000654220581, "learning_rate": 1.1372207716855238e-06, "loss": 0.6613, "step": 18642 }, { "epoch": 0.7726387334742426, "grad_norm": 0.400253564119339, "learning_rate": 1.1370135521571554e-06, "loss": 0.6965, "step": 18643 }, { "epoch": 0.7726801773799162, "grad_norm": 0.4083994925022125, "learning_rate": 1.136806332628787e-06, "loss": 0.6445, "step": 18644 }, { "epoch": 0.77272162128559, "grad_norm": 0.4834294319152832, "learning_rate": 1.1365991131004186e-06, "loss": 0.7148, "step": 18645 }, { "epoch": 0.7727630651912636, "grad_norm": 0.41728124022483826, "learning_rate": 1.1363918935720502e-06, "loss": 0.6461, "step": 18646 }, { "epoch": 0.7728045090969373, "grad_norm": 0.3866616487503052, "learning_rate": 1.136184674043682e-06, "loss": 0.6781, "step": 18647 }, { "epoch": 0.7728459530026109, "grad_norm": 0.3816164433956146, "learning_rate": 1.1359774545153136e-06, "loss": 0.6248, "step": 18648 }, { "epoch": 0.7728873969082847, "grad_norm": 0.40736693143844604, "learning_rate": 1.1357702349869452e-06, "loss": 0.6743, "step": 18649 }, { "epoch": 0.7729288408139583, "grad_norm": 0.4172896146774292, "learning_rate": 1.1355630154585768e-06, "loss": 0.7017, "step": 18650 }, { "epoch": 0.772970284719632, "grad_norm": 0.4151565432548523, "learning_rate": 1.1353557959302086e-06, "loss": 0.6658, "step": 18651 }, { "epoch": 0.7730117286253056, "grad_norm": 0.41716209053993225, "learning_rate": 1.1351485764018402e-06, "loss": 0.6277, "step": 18652 }, { "epoch": 0.7730531725309793, "grad_norm": 0.3743089437484741, "learning_rate": 1.1349413568734718e-06, "loss": 0.6421, "step": 18653 }, { "epoch": 0.773094616436653, "grad_norm": 0.4839318096637726, "learning_rate": 1.1347341373451034e-06, "loss": 0.7744, "step": 18654 }, { "epoch": 0.7731360603423266, "grad_norm": 0.44469866156578064, "learning_rate": 1.134526917816735e-06, "loss": 0.6509, "step": 18655 }, { "epoch": 0.7731775042480004, "grad_norm": 0.4358755946159363, "learning_rate": 1.1343196982883668e-06, "loss": 0.6907, "step": 18656 }, { "epoch": 0.773218948153674, "grad_norm": 0.4016987681388855, "learning_rate": 1.1341124787599984e-06, "loss": 0.6802, "step": 18657 }, { "epoch": 0.7732603920593477, "grad_norm": 0.3807734549045563, "learning_rate": 1.13390525923163e-06, "loss": 0.6167, "step": 18658 }, { "epoch": 0.7733018359650213, "grad_norm": 0.427251398563385, "learning_rate": 1.1336980397032616e-06, "loss": 0.6669, "step": 18659 }, { "epoch": 0.7733432798706951, "grad_norm": 0.41537249088287354, "learning_rate": 1.1334908201748934e-06, "loss": 0.6511, "step": 18660 }, { "epoch": 0.7733847237763687, "grad_norm": 0.4192321002483368, "learning_rate": 1.133283600646525e-06, "loss": 0.6682, "step": 18661 }, { "epoch": 0.7734261676820423, "grad_norm": 0.40378138422966003, "learning_rate": 1.1330763811181566e-06, "loss": 0.6658, "step": 18662 }, { "epoch": 0.773467611587716, "grad_norm": 0.45574843883514404, "learning_rate": 1.1328691615897882e-06, "loss": 0.6722, "step": 18663 }, { "epoch": 0.7735090554933897, "grad_norm": 0.40728622674942017, "learning_rate": 1.13266194206142e-06, "loss": 0.6637, "step": 18664 }, { "epoch": 0.7735504993990634, "grad_norm": 0.4360423982143402, "learning_rate": 1.1324547225330516e-06, "loss": 0.672, "step": 18665 }, { "epoch": 0.773591943304737, "grad_norm": 0.39295539259910583, "learning_rate": 1.1322475030046832e-06, "loss": 0.6438, "step": 18666 }, { "epoch": 0.7736333872104108, "grad_norm": 0.43571820855140686, "learning_rate": 1.1320402834763148e-06, "loss": 0.704, "step": 18667 }, { "epoch": 0.7736748311160844, "grad_norm": 0.46852004528045654, "learning_rate": 1.1318330639479464e-06, "loss": 0.7096, "step": 18668 }, { "epoch": 0.7737162750217581, "grad_norm": 0.39040815830230713, "learning_rate": 1.1316258444195782e-06, "loss": 0.6171, "step": 18669 }, { "epoch": 0.7737577189274317, "grad_norm": 0.42720717191696167, "learning_rate": 1.1314186248912098e-06, "loss": 0.6484, "step": 18670 }, { "epoch": 0.7737991628331053, "grad_norm": 0.40399742126464844, "learning_rate": 1.1312114053628414e-06, "loss": 0.6472, "step": 18671 }, { "epoch": 0.7738406067387791, "grad_norm": 0.40572458505630493, "learning_rate": 1.131004185834473e-06, "loss": 0.6277, "step": 18672 }, { "epoch": 0.7738820506444527, "grad_norm": 0.4221031665802002, "learning_rate": 1.1307969663061048e-06, "loss": 0.7378, "step": 18673 }, { "epoch": 0.7739234945501264, "grad_norm": 0.4922735095024109, "learning_rate": 1.1305897467777364e-06, "loss": 0.6749, "step": 18674 }, { "epoch": 0.7739649384558001, "grad_norm": 0.38611453771591187, "learning_rate": 1.130382527249368e-06, "loss": 0.6338, "step": 18675 }, { "epoch": 0.7740063823614738, "grad_norm": 0.42773130536079407, "learning_rate": 1.1301753077209996e-06, "loss": 0.6699, "step": 18676 }, { "epoch": 0.7740478262671474, "grad_norm": 0.3943508267402649, "learning_rate": 1.1299680881926314e-06, "loss": 0.5994, "step": 18677 }, { "epoch": 0.774089270172821, "grad_norm": 0.4129193127155304, "learning_rate": 1.129760868664263e-06, "loss": 0.6511, "step": 18678 }, { "epoch": 0.7741307140784948, "grad_norm": 0.45398756861686707, "learning_rate": 1.1295536491358946e-06, "loss": 0.6577, "step": 18679 }, { "epoch": 0.7741721579841684, "grad_norm": 0.44203853607177734, "learning_rate": 1.1293464296075262e-06, "loss": 0.7103, "step": 18680 }, { "epoch": 0.7742136018898421, "grad_norm": 0.4224059581756592, "learning_rate": 1.1291392100791578e-06, "loss": 0.7003, "step": 18681 }, { "epoch": 0.7742550457955157, "grad_norm": 0.38337188959121704, "learning_rate": 1.1289319905507896e-06, "loss": 0.6665, "step": 18682 }, { "epoch": 0.7742964897011895, "grad_norm": 0.4135085940361023, "learning_rate": 1.1287247710224212e-06, "loss": 0.6583, "step": 18683 }, { "epoch": 0.7743379336068631, "grad_norm": 0.43615031242370605, "learning_rate": 1.1285175514940528e-06, "loss": 0.7256, "step": 18684 }, { "epoch": 0.7743793775125368, "grad_norm": 0.464132159948349, "learning_rate": 1.1283103319656844e-06, "loss": 0.6807, "step": 18685 }, { "epoch": 0.7744208214182104, "grad_norm": 0.4091275632381439, "learning_rate": 1.1281031124373162e-06, "loss": 0.6733, "step": 18686 }, { "epoch": 0.7744622653238841, "grad_norm": 0.4081200361251831, "learning_rate": 1.1278958929089478e-06, "loss": 0.6549, "step": 18687 }, { "epoch": 0.7745037092295578, "grad_norm": 0.4100535213947296, "learning_rate": 1.1276886733805794e-06, "loss": 0.6578, "step": 18688 }, { "epoch": 0.7745451531352314, "grad_norm": 0.4124448299407959, "learning_rate": 1.127481453852211e-06, "loss": 0.6598, "step": 18689 }, { "epoch": 0.7745865970409052, "grad_norm": 0.39033955335617065, "learning_rate": 1.1272742343238428e-06, "loss": 0.663, "step": 18690 }, { "epoch": 0.7746280409465788, "grad_norm": 0.3985194265842438, "learning_rate": 1.1270670147954744e-06, "loss": 0.6025, "step": 18691 }, { "epoch": 0.7746694848522525, "grad_norm": 0.44080355763435364, "learning_rate": 1.126859795267106e-06, "loss": 0.6925, "step": 18692 }, { "epoch": 0.7747109287579261, "grad_norm": 0.39983922243118286, "learning_rate": 1.1266525757387376e-06, "loss": 0.6099, "step": 18693 }, { "epoch": 0.7747523726635999, "grad_norm": 0.40150535106658936, "learning_rate": 1.1264453562103692e-06, "loss": 0.6621, "step": 18694 }, { "epoch": 0.7747938165692735, "grad_norm": 0.41600513458251953, "learning_rate": 1.126238136682001e-06, "loss": 0.6537, "step": 18695 }, { "epoch": 0.7748352604749471, "grad_norm": 0.44487765431404114, "learning_rate": 1.1260309171536326e-06, "loss": 0.6821, "step": 18696 }, { "epoch": 0.7748767043806208, "grad_norm": 0.3848533034324646, "learning_rate": 1.1258236976252642e-06, "loss": 0.6531, "step": 18697 }, { "epoch": 0.7749181482862945, "grad_norm": 0.3946925699710846, "learning_rate": 1.1256164780968958e-06, "loss": 0.6261, "step": 18698 }, { "epoch": 0.7749595921919682, "grad_norm": 0.3837633728981018, "learning_rate": 1.1254092585685276e-06, "loss": 0.6594, "step": 18699 }, { "epoch": 0.7750010360976418, "grad_norm": 0.4449877440929413, "learning_rate": 1.1252020390401592e-06, "loss": 0.6561, "step": 18700 }, { "epoch": 0.7750424800033155, "grad_norm": 0.39526981115341187, "learning_rate": 1.1249948195117908e-06, "loss": 0.6848, "step": 18701 }, { "epoch": 0.7750839239089892, "grad_norm": 0.42968806624412537, "learning_rate": 1.1247875999834224e-06, "loss": 0.6704, "step": 18702 }, { "epoch": 0.7751253678146629, "grad_norm": 0.4310893416404724, "learning_rate": 1.1245803804550542e-06, "loss": 0.6666, "step": 18703 }, { "epoch": 0.7751668117203365, "grad_norm": 0.4172687232494354, "learning_rate": 1.1243731609266858e-06, "loss": 0.6998, "step": 18704 }, { "epoch": 0.7752082556260101, "grad_norm": 0.43702420592308044, "learning_rate": 1.1241659413983174e-06, "loss": 0.6404, "step": 18705 }, { "epoch": 0.7752496995316839, "grad_norm": 0.4350883662700653, "learning_rate": 1.123958721869949e-06, "loss": 0.641, "step": 18706 }, { "epoch": 0.7752911434373575, "grad_norm": 0.4447561204433441, "learning_rate": 1.1237515023415806e-06, "loss": 0.7148, "step": 18707 }, { "epoch": 0.7753325873430312, "grad_norm": 0.46580421924591064, "learning_rate": 1.1235442828132124e-06, "loss": 0.6963, "step": 18708 }, { "epoch": 0.7753740312487049, "grad_norm": 0.42502662539482117, "learning_rate": 1.123337063284844e-06, "loss": 0.6982, "step": 18709 }, { "epoch": 0.7754154751543786, "grad_norm": 0.4118465781211853, "learning_rate": 1.1231298437564756e-06, "loss": 0.6707, "step": 18710 }, { "epoch": 0.7754569190600522, "grad_norm": 0.40466099977493286, "learning_rate": 1.1229226242281072e-06, "loss": 0.6643, "step": 18711 }, { "epoch": 0.7754983629657259, "grad_norm": 0.4172675907611847, "learning_rate": 1.122715404699739e-06, "loss": 0.698, "step": 18712 }, { "epoch": 0.7755398068713996, "grad_norm": 0.39812880754470825, "learning_rate": 1.1225081851713706e-06, "loss": 0.6582, "step": 18713 }, { "epoch": 0.7755812507770732, "grad_norm": 0.4064295291900635, "learning_rate": 1.1223009656430022e-06, "loss": 0.6532, "step": 18714 }, { "epoch": 0.7756226946827469, "grad_norm": 0.40281838178634644, "learning_rate": 1.1220937461146338e-06, "loss": 0.6597, "step": 18715 }, { "epoch": 0.7756641385884205, "grad_norm": 0.4151982367038727, "learning_rate": 1.1218865265862656e-06, "loss": 0.6963, "step": 18716 }, { "epoch": 0.7757055824940943, "grad_norm": 0.4396878778934479, "learning_rate": 1.1216793070578972e-06, "loss": 0.6725, "step": 18717 }, { "epoch": 0.7757470263997679, "grad_norm": 0.42466995120048523, "learning_rate": 1.1214720875295288e-06, "loss": 0.6895, "step": 18718 }, { "epoch": 0.7757884703054416, "grad_norm": 0.385654479265213, "learning_rate": 1.1212648680011604e-06, "loss": 0.6349, "step": 18719 }, { "epoch": 0.7758299142111152, "grad_norm": 0.3998183608055115, "learning_rate": 1.121057648472792e-06, "loss": 0.6108, "step": 18720 }, { "epoch": 0.775871358116789, "grad_norm": 0.4541303813457489, "learning_rate": 1.1208504289444238e-06, "loss": 0.7129, "step": 18721 }, { "epoch": 0.7759128020224626, "grad_norm": 0.41898781061172485, "learning_rate": 1.1206432094160554e-06, "loss": 0.6553, "step": 18722 }, { "epoch": 0.7759542459281362, "grad_norm": 0.40791329741477966, "learning_rate": 1.120435989887687e-06, "loss": 0.67, "step": 18723 }, { "epoch": 0.77599568983381, "grad_norm": 0.4193266034126282, "learning_rate": 1.1202287703593186e-06, "loss": 0.673, "step": 18724 }, { "epoch": 0.7760371337394836, "grad_norm": 0.42473992705345154, "learning_rate": 1.1200215508309504e-06, "loss": 0.719, "step": 18725 }, { "epoch": 0.7760785776451573, "grad_norm": 0.46280011534690857, "learning_rate": 1.119814331302582e-06, "loss": 0.7314, "step": 18726 }, { "epoch": 0.7761200215508309, "grad_norm": 0.41773149371147156, "learning_rate": 1.1196071117742136e-06, "loss": 0.6775, "step": 18727 }, { "epoch": 0.7761614654565047, "grad_norm": 0.41040560603141785, "learning_rate": 1.1193998922458452e-06, "loss": 0.6718, "step": 18728 }, { "epoch": 0.7762029093621783, "grad_norm": 0.4290517568588257, "learning_rate": 1.1191926727174768e-06, "loss": 0.6504, "step": 18729 }, { "epoch": 0.776244353267852, "grad_norm": 0.4477480947971344, "learning_rate": 1.1189854531891086e-06, "loss": 0.6556, "step": 18730 }, { "epoch": 0.7762857971735256, "grad_norm": 0.41496020555496216, "learning_rate": 1.1187782336607402e-06, "loss": 0.6484, "step": 18731 }, { "epoch": 0.7763272410791993, "grad_norm": 0.4248695373535156, "learning_rate": 1.1185710141323718e-06, "loss": 0.6479, "step": 18732 }, { "epoch": 0.776368684984873, "grad_norm": 0.41781800985336304, "learning_rate": 1.1183637946040034e-06, "loss": 0.6887, "step": 18733 }, { "epoch": 0.7764101288905466, "grad_norm": 0.4059661030769348, "learning_rate": 1.1181565750756352e-06, "loss": 0.6475, "step": 18734 }, { "epoch": 0.7764515727962203, "grad_norm": 0.4230389893054962, "learning_rate": 1.1179493555472668e-06, "loss": 0.6417, "step": 18735 }, { "epoch": 0.776493016701894, "grad_norm": 0.41161203384399414, "learning_rate": 1.1177421360188984e-06, "loss": 0.6526, "step": 18736 }, { "epoch": 0.7765344606075677, "grad_norm": 0.39863821864128113, "learning_rate": 1.11753491649053e-06, "loss": 0.658, "step": 18737 }, { "epoch": 0.7765759045132413, "grad_norm": 0.41206008195877075, "learning_rate": 1.1173276969621618e-06, "loss": 0.6914, "step": 18738 }, { "epoch": 0.7766173484189149, "grad_norm": 0.43053460121154785, "learning_rate": 1.1171204774337934e-06, "loss": 0.6831, "step": 18739 }, { "epoch": 0.7766587923245887, "grad_norm": 0.40393710136413574, "learning_rate": 1.116913257905425e-06, "loss": 0.6617, "step": 18740 }, { "epoch": 0.7767002362302623, "grad_norm": 0.4298970699310303, "learning_rate": 1.1167060383770566e-06, "loss": 0.6797, "step": 18741 }, { "epoch": 0.776741680135936, "grad_norm": 0.4310683608055115, "learning_rate": 1.1164988188486882e-06, "loss": 0.7332, "step": 18742 }, { "epoch": 0.7767831240416097, "grad_norm": 0.42302408814430237, "learning_rate": 1.11629159932032e-06, "loss": 0.6742, "step": 18743 }, { "epoch": 0.7768245679472834, "grad_norm": 0.42748138308525085, "learning_rate": 1.1160843797919516e-06, "loss": 0.6689, "step": 18744 }, { "epoch": 0.776866011852957, "grad_norm": 0.4327971339225769, "learning_rate": 1.1158771602635832e-06, "loss": 0.6919, "step": 18745 }, { "epoch": 0.7769074557586307, "grad_norm": 0.39091575145721436, "learning_rate": 1.1156699407352148e-06, "loss": 0.6334, "step": 18746 }, { "epoch": 0.7769488996643044, "grad_norm": 0.4027308225631714, "learning_rate": 1.1154627212068466e-06, "loss": 0.6714, "step": 18747 }, { "epoch": 0.776990343569978, "grad_norm": 0.44414129853248596, "learning_rate": 1.1152555016784782e-06, "loss": 0.6854, "step": 18748 }, { "epoch": 0.7770317874756517, "grad_norm": 0.416934996843338, "learning_rate": 1.1150482821501098e-06, "loss": 0.6931, "step": 18749 }, { "epoch": 0.7770732313813253, "grad_norm": 0.4397965371608734, "learning_rate": 1.1148410626217414e-06, "loss": 0.6407, "step": 18750 }, { "epoch": 0.7771146752869991, "grad_norm": 0.42599260807037354, "learning_rate": 1.1146338430933732e-06, "loss": 0.67, "step": 18751 }, { "epoch": 0.7771561191926727, "grad_norm": 0.3895764648914337, "learning_rate": 1.1144266235650048e-06, "loss": 0.6943, "step": 18752 }, { "epoch": 0.7771975630983464, "grad_norm": 0.38975492119789124, "learning_rate": 1.1142194040366364e-06, "loss": 0.6648, "step": 18753 }, { "epoch": 0.77723900700402, "grad_norm": 0.4198983907699585, "learning_rate": 1.114012184508268e-06, "loss": 0.719, "step": 18754 }, { "epoch": 0.7772804509096938, "grad_norm": 0.43661805987358093, "learning_rate": 1.1138049649798996e-06, "loss": 0.6411, "step": 18755 }, { "epoch": 0.7773218948153674, "grad_norm": 0.4458347260951996, "learning_rate": 1.1135977454515314e-06, "loss": 0.6844, "step": 18756 }, { "epoch": 0.777363338721041, "grad_norm": 0.40382084250450134, "learning_rate": 1.113390525923163e-06, "loss": 0.6949, "step": 18757 }, { "epoch": 0.7774047826267148, "grad_norm": 0.41136816143989563, "learning_rate": 1.1131833063947946e-06, "loss": 0.6422, "step": 18758 }, { "epoch": 0.7774462265323884, "grad_norm": 0.4279707372188568, "learning_rate": 1.1129760868664262e-06, "loss": 0.7142, "step": 18759 }, { "epoch": 0.7774876704380621, "grad_norm": 0.4160175323486328, "learning_rate": 1.112768867338058e-06, "loss": 0.6646, "step": 18760 }, { "epoch": 0.7775291143437357, "grad_norm": 0.39878320693969727, "learning_rate": 1.1125616478096896e-06, "loss": 0.626, "step": 18761 }, { "epoch": 0.7775705582494095, "grad_norm": 0.4324229061603546, "learning_rate": 1.1123544282813212e-06, "loss": 0.6831, "step": 18762 }, { "epoch": 0.7776120021550831, "grad_norm": 0.39504092931747437, "learning_rate": 1.1121472087529528e-06, "loss": 0.6339, "step": 18763 }, { "epoch": 0.7776534460607568, "grad_norm": 0.4084392189979553, "learning_rate": 1.1119399892245846e-06, "loss": 0.6624, "step": 18764 }, { "epoch": 0.7776948899664304, "grad_norm": 0.42251867055892944, "learning_rate": 1.1117327696962162e-06, "loss": 0.6842, "step": 18765 }, { "epoch": 0.7777363338721041, "grad_norm": 0.4526134133338928, "learning_rate": 1.1115255501678478e-06, "loss": 0.6309, "step": 18766 }, { "epoch": 0.7777777777777778, "grad_norm": 0.4456314742565155, "learning_rate": 1.1113183306394794e-06, "loss": 0.6407, "step": 18767 }, { "epoch": 0.7778192216834514, "grad_norm": 0.40142011642456055, "learning_rate": 1.111111111111111e-06, "loss": 0.6531, "step": 18768 }, { "epoch": 0.7778606655891251, "grad_norm": 0.4107346832752228, "learning_rate": 1.1109038915827428e-06, "loss": 0.6613, "step": 18769 }, { "epoch": 0.7779021094947988, "grad_norm": 0.3942998945713043, "learning_rate": 1.1106966720543744e-06, "loss": 0.6348, "step": 18770 }, { "epoch": 0.7779435534004725, "grad_norm": 0.4623507261276245, "learning_rate": 1.110489452526006e-06, "loss": 0.7013, "step": 18771 }, { "epoch": 0.7779849973061461, "grad_norm": 0.41602858901023865, "learning_rate": 1.1102822329976376e-06, "loss": 0.6753, "step": 18772 }, { "epoch": 0.7780264412118199, "grad_norm": 0.41368016600608826, "learning_rate": 1.1100750134692694e-06, "loss": 0.6238, "step": 18773 }, { "epoch": 0.7780678851174935, "grad_norm": 0.4194434881210327, "learning_rate": 1.109867793940901e-06, "loss": 0.6848, "step": 18774 }, { "epoch": 0.7781093290231671, "grad_norm": 0.47933462262153625, "learning_rate": 1.1096605744125326e-06, "loss": 0.7034, "step": 18775 }, { "epoch": 0.7781507729288408, "grad_norm": 0.4257299304008484, "learning_rate": 1.1094533548841642e-06, "loss": 0.6974, "step": 18776 }, { "epoch": 0.7781922168345144, "grad_norm": 0.4096745252609253, "learning_rate": 1.109246135355796e-06, "loss": 0.7034, "step": 18777 }, { "epoch": 0.7782336607401882, "grad_norm": 0.4046975076198578, "learning_rate": 1.1090389158274276e-06, "loss": 0.6281, "step": 18778 }, { "epoch": 0.7782751046458618, "grad_norm": 0.44408655166625977, "learning_rate": 1.1088316962990592e-06, "loss": 0.699, "step": 18779 }, { "epoch": 0.7783165485515355, "grad_norm": 0.4406507909297943, "learning_rate": 1.1086244767706908e-06, "loss": 0.6875, "step": 18780 }, { "epoch": 0.7783579924572092, "grad_norm": 0.40251192450523376, "learning_rate": 1.1084172572423227e-06, "loss": 0.6609, "step": 18781 }, { "epoch": 0.7783994363628829, "grad_norm": 0.38690513372421265, "learning_rate": 1.1082100377139542e-06, "loss": 0.6295, "step": 18782 }, { "epoch": 0.7784408802685565, "grad_norm": 0.4032716751098633, "learning_rate": 1.1080028181855858e-06, "loss": 0.6478, "step": 18783 }, { "epoch": 0.7784823241742301, "grad_norm": 0.391605406999588, "learning_rate": 1.1077955986572174e-06, "loss": 0.6782, "step": 18784 }, { "epoch": 0.7785237680799039, "grad_norm": 0.42940467596054077, "learning_rate": 1.107588379128849e-06, "loss": 0.6954, "step": 18785 }, { "epoch": 0.7785652119855775, "grad_norm": 0.47324827313423157, "learning_rate": 1.1073811596004808e-06, "loss": 0.7213, "step": 18786 }, { "epoch": 0.7786066558912512, "grad_norm": 0.4105950891971588, "learning_rate": 1.1071739400721124e-06, "loss": 0.6704, "step": 18787 }, { "epoch": 0.7786480997969248, "grad_norm": 0.43260952830314636, "learning_rate": 1.106966720543744e-06, "loss": 0.6833, "step": 18788 }, { "epoch": 0.7786895437025986, "grad_norm": 0.41201263666152954, "learning_rate": 1.1067595010153756e-06, "loss": 0.6041, "step": 18789 }, { "epoch": 0.7787309876082722, "grad_norm": 0.4050266444683075, "learning_rate": 1.1065522814870075e-06, "loss": 0.675, "step": 18790 }, { "epoch": 0.7787724315139459, "grad_norm": 0.41364237666130066, "learning_rate": 1.106345061958639e-06, "loss": 0.6405, "step": 18791 }, { "epoch": 0.7788138754196196, "grad_norm": 0.3975664973258972, "learning_rate": 1.1061378424302706e-06, "loss": 0.6594, "step": 18792 }, { "epoch": 0.7788553193252932, "grad_norm": 0.4294144809246063, "learning_rate": 1.1059306229019022e-06, "loss": 0.6395, "step": 18793 }, { "epoch": 0.7788967632309669, "grad_norm": 0.39942771196365356, "learning_rate": 1.105723403373534e-06, "loss": 0.6631, "step": 18794 }, { "epoch": 0.7789382071366405, "grad_norm": 0.4126463830471039, "learning_rate": 1.1055161838451656e-06, "loss": 0.6553, "step": 18795 }, { "epoch": 0.7789796510423143, "grad_norm": 0.4053286910057068, "learning_rate": 1.1053089643167972e-06, "loss": 0.6354, "step": 18796 }, { "epoch": 0.7790210949479879, "grad_norm": 0.3916606605052948, "learning_rate": 1.1051017447884288e-06, "loss": 0.6975, "step": 18797 }, { "epoch": 0.7790625388536616, "grad_norm": 0.42304617166519165, "learning_rate": 1.1048945252600607e-06, "loss": 0.7141, "step": 18798 }, { "epoch": 0.7791039827593352, "grad_norm": 0.39214229583740234, "learning_rate": 1.1046873057316923e-06, "loss": 0.6514, "step": 18799 }, { "epoch": 0.7791454266650089, "grad_norm": 0.4053260087966919, "learning_rate": 1.1044800862033238e-06, "loss": 0.7063, "step": 18800 }, { "epoch": 0.7791868705706826, "grad_norm": 0.38826656341552734, "learning_rate": 1.1042728666749554e-06, "loss": 0.6624, "step": 18801 }, { "epoch": 0.7792283144763562, "grad_norm": 0.4081454575061798, "learning_rate": 1.1040656471465873e-06, "loss": 0.7229, "step": 18802 }, { "epoch": 0.7792697583820299, "grad_norm": 0.4236225485801697, "learning_rate": 1.1038584276182189e-06, "loss": 0.7153, "step": 18803 }, { "epoch": 0.7793112022877036, "grad_norm": 0.3959042429924011, "learning_rate": 1.1036512080898504e-06, "loss": 0.689, "step": 18804 }, { "epoch": 0.7793526461933773, "grad_norm": 0.41856518387794495, "learning_rate": 1.103443988561482e-06, "loss": 0.6355, "step": 18805 }, { "epoch": 0.7793940900990509, "grad_norm": 0.40799281001091003, "learning_rate": 1.1032367690331136e-06, "loss": 0.6873, "step": 18806 }, { "epoch": 0.7794355340047247, "grad_norm": 0.4445628523826599, "learning_rate": 1.1030295495047455e-06, "loss": 0.663, "step": 18807 }, { "epoch": 0.7794769779103983, "grad_norm": 0.3994918167591095, "learning_rate": 1.102822329976377e-06, "loss": 0.677, "step": 18808 }, { "epoch": 0.7795184218160719, "grad_norm": 0.4263248145580292, "learning_rate": 1.1026151104480086e-06, "loss": 0.6786, "step": 18809 }, { "epoch": 0.7795598657217456, "grad_norm": 0.3844582438468933, "learning_rate": 1.1024078909196402e-06, "loss": 0.6617, "step": 18810 }, { "epoch": 0.7796013096274192, "grad_norm": 0.4311043918132782, "learning_rate": 1.102200671391272e-06, "loss": 0.6512, "step": 18811 }, { "epoch": 0.779642753533093, "grad_norm": 0.41674917936325073, "learning_rate": 1.1019934518629037e-06, "loss": 0.6864, "step": 18812 }, { "epoch": 0.7796841974387666, "grad_norm": 0.4419926702976227, "learning_rate": 1.1017862323345353e-06, "loss": 0.6744, "step": 18813 }, { "epoch": 0.7797256413444403, "grad_norm": 0.3851330578327179, "learning_rate": 1.1015790128061668e-06, "loss": 0.6531, "step": 18814 }, { "epoch": 0.779767085250114, "grad_norm": 0.3952349126338959, "learning_rate": 1.1013717932777987e-06, "loss": 0.6786, "step": 18815 }, { "epoch": 0.7798085291557877, "grad_norm": 0.44760245084762573, "learning_rate": 1.1011645737494303e-06, "loss": 0.6631, "step": 18816 }, { "epoch": 0.7798499730614613, "grad_norm": 0.42007654905319214, "learning_rate": 1.1009573542210619e-06, "loss": 0.6587, "step": 18817 }, { "epoch": 0.7798914169671349, "grad_norm": 0.39179566502571106, "learning_rate": 1.1007501346926934e-06, "loss": 0.6769, "step": 18818 }, { "epoch": 0.7799328608728087, "grad_norm": 0.39047813415527344, "learning_rate": 1.1005429151643253e-06, "loss": 0.6605, "step": 18819 }, { "epoch": 0.7799743047784823, "grad_norm": 0.40700316429138184, "learning_rate": 1.1003356956359569e-06, "loss": 0.6892, "step": 18820 }, { "epoch": 0.780015748684156, "grad_norm": 0.3896768391132355, "learning_rate": 1.1001284761075885e-06, "loss": 0.6543, "step": 18821 }, { "epoch": 0.7800571925898296, "grad_norm": 0.3938610553741455, "learning_rate": 1.09992125657922e-06, "loss": 0.6461, "step": 18822 }, { "epoch": 0.7800986364955034, "grad_norm": 0.4198577404022217, "learning_rate": 1.0997140370508516e-06, "loss": 0.6597, "step": 18823 }, { "epoch": 0.780140080401177, "grad_norm": 0.41655710339546204, "learning_rate": 1.0995068175224835e-06, "loss": 0.5928, "step": 18824 }, { "epoch": 0.7801815243068507, "grad_norm": 0.3947398364543915, "learning_rate": 1.099299597994115e-06, "loss": 0.6196, "step": 18825 }, { "epoch": 0.7802229682125243, "grad_norm": 0.40593814849853516, "learning_rate": 1.0990923784657467e-06, "loss": 0.6841, "step": 18826 }, { "epoch": 0.780264412118198, "grad_norm": 0.4076291024684906, "learning_rate": 1.0988851589373782e-06, "loss": 0.6652, "step": 18827 }, { "epoch": 0.7803058560238717, "grad_norm": 0.44146642088890076, "learning_rate": 1.09867793940901e-06, "loss": 0.6241, "step": 18828 }, { "epoch": 0.7803472999295453, "grad_norm": 0.45738741755485535, "learning_rate": 1.0984707198806417e-06, "loss": 0.6708, "step": 18829 }, { "epoch": 0.7803887438352191, "grad_norm": 0.4379986822605133, "learning_rate": 1.0982635003522733e-06, "loss": 0.6901, "step": 18830 }, { "epoch": 0.7804301877408927, "grad_norm": 0.3872493803501129, "learning_rate": 1.0980562808239049e-06, "loss": 0.6398, "step": 18831 }, { "epoch": 0.7804716316465664, "grad_norm": 0.4121597707271576, "learning_rate": 1.0978490612955367e-06, "loss": 0.6982, "step": 18832 }, { "epoch": 0.78051307555224, "grad_norm": 0.4347425401210785, "learning_rate": 1.0976418417671683e-06, "loss": 0.6331, "step": 18833 }, { "epoch": 0.7805545194579138, "grad_norm": 0.40419870615005493, "learning_rate": 1.0974346222387999e-06, "loss": 0.6638, "step": 18834 }, { "epoch": 0.7805959633635874, "grad_norm": 0.41031917929649353, "learning_rate": 1.0972274027104315e-06, "loss": 0.6846, "step": 18835 }, { "epoch": 0.780637407269261, "grad_norm": 0.411199688911438, "learning_rate": 1.0970201831820633e-06, "loss": 0.6553, "step": 18836 }, { "epoch": 0.7806788511749347, "grad_norm": 0.3981885612010956, "learning_rate": 1.0968129636536949e-06, "loss": 0.5975, "step": 18837 }, { "epoch": 0.7807202950806084, "grad_norm": 0.399404913187027, "learning_rate": 1.0966057441253265e-06, "loss": 0.6232, "step": 18838 }, { "epoch": 0.7807617389862821, "grad_norm": 0.4402633011341095, "learning_rate": 1.096398524596958e-06, "loss": 0.6982, "step": 18839 }, { "epoch": 0.7808031828919557, "grad_norm": 0.4260307848453522, "learning_rate": 1.0961913050685897e-06, "loss": 0.7595, "step": 18840 }, { "epoch": 0.7808446267976294, "grad_norm": 0.4659217596054077, "learning_rate": 1.0959840855402215e-06, "loss": 0.7234, "step": 18841 }, { "epoch": 0.7808860707033031, "grad_norm": 0.4123915433883667, "learning_rate": 1.095776866011853e-06, "loss": 0.6493, "step": 18842 }, { "epoch": 0.7809275146089768, "grad_norm": 0.4259832799434662, "learning_rate": 1.0955696464834847e-06, "loss": 0.6912, "step": 18843 }, { "epoch": 0.7809689585146504, "grad_norm": 0.4359184205532074, "learning_rate": 1.0953624269551163e-06, "loss": 0.6946, "step": 18844 }, { "epoch": 0.781010402420324, "grad_norm": 0.3814309239387512, "learning_rate": 1.095155207426748e-06, "loss": 0.6674, "step": 18845 }, { "epoch": 0.7810518463259978, "grad_norm": 0.4515441060066223, "learning_rate": 1.0949479878983797e-06, "loss": 0.6339, "step": 18846 }, { "epoch": 0.7810932902316714, "grad_norm": 0.4213552474975586, "learning_rate": 1.0947407683700113e-06, "loss": 0.7195, "step": 18847 }, { "epoch": 0.7811347341373451, "grad_norm": 0.42695024609565735, "learning_rate": 1.0945335488416429e-06, "loss": 0.6851, "step": 18848 }, { "epoch": 0.7811761780430188, "grad_norm": 0.39487844705581665, "learning_rate": 1.0943263293132747e-06, "loss": 0.6672, "step": 18849 }, { "epoch": 0.7812176219486925, "grad_norm": 0.41849663853645325, "learning_rate": 1.0941191097849063e-06, "loss": 0.6985, "step": 18850 }, { "epoch": 0.7812590658543661, "grad_norm": 0.3849886953830719, "learning_rate": 1.0939118902565379e-06, "loss": 0.6757, "step": 18851 }, { "epoch": 0.7813005097600398, "grad_norm": 0.4149416387081146, "learning_rate": 1.0937046707281695e-06, "loss": 0.709, "step": 18852 }, { "epoch": 0.7813419536657135, "grad_norm": 0.42889589071273804, "learning_rate": 1.0934974511998013e-06, "loss": 0.7078, "step": 18853 }, { "epoch": 0.7813833975713871, "grad_norm": 0.40220752358436584, "learning_rate": 1.0932902316714329e-06, "loss": 0.646, "step": 18854 }, { "epoch": 0.7814248414770608, "grad_norm": 0.4418937563896179, "learning_rate": 1.0930830121430645e-06, "loss": 0.6716, "step": 18855 }, { "epoch": 0.7814662853827344, "grad_norm": 0.3934852182865143, "learning_rate": 1.092875792614696e-06, "loss": 0.6556, "step": 18856 }, { "epoch": 0.7815077292884082, "grad_norm": 0.4429916739463806, "learning_rate": 1.0926685730863279e-06, "loss": 0.7279, "step": 18857 }, { "epoch": 0.7815491731940818, "grad_norm": 0.41080379486083984, "learning_rate": 1.0924613535579595e-06, "loss": 0.6807, "step": 18858 }, { "epoch": 0.7815906170997555, "grad_norm": 0.434968501329422, "learning_rate": 1.092254134029591e-06, "loss": 0.7236, "step": 18859 }, { "epoch": 0.7816320610054291, "grad_norm": 0.4013499319553375, "learning_rate": 1.0920469145012227e-06, "loss": 0.6716, "step": 18860 }, { "epoch": 0.7816735049111028, "grad_norm": 0.44486042857170105, "learning_rate": 1.0918396949728543e-06, "loss": 0.5999, "step": 18861 }, { "epoch": 0.7817149488167765, "grad_norm": 0.4249608814716339, "learning_rate": 1.091632475444486e-06, "loss": 0.6539, "step": 18862 }, { "epoch": 0.7817563927224501, "grad_norm": 0.3992757499217987, "learning_rate": 1.0914252559161177e-06, "loss": 0.6785, "step": 18863 }, { "epoch": 0.7817978366281239, "grad_norm": 0.4159892201423645, "learning_rate": 1.0912180363877493e-06, "loss": 0.6508, "step": 18864 }, { "epoch": 0.7818392805337975, "grad_norm": 0.38451892137527466, "learning_rate": 1.0910108168593809e-06, "loss": 0.6392, "step": 18865 }, { "epoch": 0.7818807244394712, "grad_norm": 0.46010440587997437, "learning_rate": 1.0908035973310127e-06, "loss": 0.7175, "step": 18866 }, { "epoch": 0.7819221683451448, "grad_norm": 0.40840089321136475, "learning_rate": 1.0905963778026443e-06, "loss": 0.6538, "step": 18867 }, { "epoch": 0.7819636122508186, "grad_norm": 0.4650651514530182, "learning_rate": 1.0903891582742759e-06, "loss": 0.6581, "step": 18868 }, { "epoch": 0.7820050561564922, "grad_norm": 0.4152011573314667, "learning_rate": 1.0901819387459075e-06, "loss": 0.6545, "step": 18869 }, { "epoch": 0.7820465000621658, "grad_norm": 0.45975184440612793, "learning_rate": 1.0899747192175393e-06, "loss": 0.6414, "step": 18870 }, { "epoch": 0.7820879439678395, "grad_norm": 0.39851266145706177, "learning_rate": 1.0897674996891709e-06, "loss": 0.6505, "step": 18871 }, { "epoch": 0.7821293878735132, "grad_norm": 0.38592639565467834, "learning_rate": 1.0895602801608025e-06, "loss": 0.6677, "step": 18872 }, { "epoch": 0.7821708317791869, "grad_norm": 0.42993760108947754, "learning_rate": 1.089353060632434e-06, "loss": 0.6533, "step": 18873 }, { "epoch": 0.7822122756848605, "grad_norm": 0.4233139753341675, "learning_rate": 1.0891458411040659e-06, "loss": 0.7239, "step": 18874 }, { "epoch": 0.7822537195905342, "grad_norm": 0.4279825687408447, "learning_rate": 1.0889386215756975e-06, "loss": 0.6892, "step": 18875 }, { "epoch": 0.7822951634962079, "grad_norm": 0.4484666883945465, "learning_rate": 1.088731402047329e-06, "loss": 0.7579, "step": 18876 }, { "epoch": 0.7823366074018816, "grad_norm": 0.4028109908103943, "learning_rate": 1.0885241825189607e-06, "loss": 0.6626, "step": 18877 }, { "epoch": 0.7823780513075552, "grad_norm": 0.43316707015037537, "learning_rate": 1.0883169629905923e-06, "loss": 0.6704, "step": 18878 }, { "epoch": 0.7824194952132288, "grad_norm": 0.4176410436630249, "learning_rate": 1.088109743462224e-06, "loss": 0.6703, "step": 18879 }, { "epoch": 0.7824609391189026, "grad_norm": 0.43516579270362854, "learning_rate": 1.0879025239338557e-06, "loss": 0.6943, "step": 18880 }, { "epoch": 0.7825023830245762, "grad_norm": 0.41555994749069214, "learning_rate": 1.0876953044054873e-06, "loss": 0.668, "step": 18881 }, { "epoch": 0.7825438269302499, "grad_norm": 0.40481501817703247, "learning_rate": 1.0874880848771189e-06, "loss": 0.6858, "step": 18882 }, { "epoch": 0.7825852708359236, "grad_norm": 0.42684054374694824, "learning_rate": 1.0872808653487507e-06, "loss": 0.6475, "step": 18883 }, { "epoch": 0.7826267147415973, "grad_norm": 0.4245835542678833, "learning_rate": 1.0870736458203823e-06, "loss": 0.6595, "step": 18884 }, { "epoch": 0.7826681586472709, "grad_norm": 0.38963648676872253, "learning_rate": 1.0868664262920139e-06, "loss": 0.6367, "step": 18885 }, { "epoch": 0.7827096025529446, "grad_norm": 0.4163943827152252, "learning_rate": 1.0866592067636455e-06, "loss": 0.6882, "step": 18886 }, { "epoch": 0.7827510464586183, "grad_norm": 0.4186316430568695, "learning_rate": 1.0864519872352773e-06, "loss": 0.6299, "step": 18887 }, { "epoch": 0.7827924903642919, "grad_norm": 0.4664421081542969, "learning_rate": 1.0862447677069089e-06, "loss": 0.7148, "step": 18888 }, { "epoch": 0.7828339342699656, "grad_norm": 0.4560980498790741, "learning_rate": 1.0860375481785405e-06, "loss": 0.6732, "step": 18889 }, { "epoch": 0.7828753781756392, "grad_norm": 0.41615772247314453, "learning_rate": 1.085830328650172e-06, "loss": 0.6533, "step": 18890 }, { "epoch": 0.782916822081313, "grad_norm": 0.42156779766082764, "learning_rate": 1.0856231091218039e-06, "loss": 0.6724, "step": 18891 }, { "epoch": 0.7829582659869866, "grad_norm": 0.39790910482406616, "learning_rate": 1.0854158895934355e-06, "loss": 0.6733, "step": 18892 }, { "epoch": 0.7829997098926603, "grad_norm": 0.4292767345905304, "learning_rate": 1.085208670065067e-06, "loss": 0.7246, "step": 18893 }, { "epoch": 0.7830411537983339, "grad_norm": 0.40560129284858704, "learning_rate": 1.0850014505366987e-06, "loss": 0.6714, "step": 18894 }, { "epoch": 0.7830825977040077, "grad_norm": 0.37918904423713684, "learning_rate": 1.0847942310083305e-06, "loss": 0.6311, "step": 18895 }, { "epoch": 0.7831240416096813, "grad_norm": 0.3928378224372864, "learning_rate": 1.084587011479962e-06, "loss": 0.6541, "step": 18896 }, { "epoch": 0.7831654855153549, "grad_norm": 0.4097030460834503, "learning_rate": 1.0843797919515937e-06, "loss": 0.7316, "step": 18897 }, { "epoch": 0.7832069294210287, "grad_norm": 0.4423655867576599, "learning_rate": 1.0841725724232253e-06, "loss": 0.6665, "step": 18898 }, { "epoch": 0.7832483733267023, "grad_norm": 0.4249981939792633, "learning_rate": 1.0839653528948569e-06, "loss": 0.6824, "step": 18899 }, { "epoch": 0.783289817232376, "grad_norm": 0.43631815910339355, "learning_rate": 1.0837581333664887e-06, "loss": 0.6525, "step": 18900 }, { "epoch": 0.7833312611380496, "grad_norm": 0.42427247762680054, "learning_rate": 1.0835509138381203e-06, "loss": 0.6039, "step": 18901 }, { "epoch": 0.7833727050437234, "grad_norm": 0.43400827050209045, "learning_rate": 1.0833436943097519e-06, "loss": 0.6584, "step": 18902 }, { "epoch": 0.783414148949397, "grad_norm": 0.4354948103427887, "learning_rate": 1.0831364747813835e-06, "loss": 0.6282, "step": 18903 }, { "epoch": 0.7834555928550707, "grad_norm": 0.3904609978199005, "learning_rate": 1.0829292552530153e-06, "loss": 0.6403, "step": 18904 }, { "epoch": 0.7834970367607443, "grad_norm": 0.42187222838401794, "learning_rate": 1.0827220357246469e-06, "loss": 0.6869, "step": 18905 }, { "epoch": 0.783538480666418, "grad_norm": 0.43158531188964844, "learning_rate": 1.0825148161962785e-06, "loss": 0.6466, "step": 18906 }, { "epoch": 0.7835799245720917, "grad_norm": 0.4186829924583435, "learning_rate": 1.08230759666791e-06, "loss": 0.6661, "step": 18907 }, { "epoch": 0.7836213684777653, "grad_norm": 0.4584585726261139, "learning_rate": 1.0821003771395419e-06, "loss": 0.6927, "step": 18908 }, { "epoch": 0.783662812383439, "grad_norm": 0.4755858778953552, "learning_rate": 1.0818931576111735e-06, "loss": 0.71, "step": 18909 }, { "epoch": 0.7837042562891127, "grad_norm": 0.3958633542060852, "learning_rate": 1.081685938082805e-06, "loss": 0.6328, "step": 18910 }, { "epoch": 0.7837457001947864, "grad_norm": 0.4167976677417755, "learning_rate": 1.0814787185544367e-06, "loss": 0.6659, "step": 18911 }, { "epoch": 0.78378714410046, "grad_norm": 0.3970787525177002, "learning_rate": 1.0812714990260685e-06, "loss": 0.6467, "step": 18912 }, { "epoch": 0.7838285880061338, "grad_norm": 0.41433587670326233, "learning_rate": 1.0810642794977e-06, "loss": 0.6832, "step": 18913 }, { "epoch": 0.7838700319118074, "grad_norm": 0.41205278038978577, "learning_rate": 1.0808570599693317e-06, "loss": 0.6411, "step": 18914 }, { "epoch": 0.783911475817481, "grad_norm": 0.43575945496559143, "learning_rate": 1.0806498404409633e-06, "loss": 0.6903, "step": 18915 }, { "epoch": 0.7839529197231547, "grad_norm": 0.40554696321487427, "learning_rate": 1.0804426209125949e-06, "loss": 0.6704, "step": 18916 }, { "epoch": 0.7839943636288284, "grad_norm": 0.4216777980327606, "learning_rate": 1.0802354013842267e-06, "loss": 0.6903, "step": 18917 }, { "epoch": 0.7840358075345021, "grad_norm": 0.3853289783000946, "learning_rate": 1.0800281818558583e-06, "loss": 0.6567, "step": 18918 }, { "epoch": 0.7840772514401757, "grad_norm": 0.3791319727897644, "learning_rate": 1.0798209623274899e-06, "loss": 0.6383, "step": 18919 }, { "epoch": 0.7841186953458494, "grad_norm": 0.39951223134994507, "learning_rate": 1.0796137427991215e-06, "loss": 0.6577, "step": 18920 }, { "epoch": 0.7841601392515231, "grad_norm": 0.4251583218574524, "learning_rate": 1.0794065232707533e-06, "loss": 0.6942, "step": 18921 }, { "epoch": 0.7842015831571967, "grad_norm": 0.39110326766967773, "learning_rate": 1.0791993037423849e-06, "loss": 0.6665, "step": 18922 }, { "epoch": 0.7842430270628704, "grad_norm": 0.4191063642501831, "learning_rate": 1.0789920842140165e-06, "loss": 0.6451, "step": 18923 }, { "epoch": 0.784284470968544, "grad_norm": 0.37308424711227417, "learning_rate": 1.078784864685648e-06, "loss": 0.6638, "step": 18924 }, { "epoch": 0.7843259148742178, "grad_norm": 0.4515168070793152, "learning_rate": 1.0785776451572799e-06, "loss": 0.6923, "step": 18925 }, { "epoch": 0.7843673587798914, "grad_norm": 0.4061039090156555, "learning_rate": 1.0783704256289115e-06, "loss": 0.6511, "step": 18926 }, { "epoch": 0.7844088026855651, "grad_norm": 0.4120289385318756, "learning_rate": 1.078163206100543e-06, "loss": 0.6798, "step": 18927 }, { "epoch": 0.7844502465912387, "grad_norm": 0.4173102378845215, "learning_rate": 1.0779559865721747e-06, "loss": 0.7458, "step": 18928 }, { "epoch": 0.7844916904969125, "grad_norm": 0.4414609968662262, "learning_rate": 1.0777487670438063e-06, "loss": 0.6855, "step": 18929 }, { "epoch": 0.7845331344025861, "grad_norm": 0.4301402270793915, "learning_rate": 1.077541547515438e-06, "loss": 0.6631, "step": 18930 }, { "epoch": 0.7845745783082597, "grad_norm": 0.4159611463546753, "learning_rate": 1.0773343279870697e-06, "loss": 0.6827, "step": 18931 }, { "epoch": 0.7846160222139335, "grad_norm": 0.39676064252853394, "learning_rate": 1.0771271084587013e-06, "loss": 0.647, "step": 18932 }, { "epoch": 0.7846574661196071, "grad_norm": 0.446014940738678, "learning_rate": 1.0769198889303329e-06, "loss": 0.6962, "step": 18933 }, { "epoch": 0.7846989100252808, "grad_norm": 0.44470736384391785, "learning_rate": 1.0767126694019647e-06, "loss": 0.7288, "step": 18934 }, { "epoch": 0.7847403539309544, "grad_norm": 0.46436241269111633, "learning_rate": 1.0765054498735963e-06, "loss": 0.6948, "step": 18935 }, { "epoch": 0.7847817978366282, "grad_norm": 0.4103720188140869, "learning_rate": 1.0762982303452279e-06, "loss": 0.667, "step": 18936 }, { "epoch": 0.7848232417423018, "grad_norm": 0.44861122965812683, "learning_rate": 1.0760910108168595e-06, "loss": 0.6787, "step": 18937 }, { "epoch": 0.7848646856479755, "grad_norm": 0.4206666946411133, "learning_rate": 1.075883791288491e-06, "loss": 0.7148, "step": 18938 }, { "epoch": 0.7849061295536491, "grad_norm": 0.41205519437789917, "learning_rate": 1.0756765717601229e-06, "loss": 0.7126, "step": 18939 }, { "epoch": 0.7849475734593228, "grad_norm": 0.42302823066711426, "learning_rate": 1.0754693522317545e-06, "loss": 0.7236, "step": 18940 }, { "epoch": 0.7849890173649965, "grad_norm": 0.4126865565776825, "learning_rate": 1.075262132703386e-06, "loss": 0.6769, "step": 18941 }, { "epoch": 0.7850304612706701, "grad_norm": 0.4498770833015442, "learning_rate": 1.0750549131750177e-06, "loss": 0.6456, "step": 18942 }, { "epoch": 0.7850719051763438, "grad_norm": 0.4283280074596405, "learning_rate": 1.0748476936466495e-06, "loss": 0.689, "step": 18943 }, { "epoch": 0.7851133490820175, "grad_norm": 0.4110310971736908, "learning_rate": 1.074640474118281e-06, "loss": 0.7446, "step": 18944 }, { "epoch": 0.7851547929876912, "grad_norm": 0.3797263503074646, "learning_rate": 1.0744332545899127e-06, "loss": 0.6619, "step": 18945 }, { "epoch": 0.7851962368933648, "grad_norm": 0.44340941309928894, "learning_rate": 1.0742260350615443e-06, "loss": 0.652, "step": 18946 }, { "epoch": 0.7852376807990386, "grad_norm": 0.4089241325855255, "learning_rate": 1.074018815533176e-06, "loss": 0.6622, "step": 18947 }, { "epoch": 0.7852791247047122, "grad_norm": 0.43524765968322754, "learning_rate": 1.0738115960048077e-06, "loss": 0.7211, "step": 18948 }, { "epoch": 0.7853205686103858, "grad_norm": 0.3743706941604614, "learning_rate": 1.0736043764764393e-06, "loss": 0.653, "step": 18949 }, { "epoch": 0.7853620125160595, "grad_norm": 0.41213884949684143, "learning_rate": 1.0733971569480709e-06, "loss": 0.7084, "step": 18950 }, { "epoch": 0.7854034564217331, "grad_norm": 0.42825594544410706, "learning_rate": 1.0731899374197025e-06, "loss": 0.7109, "step": 18951 }, { "epoch": 0.7854449003274069, "grad_norm": 0.4252018332481384, "learning_rate": 1.0729827178913343e-06, "loss": 0.6841, "step": 18952 }, { "epoch": 0.7854863442330805, "grad_norm": 0.4036179482936859, "learning_rate": 1.0727754983629659e-06, "loss": 0.6082, "step": 18953 }, { "epoch": 0.7855277881387542, "grad_norm": 0.42404064536094666, "learning_rate": 1.0725682788345975e-06, "loss": 0.6345, "step": 18954 }, { "epoch": 0.7855692320444279, "grad_norm": 0.4236163794994354, "learning_rate": 1.072361059306229e-06, "loss": 0.6759, "step": 18955 }, { "epoch": 0.7856106759501016, "grad_norm": 0.38917067646980286, "learning_rate": 1.0721538397778609e-06, "loss": 0.6542, "step": 18956 }, { "epoch": 0.7856521198557752, "grad_norm": 0.4344775676727295, "learning_rate": 1.0719466202494925e-06, "loss": 0.6478, "step": 18957 }, { "epoch": 0.7856935637614488, "grad_norm": 0.441770076751709, "learning_rate": 1.071739400721124e-06, "loss": 0.6466, "step": 18958 }, { "epoch": 0.7857350076671226, "grad_norm": 0.4167284369468689, "learning_rate": 1.0715321811927557e-06, "loss": 0.6929, "step": 18959 }, { "epoch": 0.7857764515727962, "grad_norm": 0.43887925148010254, "learning_rate": 1.0713249616643875e-06, "loss": 0.6753, "step": 18960 }, { "epoch": 0.7858178954784699, "grad_norm": 0.4397631883621216, "learning_rate": 1.071117742136019e-06, "loss": 0.6851, "step": 18961 }, { "epoch": 0.7858593393841435, "grad_norm": 0.3701726496219635, "learning_rate": 1.0709105226076507e-06, "loss": 0.6942, "step": 18962 }, { "epoch": 0.7859007832898173, "grad_norm": 0.47792327404022217, "learning_rate": 1.0707033030792823e-06, "loss": 0.6205, "step": 18963 }, { "epoch": 0.7859422271954909, "grad_norm": 0.45355769991874695, "learning_rate": 1.0704960835509139e-06, "loss": 0.7286, "step": 18964 }, { "epoch": 0.7859836711011646, "grad_norm": 0.4044123888015747, "learning_rate": 1.0702888640225457e-06, "loss": 0.6548, "step": 18965 }, { "epoch": 0.7860251150068382, "grad_norm": 0.4119459390640259, "learning_rate": 1.0700816444941773e-06, "loss": 0.6718, "step": 18966 }, { "epoch": 0.7860665589125119, "grad_norm": 0.4102928042411804, "learning_rate": 1.0698744249658089e-06, "loss": 0.6147, "step": 18967 }, { "epoch": 0.7861080028181856, "grad_norm": 0.38672274351119995, "learning_rate": 1.0696672054374405e-06, "loss": 0.6119, "step": 18968 }, { "epoch": 0.7861494467238592, "grad_norm": 0.3983534574508667, "learning_rate": 1.0694599859090723e-06, "loss": 0.6661, "step": 18969 }, { "epoch": 0.786190890629533, "grad_norm": 0.4630031883716583, "learning_rate": 1.0692527663807039e-06, "loss": 0.6514, "step": 18970 }, { "epoch": 0.7862323345352066, "grad_norm": 0.43525680899620056, "learning_rate": 1.0690455468523355e-06, "loss": 0.6543, "step": 18971 }, { "epoch": 0.7862737784408803, "grad_norm": 0.40419742465019226, "learning_rate": 1.068838327323967e-06, "loss": 0.6412, "step": 18972 }, { "epoch": 0.7863152223465539, "grad_norm": 0.43733981251716614, "learning_rate": 1.0686311077955989e-06, "loss": 0.7062, "step": 18973 }, { "epoch": 0.7863566662522277, "grad_norm": 0.4275733530521393, "learning_rate": 1.0684238882672305e-06, "loss": 0.6934, "step": 18974 }, { "epoch": 0.7863981101579013, "grad_norm": 0.42171716690063477, "learning_rate": 1.068216668738862e-06, "loss": 0.677, "step": 18975 }, { "epoch": 0.7864395540635749, "grad_norm": 0.4469958245754242, "learning_rate": 1.0680094492104937e-06, "loss": 0.681, "step": 18976 }, { "epoch": 0.7864809979692486, "grad_norm": 0.40395402908325195, "learning_rate": 1.0678022296821253e-06, "loss": 0.6603, "step": 18977 }, { "epoch": 0.7865224418749223, "grad_norm": 0.4354201853275299, "learning_rate": 1.067595010153757e-06, "loss": 0.7185, "step": 18978 }, { "epoch": 0.786563885780596, "grad_norm": 0.39695966243743896, "learning_rate": 1.0673877906253887e-06, "loss": 0.6674, "step": 18979 }, { "epoch": 0.7866053296862696, "grad_norm": 0.36842912435531616, "learning_rate": 1.0671805710970203e-06, "loss": 0.6396, "step": 18980 }, { "epoch": 0.7866467735919433, "grad_norm": 0.400668203830719, "learning_rate": 1.0669733515686519e-06, "loss": 0.6376, "step": 18981 }, { "epoch": 0.786688217497617, "grad_norm": 0.400261789560318, "learning_rate": 1.0667661320402837e-06, "loss": 0.6218, "step": 18982 }, { "epoch": 0.7867296614032906, "grad_norm": 0.43155601620674133, "learning_rate": 1.0665589125119153e-06, "loss": 0.7091, "step": 18983 }, { "epoch": 0.7867711053089643, "grad_norm": 0.46743783354759216, "learning_rate": 1.0663516929835469e-06, "loss": 0.6833, "step": 18984 }, { "epoch": 0.786812549214638, "grad_norm": 0.40582382678985596, "learning_rate": 1.0661444734551785e-06, "loss": 0.7268, "step": 18985 }, { "epoch": 0.7868539931203117, "grad_norm": 0.37013640999794006, "learning_rate": 1.0659372539268103e-06, "loss": 0.6492, "step": 18986 }, { "epoch": 0.7868954370259853, "grad_norm": 0.44557520747184753, "learning_rate": 1.0657300343984419e-06, "loss": 0.6757, "step": 18987 }, { "epoch": 0.786936880931659, "grad_norm": 0.38597506284713745, "learning_rate": 1.0655228148700735e-06, "loss": 0.6714, "step": 18988 }, { "epoch": 0.7869783248373327, "grad_norm": 0.41287940740585327, "learning_rate": 1.065315595341705e-06, "loss": 0.6725, "step": 18989 }, { "epoch": 0.7870197687430064, "grad_norm": 0.4253733456134796, "learning_rate": 1.0651083758133367e-06, "loss": 0.656, "step": 18990 }, { "epoch": 0.78706121264868, "grad_norm": 0.47208890318870544, "learning_rate": 1.0649011562849685e-06, "loss": 0.7074, "step": 18991 }, { "epoch": 0.7871026565543536, "grad_norm": 0.41482940316200256, "learning_rate": 1.0646939367566e-06, "loss": 0.6758, "step": 18992 }, { "epoch": 0.7871441004600274, "grad_norm": 0.453336238861084, "learning_rate": 1.0644867172282317e-06, "loss": 0.7168, "step": 18993 }, { "epoch": 0.787185544365701, "grad_norm": 0.3993060886859894, "learning_rate": 1.0642794976998633e-06, "loss": 0.6528, "step": 18994 }, { "epoch": 0.7872269882713747, "grad_norm": 0.4056169390678406, "learning_rate": 1.064072278171495e-06, "loss": 0.6921, "step": 18995 }, { "epoch": 0.7872684321770483, "grad_norm": 0.4069829285144806, "learning_rate": 1.0638650586431267e-06, "loss": 0.6958, "step": 18996 }, { "epoch": 0.7873098760827221, "grad_norm": 0.42748552560806274, "learning_rate": 1.0636578391147583e-06, "loss": 0.6855, "step": 18997 }, { "epoch": 0.7873513199883957, "grad_norm": 0.39725416898727417, "learning_rate": 1.0634506195863899e-06, "loss": 0.7271, "step": 18998 }, { "epoch": 0.7873927638940694, "grad_norm": 0.40559178590774536, "learning_rate": 1.0632434000580217e-06, "loss": 0.7312, "step": 18999 }, { "epoch": 0.787434207799743, "grad_norm": 0.38320693373680115, "learning_rate": 1.0630361805296533e-06, "loss": 0.6447, "step": 19000 }, { "epoch": 0.7874756517054167, "grad_norm": 0.4029567837715149, "learning_rate": 1.0628289610012849e-06, "loss": 0.7061, "step": 19001 }, { "epoch": 0.7875170956110904, "grad_norm": 0.4195445775985718, "learning_rate": 1.0626217414729165e-06, "loss": 0.675, "step": 19002 }, { "epoch": 0.787558539516764, "grad_norm": 0.4085678160190582, "learning_rate": 1.062414521944548e-06, "loss": 0.6804, "step": 19003 }, { "epoch": 0.7875999834224378, "grad_norm": 0.4336659908294678, "learning_rate": 1.0622073024161799e-06, "loss": 0.6992, "step": 19004 }, { "epoch": 0.7876414273281114, "grad_norm": 0.39956793189048767, "learning_rate": 1.0620000828878115e-06, "loss": 0.7, "step": 19005 }, { "epoch": 0.7876828712337851, "grad_norm": 0.4146704375743866, "learning_rate": 1.061792863359443e-06, "loss": 0.6498, "step": 19006 }, { "epoch": 0.7877243151394587, "grad_norm": 0.4378402531147003, "learning_rate": 1.0615856438310747e-06, "loss": 0.7189, "step": 19007 }, { "epoch": 0.7877657590451325, "grad_norm": 0.4073712229728699, "learning_rate": 1.0613784243027065e-06, "loss": 0.6984, "step": 19008 }, { "epoch": 0.7878072029508061, "grad_norm": 0.42775028944015503, "learning_rate": 1.061171204774338e-06, "loss": 0.6864, "step": 19009 }, { "epoch": 0.7878486468564797, "grad_norm": 0.4420085847377777, "learning_rate": 1.0609639852459697e-06, "loss": 0.6365, "step": 19010 }, { "epoch": 0.7878900907621534, "grad_norm": 0.41885870695114136, "learning_rate": 1.0607567657176013e-06, "loss": 0.7188, "step": 19011 }, { "epoch": 0.7879315346678271, "grad_norm": 0.46431753039360046, "learning_rate": 1.0605495461892329e-06, "loss": 0.7429, "step": 19012 }, { "epoch": 0.7879729785735008, "grad_norm": 0.4021746516227722, "learning_rate": 1.0603423266608647e-06, "loss": 0.6177, "step": 19013 }, { "epoch": 0.7880144224791744, "grad_norm": 0.3859046995639801, "learning_rate": 1.0601351071324963e-06, "loss": 0.6497, "step": 19014 }, { "epoch": 0.7880558663848481, "grad_norm": 0.41980859637260437, "learning_rate": 1.0599278876041279e-06, "loss": 0.6519, "step": 19015 }, { "epoch": 0.7880973102905218, "grad_norm": 0.40654247999191284, "learning_rate": 1.0597206680757595e-06, "loss": 0.6792, "step": 19016 }, { "epoch": 0.7881387541961955, "grad_norm": 0.4137214124202728, "learning_rate": 1.0595134485473913e-06, "loss": 0.6475, "step": 19017 }, { "epoch": 0.7881801981018691, "grad_norm": 0.42744868993759155, "learning_rate": 1.0593062290190229e-06, "loss": 0.6864, "step": 19018 }, { "epoch": 0.7882216420075427, "grad_norm": 0.4114372432231903, "learning_rate": 1.0590990094906545e-06, "loss": 0.6589, "step": 19019 }, { "epoch": 0.7882630859132165, "grad_norm": 0.41254138946533203, "learning_rate": 1.058891789962286e-06, "loss": 0.6875, "step": 19020 }, { "epoch": 0.7883045298188901, "grad_norm": 0.4225682020187378, "learning_rate": 1.0586845704339179e-06, "loss": 0.6896, "step": 19021 }, { "epoch": 0.7883459737245638, "grad_norm": 0.39918065071105957, "learning_rate": 1.0584773509055495e-06, "loss": 0.6899, "step": 19022 }, { "epoch": 0.7883874176302375, "grad_norm": 0.43880385160446167, "learning_rate": 1.058270131377181e-06, "loss": 0.6654, "step": 19023 }, { "epoch": 0.7884288615359112, "grad_norm": 0.42066556215286255, "learning_rate": 1.0580629118488127e-06, "loss": 0.7278, "step": 19024 }, { "epoch": 0.7884703054415848, "grad_norm": 0.39099857211112976, "learning_rate": 1.0578556923204443e-06, "loss": 0.6403, "step": 19025 }, { "epoch": 0.7885117493472585, "grad_norm": 0.4247477650642395, "learning_rate": 1.057648472792076e-06, "loss": 0.6399, "step": 19026 }, { "epoch": 0.7885531932529322, "grad_norm": 0.38323912024497986, "learning_rate": 1.0574412532637077e-06, "loss": 0.6619, "step": 19027 }, { "epoch": 0.7885946371586058, "grad_norm": 0.4923589527606964, "learning_rate": 1.0572340337353393e-06, "loss": 0.6241, "step": 19028 }, { "epoch": 0.7886360810642795, "grad_norm": 0.43588075041770935, "learning_rate": 1.0570268142069709e-06, "loss": 0.7095, "step": 19029 }, { "epoch": 0.7886775249699531, "grad_norm": 0.4013383388519287, "learning_rate": 1.0568195946786027e-06, "loss": 0.6127, "step": 19030 }, { "epoch": 0.7887189688756269, "grad_norm": 0.3968726396560669, "learning_rate": 1.0566123751502343e-06, "loss": 0.6345, "step": 19031 }, { "epoch": 0.7887604127813005, "grad_norm": 0.42208540439605713, "learning_rate": 1.0564051556218659e-06, "loss": 0.6735, "step": 19032 }, { "epoch": 0.7888018566869742, "grad_norm": 0.4201124608516693, "learning_rate": 1.0561979360934975e-06, "loss": 0.6483, "step": 19033 }, { "epoch": 0.7888433005926478, "grad_norm": 0.4163497984409332, "learning_rate": 1.0559907165651293e-06, "loss": 0.6702, "step": 19034 }, { "epoch": 0.7888847444983216, "grad_norm": 0.43796736001968384, "learning_rate": 1.0557834970367609e-06, "loss": 0.666, "step": 19035 }, { "epoch": 0.7889261884039952, "grad_norm": 0.4109412431716919, "learning_rate": 1.0555762775083925e-06, "loss": 0.6716, "step": 19036 }, { "epoch": 0.7889676323096688, "grad_norm": 0.39690208435058594, "learning_rate": 1.055369057980024e-06, "loss": 0.6512, "step": 19037 }, { "epoch": 0.7890090762153426, "grad_norm": 0.4165399968624115, "learning_rate": 1.0551618384516557e-06, "loss": 0.7332, "step": 19038 }, { "epoch": 0.7890505201210162, "grad_norm": 0.41597679257392883, "learning_rate": 1.0549546189232875e-06, "loss": 0.677, "step": 19039 }, { "epoch": 0.7890919640266899, "grad_norm": 0.406015008687973, "learning_rate": 1.054747399394919e-06, "loss": 0.6597, "step": 19040 }, { "epoch": 0.7891334079323635, "grad_norm": 0.40825799107551575, "learning_rate": 1.0545401798665507e-06, "loss": 0.611, "step": 19041 }, { "epoch": 0.7891748518380373, "grad_norm": 0.4134366810321808, "learning_rate": 1.0543329603381823e-06, "loss": 0.6941, "step": 19042 }, { "epoch": 0.7892162957437109, "grad_norm": 0.41800180077552795, "learning_rate": 1.054125740809814e-06, "loss": 0.7043, "step": 19043 }, { "epoch": 0.7892577396493845, "grad_norm": 0.38747695088386536, "learning_rate": 1.0539185212814457e-06, "loss": 0.6022, "step": 19044 }, { "epoch": 0.7892991835550582, "grad_norm": 0.42518559098243713, "learning_rate": 1.0537113017530773e-06, "loss": 0.6266, "step": 19045 }, { "epoch": 0.7893406274607319, "grad_norm": 0.4062229096889496, "learning_rate": 1.0535040822247089e-06, "loss": 0.6439, "step": 19046 }, { "epoch": 0.7893820713664056, "grad_norm": 0.43059051036834717, "learning_rate": 1.0532968626963407e-06, "loss": 0.6707, "step": 19047 }, { "epoch": 0.7894235152720792, "grad_norm": 0.4235536456108093, "learning_rate": 1.0530896431679723e-06, "loss": 0.6158, "step": 19048 }, { "epoch": 0.789464959177753, "grad_norm": 0.3986480236053467, "learning_rate": 1.0528824236396039e-06, "loss": 0.658, "step": 19049 }, { "epoch": 0.7895064030834266, "grad_norm": 0.4458043873310089, "learning_rate": 1.0526752041112355e-06, "loss": 0.675, "step": 19050 }, { "epoch": 0.7895478469891003, "grad_norm": 0.41404658555984497, "learning_rate": 1.052467984582867e-06, "loss": 0.7113, "step": 19051 }, { "epoch": 0.7895892908947739, "grad_norm": 0.4179061949253082, "learning_rate": 1.0522607650544989e-06, "loss": 0.6562, "step": 19052 }, { "epoch": 0.7896307348004475, "grad_norm": 0.4164784252643585, "learning_rate": 1.0520535455261305e-06, "loss": 0.6837, "step": 19053 }, { "epoch": 0.7896721787061213, "grad_norm": 0.40183213353157043, "learning_rate": 1.051846325997762e-06, "loss": 0.6362, "step": 19054 }, { "epoch": 0.7897136226117949, "grad_norm": 0.44516390562057495, "learning_rate": 1.0516391064693937e-06, "loss": 0.7229, "step": 19055 }, { "epoch": 0.7897550665174686, "grad_norm": 0.42284077405929565, "learning_rate": 1.0514318869410255e-06, "loss": 0.7131, "step": 19056 }, { "epoch": 0.7897965104231423, "grad_norm": 0.41007930040359497, "learning_rate": 1.051224667412657e-06, "loss": 0.635, "step": 19057 }, { "epoch": 0.789837954328816, "grad_norm": 0.45519980788230896, "learning_rate": 1.0510174478842887e-06, "loss": 0.7332, "step": 19058 }, { "epoch": 0.7898793982344896, "grad_norm": 0.4291892945766449, "learning_rate": 1.0508102283559203e-06, "loss": 0.6285, "step": 19059 }, { "epoch": 0.7899208421401633, "grad_norm": 0.3933717906475067, "learning_rate": 1.050603008827552e-06, "loss": 0.6249, "step": 19060 }, { "epoch": 0.789962286045837, "grad_norm": 0.4040065109729767, "learning_rate": 1.0503957892991837e-06, "loss": 0.6669, "step": 19061 }, { "epoch": 0.7900037299515106, "grad_norm": 0.42912599444389343, "learning_rate": 1.0501885697708153e-06, "loss": 0.6794, "step": 19062 }, { "epoch": 0.7900451738571843, "grad_norm": 0.46373358368873596, "learning_rate": 1.0499813502424469e-06, "loss": 0.7131, "step": 19063 }, { "epoch": 0.7900866177628579, "grad_norm": 0.44351908564567566, "learning_rate": 1.0497741307140785e-06, "loss": 0.6991, "step": 19064 }, { "epoch": 0.7901280616685317, "grad_norm": 0.48599907755851746, "learning_rate": 1.0495669111857103e-06, "loss": 0.7123, "step": 19065 }, { "epoch": 0.7901695055742053, "grad_norm": 0.41803210973739624, "learning_rate": 1.0493596916573419e-06, "loss": 0.6292, "step": 19066 }, { "epoch": 0.790210949479879, "grad_norm": 0.3954671621322632, "learning_rate": 1.0491524721289735e-06, "loss": 0.6187, "step": 19067 }, { "epoch": 0.7902523933855526, "grad_norm": 0.4047689139842987, "learning_rate": 1.048945252600605e-06, "loss": 0.6576, "step": 19068 }, { "epoch": 0.7902938372912264, "grad_norm": 0.42807894945144653, "learning_rate": 1.0487380330722369e-06, "loss": 0.6578, "step": 19069 }, { "epoch": 0.7903352811969, "grad_norm": 0.45076891779899597, "learning_rate": 1.0485308135438685e-06, "loss": 0.662, "step": 19070 }, { "epoch": 0.7903767251025736, "grad_norm": 0.407345712184906, "learning_rate": 1.0483235940155e-06, "loss": 0.6387, "step": 19071 }, { "epoch": 0.7904181690082474, "grad_norm": 0.38333529233932495, "learning_rate": 1.0481163744871317e-06, "loss": 0.6071, "step": 19072 }, { "epoch": 0.790459612913921, "grad_norm": 0.4433356523513794, "learning_rate": 1.0479091549587633e-06, "loss": 0.656, "step": 19073 }, { "epoch": 0.7905010568195947, "grad_norm": 0.4648955464363098, "learning_rate": 1.047701935430395e-06, "loss": 0.6985, "step": 19074 }, { "epoch": 0.7905425007252683, "grad_norm": 0.4331129193305969, "learning_rate": 1.0474947159020267e-06, "loss": 0.7375, "step": 19075 }, { "epoch": 0.7905839446309421, "grad_norm": 0.41944730281829834, "learning_rate": 1.0472874963736583e-06, "loss": 0.6921, "step": 19076 }, { "epoch": 0.7906253885366157, "grad_norm": 0.4042914807796478, "learning_rate": 1.0470802768452899e-06, "loss": 0.7076, "step": 19077 }, { "epoch": 0.7906668324422894, "grad_norm": 0.4790422320365906, "learning_rate": 1.0468730573169217e-06, "loss": 0.6522, "step": 19078 }, { "epoch": 0.790708276347963, "grad_norm": 0.400082528591156, "learning_rate": 1.0466658377885533e-06, "loss": 0.6116, "step": 19079 }, { "epoch": 0.7907497202536367, "grad_norm": 0.39321476221084595, "learning_rate": 1.0464586182601849e-06, "loss": 0.6589, "step": 19080 }, { "epoch": 0.7907911641593104, "grad_norm": 0.40992337465286255, "learning_rate": 1.0462513987318165e-06, "loss": 0.6509, "step": 19081 }, { "epoch": 0.790832608064984, "grad_norm": 0.4748751223087311, "learning_rate": 1.0460441792034483e-06, "loss": 0.7249, "step": 19082 }, { "epoch": 0.7908740519706577, "grad_norm": 0.4171788990497589, "learning_rate": 1.0458369596750799e-06, "loss": 0.6865, "step": 19083 }, { "epoch": 0.7909154958763314, "grad_norm": 0.3876623213291168, "learning_rate": 1.0456297401467115e-06, "loss": 0.6487, "step": 19084 }, { "epoch": 0.7909569397820051, "grad_norm": 0.4288181662559509, "learning_rate": 1.045422520618343e-06, "loss": 0.6594, "step": 19085 }, { "epoch": 0.7909983836876787, "grad_norm": 0.4108084440231323, "learning_rate": 1.0452153010899747e-06, "loss": 0.6753, "step": 19086 }, { "epoch": 0.7910398275933525, "grad_norm": 0.43524670600891113, "learning_rate": 1.0450080815616065e-06, "loss": 0.7097, "step": 19087 }, { "epoch": 0.7910812714990261, "grad_norm": 0.409869909286499, "learning_rate": 1.044800862033238e-06, "loss": 0.7131, "step": 19088 }, { "epoch": 0.7911227154046997, "grad_norm": 0.380825012922287, "learning_rate": 1.0445936425048697e-06, "loss": 0.6284, "step": 19089 }, { "epoch": 0.7911641593103734, "grad_norm": 0.4318774342536926, "learning_rate": 1.0443864229765013e-06, "loss": 0.704, "step": 19090 }, { "epoch": 0.791205603216047, "grad_norm": 0.43159759044647217, "learning_rate": 1.044179203448133e-06, "loss": 0.7534, "step": 19091 }, { "epoch": 0.7912470471217208, "grad_norm": 0.3986278474330902, "learning_rate": 1.0439719839197647e-06, "loss": 0.6329, "step": 19092 }, { "epoch": 0.7912884910273944, "grad_norm": 0.3839908838272095, "learning_rate": 1.0437647643913963e-06, "loss": 0.6395, "step": 19093 }, { "epoch": 0.7913299349330681, "grad_norm": 0.4027753472328186, "learning_rate": 1.0435575448630279e-06, "loss": 0.6786, "step": 19094 }, { "epoch": 0.7913713788387418, "grad_norm": 0.40776771306991577, "learning_rate": 1.0433503253346597e-06, "loss": 0.6345, "step": 19095 }, { "epoch": 0.7914128227444154, "grad_norm": 0.39865782856941223, "learning_rate": 1.0431431058062913e-06, "loss": 0.6886, "step": 19096 }, { "epoch": 0.7914542666500891, "grad_norm": 0.4060210585594177, "learning_rate": 1.0429358862779229e-06, "loss": 0.6309, "step": 19097 }, { "epoch": 0.7914957105557627, "grad_norm": 0.4109935462474823, "learning_rate": 1.0427286667495545e-06, "loss": 0.7019, "step": 19098 }, { "epoch": 0.7915371544614365, "grad_norm": 0.4249586760997772, "learning_rate": 1.042521447221186e-06, "loss": 0.7258, "step": 19099 }, { "epoch": 0.7915785983671101, "grad_norm": 0.4111277759075165, "learning_rate": 1.042314227692818e-06, "loss": 0.708, "step": 19100 }, { "epoch": 0.7916200422727838, "grad_norm": 0.4062563180923462, "learning_rate": 1.0421070081644495e-06, "loss": 0.7083, "step": 19101 }, { "epoch": 0.7916614861784574, "grad_norm": 0.4303516447544098, "learning_rate": 1.041899788636081e-06, "loss": 0.6631, "step": 19102 }, { "epoch": 0.7917029300841312, "grad_norm": 0.41718176007270813, "learning_rate": 1.0416925691077127e-06, "loss": 0.6572, "step": 19103 }, { "epoch": 0.7917443739898048, "grad_norm": 0.43008843064308167, "learning_rate": 1.0414853495793445e-06, "loss": 0.6655, "step": 19104 }, { "epoch": 0.7917858178954784, "grad_norm": 0.3673175871372223, "learning_rate": 1.041278130050976e-06, "loss": 0.679, "step": 19105 }, { "epoch": 0.7918272618011521, "grad_norm": 0.40681567788124084, "learning_rate": 1.0410709105226077e-06, "loss": 0.7119, "step": 19106 }, { "epoch": 0.7918687057068258, "grad_norm": 0.4077039062976837, "learning_rate": 1.0408636909942393e-06, "loss": 0.6633, "step": 19107 }, { "epoch": 0.7919101496124995, "grad_norm": 0.4045580327510834, "learning_rate": 1.040656471465871e-06, "loss": 0.6392, "step": 19108 }, { "epoch": 0.7919515935181731, "grad_norm": 0.41078484058380127, "learning_rate": 1.0404492519375027e-06, "loss": 0.6687, "step": 19109 }, { "epoch": 0.7919930374238469, "grad_norm": 0.3805042803287506, "learning_rate": 1.0402420324091343e-06, "loss": 0.6573, "step": 19110 }, { "epoch": 0.7920344813295205, "grad_norm": 0.38384923338890076, "learning_rate": 1.0400348128807659e-06, "loss": 0.6379, "step": 19111 }, { "epoch": 0.7920759252351942, "grad_norm": 0.3904039263725281, "learning_rate": 1.0398275933523975e-06, "loss": 0.6085, "step": 19112 }, { "epoch": 0.7921173691408678, "grad_norm": 0.4050913453102112, "learning_rate": 1.0396203738240293e-06, "loss": 0.6792, "step": 19113 }, { "epoch": 0.7921588130465415, "grad_norm": 0.40852969884872437, "learning_rate": 1.039413154295661e-06, "loss": 0.6514, "step": 19114 }, { "epoch": 0.7922002569522152, "grad_norm": 0.3969424068927765, "learning_rate": 1.0392059347672925e-06, "loss": 0.7062, "step": 19115 }, { "epoch": 0.7922417008578888, "grad_norm": 0.421100378036499, "learning_rate": 1.038998715238924e-06, "loss": 0.7146, "step": 19116 }, { "epoch": 0.7922831447635625, "grad_norm": 0.4273785650730133, "learning_rate": 1.038791495710556e-06, "loss": 0.6427, "step": 19117 }, { "epoch": 0.7923245886692362, "grad_norm": 0.4265558421611786, "learning_rate": 1.0385842761821875e-06, "loss": 0.6348, "step": 19118 }, { "epoch": 0.7923660325749099, "grad_norm": 0.4083535671234131, "learning_rate": 1.038377056653819e-06, "loss": 0.6544, "step": 19119 }, { "epoch": 0.7924074764805835, "grad_norm": 0.4063396453857422, "learning_rate": 1.0381698371254507e-06, "loss": 0.6272, "step": 19120 }, { "epoch": 0.7924489203862572, "grad_norm": 0.4282853901386261, "learning_rate": 1.0379626175970825e-06, "loss": 0.7186, "step": 19121 }, { "epoch": 0.7924903642919309, "grad_norm": 0.4389208257198334, "learning_rate": 1.037755398068714e-06, "loss": 0.7498, "step": 19122 }, { "epoch": 0.7925318081976045, "grad_norm": 0.39804983139038086, "learning_rate": 1.0375481785403457e-06, "loss": 0.687, "step": 19123 }, { "epoch": 0.7925732521032782, "grad_norm": 0.41383928060531616, "learning_rate": 1.0373409590119773e-06, "loss": 0.6573, "step": 19124 }, { "epoch": 0.7926146960089518, "grad_norm": 0.4161819815635681, "learning_rate": 1.0371337394836089e-06, "loss": 0.6837, "step": 19125 }, { "epoch": 0.7926561399146256, "grad_norm": 0.4032747149467468, "learning_rate": 1.0369265199552407e-06, "loss": 0.6675, "step": 19126 }, { "epoch": 0.7926975838202992, "grad_norm": 0.4000454843044281, "learning_rate": 1.0367193004268723e-06, "loss": 0.6792, "step": 19127 }, { "epoch": 0.7927390277259729, "grad_norm": 0.47749966382980347, "learning_rate": 1.0365120808985039e-06, "loss": 0.7383, "step": 19128 }, { "epoch": 0.7927804716316466, "grad_norm": 0.37558141350746155, "learning_rate": 1.0363048613701355e-06, "loss": 0.6378, "step": 19129 }, { "epoch": 0.7928219155373203, "grad_norm": 0.39896973967552185, "learning_rate": 1.0360976418417673e-06, "loss": 0.6128, "step": 19130 }, { "epoch": 0.7928633594429939, "grad_norm": 0.46355870366096497, "learning_rate": 1.035890422313399e-06, "loss": 0.6432, "step": 19131 }, { "epoch": 0.7929048033486675, "grad_norm": 0.3922251760959625, "learning_rate": 1.0356832027850305e-06, "loss": 0.6328, "step": 19132 }, { "epoch": 0.7929462472543413, "grad_norm": 0.4367353916168213, "learning_rate": 1.035475983256662e-06, "loss": 0.6707, "step": 19133 }, { "epoch": 0.7929876911600149, "grad_norm": 0.42394259572029114, "learning_rate": 1.035268763728294e-06, "loss": 0.6489, "step": 19134 }, { "epoch": 0.7930291350656886, "grad_norm": 0.4028220474720001, "learning_rate": 1.0350615441999255e-06, "loss": 0.6215, "step": 19135 }, { "epoch": 0.7930705789713622, "grad_norm": 0.4532501995563507, "learning_rate": 1.034854324671557e-06, "loss": 0.6732, "step": 19136 }, { "epoch": 0.793112022877036, "grad_norm": 0.40724194049835205, "learning_rate": 1.0346471051431887e-06, "loss": 0.6362, "step": 19137 }, { "epoch": 0.7931534667827096, "grad_norm": 0.43106380105018616, "learning_rate": 1.0344398856148203e-06, "loss": 0.6265, "step": 19138 }, { "epoch": 0.7931949106883833, "grad_norm": 0.4336501657962799, "learning_rate": 1.034232666086452e-06, "loss": 0.7258, "step": 19139 }, { "epoch": 0.793236354594057, "grad_norm": 0.4108738303184509, "learning_rate": 1.0340254465580837e-06, "loss": 0.6349, "step": 19140 }, { "epoch": 0.7932777984997306, "grad_norm": 0.3940304219722748, "learning_rate": 1.0338182270297153e-06, "loss": 0.6277, "step": 19141 }, { "epoch": 0.7933192424054043, "grad_norm": 0.45226559042930603, "learning_rate": 1.0336110075013469e-06, "loss": 0.6729, "step": 19142 }, { "epoch": 0.7933606863110779, "grad_norm": 0.42679503560066223, "learning_rate": 1.0334037879729787e-06, "loss": 0.6646, "step": 19143 }, { "epoch": 0.7934021302167517, "grad_norm": 0.41281846165657043, "learning_rate": 1.0331965684446103e-06, "loss": 0.6829, "step": 19144 }, { "epoch": 0.7934435741224253, "grad_norm": 0.4624081552028656, "learning_rate": 1.032989348916242e-06, "loss": 0.7023, "step": 19145 }, { "epoch": 0.793485018028099, "grad_norm": 0.42832711338996887, "learning_rate": 1.0327821293878735e-06, "loss": 0.6868, "step": 19146 }, { "epoch": 0.7935264619337726, "grad_norm": 0.3996882140636444, "learning_rate": 1.032574909859505e-06, "loss": 0.5905, "step": 19147 }, { "epoch": 0.7935679058394464, "grad_norm": 0.392010360956192, "learning_rate": 1.032367690331137e-06, "loss": 0.6793, "step": 19148 }, { "epoch": 0.79360934974512, "grad_norm": 0.4246884882450104, "learning_rate": 1.0321604708027685e-06, "loss": 0.6733, "step": 19149 }, { "epoch": 0.7936507936507936, "grad_norm": 0.4101029932498932, "learning_rate": 1.0319532512744e-06, "loss": 0.6541, "step": 19150 }, { "epoch": 0.7936922375564673, "grad_norm": 0.40647193789482117, "learning_rate": 1.0317460317460317e-06, "loss": 0.6979, "step": 19151 }, { "epoch": 0.793733681462141, "grad_norm": 0.3890632688999176, "learning_rate": 1.0315388122176635e-06, "loss": 0.6193, "step": 19152 }, { "epoch": 0.7937751253678147, "grad_norm": 0.4335087239742279, "learning_rate": 1.031331592689295e-06, "loss": 0.6156, "step": 19153 }, { "epoch": 0.7938165692734883, "grad_norm": 0.4121062755584717, "learning_rate": 1.0311243731609267e-06, "loss": 0.6553, "step": 19154 }, { "epoch": 0.793858013179162, "grad_norm": 0.4343561828136444, "learning_rate": 1.0309171536325583e-06, "loss": 0.6888, "step": 19155 }, { "epoch": 0.7938994570848357, "grad_norm": 0.394872784614563, "learning_rate": 1.03070993410419e-06, "loss": 0.6339, "step": 19156 }, { "epoch": 0.7939409009905093, "grad_norm": 0.418499618768692, "learning_rate": 1.0305027145758217e-06, "loss": 0.6666, "step": 19157 }, { "epoch": 0.793982344896183, "grad_norm": 0.39036089181900024, "learning_rate": 1.0302954950474533e-06, "loss": 0.6614, "step": 19158 }, { "epoch": 0.7940237888018566, "grad_norm": 0.4605160653591156, "learning_rate": 1.030088275519085e-06, "loss": 0.6687, "step": 19159 }, { "epoch": 0.7940652327075304, "grad_norm": 0.4489428699016571, "learning_rate": 1.0298810559907165e-06, "loss": 0.7183, "step": 19160 }, { "epoch": 0.794106676613204, "grad_norm": 0.4360344111919403, "learning_rate": 1.0296738364623483e-06, "loss": 0.6559, "step": 19161 }, { "epoch": 0.7941481205188777, "grad_norm": 0.3946727514266968, "learning_rate": 1.02946661693398e-06, "loss": 0.6885, "step": 19162 }, { "epoch": 0.7941895644245514, "grad_norm": 0.39983662962913513, "learning_rate": 1.0292593974056115e-06, "loss": 0.6736, "step": 19163 }, { "epoch": 0.7942310083302251, "grad_norm": 0.4200667142868042, "learning_rate": 1.029052177877243e-06, "loss": 0.6833, "step": 19164 }, { "epoch": 0.7942724522358987, "grad_norm": 0.4198237359523773, "learning_rate": 1.028844958348875e-06, "loss": 0.6477, "step": 19165 }, { "epoch": 0.7943138961415723, "grad_norm": 0.43376612663269043, "learning_rate": 1.0286377388205065e-06, "loss": 0.7078, "step": 19166 }, { "epoch": 0.7943553400472461, "grad_norm": 0.41372203826904297, "learning_rate": 1.028430519292138e-06, "loss": 0.6509, "step": 19167 }, { "epoch": 0.7943967839529197, "grad_norm": 0.4244420826435089, "learning_rate": 1.0282232997637697e-06, "loss": 0.6527, "step": 19168 }, { "epoch": 0.7944382278585934, "grad_norm": 0.40649351477622986, "learning_rate": 1.0280160802354015e-06, "loss": 0.7073, "step": 19169 }, { "epoch": 0.794479671764267, "grad_norm": 0.4477437436580658, "learning_rate": 1.027808860707033e-06, "loss": 0.6882, "step": 19170 }, { "epoch": 0.7945211156699408, "grad_norm": 0.39487090706825256, "learning_rate": 1.0276016411786647e-06, "loss": 0.6521, "step": 19171 }, { "epoch": 0.7945625595756144, "grad_norm": 0.4563996493816376, "learning_rate": 1.0273944216502963e-06, "loss": 0.6487, "step": 19172 }, { "epoch": 0.7946040034812881, "grad_norm": 0.42581963539123535, "learning_rate": 1.027187202121928e-06, "loss": 0.6924, "step": 19173 }, { "epoch": 0.7946454473869617, "grad_norm": 0.4447450041770935, "learning_rate": 1.0269799825935597e-06, "loss": 0.6821, "step": 19174 }, { "epoch": 0.7946868912926354, "grad_norm": 0.41711607575416565, "learning_rate": 1.0267727630651913e-06, "loss": 0.6477, "step": 19175 }, { "epoch": 0.7947283351983091, "grad_norm": 0.4189877510070801, "learning_rate": 1.026565543536823e-06, "loss": 0.7258, "step": 19176 }, { "epoch": 0.7947697791039827, "grad_norm": 0.3847160041332245, "learning_rate": 1.0263583240084545e-06, "loss": 0.6658, "step": 19177 }, { "epoch": 0.7948112230096565, "grad_norm": 0.45315617322921753, "learning_rate": 1.0261511044800863e-06, "loss": 0.7803, "step": 19178 }, { "epoch": 0.7948526669153301, "grad_norm": 0.4791221022605896, "learning_rate": 1.025943884951718e-06, "loss": 0.7, "step": 19179 }, { "epoch": 0.7948941108210038, "grad_norm": 0.45094266533851624, "learning_rate": 1.0257366654233495e-06, "loss": 0.7161, "step": 19180 }, { "epoch": 0.7949355547266774, "grad_norm": 0.41903766989707947, "learning_rate": 1.025529445894981e-06, "loss": 0.6396, "step": 19181 }, { "epoch": 0.7949769986323512, "grad_norm": 0.42651352286338806, "learning_rate": 1.025322226366613e-06, "loss": 0.6431, "step": 19182 }, { "epoch": 0.7950184425380248, "grad_norm": 0.41960132122039795, "learning_rate": 1.0251150068382445e-06, "loss": 0.6855, "step": 19183 }, { "epoch": 0.7950598864436984, "grad_norm": 0.43986159563064575, "learning_rate": 1.024907787309876e-06, "loss": 0.7014, "step": 19184 }, { "epoch": 0.7951013303493721, "grad_norm": 0.39675453305244446, "learning_rate": 1.0247005677815077e-06, "loss": 0.618, "step": 19185 }, { "epoch": 0.7951427742550458, "grad_norm": 0.4377390444278717, "learning_rate": 1.0244933482531393e-06, "loss": 0.723, "step": 19186 }, { "epoch": 0.7951842181607195, "grad_norm": 0.4442036747932434, "learning_rate": 1.024286128724771e-06, "loss": 0.6952, "step": 19187 }, { "epoch": 0.7952256620663931, "grad_norm": 0.41307970881462097, "learning_rate": 1.0240789091964027e-06, "loss": 0.6589, "step": 19188 }, { "epoch": 0.7952671059720668, "grad_norm": 0.42951148748397827, "learning_rate": 1.0238716896680343e-06, "loss": 0.6949, "step": 19189 }, { "epoch": 0.7953085498777405, "grad_norm": 0.3905729055404663, "learning_rate": 1.023664470139666e-06, "loss": 0.6451, "step": 19190 }, { "epoch": 0.7953499937834142, "grad_norm": 0.4037232995033264, "learning_rate": 1.0234572506112977e-06, "loss": 0.7296, "step": 19191 }, { "epoch": 0.7953914376890878, "grad_norm": 0.43444111943244934, "learning_rate": 1.0232500310829293e-06, "loss": 0.6639, "step": 19192 }, { "epoch": 0.7954328815947614, "grad_norm": 0.3985172212123871, "learning_rate": 1.023042811554561e-06, "loss": 0.6648, "step": 19193 }, { "epoch": 0.7954743255004352, "grad_norm": 0.46277618408203125, "learning_rate": 1.0228355920261925e-06, "loss": 0.7373, "step": 19194 }, { "epoch": 0.7955157694061088, "grad_norm": 0.3824600577354431, "learning_rate": 1.0226283724978243e-06, "loss": 0.624, "step": 19195 }, { "epoch": 0.7955572133117825, "grad_norm": 0.42047038674354553, "learning_rate": 1.022421152969456e-06, "loss": 0.6831, "step": 19196 }, { "epoch": 0.7955986572174562, "grad_norm": 0.4389949440956116, "learning_rate": 1.0222139334410875e-06, "loss": 0.6552, "step": 19197 }, { "epoch": 0.7956401011231299, "grad_norm": 0.41491270065307617, "learning_rate": 1.022006713912719e-06, "loss": 0.691, "step": 19198 }, { "epoch": 0.7956815450288035, "grad_norm": 0.42028236389160156, "learning_rate": 1.021799494384351e-06, "loss": 0.6403, "step": 19199 }, { "epoch": 0.7957229889344772, "grad_norm": 0.4287458062171936, "learning_rate": 1.0215922748559825e-06, "loss": 0.6713, "step": 19200 }, { "epoch": 0.7957644328401509, "grad_norm": 0.5021578073501587, "learning_rate": 1.021385055327614e-06, "loss": 0.7108, "step": 19201 }, { "epoch": 0.7958058767458245, "grad_norm": 0.36508241295814514, "learning_rate": 1.0211778357992457e-06, "loss": 0.6388, "step": 19202 }, { "epoch": 0.7958473206514982, "grad_norm": 0.4200110137462616, "learning_rate": 1.0209706162708773e-06, "loss": 0.7119, "step": 19203 }, { "epoch": 0.7958887645571718, "grad_norm": 0.3856765031814575, "learning_rate": 1.0207633967425091e-06, "loss": 0.6418, "step": 19204 }, { "epoch": 0.7959302084628456, "grad_norm": 0.39627358317375183, "learning_rate": 1.0205561772141407e-06, "loss": 0.7054, "step": 19205 }, { "epoch": 0.7959716523685192, "grad_norm": 0.40329477190971375, "learning_rate": 1.0203489576857723e-06, "loss": 0.6938, "step": 19206 }, { "epoch": 0.7960130962741929, "grad_norm": 0.4326789975166321, "learning_rate": 1.020141738157404e-06, "loss": 0.6495, "step": 19207 }, { "epoch": 0.7960545401798665, "grad_norm": 0.4115239977836609, "learning_rate": 1.0199345186290357e-06, "loss": 0.6654, "step": 19208 }, { "epoch": 0.7960959840855403, "grad_norm": 0.4198668599128723, "learning_rate": 1.0197272991006673e-06, "loss": 0.6797, "step": 19209 }, { "epoch": 0.7961374279912139, "grad_norm": 0.4563296437263489, "learning_rate": 1.019520079572299e-06, "loss": 0.6836, "step": 19210 }, { "epoch": 0.7961788718968875, "grad_norm": 0.3868393898010254, "learning_rate": 1.0193128600439305e-06, "loss": 0.7185, "step": 19211 }, { "epoch": 0.7962203158025613, "grad_norm": 0.3994968831539154, "learning_rate": 1.0191056405155623e-06, "loss": 0.6552, "step": 19212 }, { "epoch": 0.7962617597082349, "grad_norm": 0.41161084175109863, "learning_rate": 1.018898420987194e-06, "loss": 0.674, "step": 19213 }, { "epoch": 0.7963032036139086, "grad_norm": 0.38676247000694275, "learning_rate": 1.0186912014588255e-06, "loss": 0.6487, "step": 19214 }, { "epoch": 0.7963446475195822, "grad_norm": 0.38010066747665405, "learning_rate": 1.018483981930457e-06, "loss": 0.6394, "step": 19215 }, { "epoch": 0.796386091425256, "grad_norm": 0.4166615307331085, "learning_rate": 1.018276762402089e-06, "loss": 0.6436, "step": 19216 }, { "epoch": 0.7964275353309296, "grad_norm": 0.4359854459762573, "learning_rate": 1.0180695428737205e-06, "loss": 0.6665, "step": 19217 }, { "epoch": 0.7964689792366032, "grad_norm": 0.40236780047416687, "learning_rate": 1.0178623233453521e-06, "loss": 0.6796, "step": 19218 }, { "epoch": 0.7965104231422769, "grad_norm": 0.4294043779373169, "learning_rate": 1.0176551038169837e-06, "loss": 0.689, "step": 19219 }, { "epoch": 0.7965518670479506, "grad_norm": 0.4124505817890167, "learning_rate": 1.0174478842886153e-06, "loss": 0.7263, "step": 19220 }, { "epoch": 0.7965933109536243, "grad_norm": 0.4284639060497284, "learning_rate": 1.0172406647602471e-06, "loss": 0.6317, "step": 19221 }, { "epoch": 0.7966347548592979, "grad_norm": 0.38135308027267456, "learning_rate": 1.0170334452318787e-06, "loss": 0.5961, "step": 19222 }, { "epoch": 0.7966761987649716, "grad_norm": 0.40183383226394653, "learning_rate": 1.0168262257035103e-06, "loss": 0.6345, "step": 19223 }, { "epoch": 0.7967176426706453, "grad_norm": 0.3929379880428314, "learning_rate": 1.016619006175142e-06, "loss": 0.6358, "step": 19224 }, { "epoch": 0.796759086576319, "grad_norm": 0.4066028594970703, "learning_rate": 1.0164117866467737e-06, "loss": 0.6777, "step": 19225 }, { "epoch": 0.7968005304819926, "grad_norm": 0.4000471830368042, "learning_rate": 1.0162045671184053e-06, "loss": 0.656, "step": 19226 }, { "epoch": 0.7968419743876662, "grad_norm": 0.3860797882080078, "learning_rate": 1.015997347590037e-06, "loss": 0.6724, "step": 19227 }, { "epoch": 0.79688341829334, "grad_norm": 0.44066840410232544, "learning_rate": 1.0157901280616685e-06, "loss": 0.6477, "step": 19228 }, { "epoch": 0.7969248621990136, "grad_norm": 0.42083582282066345, "learning_rate": 1.0155829085333003e-06, "loss": 0.6974, "step": 19229 }, { "epoch": 0.7969663061046873, "grad_norm": 0.4336947798728943, "learning_rate": 1.015375689004932e-06, "loss": 0.6466, "step": 19230 }, { "epoch": 0.797007750010361, "grad_norm": 0.3749508857727051, "learning_rate": 1.0151684694765635e-06, "loss": 0.6532, "step": 19231 }, { "epoch": 0.7970491939160347, "grad_norm": 0.42087122797966003, "learning_rate": 1.0149612499481951e-06, "loss": 0.642, "step": 19232 }, { "epoch": 0.7970906378217083, "grad_norm": 0.41845813393592834, "learning_rate": 1.014754030419827e-06, "loss": 0.6338, "step": 19233 }, { "epoch": 0.797132081727382, "grad_norm": 0.43553638458251953, "learning_rate": 1.0145468108914585e-06, "loss": 0.7169, "step": 19234 }, { "epoch": 0.7971735256330557, "grad_norm": 0.5038114786148071, "learning_rate": 1.0143395913630901e-06, "loss": 0.6865, "step": 19235 }, { "epoch": 0.7972149695387293, "grad_norm": 0.40449094772338867, "learning_rate": 1.0141323718347217e-06, "loss": 0.6499, "step": 19236 }, { "epoch": 0.797256413444403, "grad_norm": 0.43818750977516174, "learning_rate": 1.0139251523063533e-06, "loss": 0.6982, "step": 19237 }, { "epoch": 0.7972978573500766, "grad_norm": 0.4346558153629303, "learning_rate": 1.0137179327779851e-06, "loss": 0.6974, "step": 19238 }, { "epoch": 0.7973393012557504, "grad_norm": 0.48006772994995117, "learning_rate": 1.0135107132496167e-06, "loss": 0.7063, "step": 19239 }, { "epoch": 0.797380745161424, "grad_norm": 0.4236416518688202, "learning_rate": 1.0133034937212483e-06, "loss": 0.6307, "step": 19240 }, { "epoch": 0.7974221890670977, "grad_norm": 0.39990922808647156, "learning_rate": 1.01309627419288e-06, "loss": 0.6396, "step": 19241 }, { "epoch": 0.7974636329727713, "grad_norm": 0.4082353711128235, "learning_rate": 1.0128890546645117e-06, "loss": 0.7078, "step": 19242 }, { "epoch": 0.7975050768784451, "grad_norm": 0.4209856688976288, "learning_rate": 1.0126818351361433e-06, "loss": 0.646, "step": 19243 }, { "epoch": 0.7975465207841187, "grad_norm": 0.4066612422466278, "learning_rate": 1.012474615607775e-06, "loss": 0.6575, "step": 19244 }, { "epoch": 0.7975879646897923, "grad_norm": 0.43028101325035095, "learning_rate": 1.0122673960794065e-06, "loss": 0.656, "step": 19245 }, { "epoch": 0.797629408595466, "grad_norm": 0.4397296905517578, "learning_rate": 1.0120601765510383e-06, "loss": 0.6755, "step": 19246 }, { "epoch": 0.7976708525011397, "grad_norm": 0.4157484173774719, "learning_rate": 1.01185295702267e-06, "loss": 0.6113, "step": 19247 }, { "epoch": 0.7977122964068134, "grad_norm": 0.4109066426753998, "learning_rate": 1.0116457374943015e-06, "loss": 0.6416, "step": 19248 }, { "epoch": 0.797753740312487, "grad_norm": 0.42129120230674744, "learning_rate": 1.0114385179659331e-06, "loss": 0.627, "step": 19249 }, { "epoch": 0.7977951842181608, "grad_norm": 0.4447652995586395, "learning_rate": 1.011231298437565e-06, "loss": 0.689, "step": 19250 }, { "epoch": 0.7978366281238344, "grad_norm": 0.4349232614040375, "learning_rate": 1.0110240789091965e-06, "loss": 0.6863, "step": 19251 }, { "epoch": 0.7978780720295081, "grad_norm": 0.42941880226135254, "learning_rate": 1.0108168593808281e-06, "loss": 0.6669, "step": 19252 }, { "epoch": 0.7979195159351817, "grad_norm": 0.3948589563369751, "learning_rate": 1.0106096398524597e-06, "loss": 0.6521, "step": 19253 }, { "epoch": 0.7979609598408554, "grad_norm": 0.42189639806747437, "learning_rate": 1.0104024203240915e-06, "loss": 0.6588, "step": 19254 }, { "epoch": 0.7980024037465291, "grad_norm": 0.4036520719528198, "learning_rate": 1.0101952007957231e-06, "loss": 0.6676, "step": 19255 }, { "epoch": 0.7980438476522027, "grad_norm": 0.40319085121154785, "learning_rate": 1.0099879812673547e-06, "loss": 0.6213, "step": 19256 }, { "epoch": 0.7980852915578764, "grad_norm": 0.4165392518043518, "learning_rate": 1.0097807617389863e-06, "loss": 0.6755, "step": 19257 }, { "epoch": 0.7981267354635501, "grad_norm": 0.40580490231513977, "learning_rate": 1.009573542210618e-06, "loss": 0.6582, "step": 19258 }, { "epoch": 0.7981681793692238, "grad_norm": 0.42870891094207764, "learning_rate": 1.0093663226822497e-06, "loss": 0.7094, "step": 19259 }, { "epoch": 0.7982096232748974, "grad_norm": 0.4271981120109558, "learning_rate": 1.0091591031538813e-06, "loss": 0.6838, "step": 19260 }, { "epoch": 0.7982510671805711, "grad_norm": 0.4052793085575104, "learning_rate": 1.008951883625513e-06, "loss": 0.6425, "step": 19261 }, { "epoch": 0.7982925110862448, "grad_norm": 0.4063340723514557, "learning_rate": 1.0087446640971445e-06, "loss": 0.6262, "step": 19262 }, { "epoch": 0.7983339549919184, "grad_norm": 0.4111064672470093, "learning_rate": 1.0085374445687763e-06, "loss": 0.7012, "step": 19263 }, { "epoch": 0.7983753988975921, "grad_norm": 0.4201134145259857, "learning_rate": 1.008330225040408e-06, "loss": 0.6301, "step": 19264 }, { "epoch": 0.7984168428032657, "grad_norm": 0.4240688979625702, "learning_rate": 1.0081230055120395e-06, "loss": 0.6682, "step": 19265 }, { "epoch": 0.7984582867089395, "grad_norm": 0.4157269597053528, "learning_rate": 1.0079157859836711e-06, "loss": 0.6768, "step": 19266 }, { "epoch": 0.7984997306146131, "grad_norm": 0.42575252056121826, "learning_rate": 1.007708566455303e-06, "loss": 0.7024, "step": 19267 }, { "epoch": 0.7985411745202868, "grad_norm": 0.4296768009662628, "learning_rate": 1.0075013469269345e-06, "loss": 0.7058, "step": 19268 }, { "epoch": 0.7985826184259605, "grad_norm": 0.3999042212963104, "learning_rate": 1.0072941273985661e-06, "loss": 0.6736, "step": 19269 }, { "epoch": 0.7986240623316342, "grad_norm": 0.465154767036438, "learning_rate": 1.0070869078701977e-06, "loss": 0.7075, "step": 19270 }, { "epoch": 0.7986655062373078, "grad_norm": 0.41514769196510315, "learning_rate": 1.0068796883418295e-06, "loss": 0.6445, "step": 19271 }, { "epoch": 0.7987069501429814, "grad_norm": 0.4592706859111786, "learning_rate": 1.0066724688134611e-06, "loss": 0.7712, "step": 19272 }, { "epoch": 0.7987483940486552, "grad_norm": 0.43338578939437866, "learning_rate": 1.0064652492850927e-06, "loss": 0.6334, "step": 19273 }, { "epoch": 0.7987898379543288, "grad_norm": 0.43260589241981506, "learning_rate": 1.0062580297567243e-06, "loss": 0.6985, "step": 19274 }, { "epoch": 0.7988312818600025, "grad_norm": 0.40319791436195374, "learning_rate": 1.006050810228356e-06, "loss": 0.6418, "step": 19275 }, { "epoch": 0.7988727257656761, "grad_norm": 0.417646586894989, "learning_rate": 1.0058435906999877e-06, "loss": 0.7026, "step": 19276 }, { "epoch": 0.7989141696713499, "grad_norm": 0.42771264910697937, "learning_rate": 1.0056363711716193e-06, "loss": 0.652, "step": 19277 }, { "epoch": 0.7989556135770235, "grad_norm": 0.4110005795955658, "learning_rate": 1.005429151643251e-06, "loss": 0.6628, "step": 19278 }, { "epoch": 0.7989970574826971, "grad_norm": 0.4150239825248718, "learning_rate": 1.0052219321148825e-06, "loss": 0.658, "step": 19279 }, { "epoch": 0.7990385013883708, "grad_norm": 0.3892100751399994, "learning_rate": 1.0050147125865143e-06, "loss": 0.6283, "step": 19280 }, { "epoch": 0.7990799452940445, "grad_norm": 0.46774381399154663, "learning_rate": 1.004807493058146e-06, "loss": 0.6985, "step": 19281 }, { "epoch": 0.7991213891997182, "grad_norm": 0.4484656751155853, "learning_rate": 1.0046002735297775e-06, "loss": 0.6995, "step": 19282 }, { "epoch": 0.7991628331053918, "grad_norm": 0.3673012852668762, "learning_rate": 1.0043930540014091e-06, "loss": 0.5863, "step": 19283 }, { "epoch": 0.7992042770110656, "grad_norm": 0.42089372873306274, "learning_rate": 1.004185834473041e-06, "loss": 0.7371, "step": 19284 }, { "epoch": 0.7992457209167392, "grad_norm": 0.5051882863044739, "learning_rate": 1.0039786149446725e-06, "loss": 0.771, "step": 19285 }, { "epoch": 0.7992871648224129, "grad_norm": 0.477998822927475, "learning_rate": 1.0037713954163041e-06, "loss": 0.705, "step": 19286 }, { "epoch": 0.7993286087280865, "grad_norm": 0.6044408679008484, "learning_rate": 1.0035641758879357e-06, "loss": 0.7295, "step": 19287 }, { "epoch": 0.7993700526337602, "grad_norm": 0.4190250635147095, "learning_rate": 1.0033569563595675e-06, "loss": 0.6799, "step": 19288 }, { "epoch": 0.7994114965394339, "grad_norm": 0.46985289454460144, "learning_rate": 1.0031497368311991e-06, "loss": 0.7078, "step": 19289 }, { "epoch": 0.7994529404451075, "grad_norm": 0.3941118121147156, "learning_rate": 1.0029425173028307e-06, "loss": 0.6658, "step": 19290 }, { "epoch": 0.7994943843507812, "grad_norm": 0.3669600188732147, "learning_rate": 1.0027352977744623e-06, "loss": 0.6312, "step": 19291 }, { "epoch": 0.7995358282564549, "grad_norm": 0.39518335461616516, "learning_rate": 1.0025280782460941e-06, "loss": 0.6454, "step": 19292 }, { "epoch": 0.7995772721621286, "grad_norm": 0.41699251532554626, "learning_rate": 1.0023208587177257e-06, "loss": 0.6641, "step": 19293 }, { "epoch": 0.7996187160678022, "grad_norm": 0.4234275817871094, "learning_rate": 1.0021136391893573e-06, "loss": 0.651, "step": 19294 }, { "epoch": 0.799660159973476, "grad_norm": 0.4194095730781555, "learning_rate": 1.001906419660989e-06, "loss": 0.6624, "step": 19295 }, { "epoch": 0.7997016038791496, "grad_norm": 0.3878420889377594, "learning_rate": 1.0016992001326205e-06, "loss": 0.6987, "step": 19296 }, { "epoch": 0.7997430477848232, "grad_norm": 0.42241519689559937, "learning_rate": 1.0014919806042523e-06, "loss": 0.6925, "step": 19297 }, { "epoch": 0.7997844916904969, "grad_norm": 0.3853236436843872, "learning_rate": 1.001284761075884e-06, "loss": 0.6838, "step": 19298 }, { "epoch": 0.7998259355961705, "grad_norm": 0.3837362229824066, "learning_rate": 1.0010775415475155e-06, "loss": 0.6135, "step": 19299 }, { "epoch": 0.7998673795018443, "grad_norm": 0.4171057343482971, "learning_rate": 1.0008703220191471e-06, "loss": 0.6453, "step": 19300 }, { "epoch": 0.7999088234075179, "grad_norm": 0.4310872554779053, "learning_rate": 1.000663102490779e-06, "loss": 0.6536, "step": 19301 }, { "epoch": 0.7999502673131916, "grad_norm": 0.41878220438957214, "learning_rate": 1.0004558829624105e-06, "loss": 0.7063, "step": 19302 }, { "epoch": 0.7999917112188653, "grad_norm": 0.42537620663642883, "learning_rate": 1.0002486634340421e-06, "loss": 0.6919, "step": 19303 }, { "epoch": 0.800033155124539, "grad_norm": 0.3814378082752228, "learning_rate": 1.0000414439056737e-06, "loss": 0.6536, "step": 19304 }, { "epoch": 0.8000745990302126, "grad_norm": 0.40808889269828796, "learning_rate": 9.998342243773055e-07, "loss": 0.6558, "step": 19305 }, { "epoch": 0.8001160429358862, "grad_norm": 0.3830588459968567, "learning_rate": 9.996270048489371e-07, "loss": 0.6656, "step": 19306 }, { "epoch": 0.80015748684156, "grad_norm": 0.4424755573272705, "learning_rate": 9.994197853205687e-07, "loss": 0.6902, "step": 19307 }, { "epoch": 0.8001989307472336, "grad_norm": 0.3978821039199829, "learning_rate": 9.992125657922003e-07, "loss": 0.6287, "step": 19308 }, { "epoch": 0.8002403746529073, "grad_norm": 0.4131268858909607, "learning_rate": 9.990053462638321e-07, "loss": 0.6476, "step": 19309 }, { "epoch": 0.8002818185585809, "grad_norm": 0.43022796511650085, "learning_rate": 9.987981267354637e-07, "loss": 0.6949, "step": 19310 }, { "epoch": 0.8003232624642547, "grad_norm": 0.38939306139945984, "learning_rate": 9.985909072070953e-07, "loss": 0.6432, "step": 19311 }, { "epoch": 0.8003647063699283, "grad_norm": 0.43666860461235046, "learning_rate": 9.98383687678727e-07, "loss": 0.6705, "step": 19312 }, { "epoch": 0.800406150275602, "grad_norm": 0.39608246088027954, "learning_rate": 9.981764681503585e-07, "loss": 0.6694, "step": 19313 }, { "epoch": 0.8004475941812756, "grad_norm": 0.40720611810684204, "learning_rate": 9.979692486219903e-07, "loss": 0.6625, "step": 19314 }, { "epoch": 0.8004890380869493, "grad_norm": 0.40317606925964355, "learning_rate": 9.97762029093622e-07, "loss": 0.6455, "step": 19315 }, { "epoch": 0.800530481992623, "grad_norm": 0.4622092545032501, "learning_rate": 9.975548095652535e-07, "loss": 0.7468, "step": 19316 }, { "epoch": 0.8005719258982966, "grad_norm": 0.4301154911518097, "learning_rate": 9.973475900368851e-07, "loss": 0.655, "step": 19317 }, { "epoch": 0.8006133698039704, "grad_norm": 0.3981773853302002, "learning_rate": 9.97140370508517e-07, "loss": 0.6176, "step": 19318 }, { "epoch": 0.800654813709644, "grad_norm": 0.4526747465133667, "learning_rate": 9.969331509801485e-07, "loss": 0.7174, "step": 19319 }, { "epoch": 0.8006962576153177, "grad_norm": 0.42840272188186646, "learning_rate": 9.967259314517801e-07, "loss": 0.679, "step": 19320 }, { "epoch": 0.8007377015209913, "grad_norm": 0.4032116234302521, "learning_rate": 9.965187119234117e-07, "loss": 0.6471, "step": 19321 }, { "epoch": 0.8007791454266651, "grad_norm": 0.436919242143631, "learning_rate": 9.963114923950435e-07, "loss": 0.6798, "step": 19322 }, { "epoch": 0.8008205893323387, "grad_norm": 0.4440711736679077, "learning_rate": 9.961042728666751e-07, "loss": 0.6509, "step": 19323 }, { "epoch": 0.8008620332380123, "grad_norm": 0.41983041167259216, "learning_rate": 9.958970533383067e-07, "loss": 0.6814, "step": 19324 }, { "epoch": 0.800903477143686, "grad_norm": 0.4386483430862427, "learning_rate": 9.956898338099383e-07, "loss": 0.6748, "step": 19325 }, { "epoch": 0.8009449210493597, "grad_norm": 0.4092095196247101, "learning_rate": 9.954826142815701e-07, "loss": 0.6581, "step": 19326 }, { "epoch": 0.8009863649550334, "grad_norm": 0.3955235779285431, "learning_rate": 9.952753947532017e-07, "loss": 0.6136, "step": 19327 }, { "epoch": 0.801027808860707, "grad_norm": 0.4116622507572174, "learning_rate": 9.950681752248333e-07, "loss": 0.7034, "step": 19328 }, { "epoch": 0.8010692527663807, "grad_norm": 0.4314057528972626, "learning_rate": 9.94860955696465e-07, "loss": 0.6418, "step": 19329 }, { "epoch": 0.8011106966720544, "grad_norm": 0.4565576910972595, "learning_rate": 9.946537361680965e-07, "loss": 0.6771, "step": 19330 }, { "epoch": 0.8011521405777281, "grad_norm": 0.3949267864227295, "learning_rate": 9.944465166397283e-07, "loss": 0.6364, "step": 19331 }, { "epoch": 0.8011935844834017, "grad_norm": 0.4094487130641937, "learning_rate": 9.9423929711136e-07, "loss": 0.6448, "step": 19332 }, { "epoch": 0.8012350283890753, "grad_norm": 0.45284906029701233, "learning_rate": 9.940320775829915e-07, "loss": 0.6914, "step": 19333 }, { "epoch": 0.8012764722947491, "grad_norm": 0.45013436675071716, "learning_rate": 9.938248580546231e-07, "loss": 0.6696, "step": 19334 }, { "epoch": 0.8013179162004227, "grad_norm": 0.47605466842651367, "learning_rate": 9.93617638526255e-07, "loss": 0.6858, "step": 19335 }, { "epoch": 0.8013593601060964, "grad_norm": 0.4308841824531555, "learning_rate": 9.934104189978865e-07, "loss": 0.6726, "step": 19336 }, { "epoch": 0.80140080401177, "grad_norm": 0.4405074417591095, "learning_rate": 9.932031994695181e-07, "loss": 0.6694, "step": 19337 }, { "epoch": 0.8014422479174438, "grad_norm": 0.4123457372188568, "learning_rate": 9.929959799411497e-07, "loss": 0.6497, "step": 19338 }, { "epoch": 0.8014836918231174, "grad_norm": 0.3954439163208008, "learning_rate": 9.927887604127815e-07, "loss": 0.6716, "step": 19339 }, { "epoch": 0.801525135728791, "grad_norm": 0.4239901900291443, "learning_rate": 9.925815408844131e-07, "loss": 0.6782, "step": 19340 }, { "epoch": 0.8015665796344648, "grad_norm": 0.39566484093666077, "learning_rate": 9.923743213560447e-07, "loss": 0.6477, "step": 19341 }, { "epoch": 0.8016080235401384, "grad_norm": 0.40866681933403015, "learning_rate": 9.921671018276763e-07, "loss": 0.632, "step": 19342 }, { "epoch": 0.8016494674458121, "grad_norm": 0.40841877460479736, "learning_rate": 9.919598822993081e-07, "loss": 0.6263, "step": 19343 }, { "epoch": 0.8016909113514857, "grad_norm": 0.4263053834438324, "learning_rate": 9.917526627709397e-07, "loss": 0.7036, "step": 19344 }, { "epoch": 0.8017323552571595, "grad_norm": 0.40292561054229736, "learning_rate": 9.915454432425713e-07, "loss": 0.6628, "step": 19345 }, { "epoch": 0.8017737991628331, "grad_norm": 0.4185163974761963, "learning_rate": 9.91338223714203e-07, "loss": 0.6853, "step": 19346 }, { "epoch": 0.8018152430685068, "grad_norm": 0.39670273661613464, "learning_rate": 9.911310041858345e-07, "loss": 0.6697, "step": 19347 }, { "epoch": 0.8018566869741804, "grad_norm": 0.3813093304634094, "learning_rate": 9.909237846574663e-07, "loss": 0.6354, "step": 19348 }, { "epoch": 0.8018981308798541, "grad_norm": 0.4442175328731537, "learning_rate": 9.90716565129098e-07, "loss": 0.6532, "step": 19349 }, { "epoch": 0.8019395747855278, "grad_norm": 0.4058321714401245, "learning_rate": 9.905093456007295e-07, "loss": 0.679, "step": 19350 }, { "epoch": 0.8019810186912014, "grad_norm": 0.4311847984790802, "learning_rate": 9.903021260723611e-07, "loss": 0.6803, "step": 19351 }, { "epoch": 0.8020224625968752, "grad_norm": 0.43009698390960693, "learning_rate": 9.90094906543993e-07, "loss": 0.6642, "step": 19352 }, { "epoch": 0.8020639065025488, "grad_norm": 0.4170438349246979, "learning_rate": 9.898876870156245e-07, "loss": 0.6372, "step": 19353 }, { "epoch": 0.8021053504082225, "grad_norm": 0.4326097369194031, "learning_rate": 9.896804674872561e-07, "loss": 0.6906, "step": 19354 }, { "epoch": 0.8021467943138961, "grad_norm": 0.41405466198921204, "learning_rate": 9.894732479588877e-07, "loss": 0.6782, "step": 19355 }, { "epoch": 0.8021882382195699, "grad_norm": 0.4072588086128235, "learning_rate": 9.892660284305193e-07, "loss": 0.6083, "step": 19356 }, { "epoch": 0.8022296821252435, "grad_norm": 0.45250436663627625, "learning_rate": 9.890588089021511e-07, "loss": 0.6698, "step": 19357 }, { "epoch": 0.8022711260309171, "grad_norm": 0.4154640734195709, "learning_rate": 9.888515893737827e-07, "loss": 0.6534, "step": 19358 }, { "epoch": 0.8023125699365908, "grad_norm": 0.4051082134246826, "learning_rate": 9.886443698454143e-07, "loss": 0.6555, "step": 19359 }, { "epoch": 0.8023540138422645, "grad_norm": 0.4125066101551056, "learning_rate": 9.88437150317046e-07, "loss": 0.6592, "step": 19360 }, { "epoch": 0.8023954577479382, "grad_norm": 0.42021554708480835, "learning_rate": 9.882299307886777e-07, "loss": 0.6765, "step": 19361 }, { "epoch": 0.8024369016536118, "grad_norm": 0.4037286937236786, "learning_rate": 9.880227112603093e-07, "loss": 0.6582, "step": 19362 }, { "epoch": 0.8024783455592855, "grad_norm": 0.435926616191864, "learning_rate": 9.87815491731941e-07, "loss": 0.6335, "step": 19363 }, { "epoch": 0.8025197894649592, "grad_norm": 0.39977800846099854, "learning_rate": 9.876082722035725e-07, "loss": 0.6516, "step": 19364 }, { "epoch": 0.8025612333706329, "grad_norm": 0.4473310708999634, "learning_rate": 9.874010526752043e-07, "loss": 0.6975, "step": 19365 }, { "epoch": 0.8026026772763065, "grad_norm": 0.38879403471946716, "learning_rate": 9.87193833146836e-07, "loss": 0.6317, "step": 19366 }, { "epoch": 0.8026441211819801, "grad_norm": 0.3910500407218933, "learning_rate": 9.869866136184675e-07, "loss": 0.6501, "step": 19367 }, { "epoch": 0.8026855650876539, "grad_norm": 0.46971505880355835, "learning_rate": 9.867793940900991e-07, "loss": 0.6902, "step": 19368 }, { "epoch": 0.8027270089933275, "grad_norm": 0.4669254422187805, "learning_rate": 9.865721745617307e-07, "loss": 0.6746, "step": 19369 }, { "epoch": 0.8027684528990012, "grad_norm": 0.41468217968940735, "learning_rate": 9.863649550333625e-07, "loss": 0.6532, "step": 19370 }, { "epoch": 0.8028098968046748, "grad_norm": 0.45452365279197693, "learning_rate": 9.861577355049941e-07, "loss": 0.7517, "step": 19371 }, { "epoch": 0.8028513407103486, "grad_norm": 0.44085124135017395, "learning_rate": 9.859505159766257e-07, "loss": 0.6741, "step": 19372 }, { "epoch": 0.8028927846160222, "grad_norm": 0.4711313247680664, "learning_rate": 9.857432964482573e-07, "loss": 0.6639, "step": 19373 }, { "epoch": 0.8029342285216959, "grad_norm": 0.4268048405647278, "learning_rate": 9.855360769198891e-07, "loss": 0.7173, "step": 19374 }, { "epoch": 0.8029756724273696, "grad_norm": 0.4161265790462494, "learning_rate": 9.853288573915207e-07, "loss": 0.6438, "step": 19375 }, { "epoch": 0.8030171163330432, "grad_norm": 0.44771715998649597, "learning_rate": 9.851216378631523e-07, "loss": 0.7151, "step": 19376 }, { "epoch": 0.8030585602387169, "grad_norm": 0.4126644730567932, "learning_rate": 9.84914418334784e-07, "loss": 0.6736, "step": 19377 }, { "epoch": 0.8031000041443905, "grad_norm": 0.44197171926498413, "learning_rate": 9.847071988064157e-07, "loss": 0.7146, "step": 19378 }, { "epoch": 0.8031414480500643, "grad_norm": 0.4149807393550873, "learning_rate": 9.844999792780473e-07, "loss": 0.6614, "step": 19379 }, { "epoch": 0.8031828919557379, "grad_norm": 0.45384860038757324, "learning_rate": 9.84292759749679e-07, "loss": 0.6643, "step": 19380 }, { "epoch": 0.8032243358614116, "grad_norm": 0.40754759311676025, "learning_rate": 9.840855402213105e-07, "loss": 0.6157, "step": 19381 }, { "epoch": 0.8032657797670852, "grad_norm": 0.413920134305954, "learning_rate": 9.838783206929421e-07, "loss": 0.6414, "step": 19382 }, { "epoch": 0.803307223672759, "grad_norm": 0.3815504014492035, "learning_rate": 9.83671101164574e-07, "loss": 0.666, "step": 19383 }, { "epoch": 0.8033486675784326, "grad_norm": 0.43437597155570984, "learning_rate": 9.834638816362055e-07, "loss": 0.7161, "step": 19384 }, { "epoch": 0.8033901114841062, "grad_norm": 0.4221706986427307, "learning_rate": 9.832566621078371e-07, "loss": 0.6672, "step": 19385 }, { "epoch": 0.80343155538978, "grad_norm": 0.4401795268058777, "learning_rate": 9.830494425794687e-07, "loss": 0.6906, "step": 19386 }, { "epoch": 0.8034729992954536, "grad_norm": 0.39666101336479187, "learning_rate": 9.828422230511005e-07, "loss": 0.6844, "step": 19387 }, { "epoch": 0.8035144432011273, "grad_norm": 0.38539233803749084, "learning_rate": 9.826350035227321e-07, "loss": 0.6394, "step": 19388 }, { "epoch": 0.8035558871068009, "grad_norm": 0.39911550283432007, "learning_rate": 9.824277839943637e-07, "loss": 0.646, "step": 19389 }, { "epoch": 0.8035973310124747, "grad_norm": 0.43002066016197205, "learning_rate": 9.822205644659953e-07, "loss": 0.6443, "step": 19390 }, { "epoch": 0.8036387749181483, "grad_norm": 0.3984774947166443, "learning_rate": 9.820133449376271e-07, "loss": 0.6462, "step": 19391 }, { "epoch": 0.803680218823822, "grad_norm": 0.4080101251602173, "learning_rate": 9.818061254092587e-07, "loss": 0.6895, "step": 19392 }, { "epoch": 0.8037216627294956, "grad_norm": 0.4028632640838623, "learning_rate": 9.815989058808903e-07, "loss": 0.6991, "step": 19393 }, { "epoch": 0.8037631066351693, "grad_norm": 0.4477446973323822, "learning_rate": 9.81391686352522e-07, "loss": 0.651, "step": 19394 }, { "epoch": 0.803804550540843, "grad_norm": 0.43847987055778503, "learning_rate": 9.811844668241535e-07, "loss": 0.6578, "step": 19395 }, { "epoch": 0.8038459944465166, "grad_norm": 0.39322584867477417, "learning_rate": 9.809772472957853e-07, "loss": 0.6725, "step": 19396 }, { "epoch": 0.8038874383521903, "grad_norm": 0.4845471680164337, "learning_rate": 9.80770027767417e-07, "loss": 0.7529, "step": 19397 }, { "epoch": 0.803928882257864, "grad_norm": 0.3780086636543274, "learning_rate": 9.805628082390485e-07, "loss": 0.6129, "step": 19398 }, { "epoch": 0.8039703261635377, "grad_norm": 0.4300594925880432, "learning_rate": 9.803555887106801e-07, "loss": 0.6809, "step": 19399 }, { "epoch": 0.8040117700692113, "grad_norm": 0.41659486293792725, "learning_rate": 9.80148369182312e-07, "loss": 0.6488, "step": 19400 }, { "epoch": 0.8040532139748849, "grad_norm": 0.4114384055137634, "learning_rate": 9.799411496539435e-07, "loss": 0.707, "step": 19401 }, { "epoch": 0.8040946578805587, "grad_norm": 0.44136449694633484, "learning_rate": 9.797339301255751e-07, "loss": 0.7041, "step": 19402 }, { "epoch": 0.8041361017862323, "grad_norm": 0.39955055713653564, "learning_rate": 9.795267105972067e-07, "loss": 0.6431, "step": 19403 }, { "epoch": 0.804177545691906, "grad_norm": 0.4023345708847046, "learning_rate": 9.793194910688385e-07, "loss": 0.6381, "step": 19404 }, { "epoch": 0.8042189895975796, "grad_norm": 0.40706032514572144, "learning_rate": 9.791122715404701e-07, "loss": 0.6621, "step": 19405 }, { "epoch": 0.8042604335032534, "grad_norm": 0.38676050305366516, "learning_rate": 9.789050520121017e-07, "loss": 0.6912, "step": 19406 }, { "epoch": 0.804301877408927, "grad_norm": 0.42181622982025146, "learning_rate": 9.786978324837333e-07, "loss": 0.6753, "step": 19407 }, { "epoch": 0.8043433213146007, "grad_norm": 0.42874929308891296, "learning_rate": 9.78490612955365e-07, "loss": 0.6904, "step": 19408 }, { "epoch": 0.8043847652202744, "grad_norm": 0.38372957706451416, "learning_rate": 9.782833934269967e-07, "loss": 0.6646, "step": 19409 }, { "epoch": 0.804426209125948, "grad_norm": 0.384045273065567, "learning_rate": 9.780761738986283e-07, "loss": 0.6046, "step": 19410 }, { "epoch": 0.8044676530316217, "grad_norm": 0.45876365900039673, "learning_rate": 9.7786895437026e-07, "loss": 0.6864, "step": 19411 }, { "epoch": 0.8045090969372953, "grad_norm": 0.39014485478401184, "learning_rate": 9.776617348418915e-07, "loss": 0.657, "step": 19412 }, { "epoch": 0.8045505408429691, "grad_norm": 0.4047342836856842, "learning_rate": 9.774545153135233e-07, "loss": 0.7008, "step": 19413 }, { "epoch": 0.8045919847486427, "grad_norm": 0.3980944752693176, "learning_rate": 9.77247295785155e-07, "loss": 0.6489, "step": 19414 }, { "epoch": 0.8046334286543164, "grad_norm": 0.5027517080307007, "learning_rate": 9.770400762567865e-07, "loss": 0.6914, "step": 19415 }, { "epoch": 0.80467487255999, "grad_norm": 0.43754228949546814, "learning_rate": 9.768328567284181e-07, "loss": 0.6543, "step": 19416 }, { "epoch": 0.8047163164656638, "grad_norm": 0.405769020318985, "learning_rate": 9.7662563720005e-07, "loss": 0.6836, "step": 19417 }, { "epoch": 0.8047577603713374, "grad_norm": 0.41399651765823364, "learning_rate": 9.764184176716815e-07, "loss": 0.6846, "step": 19418 }, { "epoch": 0.804799204277011, "grad_norm": 0.39244070649147034, "learning_rate": 9.762111981433131e-07, "loss": 0.6724, "step": 19419 }, { "epoch": 0.8048406481826847, "grad_norm": 0.4376327693462372, "learning_rate": 9.760039786149447e-07, "loss": 0.6703, "step": 19420 }, { "epoch": 0.8048820920883584, "grad_norm": 0.4239356815814972, "learning_rate": 9.757967590865763e-07, "loss": 0.6726, "step": 19421 }, { "epoch": 0.8049235359940321, "grad_norm": 0.40637609362602234, "learning_rate": 9.755895395582081e-07, "loss": 0.6954, "step": 19422 }, { "epoch": 0.8049649798997057, "grad_norm": 0.4169522821903229, "learning_rate": 9.753823200298397e-07, "loss": 0.6901, "step": 19423 }, { "epoch": 0.8050064238053795, "grad_norm": 0.4658155143260956, "learning_rate": 9.751751005014713e-07, "loss": 0.6987, "step": 19424 }, { "epoch": 0.8050478677110531, "grad_norm": 0.43301939964294434, "learning_rate": 9.74967880973103e-07, "loss": 0.6217, "step": 19425 }, { "epoch": 0.8050893116167268, "grad_norm": 0.43847575783729553, "learning_rate": 9.747606614447347e-07, "loss": 0.6384, "step": 19426 }, { "epoch": 0.8051307555224004, "grad_norm": 0.4266357719898224, "learning_rate": 9.745534419163663e-07, "loss": 0.6544, "step": 19427 }, { "epoch": 0.805172199428074, "grad_norm": 0.3924982249736786, "learning_rate": 9.74346222387998e-07, "loss": 0.686, "step": 19428 }, { "epoch": 0.8052136433337478, "grad_norm": 0.45344647765159607, "learning_rate": 9.741390028596295e-07, "loss": 0.7053, "step": 19429 }, { "epoch": 0.8052550872394214, "grad_norm": 0.39786022901535034, "learning_rate": 9.739317833312611e-07, "loss": 0.634, "step": 19430 }, { "epoch": 0.8052965311450951, "grad_norm": 0.400820791721344, "learning_rate": 9.73724563802893e-07, "loss": 0.665, "step": 19431 }, { "epoch": 0.8053379750507688, "grad_norm": 0.42308247089385986, "learning_rate": 9.735173442745245e-07, "loss": 0.6752, "step": 19432 }, { "epoch": 0.8053794189564425, "grad_norm": 0.4272763431072235, "learning_rate": 9.733101247461561e-07, "loss": 0.7058, "step": 19433 }, { "epoch": 0.8054208628621161, "grad_norm": 0.4212509095668793, "learning_rate": 9.731029052177877e-07, "loss": 0.6118, "step": 19434 }, { "epoch": 0.8054623067677898, "grad_norm": 0.3884056806564331, "learning_rate": 9.728956856894195e-07, "loss": 0.6007, "step": 19435 }, { "epoch": 0.8055037506734635, "grad_norm": 0.46689823269844055, "learning_rate": 9.726884661610511e-07, "loss": 0.6829, "step": 19436 }, { "epoch": 0.8055451945791371, "grad_norm": 0.4108678996562958, "learning_rate": 9.724812466326827e-07, "loss": 0.5872, "step": 19437 }, { "epoch": 0.8055866384848108, "grad_norm": 0.43207234144210815, "learning_rate": 9.722740271043143e-07, "loss": 0.6777, "step": 19438 }, { "epoch": 0.8056280823904844, "grad_norm": 0.4216160774230957, "learning_rate": 9.720668075759461e-07, "loss": 0.6327, "step": 19439 }, { "epoch": 0.8056695262961582, "grad_norm": 0.410572350025177, "learning_rate": 9.718595880475777e-07, "loss": 0.6543, "step": 19440 }, { "epoch": 0.8057109702018318, "grad_norm": 0.3959107995033264, "learning_rate": 9.716523685192093e-07, "loss": 0.6434, "step": 19441 }, { "epoch": 0.8057524141075055, "grad_norm": 0.4042576253414154, "learning_rate": 9.71445148990841e-07, "loss": 0.6266, "step": 19442 }, { "epoch": 0.8057938580131792, "grad_norm": 0.42086338996887207, "learning_rate": 9.712379294624725e-07, "loss": 0.6213, "step": 19443 }, { "epoch": 0.8058353019188529, "grad_norm": 0.43096107244491577, "learning_rate": 9.710307099341043e-07, "loss": 0.6206, "step": 19444 }, { "epoch": 0.8058767458245265, "grad_norm": 0.4263373017311096, "learning_rate": 9.70823490405736e-07, "loss": 0.6843, "step": 19445 }, { "epoch": 0.8059181897302001, "grad_norm": 0.3968210518360138, "learning_rate": 9.706162708773675e-07, "loss": 0.6324, "step": 19446 }, { "epoch": 0.8059596336358739, "grad_norm": 0.46057218313217163, "learning_rate": 9.704090513489991e-07, "loss": 0.7241, "step": 19447 }, { "epoch": 0.8060010775415475, "grad_norm": 0.45166391134262085, "learning_rate": 9.70201831820631e-07, "loss": 0.6716, "step": 19448 }, { "epoch": 0.8060425214472212, "grad_norm": 0.40858060121536255, "learning_rate": 9.699946122922625e-07, "loss": 0.661, "step": 19449 }, { "epoch": 0.8060839653528948, "grad_norm": 0.39239588379859924, "learning_rate": 9.697873927638941e-07, "loss": 0.5837, "step": 19450 }, { "epoch": 0.8061254092585686, "grad_norm": 0.40284907817840576, "learning_rate": 9.695801732355257e-07, "loss": 0.6699, "step": 19451 }, { "epoch": 0.8061668531642422, "grad_norm": 0.380706250667572, "learning_rate": 9.693729537071575e-07, "loss": 0.7168, "step": 19452 }, { "epoch": 0.8062082970699158, "grad_norm": 0.41635850071907043, "learning_rate": 9.691657341787891e-07, "loss": 0.6764, "step": 19453 }, { "epoch": 0.8062497409755895, "grad_norm": 0.40801382064819336, "learning_rate": 9.689585146504207e-07, "loss": 0.7042, "step": 19454 }, { "epoch": 0.8062911848812632, "grad_norm": 0.4157634973526001, "learning_rate": 9.687512951220523e-07, "loss": 0.6982, "step": 19455 }, { "epoch": 0.8063326287869369, "grad_norm": 0.3871079087257385, "learning_rate": 9.68544075593684e-07, "loss": 0.6608, "step": 19456 }, { "epoch": 0.8063740726926105, "grad_norm": 0.4332897663116455, "learning_rate": 9.683368560653157e-07, "loss": 0.7198, "step": 19457 }, { "epoch": 0.8064155165982843, "grad_norm": 0.4432371258735657, "learning_rate": 9.681296365369473e-07, "loss": 0.7292, "step": 19458 }, { "epoch": 0.8064569605039579, "grad_norm": 0.41420260071754456, "learning_rate": 9.67922417008579e-07, "loss": 0.6802, "step": 19459 }, { "epoch": 0.8064984044096316, "grad_norm": 0.3928356468677521, "learning_rate": 9.677151974802105e-07, "loss": 0.6592, "step": 19460 }, { "epoch": 0.8065398483153052, "grad_norm": 0.4358169436454773, "learning_rate": 9.675079779518423e-07, "loss": 0.676, "step": 19461 }, { "epoch": 0.8065812922209789, "grad_norm": 0.3987569510936737, "learning_rate": 9.67300758423474e-07, "loss": 0.6584, "step": 19462 }, { "epoch": 0.8066227361266526, "grad_norm": 0.4270404577255249, "learning_rate": 9.670935388951055e-07, "loss": 0.6782, "step": 19463 }, { "epoch": 0.8066641800323262, "grad_norm": 0.38269197940826416, "learning_rate": 9.668863193667371e-07, "loss": 0.6333, "step": 19464 }, { "epoch": 0.8067056239379999, "grad_norm": 0.4105305075645447, "learning_rate": 9.66679099838369e-07, "loss": 0.6708, "step": 19465 }, { "epoch": 0.8067470678436736, "grad_norm": 0.4493926763534546, "learning_rate": 9.664718803100005e-07, "loss": 0.6888, "step": 19466 }, { "epoch": 0.8067885117493473, "grad_norm": 0.44324979186058044, "learning_rate": 9.662646607816321e-07, "loss": 0.6615, "step": 19467 }, { "epoch": 0.8068299556550209, "grad_norm": 0.4452129304409027, "learning_rate": 9.660574412532637e-07, "loss": 0.7203, "step": 19468 }, { "epoch": 0.8068713995606946, "grad_norm": 0.42313480377197266, "learning_rate": 9.658502217248953e-07, "loss": 0.6858, "step": 19469 }, { "epoch": 0.8069128434663683, "grad_norm": 0.42199939489364624, "learning_rate": 9.656430021965271e-07, "loss": 0.6122, "step": 19470 }, { "epoch": 0.8069542873720419, "grad_norm": 0.40722450613975525, "learning_rate": 9.654357826681587e-07, "loss": 0.6372, "step": 19471 }, { "epoch": 0.8069957312777156, "grad_norm": 0.4141182005405426, "learning_rate": 9.652285631397903e-07, "loss": 0.6913, "step": 19472 }, { "epoch": 0.8070371751833892, "grad_norm": 0.39499810338020325, "learning_rate": 9.65021343611422e-07, "loss": 0.6057, "step": 19473 }, { "epoch": 0.807078619089063, "grad_norm": 0.3915005326271057, "learning_rate": 9.648141240830538e-07, "loss": 0.6123, "step": 19474 }, { "epoch": 0.8071200629947366, "grad_norm": 0.40600264072418213, "learning_rate": 9.646069045546853e-07, "loss": 0.6948, "step": 19475 }, { "epoch": 0.8071615069004103, "grad_norm": 0.4054807126522064, "learning_rate": 9.64399685026317e-07, "loss": 0.6506, "step": 19476 }, { "epoch": 0.807202950806084, "grad_norm": 0.3943275511264801, "learning_rate": 9.641924654979485e-07, "loss": 0.6519, "step": 19477 }, { "epoch": 0.8072443947117577, "grad_norm": 0.43208932876586914, "learning_rate": 9.639852459695804e-07, "loss": 0.6998, "step": 19478 }, { "epoch": 0.8072858386174313, "grad_norm": 0.4362471103668213, "learning_rate": 9.63778026441212e-07, "loss": 0.6509, "step": 19479 }, { "epoch": 0.8073272825231049, "grad_norm": 0.40001848340034485, "learning_rate": 9.635708069128435e-07, "loss": 0.6833, "step": 19480 }, { "epoch": 0.8073687264287787, "grad_norm": 0.42091280221939087, "learning_rate": 9.633635873844751e-07, "loss": 0.6797, "step": 19481 }, { "epoch": 0.8074101703344523, "grad_norm": 0.4162572920322418, "learning_rate": 9.631563678561067e-07, "loss": 0.6705, "step": 19482 }, { "epoch": 0.807451614240126, "grad_norm": 0.41161155700683594, "learning_rate": 9.629491483277386e-07, "loss": 0.6556, "step": 19483 }, { "epoch": 0.8074930581457996, "grad_norm": 0.42415064573287964, "learning_rate": 9.627419287993701e-07, "loss": 0.6864, "step": 19484 }, { "epoch": 0.8075345020514734, "grad_norm": 0.4010603725910187, "learning_rate": 9.625347092710017e-07, "loss": 0.6445, "step": 19485 }, { "epoch": 0.807575945957147, "grad_norm": 0.425437331199646, "learning_rate": 9.623274897426333e-07, "loss": 0.6953, "step": 19486 }, { "epoch": 0.8076173898628207, "grad_norm": 0.4074363708496094, "learning_rate": 9.621202702142652e-07, "loss": 0.6986, "step": 19487 }, { "epoch": 0.8076588337684943, "grad_norm": 0.3950572907924652, "learning_rate": 9.619130506858967e-07, "loss": 0.6141, "step": 19488 }, { "epoch": 0.807700277674168, "grad_norm": 0.42007943987846375, "learning_rate": 9.617058311575283e-07, "loss": 0.6766, "step": 19489 }, { "epoch": 0.8077417215798417, "grad_norm": 0.39537885785102844, "learning_rate": 9.6149861162916e-07, "loss": 0.6675, "step": 19490 }, { "epoch": 0.8077831654855153, "grad_norm": 0.43586283922195435, "learning_rate": 9.612913921007918e-07, "loss": 0.641, "step": 19491 }, { "epoch": 0.807824609391189, "grad_norm": 0.42393407225608826, "learning_rate": 9.610841725724234e-07, "loss": 0.6841, "step": 19492 }, { "epoch": 0.8078660532968627, "grad_norm": 0.3910629451274872, "learning_rate": 9.60876953044055e-07, "loss": 0.6633, "step": 19493 }, { "epoch": 0.8079074972025364, "grad_norm": 0.41703030467033386, "learning_rate": 9.606697335156865e-07, "loss": 0.6989, "step": 19494 }, { "epoch": 0.80794894110821, "grad_norm": 0.40369051694869995, "learning_rate": 9.604625139873181e-07, "loss": 0.6644, "step": 19495 }, { "epoch": 0.8079903850138838, "grad_norm": 0.408910870552063, "learning_rate": 9.6025529445895e-07, "loss": 0.6299, "step": 19496 }, { "epoch": 0.8080318289195574, "grad_norm": 0.42264577746391296, "learning_rate": 9.600480749305815e-07, "loss": 0.6222, "step": 19497 }, { "epoch": 0.808073272825231, "grad_norm": 0.455897718667984, "learning_rate": 9.598408554022131e-07, "loss": 0.7235, "step": 19498 }, { "epoch": 0.8081147167309047, "grad_norm": 0.40088674426078796, "learning_rate": 9.596336358738447e-07, "loss": 0.6772, "step": 19499 }, { "epoch": 0.8081561606365784, "grad_norm": 0.38747960329055786, "learning_rate": 9.594264163454766e-07, "loss": 0.6309, "step": 19500 }, { "epoch": 0.8081976045422521, "grad_norm": 0.4007011651992798, "learning_rate": 9.592191968171082e-07, "loss": 0.6134, "step": 19501 }, { "epoch": 0.8082390484479257, "grad_norm": 0.4221867322921753, "learning_rate": 9.590119772887397e-07, "loss": 0.7185, "step": 19502 }, { "epoch": 0.8082804923535994, "grad_norm": 0.3902588486671448, "learning_rate": 9.588047577603713e-07, "loss": 0.6569, "step": 19503 }, { "epoch": 0.8083219362592731, "grad_norm": 0.39849603176116943, "learning_rate": 9.58597538232003e-07, "loss": 0.6853, "step": 19504 }, { "epoch": 0.8083633801649468, "grad_norm": 0.40166008472442627, "learning_rate": 9.583903187036348e-07, "loss": 0.6406, "step": 19505 }, { "epoch": 0.8084048240706204, "grad_norm": 0.39391639828681946, "learning_rate": 9.581830991752663e-07, "loss": 0.6743, "step": 19506 }, { "epoch": 0.808446267976294, "grad_norm": 0.4134604334831238, "learning_rate": 9.57975879646898e-07, "loss": 0.6606, "step": 19507 }, { "epoch": 0.8084877118819678, "grad_norm": 0.42527467012405396, "learning_rate": 9.577686601185295e-07, "loss": 0.657, "step": 19508 }, { "epoch": 0.8085291557876414, "grad_norm": 0.4068295955657959, "learning_rate": 9.575614405901614e-07, "loss": 0.6273, "step": 19509 }, { "epoch": 0.8085705996933151, "grad_norm": 0.48065459728240967, "learning_rate": 9.57354221061793e-07, "loss": 0.7065, "step": 19510 }, { "epoch": 0.8086120435989888, "grad_norm": 0.4314187467098236, "learning_rate": 9.571470015334245e-07, "loss": 0.6752, "step": 19511 }, { "epoch": 0.8086534875046625, "grad_norm": 0.44857847690582275, "learning_rate": 9.569397820050561e-07, "loss": 0.7059, "step": 19512 }, { "epoch": 0.8086949314103361, "grad_norm": 0.4418477714061737, "learning_rate": 9.56732562476688e-07, "loss": 0.7152, "step": 19513 }, { "epoch": 0.8087363753160097, "grad_norm": 0.390018105506897, "learning_rate": 9.565253429483196e-07, "loss": 0.6215, "step": 19514 }, { "epoch": 0.8087778192216835, "grad_norm": 0.42457568645477295, "learning_rate": 9.563181234199512e-07, "loss": 0.6974, "step": 19515 }, { "epoch": 0.8088192631273571, "grad_norm": 0.40534886717796326, "learning_rate": 9.561109038915827e-07, "loss": 0.6145, "step": 19516 }, { "epoch": 0.8088607070330308, "grad_norm": 0.4359142780303955, "learning_rate": 9.559036843632143e-07, "loss": 0.6687, "step": 19517 }, { "epoch": 0.8089021509387044, "grad_norm": 0.4252561330795288, "learning_rate": 9.556964648348462e-07, "loss": 0.7236, "step": 19518 }, { "epoch": 0.8089435948443782, "grad_norm": 0.4606792628765106, "learning_rate": 9.554892453064778e-07, "loss": 0.7036, "step": 19519 }, { "epoch": 0.8089850387500518, "grad_norm": 0.403179407119751, "learning_rate": 9.552820257781093e-07, "loss": 0.6648, "step": 19520 }, { "epoch": 0.8090264826557255, "grad_norm": 0.39424610137939453, "learning_rate": 9.55074806249741e-07, "loss": 0.6053, "step": 19521 }, { "epoch": 0.8090679265613991, "grad_norm": 0.35912024974823, "learning_rate": 9.548675867213728e-07, "loss": 0.6118, "step": 19522 }, { "epoch": 0.8091093704670728, "grad_norm": 0.44607606530189514, "learning_rate": 9.546603671930044e-07, "loss": 0.6936, "step": 19523 }, { "epoch": 0.8091508143727465, "grad_norm": 0.41180214285850525, "learning_rate": 9.54453147664636e-07, "loss": 0.6327, "step": 19524 }, { "epoch": 0.8091922582784201, "grad_norm": 0.4313219487667084, "learning_rate": 9.542459281362675e-07, "loss": 0.6495, "step": 19525 }, { "epoch": 0.8092337021840939, "grad_norm": 0.43710458278656006, "learning_rate": 9.540387086078994e-07, "loss": 0.6724, "step": 19526 }, { "epoch": 0.8092751460897675, "grad_norm": 0.4097225069999695, "learning_rate": 9.53831489079531e-07, "loss": 0.6671, "step": 19527 }, { "epoch": 0.8093165899954412, "grad_norm": 0.43732449412345886, "learning_rate": 9.536242695511626e-07, "loss": 0.7074, "step": 19528 }, { "epoch": 0.8093580339011148, "grad_norm": 0.4084685444831848, "learning_rate": 9.534170500227941e-07, "loss": 0.6787, "step": 19529 }, { "epoch": 0.8093994778067886, "grad_norm": 0.44383955001831055, "learning_rate": 9.532098304944259e-07, "loss": 0.6936, "step": 19530 }, { "epoch": 0.8094409217124622, "grad_norm": 0.37607231736183167, "learning_rate": 9.530026109660576e-07, "loss": 0.5717, "step": 19531 }, { "epoch": 0.8094823656181358, "grad_norm": 0.4168631136417389, "learning_rate": 9.527953914376892e-07, "loss": 0.6262, "step": 19532 }, { "epoch": 0.8095238095238095, "grad_norm": 0.3866446912288666, "learning_rate": 9.525881719093208e-07, "loss": 0.5846, "step": 19533 }, { "epoch": 0.8095652534294832, "grad_norm": 0.44628483057022095, "learning_rate": 9.523809523809525e-07, "loss": 0.6749, "step": 19534 }, { "epoch": 0.8096066973351569, "grad_norm": 0.38900160789489746, "learning_rate": 9.521737328525842e-07, "loss": 0.632, "step": 19535 }, { "epoch": 0.8096481412408305, "grad_norm": 0.45896750688552856, "learning_rate": 9.519665133242158e-07, "loss": 0.6853, "step": 19536 }, { "epoch": 0.8096895851465042, "grad_norm": 0.44777947664260864, "learning_rate": 9.517592937958474e-07, "loss": 0.6479, "step": 19537 }, { "epoch": 0.8097310290521779, "grad_norm": 0.4207313358783722, "learning_rate": 9.515520742674791e-07, "loss": 0.6604, "step": 19538 }, { "epoch": 0.8097724729578516, "grad_norm": 0.42841997742652893, "learning_rate": 9.513448547391108e-07, "loss": 0.6726, "step": 19539 }, { "epoch": 0.8098139168635252, "grad_norm": 0.40120217204093933, "learning_rate": 9.511376352107424e-07, "loss": 0.6519, "step": 19540 }, { "epoch": 0.8098553607691988, "grad_norm": 0.4038342535495758, "learning_rate": 9.50930415682374e-07, "loss": 0.6503, "step": 19541 }, { "epoch": 0.8098968046748726, "grad_norm": 0.4423257112503052, "learning_rate": 9.507231961540056e-07, "loss": 0.72, "step": 19542 }, { "epoch": 0.8099382485805462, "grad_norm": 0.4006195068359375, "learning_rate": 9.505159766256373e-07, "loss": 0.6469, "step": 19543 }, { "epoch": 0.8099796924862199, "grad_norm": 0.4442633390426636, "learning_rate": 9.50308757097269e-07, "loss": 0.6663, "step": 19544 }, { "epoch": 0.8100211363918935, "grad_norm": 0.425112247467041, "learning_rate": 9.501015375689006e-07, "loss": 0.7031, "step": 19545 }, { "epoch": 0.8100625802975673, "grad_norm": 0.3796014189720154, "learning_rate": 9.498943180405322e-07, "loss": 0.668, "step": 19546 }, { "epoch": 0.8101040242032409, "grad_norm": 0.4757033586502075, "learning_rate": 9.496870985121639e-07, "loss": 0.748, "step": 19547 }, { "epoch": 0.8101454681089146, "grad_norm": 0.4946349263191223, "learning_rate": 9.494798789837956e-07, "loss": 0.6772, "step": 19548 }, { "epoch": 0.8101869120145883, "grad_norm": 0.4156789779663086, "learning_rate": 9.492726594554272e-07, "loss": 0.6943, "step": 19549 }, { "epoch": 0.8102283559202619, "grad_norm": 0.48004481196403503, "learning_rate": 9.490654399270588e-07, "loss": 0.73, "step": 19550 }, { "epoch": 0.8102697998259356, "grad_norm": 0.4179478883743286, "learning_rate": 9.488582203986905e-07, "loss": 0.6398, "step": 19551 }, { "epoch": 0.8103112437316092, "grad_norm": 0.4231070578098297, "learning_rate": 9.486510008703222e-07, "loss": 0.6439, "step": 19552 }, { "epoch": 0.810352687637283, "grad_norm": 0.440898060798645, "learning_rate": 9.484437813419538e-07, "loss": 0.6995, "step": 19553 }, { "epoch": 0.8103941315429566, "grad_norm": 0.39904752373695374, "learning_rate": 9.482365618135854e-07, "loss": 0.5946, "step": 19554 }, { "epoch": 0.8104355754486303, "grad_norm": 0.4150044620037079, "learning_rate": 9.480293422852171e-07, "loss": 0.6434, "step": 19555 }, { "epoch": 0.8104770193543039, "grad_norm": 0.41956767439842224, "learning_rate": 9.478221227568487e-07, "loss": 0.6498, "step": 19556 }, { "epoch": 0.8105184632599777, "grad_norm": 0.4612277150154114, "learning_rate": 9.476149032284804e-07, "loss": 0.6674, "step": 19557 }, { "epoch": 0.8105599071656513, "grad_norm": 0.4166176915168762, "learning_rate": 9.47407683700112e-07, "loss": 0.6538, "step": 19558 }, { "epoch": 0.8106013510713249, "grad_norm": 0.37055960297584534, "learning_rate": 9.472004641717436e-07, "loss": 0.5989, "step": 19559 }, { "epoch": 0.8106427949769986, "grad_norm": 0.3949413001537323, "learning_rate": 9.469932446433753e-07, "loss": 0.6338, "step": 19560 }, { "epoch": 0.8106842388826723, "grad_norm": 0.446988046169281, "learning_rate": 9.46786025115007e-07, "loss": 0.741, "step": 19561 }, { "epoch": 0.810725682788346, "grad_norm": 0.4303412437438965, "learning_rate": 9.465788055866386e-07, "loss": 0.6472, "step": 19562 }, { "epoch": 0.8107671266940196, "grad_norm": 0.41025108098983765, "learning_rate": 9.463715860582702e-07, "loss": 0.6406, "step": 19563 }, { "epoch": 0.8108085705996934, "grad_norm": 0.3790895342826843, "learning_rate": 9.461643665299019e-07, "loss": 0.6412, "step": 19564 }, { "epoch": 0.810850014505367, "grad_norm": 0.3985369801521301, "learning_rate": 9.459571470015336e-07, "loss": 0.6382, "step": 19565 }, { "epoch": 0.8108914584110407, "grad_norm": 0.44402050971984863, "learning_rate": 9.457499274731652e-07, "loss": 0.7021, "step": 19566 }, { "epoch": 0.8109329023167143, "grad_norm": 0.40355411171913147, "learning_rate": 9.455427079447968e-07, "loss": 0.6852, "step": 19567 }, { "epoch": 0.810974346222388, "grad_norm": 0.4874855577945709, "learning_rate": 9.453354884164285e-07, "loss": 0.6508, "step": 19568 }, { "epoch": 0.8110157901280617, "grad_norm": 0.41878223419189453, "learning_rate": 9.451282688880601e-07, "loss": 0.6375, "step": 19569 }, { "epoch": 0.8110572340337353, "grad_norm": 0.4299320578575134, "learning_rate": 9.449210493596918e-07, "loss": 0.679, "step": 19570 }, { "epoch": 0.811098677939409, "grad_norm": 0.3985569477081299, "learning_rate": 9.447138298313234e-07, "loss": 0.6492, "step": 19571 }, { "epoch": 0.8111401218450827, "grad_norm": 0.4231337010860443, "learning_rate": 9.445066103029551e-07, "loss": 0.6569, "step": 19572 }, { "epoch": 0.8111815657507564, "grad_norm": 0.4230033755302429, "learning_rate": 9.442993907745867e-07, "loss": 0.6664, "step": 19573 }, { "epoch": 0.81122300965643, "grad_norm": 0.40938642621040344, "learning_rate": 9.440921712462184e-07, "loss": 0.6774, "step": 19574 }, { "epoch": 0.8112644535621036, "grad_norm": 0.4231196641921997, "learning_rate": 9.4388495171785e-07, "loss": 0.6945, "step": 19575 }, { "epoch": 0.8113058974677774, "grad_norm": 0.4148619472980499, "learning_rate": 9.436777321894817e-07, "loss": 0.6716, "step": 19576 }, { "epoch": 0.811347341373451, "grad_norm": 0.43137112259864807, "learning_rate": 9.434705126611133e-07, "loss": 0.6578, "step": 19577 }, { "epoch": 0.8113887852791247, "grad_norm": 0.3822075426578522, "learning_rate": 9.432632931327449e-07, "loss": 0.6266, "step": 19578 }, { "epoch": 0.8114302291847983, "grad_norm": 0.40512531995773315, "learning_rate": 9.430560736043766e-07, "loss": 0.6707, "step": 19579 }, { "epoch": 0.8114716730904721, "grad_norm": 0.4204866290092468, "learning_rate": 9.428488540760082e-07, "loss": 0.6171, "step": 19580 }, { "epoch": 0.8115131169961457, "grad_norm": 0.4203627407550812, "learning_rate": 9.426416345476399e-07, "loss": 0.666, "step": 19581 }, { "epoch": 0.8115545609018194, "grad_norm": 0.40756741166114807, "learning_rate": 9.424344150192715e-07, "loss": 0.6772, "step": 19582 }, { "epoch": 0.811596004807493, "grad_norm": 0.3999824523925781, "learning_rate": 9.422271954909032e-07, "loss": 0.5991, "step": 19583 }, { "epoch": 0.8116374487131667, "grad_norm": 0.3941100239753723, "learning_rate": 9.420199759625348e-07, "loss": 0.6417, "step": 19584 }, { "epoch": 0.8116788926188404, "grad_norm": 0.4361357092857361, "learning_rate": 9.418127564341665e-07, "loss": 0.6719, "step": 19585 }, { "epoch": 0.811720336524514, "grad_norm": 0.4035375714302063, "learning_rate": 9.416055369057981e-07, "loss": 0.6096, "step": 19586 }, { "epoch": 0.8117617804301878, "grad_norm": 0.4277745187282562, "learning_rate": 9.413983173774298e-07, "loss": 0.7708, "step": 19587 }, { "epoch": 0.8118032243358614, "grad_norm": 0.40764865279197693, "learning_rate": 9.411910978490614e-07, "loss": 0.7007, "step": 19588 }, { "epoch": 0.8118446682415351, "grad_norm": 0.40538203716278076, "learning_rate": 9.409838783206931e-07, "loss": 0.6682, "step": 19589 }, { "epoch": 0.8118861121472087, "grad_norm": 0.4215800166130066, "learning_rate": 9.407766587923247e-07, "loss": 0.6781, "step": 19590 }, { "epoch": 0.8119275560528825, "grad_norm": 0.4068776071071625, "learning_rate": 9.405694392639563e-07, "loss": 0.7058, "step": 19591 }, { "epoch": 0.8119689999585561, "grad_norm": 0.39818522334098816, "learning_rate": 9.40362219735588e-07, "loss": 0.6674, "step": 19592 }, { "epoch": 0.8120104438642297, "grad_norm": 0.4001738429069519, "learning_rate": 9.401550002072197e-07, "loss": 0.6249, "step": 19593 }, { "epoch": 0.8120518877699034, "grad_norm": 0.4007297158241272, "learning_rate": 9.399477806788513e-07, "loss": 0.675, "step": 19594 }, { "epoch": 0.8120933316755771, "grad_norm": 0.38033580780029297, "learning_rate": 9.397405611504829e-07, "loss": 0.665, "step": 19595 }, { "epoch": 0.8121347755812508, "grad_norm": 0.4213559031486511, "learning_rate": 9.395333416221146e-07, "loss": 0.7086, "step": 19596 }, { "epoch": 0.8121762194869244, "grad_norm": 0.5391089916229248, "learning_rate": 9.393261220937462e-07, "loss": 0.7217, "step": 19597 }, { "epoch": 0.8122176633925982, "grad_norm": 0.3782764971256256, "learning_rate": 9.391189025653779e-07, "loss": 0.6614, "step": 19598 }, { "epoch": 0.8122591072982718, "grad_norm": 0.4088730216026306, "learning_rate": 9.389116830370095e-07, "loss": 0.6853, "step": 19599 }, { "epoch": 0.8123005512039455, "grad_norm": 0.42250147461891174, "learning_rate": 9.387044635086412e-07, "loss": 0.6583, "step": 19600 }, { "epoch": 0.8123419951096191, "grad_norm": 0.40646958351135254, "learning_rate": 9.384972439802728e-07, "loss": 0.6567, "step": 19601 }, { "epoch": 0.8123834390152928, "grad_norm": 0.40952181816101074, "learning_rate": 9.382900244519045e-07, "loss": 0.6521, "step": 19602 }, { "epoch": 0.8124248829209665, "grad_norm": 0.40526333451271057, "learning_rate": 9.380828049235361e-07, "loss": 0.6427, "step": 19603 }, { "epoch": 0.8124663268266401, "grad_norm": 0.41660037636756897, "learning_rate": 9.378755853951677e-07, "loss": 0.6273, "step": 19604 }, { "epoch": 0.8125077707323138, "grad_norm": 0.4488762319087982, "learning_rate": 9.376683658667994e-07, "loss": 0.6887, "step": 19605 }, { "epoch": 0.8125492146379875, "grad_norm": 0.39120998978614807, "learning_rate": 9.374611463384311e-07, "loss": 0.6421, "step": 19606 }, { "epoch": 0.8125906585436612, "grad_norm": 0.4163181781768799, "learning_rate": 9.372539268100627e-07, "loss": 0.639, "step": 19607 }, { "epoch": 0.8126321024493348, "grad_norm": 0.38665008544921875, "learning_rate": 9.370467072816943e-07, "loss": 0.6661, "step": 19608 }, { "epoch": 0.8126735463550085, "grad_norm": 0.4687749743461609, "learning_rate": 9.36839487753326e-07, "loss": 0.6779, "step": 19609 }, { "epoch": 0.8127149902606822, "grad_norm": 0.39361050724983215, "learning_rate": 9.366322682249577e-07, "loss": 0.6696, "step": 19610 }, { "epoch": 0.8127564341663558, "grad_norm": 0.4061894118785858, "learning_rate": 9.364250486965893e-07, "loss": 0.6671, "step": 19611 }, { "epoch": 0.8127978780720295, "grad_norm": 0.4217144250869751, "learning_rate": 9.362178291682209e-07, "loss": 0.7, "step": 19612 }, { "epoch": 0.8128393219777031, "grad_norm": 0.42487600445747375, "learning_rate": 9.360106096398526e-07, "loss": 0.6506, "step": 19613 }, { "epoch": 0.8128807658833769, "grad_norm": 0.38957899808883667, "learning_rate": 9.358033901114842e-07, "loss": 0.5918, "step": 19614 }, { "epoch": 0.8129222097890505, "grad_norm": 0.4197501838207245, "learning_rate": 9.355961705831159e-07, "loss": 0.6816, "step": 19615 }, { "epoch": 0.8129636536947242, "grad_norm": 0.42846062779426575, "learning_rate": 9.353889510547475e-07, "loss": 0.6448, "step": 19616 }, { "epoch": 0.8130050976003979, "grad_norm": 0.4134286642074585, "learning_rate": 9.351817315263791e-07, "loss": 0.6396, "step": 19617 }, { "epoch": 0.8130465415060716, "grad_norm": 0.43286341428756714, "learning_rate": 9.349745119980108e-07, "loss": 0.7164, "step": 19618 }, { "epoch": 0.8130879854117452, "grad_norm": 0.38709139823913574, "learning_rate": 9.347672924696425e-07, "loss": 0.6277, "step": 19619 }, { "epoch": 0.8131294293174188, "grad_norm": 0.41580572724342346, "learning_rate": 9.345600729412741e-07, "loss": 0.6628, "step": 19620 }, { "epoch": 0.8131708732230926, "grad_norm": 0.4190933406352997, "learning_rate": 9.343528534129057e-07, "loss": 0.6914, "step": 19621 }, { "epoch": 0.8132123171287662, "grad_norm": 0.416583776473999, "learning_rate": 9.341456338845374e-07, "loss": 0.6589, "step": 19622 }, { "epoch": 0.8132537610344399, "grad_norm": 0.47658082842826843, "learning_rate": 9.339384143561691e-07, "loss": 0.6763, "step": 19623 }, { "epoch": 0.8132952049401135, "grad_norm": 0.4048510491847992, "learning_rate": 9.337311948278007e-07, "loss": 0.7461, "step": 19624 }, { "epoch": 0.8133366488457873, "grad_norm": 0.40592893958091736, "learning_rate": 9.335239752994323e-07, "loss": 0.6689, "step": 19625 }, { "epoch": 0.8133780927514609, "grad_norm": 0.443429559469223, "learning_rate": 9.33316755771064e-07, "loss": 0.6906, "step": 19626 }, { "epoch": 0.8134195366571346, "grad_norm": 0.41236671805381775, "learning_rate": 9.331095362426957e-07, "loss": 0.6802, "step": 19627 }, { "epoch": 0.8134609805628082, "grad_norm": 0.4343055784702301, "learning_rate": 9.329023167143273e-07, "loss": 0.6372, "step": 19628 }, { "epoch": 0.8135024244684819, "grad_norm": 0.4300934374332428, "learning_rate": 9.326950971859589e-07, "loss": 0.7224, "step": 19629 }, { "epoch": 0.8135438683741556, "grad_norm": 0.42721492052078247, "learning_rate": 9.324878776575905e-07, "loss": 0.6458, "step": 19630 }, { "epoch": 0.8135853122798292, "grad_norm": 0.410971462726593, "learning_rate": 9.322806581292223e-07, "loss": 0.677, "step": 19631 }, { "epoch": 0.813626756185503, "grad_norm": 0.40581756830215454, "learning_rate": 9.320734386008539e-07, "loss": 0.6501, "step": 19632 }, { "epoch": 0.8136682000911766, "grad_norm": 0.41823723912239075, "learning_rate": 9.318662190724855e-07, "loss": 0.692, "step": 19633 }, { "epoch": 0.8137096439968503, "grad_norm": 0.39229342341423035, "learning_rate": 9.316589995441171e-07, "loss": 0.639, "step": 19634 }, { "epoch": 0.8137510879025239, "grad_norm": 0.4431338608264923, "learning_rate": 9.314517800157488e-07, "loss": 0.6932, "step": 19635 }, { "epoch": 0.8137925318081976, "grad_norm": 0.4214947819709778, "learning_rate": 9.312445604873805e-07, "loss": 0.6934, "step": 19636 }, { "epoch": 0.8138339757138713, "grad_norm": 0.42291656136512756, "learning_rate": 9.310373409590121e-07, "loss": 0.6956, "step": 19637 }, { "epoch": 0.8138754196195449, "grad_norm": 0.44587475061416626, "learning_rate": 9.308301214306437e-07, "loss": 0.7249, "step": 19638 }, { "epoch": 0.8139168635252186, "grad_norm": 0.40491124987602234, "learning_rate": 9.306229019022753e-07, "loss": 0.6628, "step": 19639 }, { "epoch": 0.8139583074308923, "grad_norm": 0.40952345728874207, "learning_rate": 9.304156823739071e-07, "loss": 0.6674, "step": 19640 }, { "epoch": 0.813999751336566, "grad_norm": 0.43701407313346863, "learning_rate": 9.302084628455387e-07, "loss": 0.6532, "step": 19641 }, { "epoch": 0.8140411952422396, "grad_norm": 0.3995969295501709, "learning_rate": 9.300012433171703e-07, "loss": 0.6801, "step": 19642 }, { "epoch": 0.8140826391479133, "grad_norm": 0.40720489621162415, "learning_rate": 9.297940237888019e-07, "loss": 0.6689, "step": 19643 }, { "epoch": 0.814124083053587, "grad_norm": 0.40332961082458496, "learning_rate": 9.295868042604337e-07, "loss": 0.6865, "step": 19644 }, { "epoch": 0.8141655269592606, "grad_norm": 0.3916777968406677, "learning_rate": 9.293795847320653e-07, "loss": 0.6775, "step": 19645 }, { "epoch": 0.8142069708649343, "grad_norm": 0.40694382786750793, "learning_rate": 9.291723652036969e-07, "loss": 0.6725, "step": 19646 }, { "epoch": 0.8142484147706079, "grad_norm": 0.42133644223213196, "learning_rate": 9.289651456753285e-07, "loss": 0.7185, "step": 19647 }, { "epoch": 0.8142898586762817, "grad_norm": 0.4047718346118927, "learning_rate": 9.287579261469603e-07, "loss": 0.6542, "step": 19648 }, { "epoch": 0.8143313025819553, "grad_norm": 0.4477708637714386, "learning_rate": 9.285507066185919e-07, "loss": 0.6804, "step": 19649 }, { "epoch": 0.814372746487629, "grad_norm": 0.4244292676448822, "learning_rate": 9.283434870902235e-07, "loss": 0.6427, "step": 19650 }, { "epoch": 0.8144141903933027, "grad_norm": 0.4160427749156952, "learning_rate": 9.281362675618551e-07, "loss": 0.7073, "step": 19651 }, { "epoch": 0.8144556342989764, "grad_norm": 0.4110974967479706, "learning_rate": 9.279290480334867e-07, "loss": 0.6566, "step": 19652 }, { "epoch": 0.81449707820465, "grad_norm": 0.4017619788646698, "learning_rate": 9.277218285051185e-07, "loss": 0.6842, "step": 19653 }, { "epoch": 0.8145385221103236, "grad_norm": 0.41221749782562256, "learning_rate": 9.275146089767501e-07, "loss": 0.5894, "step": 19654 }, { "epoch": 0.8145799660159974, "grad_norm": 0.4414932429790497, "learning_rate": 9.273073894483817e-07, "loss": 0.7131, "step": 19655 }, { "epoch": 0.814621409921671, "grad_norm": 0.41789546608924866, "learning_rate": 9.271001699200133e-07, "loss": 0.6713, "step": 19656 }, { "epoch": 0.8146628538273447, "grad_norm": 0.4307880103588104, "learning_rate": 9.268929503916451e-07, "loss": 0.6843, "step": 19657 }, { "epoch": 0.8147042977330183, "grad_norm": 0.42938876152038574, "learning_rate": 9.266857308632767e-07, "loss": 0.7581, "step": 19658 }, { "epoch": 0.8147457416386921, "grad_norm": 0.4760993421077728, "learning_rate": 9.264785113349083e-07, "loss": 0.7705, "step": 19659 }, { "epoch": 0.8147871855443657, "grad_norm": 0.40461939573287964, "learning_rate": 9.262712918065399e-07, "loss": 0.6443, "step": 19660 }, { "epoch": 0.8148286294500394, "grad_norm": 0.394463449716568, "learning_rate": 9.260640722781717e-07, "loss": 0.6337, "step": 19661 }, { "epoch": 0.814870073355713, "grad_norm": 0.41641008853912354, "learning_rate": 9.258568527498033e-07, "loss": 0.6821, "step": 19662 }, { "epoch": 0.8149115172613867, "grad_norm": 0.4241864085197449, "learning_rate": 9.256496332214349e-07, "loss": 0.6776, "step": 19663 }, { "epoch": 0.8149529611670604, "grad_norm": 0.404107004404068, "learning_rate": 9.254424136930665e-07, "loss": 0.6675, "step": 19664 }, { "epoch": 0.814994405072734, "grad_norm": 0.45130255818367004, "learning_rate": 9.252351941646981e-07, "loss": 0.7126, "step": 19665 }, { "epoch": 0.8150358489784078, "grad_norm": 0.43848392367362976, "learning_rate": 9.250279746363299e-07, "loss": 0.7046, "step": 19666 }, { "epoch": 0.8150772928840814, "grad_norm": 0.41398677229881287, "learning_rate": 9.248207551079615e-07, "loss": 0.6672, "step": 19667 }, { "epoch": 0.8151187367897551, "grad_norm": 0.43072229623794556, "learning_rate": 9.246135355795931e-07, "loss": 0.6445, "step": 19668 }, { "epoch": 0.8151601806954287, "grad_norm": 0.4125159978866577, "learning_rate": 9.244063160512247e-07, "loss": 0.5852, "step": 19669 }, { "epoch": 0.8152016246011025, "grad_norm": 0.42327719926834106, "learning_rate": 9.241990965228565e-07, "loss": 0.6549, "step": 19670 }, { "epoch": 0.8152430685067761, "grad_norm": 0.4482043981552124, "learning_rate": 9.239918769944881e-07, "loss": 0.6667, "step": 19671 }, { "epoch": 0.8152845124124497, "grad_norm": 0.41444191336631775, "learning_rate": 9.237846574661197e-07, "loss": 0.6251, "step": 19672 }, { "epoch": 0.8153259563181234, "grad_norm": 0.39755353331565857, "learning_rate": 9.235774379377513e-07, "loss": 0.6127, "step": 19673 }, { "epoch": 0.8153674002237971, "grad_norm": 0.43398159742355347, "learning_rate": 9.233702184093831e-07, "loss": 0.6379, "step": 19674 }, { "epoch": 0.8154088441294708, "grad_norm": 0.4340681731700897, "learning_rate": 9.231629988810147e-07, "loss": 0.6741, "step": 19675 }, { "epoch": 0.8154502880351444, "grad_norm": 0.42247021198272705, "learning_rate": 9.229557793526463e-07, "loss": 0.7024, "step": 19676 }, { "epoch": 0.8154917319408181, "grad_norm": 0.44464629888534546, "learning_rate": 9.227485598242779e-07, "loss": 0.6719, "step": 19677 }, { "epoch": 0.8155331758464918, "grad_norm": 0.408925861120224, "learning_rate": 9.225413402959095e-07, "loss": 0.6926, "step": 19678 }, { "epoch": 0.8155746197521655, "grad_norm": 0.40343138575553894, "learning_rate": 9.223341207675413e-07, "loss": 0.6951, "step": 19679 }, { "epoch": 0.8156160636578391, "grad_norm": 0.42007914185523987, "learning_rate": 9.221269012391729e-07, "loss": 0.7026, "step": 19680 }, { "epoch": 0.8156575075635127, "grad_norm": 0.39540037512779236, "learning_rate": 9.219196817108045e-07, "loss": 0.6033, "step": 19681 }, { "epoch": 0.8156989514691865, "grad_norm": 0.4160844683647156, "learning_rate": 9.217124621824361e-07, "loss": 0.6077, "step": 19682 }, { "epoch": 0.8157403953748601, "grad_norm": 0.39774826169013977, "learning_rate": 9.215052426540679e-07, "loss": 0.6387, "step": 19683 }, { "epoch": 0.8157818392805338, "grad_norm": 0.41695356369018555, "learning_rate": 9.212980231256995e-07, "loss": 0.7037, "step": 19684 }, { "epoch": 0.8158232831862074, "grad_norm": 0.4222720265388489, "learning_rate": 9.210908035973311e-07, "loss": 0.6635, "step": 19685 }, { "epoch": 0.8158647270918812, "grad_norm": 0.41020119190216064, "learning_rate": 9.208835840689627e-07, "loss": 0.6782, "step": 19686 }, { "epoch": 0.8159061709975548, "grad_norm": 0.4394177496433258, "learning_rate": 9.206763645405945e-07, "loss": 0.6615, "step": 19687 }, { "epoch": 0.8159476149032285, "grad_norm": 0.4219087064266205, "learning_rate": 9.204691450122261e-07, "loss": 0.6642, "step": 19688 }, { "epoch": 0.8159890588089022, "grad_norm": 0.3978648781776428, "learning_rate": 9.202619254838577e-07, "loss": 0.6373, "step": 19689 }, { "epoch": 0.8160305027145758, "grad_norm": 0.4171396493911743, "learning_rate": 9.200547059554893e-07, "loss": 0.6674, "step": 19690 }, { "epoch": 0.8160719466202495, "grad_norm": 0.4340713918209076, "learning_rate": 9.198474864271209e-07, "loss": 0.7319, "step": 19691 }, { "epoch": 0.8161133905259231, "grad_norm": 0.38078805804252625, "learning_rate": 9.196402668987527e-07, "loss": 0.6572, "step": 19692 }, { "epoch": 0.8161548344315969, "grad_norm": 0.3820040822029114, "learning_rate": 9.194330473703843e-07, "loss": 0.6208, "step": 19693 }, { "epoch": 0.8161962783372705, "grad_norm": 0.39858493208885193, "learning_rate": 9.192258278420159e-07, "loss": 0.6191, "step": 19694 }, { "epoch": 0.8162377222429442, "grad_norm": 0.4075811207294464, "learning_rate": 9.190186083136475e-07, "loss": 0.6375, "step": 19695 }, { "epoch": 0.8162791661486178, "grad_norm": 0.38667964935302734, "learning_rate": 9.188113887852793e-07, "loss": 0.6506, "step": 19696 }, { "epoch": 0.8163206100542915, "grad_norm": 0.4880538284778595, "learning_rate": 9.186041692569109e-07, "loss": 0.6898, "step": 19697 }, { "epoch": 0.8163620539599652, "grad_norm": 0.3847583830356598, "learning_rate": 9.183969497285425e-07, "loss": 0.6809, "step": 19698 }, { "epoch": 0.8164034978656388, "grad_norm": 0.4298837184906006, "learning_rate": 9.181897302001741e-07, "loss": 0.7065, "step": 19699 }, { "epoch": 0.8164449417713125, "grad_norm": 0.427444189786911, "learning_rate": 9.179825106718059e-07, "loss": 0.7097, "step": 19700 }, { "epoch": 0.8164863856769862, "grad_norm": 0.3810659646987915, "learning_rate": 9.177752911434375e-07, "loss": 0.6403, "step": 19701 }, { "epoch": 0.8165278295826599, "grad_norm": 0.38859644532203674, "learning_rate": 9.175680716150691e-07, "loss": 0.6388, "step": 19702 }, { "epoch": 0.8165692734883335, "grad_norm": 0.43492475152015686, "learning_rate": 9.173608520867007e-07, "loss": 0.6991, "step": 19703 }, { "epoch": 0.8166107173940073, "grad_norm": 0.3958321511745453, "learning_rate": 9.171536325583323e-07, "loss": 0.6471, "step": 19704 }, { "epoch": 0.8166521612996809, "grad_norm": 0.4117456078529358, "learning_rate": 9.169464130299641e-07, "loss": 0.6183, "step": 19705 }, { "epoch": 0.8166936052053545, "grad_norm": 0.425936758518219, "learning_rate": 9.167391935015957e-07, "loss": 0.6605, "step": 19706 }, { "epoch": 0.8167350491110282, "grad_norm": 0.4100085496902466, "learning_rate": 9.165319739732273e-07, "loss": 0.6136, "step": 19707 }, { "epoch": 0.8167764930167019, "grad_norm": 0.404861718416214, "learning_rate": 9.163247544448589e-07, "loss": 0.667, "step": 19708 }, { "epoch": 0.8168179369223756, "grad_norm": 0.41336488723754883, "learning_rate": 9.161175349164907e-07, "loss": 0.6726, "step": 19709 }, { "epoch": 0.8168593808280492, "grad_norm": 0.4177948832511902, "learning_rate": 9.159103153881223e-07, "loss": 0.6788, "step": 19710 }, { "epoch": 0.8169008247337229, "grad_norm": 0.4212389588356018, "learning_rate": 9.157030958597539e-07, "loss": 0.6902, "step": 19711 }, { "epoch": 0.8169422686393966, "grad_norm": 0.42884549498558044, "learning_rate": 9.154958763313855e-07, "loss": 0.675, "step": 19712 }, { "epoch": 0.8169837125450703, "grad_norm": 0.4176728427410126, "learning_rate": 9.152886568030171e-07, "loss": 0.6534, "step": 19713 }, { "epoch": 0.8170251564507439, "grad_norm": 0.42101776599884033, "learning_rate": 9.150814372746489e-07, "loss": 0.7009, "step": 19714 }, { "epoch": 0.8170666003564175, "grad_norm": 0.4365403652191162, "learning_rate": 9.148742177462805e-07, "loss": 0.694, "step": 19715 }, { "epoch": 0.8171080442620913, "grad_norm": 0.423768550157547, "learning_rate": 9.146669982179121e-07, "loss": 0.6965, "step": 19716 }, { "epoch": 0.8171494881677649, "grad_norm": 0.44997668266296387, "learning_rate": 9.144597786895437e-07, "loss": 0.6615, "step": 19717 }, { "epoch": 0.8171909320734386, "grad_norm": 0.4319949150085449, "learning_rate": 9.142525591611755e-07, "loss": 0.7166, "step": 19718 }, { "epoch": 0.8172323759791122, "grad_norm": 0.40280041098594666, "learning_rate": 9.140453396328071e-07, "loss": 0.6584, "step": 19719 }, { "epoch": 0.817273819884786, "grad_norm": 0.40576761960983276, "learning_rate": 9.138381201044387e-07, "loss": 0.693, "step": 19720 }, { "epoch": 0.8173152637904596, "grad_norm": 0.3668539524078369, "learning_rate": 9.136309005760703e-07, "loss": 0.6354, "step": 19721 }, { "epoch": 0.8173567076961333, "grad_norm": 0.4559946358203888, "learning_rate": 9.134236810477021e-07, "loss": 0.7173, "step": 19722 }, { "epoch": 0.817398151601807, "grad_norm": 0.44028905034065247, "learning_rate": 9.132164615193337e-07, "loss": 0.6758, "step": 19723 }, { "epoch": 0.8174395955074806, "grad_norm": 0.4746832549571991, "learning_rate": 9.130092419909653e-07, "loss": 0.718, "step": 19724 }, { "epoch": 0.8174810394131543, "grad_norm": 0.42604297399520874, "learning_rate": 9.128020224625969e-07, "loss": 0.6847, "step": 19725 }, { "epoch": 0.8175224833188279, "grad_norm": 0.4185559153556824, "learning_rate": 9.125948029342286e-07, "loss": 0.7095, "step": 19726 }, { "epoch": 0.8175639272245017, "grad_norm": 0.4090959429740906, "learning_rate": 9.123875834058603e-07, "loss": 0.6874, "step": 19727 }, { "epoch": 0.8176053711301753, "grad_norm": 0.3999204635620117, "learning_rate": 9.121803638774919e-07, "loss": 0.6525, "step": 19728 }, { "epoch": 0.817646815035849, "grad_norm": 0.4440370202064514, "learning_rate": 9.119731443491235e-07, "loss": 0.6997, "step": 19729 }, { "epoch": 0.8176882589415226, "grad_norm": 0.41823819279670715, "learning_rate": 9.117659248207551e-07, "loss": 0.6654, "step": 19730 }, { "epoch": 0.8177297028471964, "grad_norm": 0.45928406715393066, "learning_rate": 9.115587052923869e-07, "loss": 0.6968, "step": 19731 }, { "epoch": 0.81777114675287, "grad_norm": 0.41114240884780884, "learning_rate": 9.113514857640185e-07, "loss": 0.6782, "step": 19732 }, { "epoch": 0.8178125906585436, "grad_norm": 0.38109442591667175, "learning_rate": 9.111442662356501e-07, "loss": 0.6187, "step": 19733 }, { "epoch": 0.8178540345642173, "grad_norm": 0.4392819106578827, "learning_rate": 9.109370467072817e-07, "loss": 0.6345, "step": 19734 }, { "epoch": 0.817895478469891, "grad_norm": 0.40617629885673523, "learning_rate": 9.107298271789135e-07, "loss": 0.662, "step": 19735 }, { "epoch": 0.8179369223755647, "grad_norm": 0.3965899348258972, "learning_rate": 9.105226076505451e-07, "loss": 0.6698, "step": 19736 }, { "epoch": 0.8179783662812383, "grad_norm": 0.4302690029144287, "learning_rate": 9.103153881221767e-07, "loss": 0.6599, "step": 19737 }, { "epoch": 0.818019810186912, "grad_norm": 0.39805907011032104, "learning_rate": 9.101081685938083e-07, "loss": 0.6229, "step": 19738 }, { "epoch": 0.8180612540925857, "grad_norm": 0.42825451493263245, "learning_rate": 9.0990094906544e-07, "loss": 0.6709, "step": 19739 }, { "epoch": 0.8181026979982594, "grad_norm": 0.4311811327934265, "learning_rate": 9.096937295370717e-07, "loss": 0.635, "step": 19740 }, { "epoch": 0.818144141903933, "grad_norm": 0.38889995217323303, "learning_rate": 9.094865100087033e-07, "loss": 0.6764, "step": 19741 }, { "epoch": 0.8181855858096067, "grad_norm": 0.4110477864742279, "learning_rate": 9.092792904803349e-07, "loss": 0.6724, "step": 19742 }, { "epoch": 0.8182270297152804, "grad_norm": 0.40320104360580444, "learning_rate": 9.090720709519666e-07, "loss": 0.7156, "step": 19743 }, { "epoch": 0.818268473620954, "grad_norm": 0.41246506571769714, "learning_rate": 9.088648514235983e-07, "loss": 0.6665, "step": 19744 }, { "epoch": 0.8183099175266277, "grad_norm": 0.4248059391975403, "learning_rate": 9.086576318952299e-07, "loss": 0.6622, "step": 19745 }, { "epoch": 0.8183513614323014, "grad_norm": 0.3999119997024536, "learning_rate": 9.084504123668615e-07, "loss": 0.6411, "step": 19746 }, { "epoch": 0.8183928053379751, "grad_norm": 0.42367807030677795, "learning_rate": 9.082431928384932e-07, "loss": 0.7061, "step": 19747 }, { "epoch": 0.8184342492436487, "grad_norm": 0.4492710828781128, "learning_rate": 9.080359733101249e-07, "loss": 0.6753, "step": 19748 }, { "epoch": 0.8184756931493224, "grad_norm": 0.4066287577152252, "learning_rate": 9.078287537817565e-07, "loss": 0.6536, "step": 19749 }, { "epoch": 0.8185171370549961, "grad_norm": 0.4213227331638336, "learning_rate": 9.076215342533881e-07, "loss": 0.6842, "step": 19750 }, { "epoch": 0.8185585809606697, "grad_norm": 0.4452371895313263, "learning_rate": 9.074143147250197e-07, "loss": 0.6733, "step": 19751 }, { "epoch": 0.8186000248663434, "grad_norm": 0.4622397720813751, "learning_rate": 9.072070951966514e-07, "loss": 0.6978, "step": 19752 }, { "epoch": 0.818641468772017, "grad_norm": 0.3998209834098816, "learning_rate": 9.069998756682831e-07, "loss": 0.6603, "step": 19753 }, { "epoch": 0.8186829126776908, "grad_norm": 0.4282058775424957, "learning_rate": 9.067926561399147e-07, "loss": 0.6646, "step": 19754 }, { "epoch": 0.8187243565833644, "grad_norm": 0.4603007733821869, "learning_rate": 9.065854366115463e-07, "loss": 0.6516, "step": 19755 }, { "epoch": 0.8187658004890381, "grad_norm": 0.43246859312057495, "learning_rate": 9.06378217083178e-07, "loss": 0.6626, "step": 19756 }, { "epoch": 0.8188072443947118, "grad_norm": 0.43439385294914246, "learning_rate": 9.061709975548097e-07, "loss": 0.6919, "step": 19757 }, { "epoch": 0.8188486883003854, "grad_norm": 0.4170182943344116, "learning_rate": 9.059637780264413e-07, "loss": 0.6849, "step": 19758 }, { "epoch": 0.8188901322060591, "grad_norm": 0.4048578441143036, "learning_rate": 9.057565584980729e-07, "loss": 0.6179, "step": 19759 }, { "epoch": 0.8189315761117327, "grad_norm": 0.4051901400089264, "learning_rate": 9.055493389697046e-07, "loss": 0.7023, "step": 19760 }, { "epoch": 0.8189730200174065, "grad_norm": 0.39366015791893005, "learning_rate": 9.053421194413363e-07, "loss": 0.6213, "step": 19761 }, { "epoch": 0.8190144639230801, "grad_norm": 0.44939351081848145, "learning_rate": 9.051348999129679e-07, "loss": 0.6541, "step": 19762 }, { "epoch": 0.8190559078287538, "grad_norm": 0.3908756673336029, "learning_rate": 9.049276803845995e-07, "loss": 0.667, "step": 19763 }, { "epoch": 0.8190973517344274, "grad_norm": 0.43734195828437805, "learning_rate": 9.047204608562312e-07, "loss": 0.6611, "step": 19764 }, { "epoch": 0.8191387956401012, "grad_norm": 0.42125099897384644, "learning_rate": 9.045132413278628e-07, "loss": 0.658, "step": 19765 }, { "epoch": 0.8191802395457748, "grad_norm": 0.43547943234443665, "learning_rate": 9.043060217994945e-07, "loss": 0.6921, "step": 19766 }, { "epoch": 0.8192216834514484, "grad_norm": 0.39512085914611816, "learning_rate": 9.040988022711261e-07, "loss": 0.6859, "step": 19767 }, { "epoch": 0.8192631273571221, "grad_norm": 0.4024547040462494, "learning_rate": 9.038915827427577e-07, "loss": 0.6753, "step": 19768 }, { "epoch": 0.8193045712627958, "grad_norm": 0.43935248255729675, "learning_rate": 9.036843632143894e-07, "loss": 0.679, "step": 19769 }, { "epoch": 0.8193460151684695, "grad_norm": 0.4244343042373657, "learning_rate": 9.034771436860211e-07, "loss": 0.6273, "step": 19770 }, { "epoch": 0.8193874590741431, "grad_norm": 0.41510331630706787, "learning_rate": 9.032699241576527e-07, "loss": 0.6539, "step": 19771 }, { "epoch": 0.8194289029798169, "grad_norm": 0.41384971141815186, "learning_rate": 9.030627046292843e-07, "loss": 0.6639, "step": 19772 }, { "epoch": 0.8194703468854905, "grad_norm": 0.4073028564453125, "learning_rate": 9.02855485100916e-07, "loss": 0.6466, "step": 19773 }, { "epoch": 0.8195117907911642, "grad_norm": 0.4025697708129883, "learning_rate": 9.026482655725477e-07, "loss": 0.6642, "step": 19774 }, { "epoch": 0.8195532346968378, "grad_norm": 0.42282360792160034, "learning_rate": 9.024410460441793e-07, "loss": 0.6531, "step": 19775 }, { "epoch": 0.8195946786025115, "grad_norm": 0.3846050500869751, "learning_rate": 9.022338265158109e-07, "loss": 0.6298, "step": 19776 }, { "epoch": 0.8196361225081852, "grad_norm": 0.41263172030448914, "learning_rate": 9.020266069874426e-07, "loss": 0.6655, "step": 19777 }, { "epoch": 0.8196775664138588, "grad_norm": 0.3998473584651947, "learning_rate": 9.018193874590742e-07, "loss": 0.6897, "step": 19778 }, { "epoch": 0.8197190103195325, "grad_norm": 0.38915055990219116, "learning_rate": 9.016121679307059e-07, "loss": 0.6772, "step": 19779 }, { "epoch": 0.8197604542252062, "grad_norm": 0.41776904463768005, "learning_rate": 9.014049484023375e-07, "loss": 0.7057, "step": 19780 }, { "epoch": 0.8198018981308799, "grad_norm": 0.40689817070961, "learning_rate": 9.011977288739692e-07, "loss": 0.6879, "step": 19781 }, { "epoch": 0.8198433420365535, "grad_norm": 0.4390327036380768, "learning_rate": 9.009905093456008e-07, "loss": 0.6588, "step": 19782 }, { "epoch": 0.8198847859422272, "grad_norm": 0.39893630146980286, "learning_rate": 9.007832898172325e-07, "loss": 0.6384, "step": 19783 }, { "epoch": 0.8199262298479009, "grad_norm": 0.4407680928707123, "learning_rate": 9.005760702888641e-07, "loss": 0.6329, "step": 19784 }, { "epoch": 0.8199676737535745, "grad_norm": 0.4602377712726593, "learning_rate": 9.003688507604957e-07, "loss": 0.6866, "step": 19785 }, { "epoch": 0.8200091176592482, "grad_norm": 0.4353601336479187, "learning_rate": 9.001616312321274e-07, "loss": 0.7, "step": 19786 }, { "epoch": 0.8200505615649218, "grad_norm": 0.4277232885360718, "learning_rate": 8.99954411703759e-07, "loss": 0.7246, "step": 19787 }, { "epoch": 0.8200920054705956, "grad_norm": 0.3654617667198181, "learning_rate": 8.997471921753907e-07, "loss": 0.6267, "step": 19788 }, { "epoch": 0.8201334493762692, "grad_norm": 0.39362895488739014, "learning_rate": 8.995399726470223e-07, "loss": 0.6421, "step": 19789 }, { "epoch": 0.8201748932819429, "grad_norm": 0.4053902328014374, "learning_rate": 8.99332753118654e-07, "loss": 0.6904, "step": 19790 }, { "epoch": 0.8202163371876166, "grad_norm": 0.3964919149875641, "learning_rate": 8.991255335902856e-07, "loss": 0.6353, "step": 19791 }, { "epoch": 0.8202577810932903, "grad_norm": 0.452632874250412, "learning_rate": 8.989183140619173e-07, "loss": 0.7083, "step": 19792 }, { "epoch": 0.8202992249989639, "grad_norm": 0.4176661968231201, "learning_rate": 8.987110945335489e-07, "loss": 0.6471, "step": 19793 }, { "epoch": 0.8203406689046375, "grad_norm": 0.4097972810268402, "learning_rate": 8.985038750051806e-07, "loss": 0.6752, "step": 19794 }, { "epoch": 0.8203821128103113, "grad_norm": 0.38780200481414795, "learning_rate": 8.982966554768122e-07, "loss": 0.5657, "step": 19795 }, { "epoch": 0.8204235567159849, "grad_norm": 0.4135057032108307, "learning_rate": 8.980894359484439e-07, "loss": 0.6648, "step": 19796 }, { "epoch": 0.8204650006216586, "grad_norm": 0.5055879354476929, "learning_rate": 8.978822164200755e-07, "loss": 0.7061, "step": 19797 }, { "epoch": 0.8205064445273322, "grad_norm": 0.43080827593803406, "learning_rate": 8.976749968917072e-07, "loss": 0.6742, "step": 19798 }, { "epoch": 0.820547888433006, "grad_norm": 0.4254550635814667, "learning_rate": 8.974677773633388e-07, "loss": 0.6514, "step": 19799 }, { "epoch": 0.8205893323386796, "grad_norm": 0.39122870564460754, "learning_rate": 8.972605578349704e-07, "loss": 0.6213, "step": 19800 }, { "epoch": 0.8206307762443533, "grad_norm": 0.41951683163642883, "learning_rate": 8.970533383066021e-07, "loss": 0.6545, "step": 19801 }, { "epoch": 0.8206722201500269, "grad_norm": 0.42110535502433777, "learning_rate": 8.968461187782338e-07, "loss": 0.6892, "step": 19802 }, { "epoch": 0.8207136640557006, "grad_norm": 0.45328107476234436, "learning_rate": 8.966388992498654e-07, "loss": 0.6964, "step": 19803 }, { "epoch": 0.8207551079613743, "grad_norm": 0.3902305066585541, "learning_rate": 8.96431679721497e-07, "loss": 0.6481, "step": 19804 }, { "epoch": 0.8207965518670479, "grad_norm": 0.3752572536468506, "learning_rate": 8.962244601931287e-07, "loss": 0.657, "step": 19805 }, { "epoch": 0.8208379957727217, "grad_norm": 0.4239276945590973, "learning_rate": 8.960172406647603e-07, "loss": 0.7094, "step": 19806 }, { "epoch": 0.8208794396783953, "grad_norm": 0.4271276593208313, "learning_rate": 8.95810021136392e-07, "loss": 0.6378, "step": 19807 }, { "epoch": 0.820920883584069, "grad_norm": 0.3974558115005493, "learning_rate": 8.956028016080236e-07, "loss": 0.6118, "step": 19808 }, { "epoch": 0.8209623274897426, "grad_norm": 0.4485943913459778, "learning_rate": 8.953955820796553e-07, "loss": 0.6829, "step": 19809 }, { "epoch": 0.8210037713954162, "grad_norm": 0.514271080493927, "learning_rate": 8.951883625512869e-07, "loss": 0.7303, "step": 19810 }, { "epoch": 0.82104521530109, "grad_norm": 0.43207061290740967, "learning_rate": 8.949811430229186e-07, "loss": 0.6813, "step": 19811 }, { "epoch": 0.8210866592067636, "grad_norm": 0.4120883345603943, "learning_rate": 8.947739234945502e-07, "loss": 0.6613, "step": 19812 }, { "epoch": 0.8211281031124373, "grad_norm": 0.40813973546028137, "learning_rate": 8.945667039661818e-07, "loss": 0.5936, "step": 19813 }, { "epoch": 0.821169547018111, "grad_norm": 0.4333094358444214, "learning_rate": 8.943594844378135e-07, "loss": 0.6364, "step": 19814 }, { "epoch": 0.8212109909237847, "grad_norm": 0.4044613838195801, "learning_rate": 8.941522649094452e-07, "loss": 0.6963, "step": 19815 }, { "epoch": 0.8212524348294583, "grad_norm": 0.3731793463230133, "learning_rate": 8.939450453810768e-07, "loss": 0.6252, "step": 19816 }, { "epoch": 0.821293878735132, "grad_norm": 0.4420129358768463, "learning_rate": 8.937378258527084e-07, "loss": 0.718, "step": 19817 }, { "epoch": 0.8213353226408057, "grad_norm": 0.3989412784576416, "learning_rate": 8.935306063243401e-07, "loss": 0.6726, "step": 19818 }, { "epoch": 0.8213767665464793, "grad_norm": 0.413006454706192, "learning_rate": 8.933233867959718e-07, "loss": 0.6393, "step": 19819 }, { "epoch": 0.821418210452153, "grad_norm": 0.4077928960323334, "learning_rate": 8.931161672676034e-07, "loss": 0.6157, "step": 19820 }, { "epoch": 0.8214596543578266, "grad_norm": 0.42799046635627747, "learning_rate": 8.92908947739235e-07, "loss": 0.6765, "step": 19821 }, { "epoch": 0.8215010982635004, "grad_norm": 0.47029295563697815, "learning_rate": 8.927017282108667e-07, "loss": 0.7362, "step": 19822 }, { "epoch": 0.821542542169174, "grad_norm": 0.4753129780292511, "learning_rate": 8.924945086824983e-07, "loss": 0.7144, "step": 19823 }, { "epoch": 0.8215839860748477, "grad_norm": 0.4120253622531891, "learning_rate": 8.9228728915413e-07, "loss": 0.644, "step": 19824 }, { "epoch": 0.8216254299805213, "grad_norm": 0.41009610891342163, "learning_rate": 8.920800696257616e-07, "loss": 0.6392, "step": 19825 }, { "epoch": 0.8216668738861951, "grad_norm": 0.44124719500541687, "learning_rate": 8.918728500973932e-07, "loss": 0.676, "step": 19826 }, { "epoch": 0.8217083177918687, "grad_norm": 0.4321514368057251, "learning_rate": 8.916656305690249e-07, "loss": 0.6738, "step": 19827 }, { "epoch": 0.8217497616975423, "grad_norm": 0.4038153886795044, "learning_rate": 8.914584110406566e-07, "loss": 0.6721, "step": 19828 }, { "epoch": 0.8217912056032161, "grad_norm": 0.4090587794780731, "learning_rate": 8.912511915122882e-07, "loss": 0.6368, "step": 19829 }, { "epoch": 0.8218326495088897, "grad_norm": 0.4300379753112793, "learning_rate": 8.910439719839198e-07, "loss": 0.6693, "step": 19830 }, { "epoch": 0.8218740934145634, "grad_norm": 0.385168194770813, "learning_rate": 8.908367524555515e-07, "loss": 0.6685, "step": 19831 }, { "epoch": 0.821915537320237, "grad_norm": 0.4192860424518585, "learning_rate": 8.906295329271832e-07, "loss": 0.6349, "step": 19832 }, { "epoch": 0.8219569812259108, "grad_norm": 0.48932138085365295, "learning_rate": 8.904223133988148e-07, "loss": 0.7098, "step": 19833 }, { "epoch": 0.8219984251315844, "grad_norm": 0.4394828677177429, "learning_rate": 8.902150938704464e-07, "loss": 0.6969, "step": 19834 }, { "epoch": 0.8220398690372581, "grad_norm": 0.3691657781600952, "learning_rate": 8.900078743420781e-07, "loss": 0.5822, "step": 19835 }, { "epoch": 0.8220813129429317, "grad_norm": 0.40473607182502747, "learning_rate": 8.898006548137098e-07, "loss": 0.7527, "step": 19836 }, { "epoch": 0.8221227568486054, "grad_norm": 0.3698449730873108, "learning_rate": 8.895934352853414e-07, "loss": 0.6306, "step": 19837 }, { "epoch": 0.8221642007542791, "grad_norm": 0.41561150550842285, "learning_rate": 8.89386215756973e-07, "loss": 0.6312, "step": 19838 }, { "epoch": 0.8222056446599527, "grad_norm": 0.3987681269645691, "learning_rate": 8.891789962286046e-07, "loss": 0.6278, "step": 19839 }, { "epoch": 0.8222470885656264, "grad_norm": 0.41775357723236084, "learning_rate": 8.889717767002363e-07, "loss": 0.683, "step": 19840 }, { "epoch": 0.8222885324713001, "grad_norm": 0.42423057556152344, "learning_rate": 8.88764557171868e-07, "loss": 0.655, "step": 19841 }, { "epoch": 0.8223299763769738, "grad_norm": 0.428254097700119, "learning_rate": 8.885573376434996e-07, "loss": 0.6832, "step": 19842 }, { "epoch": 0.8223714202826474, "grad_norm": 0.4318723678588867, "learning_rate": 8.883501181151312e-07, "loss": 0.6799, "step": 19843 }, { "epoch": 0.8224128641883212, "grad_norm": 0.44461116194725037, "learning_rate": 8.881428985867629e-07, "loss": 0.6168, "step": 19844 }, { "epoch": 0.8224543080939948, "grad_norm": 0.42376846075057983, "learning_rate": 8.879356790583946e-07, "loss": 0.6681, "step": 19845 }, { "epoch": 0.8224957519996684, "grad_norm": 0.41388991475105286, "learning_rate": 8.877284595300262e-07, "loss": 0.6298, "step": 19846 }, { "epoch": 0.8225371959053421, "grad_norm": 0.4312250316143036, "learning_rate": 8.875212400016578e-07, "loss": 0.6539, "step": 19847 }, { "epoch": 0.8225786398110158, "grad_norm": 0.4134850800037384, "learning_rate": 8.873140204732895e-07, "loss": 0.6824, "step": 19848 }, { "epoch": 0.8226200837166895, "grad_norm": 0.43058791756629944, "learning_rate": 8.871068009449212e-07, "loss": 0.6647, "step": 19849 }, { "epoch": 0.8226615276223631, "grad_norm": 0.4457002878189087, "learning_rate": 8.868995814165528e-07, "loss": 0.6907, "step": 19850 }, { "epoch": 0.8227029715280368, "grad_norm": 0.44663041830062866, "learning_rate": 8.866923618881844e-07, "loss": 0.6697, "step": 19851 }, { "epoch": 0.8227444154337105, "grad_norm": 0.4230356216430664, "learning_rate": 8.86485142359816e-07, "loss": 0.6938, "step": 19852 }, { "epoch": 0.8227858593393842, "grad_norm": 0.3900780975818634, "learning_rate": 8.862779228314478e-07, "loss": 0.642, "step": 19853 }, { "epoch": 0.8228273032450578, "grad_norm": 0.38914573192596436, "learning_rate": 8.860707033030794e-07, "loss": 0.6621, "step": 19854 }, { "epoch": 0.8228687471507314, "grad_norm": 0.3960849940776825, "learning_rate": 8.85863483774711e-07, "loss": 0.6505, "step": 19855 }, { "epoch": 0.8229101910564052, "grad_norm": 0.4598386585712433, "learning_rate": 8.856562642463426e-07, "loss": 0.7212, "step": 19856 }, { "epoch": 0.8229516349620788, "grad_norm": 0.3934946060180664, "learning_rate": 8.854490447179744e-07, "loss": 0.6353, "step": 19857 }, { "epoch": 0.8229930788677525, "grad_norm": 0.4103161692619324, "learning_rate": 8.85241825189606e-07, "loss": 0.6143, "step": 19858 }, { "epoch": 0.8230345227734261, "grad_norm": 0.4113180637359619, "learning_rate": 8.850346056612376e-07, "loss": 0.635, "step": 19859 }, { "epoch": 0.8230759666790999, "grad_norm": 0.4891010522842407, "learning_rate": 8.848273861328692e-07, "loss": 0.6869, "step": 19860 }, { "epoch": 0.8231174105847735, "grad_norm": 0.40585508942604065, "learning_rate": 8.846201666045008e-07, "loss": 0.6776, "step": 19861 }, { "epoch": 0.8231588544904472, "grad_norm": 0.46703702211380005, "learning_rate": 8.844129470761326e-07, "loss": 0.6719, "step": 19862 }, { "epoch": 0.8232002983961209, "grad_norm": 0.4452812671661377, "learning_rate": 8.842057275477642e-07, "loss": 0.7026, "step": 19863 }, { "epoch": 0.8232417423017945, "grad_norm": 0.4191691279411316, "learning_rate": 8.839985080193958e-07, "loss": 0.6758, "step": 19864 }, { "epoch": 0.8232831862074682, "grad_norm": 0.40663719177246094, "learning_rate": 8.837912884910274e-07, "loss": 0.6904, "step": 19865 }, { "epoch": 0.8233246301131418, "grad_norm": 0.43943801522254944, "learning_rate": 8.835840689626592e-07, "loss": 0.7029, "step": 19866 }, { "epoch": 0.8233660740188156, "grad_norm": 0.40461650490760803, "learning_rate": 8.833768494342908e-07, "loss": 0.66, "step": 19867 }, { "epoch": 0.8234075179244892, "grad_norm": 0.40240442752838135, "learning_rate": 8.831696299059224e-07, "loss": 0.6465, "step": 19868 }, { "epoch": 0.8234489618301629, "grad_norm": 0.4185279309749603, "learning_rate": 8.82962410377554e-07, "loss": 0.6704, "step": 19869 }, { "epoch": 0.8234904057358365, "grad_norm": 0.4145281910896301, "learning_rate": 8.827551908491858e-07, "loss": 0.6223, "step": 19870 }, { "epoch": 0.8235318496415102, "grad_norm": 0.38802003860473633, "learning_rate": 8.825479713208174e-07, "loss": 0.6953, "step": 19871 }, { "epoch": 0.8235732935471839, "grad_norm": 0.39619219303131104, "learning_rate": 8.82340751792449e-07, "loss": 0.6381, "step": 19872 }, { "epoch": 0.8236147374528575, "grad_norm": 0.4229048490524292, "learning_rate": 8.821335322640806e-07, "loss": 0.66, "step": 19873 }, { "epoch": 0.8236561813585312, "grad_norm": 0.43544185161590576, "learning_rate": 8.819263127357122e-07, "loss": 0.6211, "step": 19874 }, { "epoch": 0.8236976252642049, "grad_norm": 0.4113274812698364, "learning_rate": 8.81719093207344e-07, "loss": 0.6707, "step": 19875 }, { "epoch": 0.8237390691698786, "grad_norm": 0.43609076738357544, "learning_rate": 8.815118736789756e-07, "loss": 0.6874, "step": 19876 }, { "epoch": 0.8237805130755522, "grad_norm": 0.42663663625717163, "learning_rate": 8.813046541506072e-07, "loss": 0.6426, "step": 19877 }, { "epoch": 0.823821956981226, "grad_norm": 0.3920465409755707, "learning_rate": 8.810974346222388e-07, "loss": 0.657, "step": 19878 }, { "epoch": 0.8238634008868996, "grad_norm": 0.4364449977874756, "learning_rate": 8.808902150938706e-07, "loss": 0.6501, "step": 19879 }, { "epoch": 0.8239048447925732, "grad_norm": 0.42272433638572693, "learning_rate": 8.806829955655022e-07, "loss": 0.7186, "step": 19880 }, { "epoch": 0.8239462886982469, "grad_norm": 0.42049479484558105, "learning_rate": 8.804757760371338e-07, "loss": 0.6589, "step": 19881 }, { "epoch": 0.8239877326039206, "grad_norm": 0.3955175578594208, "learning_rate": 8.802685565087654e-07, "loss": 0.6554, "step": 19882 }, { "epoch": 0.8240291765095943, "grad_norm": 0.3816300332546234, "learning_rate": 8.800613369803972e-07, "loss": 0.6506, "step": 19883 }, { "epoch": 0.8240706204152679, "grad_norm": 0.3778861463069916, "learning_rate": 8.798541174520288e-07, "loss": 0.5952, "step": 19884 }, { "epoch": 0.8241120643209416, "grad_norm": 0.3931732177734375, "learning_rate": 8.796468979236604e-07, "loss": 0.647, "step": 19885 }, { "epoch": 0.8241535082266153, "grad_norm": 0.4139452874660492, "learning_rate": 8.79439678395292e-07, "loss": 0.6715, "step": 19886 }, { "epoch": 0.824194952132289, "grad_norm": 0.4181923270225525, "learning_rate": 8.792324588669236e-07, "loss": 0.6465, "step": 19887 }, { "epoch": 0.8242363960379626, "grad_norm": 0.4303172826766968, "learning_rate": 8.790252393385554e-07, "loss": 0.7083, "step": 19888 }, { "epoch": 0.8242778399436362, "grad_norm": 0.40997904539108276, "learning_rate": 8.78818019810187e-07, "loss": 0.652, "step": 19889 }, { "epoch": 0.82431928384931, "grad_norm": 0.40727031230926514, "learning_rate": 8.786108002818186e-07, "loss": 0.6842, "step": 19890 }, { "epoch": 0.8243607277549836, "grad_norm": 0.39041846990585327, "learning_rate": 8.784035807534502e-07, "loss": 0.6141, "step": 19891 }, { "epoch": 0.8244021716606573, "grad_norm": 0.40774205327033997, "learning_rate": 8.78196361225082e-07, "loss": 0.6523, "step": 19892 }, { "epoch": 0.824443615566331, "grad_norm": 0.4005863666534424, "learning_rate": 8.779891416967136e-07, "loss": 0.6283, "step": 19893 }, { "epoch": 0.8244850594720047, "grad_norm": 0.5196129083633423, "learning_rate": 8.777819221683452e-07, "loss": 0.7163, "step": 19894 }, { "epoch": 0.8245265033776783, "grad_norm": 0.39395537972450256, "learning_rate": 8.775747026399768e-07, "loss": 0.6682, "step": 19895 }, { "epoch": 0.824567947283352, "grad_norm": 0.43335792422294617, "learning_rate": 8.773674831116086e-07, "loss": 0.6846, "step": 19896 }, { "epoch": 0.8246093911890257, "grad_norm": 0.4048047959804535, "learning_rate": 8.771602635832402e-07, "loss": 0.6267, "step": 19897 }, { "epoch": 0.8246508350946993, "grad_norm": 0.42945295572280884, "learning_rate": 8.769530440548718e-07, "loss": 0.6975, "step": 19898 }, { "epoch": 0.824692279000373, "grad_norm": 0.42068254947662354, "learning_rate": 8.767458245265034e-07, "loss": 0.6473, "step": 19899 }, { "epoch": 0.8247337229060466, "grad_norm": 0.40256649255752563, "learning_rate": 8.76538604998135e-07, "loss": 0.6798, "step": 19900 }, { "epoch": 0.8247751668117204, "grad_norm": 0.4270101487636566, "learning_rate": 8.763313854697668e-07, "loss": 0.6373, "step": 19901 }, { "epoch": 0.824816610717394, "grad_norm": 0.41601482033729553, "learning_rate": 8.761241659413984e-07, "loss": 0.65, "step": 19902 }, { "epoch": 0.8248580546230677, "grad_norm": 0.4278353452682495, "learning_rate": 8.7591694641303e-07, "loss": 0.6721, "step": 19903 }, { "epoch": 0.8248994985287413, "grad_norm": 0.45707967877388, "learning_rate": 8.757097268846616e-07, "loss": 0.684, "step": 19904 }, { "epoch": 0.8249409424344151, "grad_norm": 0.44562390446662903, "learning_rate": 8.755025073562934e-07, "loss": 0.6765, "step": 19905 }, { "epoch": 0.8249823863400887, "grad_norm": 0.39573732018470764, "learning_rate": 8.75295287827925e-07, "loss": 0.6519, "step": 19906 }, { "epoch": 0.8250238302457623, "grad_norm": 0.36985552310943604, "learning_rate": 8.750880682995566e-07, "loss": 0.6005, "step": 19907 }, { "epoch": 0.825065274151436, "grad_norm": 0.4180866777896881, "learning_rate": 8.748808487711882e-07, "loss": 0.6593, "step": 19908 }, { "epoch": 0.8251067180571097, "grad_norm": 0.3873366713523865, "learning_rate": 8.7467362924282e-07, "loss": 0.6841, "step": 19909 }, { "epoch": 0.8251481619627834, "grad_norm": 0.41564396023750305, "learning_rate": 8.744664097144516e-07, "loss": 0.6482, "step": 19910 }, { "epoch": 0.825189605868457, "grad_norm": 0.41485926508903503, "learning_rate": 8.742591901860832e-07, "loss": 0.6584, "step": 19911 }, { "epoch": 0.8252310497741308, "grad_norm": 0.40630820393562317, "learning_rate": 8.740519706577148e-07, "loss": 0.6506, "step": 19912 }, { "epoch": 0.8252724936798044, "grad_norm": 0.43116316199302673, "learning_rate": 8.738447511293464e-07, "loss": 0.6876, "step": 19913 }, { "epoch": 0.8253139375854781, "grad_norm": 0.4070904850959778, "learning_rate": 8.736375316009782e-07, "loss": 0.6719, "step": 19914 }, { "epoch": 0.8253553814911517, "grad_norm": 0.41669800877571106, "learning_rate": 8.734303120726098e-07, "loss": 0.6545, "step": 19915 }, { "epoch": 0.8253968253968254, "grad_norm": 0.4123237133026123, "learning_rate": 8.732230925442414e-07, "loss": 0.736, "step": 19916 }, { "epoch": 0.8254382693024991, "grad_norm": 0.4123421609401703, "learning_rate": 8.73015873015873e-07, "loss": 0.6926, "step": 19917 }, { "epoch": 0.8254797132081727, "grad_norm": 0.42681917548179626, "learning_rate": 8.728086534875048e-07, "loss": 0.6792, "step": 19918 }, { "epoch": 0.8255211571138464, "grad_norm": 0.41366487741470337, "learning_rate": 8.726014339591364e-07, "loss": 0.677, "step": 19919 }, { "epoch": 0.8255626010195201, "grad_norm": 0.40823736786842346, "learning_rate": 8.72394214430768e-07, "loss": 0.6447, "step": 19920 }, { "epoch": 0.8256040449251938, "grad_norm": 0.43181225657463074, "learning_rate": 8.721869949023996e-07, "loss": 0.6567, "step": 19921 }, { "epoch": 0.8256454888308674, "grad_norm": 0.4298512637615204, "learning_rate": 8.719797753740312e-07, "loss": 0.6351, "step": 19922 }, { "epoch": 0.8256869327365411, "grad_norm": 0.4529671370983124, "learning_rate": 8.71772555845663e-07, "loss": 0.6858, "step": 19923 }, { "epoch": 0.8257283766422148, "grad_norm": 0.42633047699928284, "learning_rate": 8.715653363172946e-07, "loss": 0.653, "step": 19924 }, { "epoch": 0.8257698205478884, "grad_norm": 0.40717771649360657, "learning_rate": 8.713581167889262e-07, "loss": 0.6741, "step": 19925 }, { "epoch": 0.8258112644535621, "grad_norm": 0.3932841122150421, "learning_rate": 8.711508972605578e-07, "loss": 0.6353, "step": 19926 }, { "epoch": 0.8258527083592357, "grad_norm": 0.39596128463745117, "learning_rate": 8.709436777321896e-07, "loss": 0.6693, "step": 19927 }, { "epoch": 0.8258941522649095, "grad_norm": 0.47697025537490845, "learning_rate": 8.707364582038212e-07, "loss": 0.7051, "step": 19928 }, { "epoch": 0.8259355961705831, "grad_norm": 0.38695260882377625, "learning_rate": 8.705292386754528e-07, "loss": 0.6315, "step": 19929 }, { "epoch": 0.8259770400762568, "grad_norm": 0.4535088539123535, "learning_rate": 8.703220191470844e-07, "loss": 0.6687, "step": 19930 }, { "epoch": 0.8260184839819305, "grad_norm": 0.4227275550365448, "learning_rate": 8.701147996187162e-07, "loss": 0.6876, "step": 19931 }, { "epoch": 0.8260599278876041, "grad_norm": 0.448953777551651, "learning_rate": 8.699075800903478e-07, "loss": 0.6597, "step": 19932 }, { "epoch": 0.8261013717932778, "grad_norm": 0.47158339619636536, "learning_rate": 8.697003605619794e-07, "loss": 0.6965, "step": 19933 }, { "epoch": 0.8261428156989514, "grad_norm": 0.4061207175254822, "learning_rate": 8.69493141033611e-07, "loss": 0.6285, "step": 19934 }, { "epoch": 0.8261842596046252, "grad_norm": 0.41902783513069153, "learning_rate": 8.692859215052427e-07, "loss": 0.6229, "step": 19935 }, { "epoch": 0.8262257035102988, "grad_norm": 0.3826889097690582, "learning_rate": 8.690787019768744e-07, "loss": 0.6694, "step": 19936 }, { "epoch": 0.8262671474159725, "grad_norm": 0.4034103453159332, "learning_rate": 8.68871482448506e-07, "loss": 0.6615, "step": 19937 }, { "epoch": 0.8263085913216461, "grad_norm": 0.4457993805408478, "learning_rate": 8.686642629201376e-07, "loss": 0.6865, "step": 19938 }, { "epoch": 0.8263500352273199, "grad_norm": 0.40942978858947754, "learning_rate": 8.684570433917692e-07, "loss": 0.6691, "step": 19939 }, { "epoch": 0.8263914791329935, "grad_norm": 0.43380528688430786, "learning_rate": 8.68249823863401e-07, "loss": 0.7126, "step": 19940 }, { "epoch": 0.8264329230386671, "grad_norm": 0.38526079058647156, "learning_rate": 8.680426043350326e-07, "loss": 0.6371, "step": 19941 }, { "epoch": 0.8264743669443408, "grad_norm": 0.40194422006607056, "learning_rate": 8.678353848066642e-07, "loss": 0.6554, "step": 19942 }, { "epoch": 0.8265158108500145, "grad_norm": 0.41399288177490234, "learning_rate": 8.676281652782958e-07, "loss": 0.7267, "step": 19943 }, { "epoch": 0.8265572547556882, "grad_norm": 0.395340234041214, "learning_rate": 8.674209457499276e-07, "loss": 0.6581, "step": 19944 }, { "epoch": 0.8265986986613618, "grad_norm": 0.3893130123615265, "learning_rate": 8.672137262215592e-07, "loss": 0.6973, "step": 19945 }, { "epoch": 0.8266401425670356, "grad_norm": 0.40726932883262634, "learning_rate": 8.670065066931908e-07, "loss": 0.6013, "step": 19946 }, { "epoch": 0.8266815864727092, "grad_norm": 0.4310166835784912, "learning_rate": 8.667992871648224e-07, "loss": 0.6962, "step": 19947 }, { "epoch": 0.8267230303783829, "grad_norm": 0.42941218614578247, "learning_rate": 8.665920676364541e-07, "loss": 0.6595, "step": 19948 }, { "epoch": 0.8267644742840565, "grad_norm": 0.3880023658275604, "learning_rate": 8.663848481080858e-07, "loss": 0.6118, "step": 19949 }, { "epoch": 0.8268059181897301, "grad_norm": 0.3884003460407257, "learning_rate": 8.661776285797174e-07, "loss": 0.6316, "step": 19950 }, { "epoch": 0.8268473620954039, "grad_norm": 0.4087783992290497, "learning_rate": 8.65970409051349e-07, "loss": 0.6067, "step": 19951 }, { "epoch": 0.8268888060010775, "grad_norm": 0.41053497791290283, "learning_rate": 8.657631895229807e-07, "loss": 0.7322, "step": 19952 }, { "epoch": 0.8269302499067512, "grad_norm": 0.4300461709499359, "learning_rate": 8.655559699946124e-07, "loss": 0.6383, "step": 19953 }, { "epoch": 0.8269716938124249, "grad_norm": 0.4125569760799408, "learning_rate": 8.65348750466244e-07, "loss": 0.6433, "step": 19954 }, { "epoch": 0.8270131377180986, "grad_norm": 0.4206247627735138, "learning_rate": 8.651415309378756e-07, "loss": 0.6488, "step": 19955 }, { "epoch": 0.8270545816237722, "grad_norm": 0.44500041007995605, "learning_rate": 8.649343114095072e-07, "loss": 0.6992, "step": 19956 }, { "epoch": 0.8270960255294459, "grad_norm": 0.4274558424949646, "learning_rate": 8.64727091881139e-07, "loss": 0.6648, "step": 19957 }, { "epoch": 0.8271374694351196, "grad_norm": 0.43079593777656555, "learning_rate": 8.645198723527706e-07, "loss": 0.71, "step": 19958 }, { "epoch": 0.8271789133407932, "grad_norm": 0.42514410614967346, "learning_rate": 8.643126528244022e-07, "loss": 0.6909, "step": 19959 }, { "epoch": 0.8272203572464669, "grad_norm": 0.3957414925098419, "learning_rate": 8.641054332960338e-07, "loss": 0.6268, "step": 19960 }, { "epoch": 0.8272618011521405, "grad_norm": 0.4250696301460266, "learning_rate": 8.638982137676655e-07, "loss": 0.6252, "step": 19961 }, { "epoch": 0.8273032450578143, "grad_norm": 0.4406365752220154, "learning_rate": 8.636909942392972e-07, "loss": 0.6565, "step": 19962 }, { "epoch": 0.8273446889634879, "grad_norm": 0.41153931617736816, "learning_rate": 8.634837747109288e-07, "loss": 0.6818, "step": 19963 }, { "epoch": 0.8273861328691616, "grad_norm": 0.4214142858982086, "learning_rate": 8.632765551825604e-07, "loss": 0.6743, "step": 19964 }, { "epoch": 0.8274275767748352, "grad_norm": 0.40621864795684814, "learning_rate": 8.630693356541921e-07, "loss": 0.6912, "step": 19965 }, { "epoch": 0.827469020680509, "grad_norm": 0.42468762397766113, "learning_rate": 8.628621161258238e-07, "loss": 0.7075, "step": 19966 }, { "epoch": 0.8275104645861826, "grad_norm": 0.39489686489105225, "learning_rate": 8.626548965974554e-07, "loss": 0.6631, "step": 19967 }, { "epoch": 0.8275519084918562, "grad_norm": 0.425238698720932, "learning_rate": 8.62447677069087e-07, "loss": 0.6444, "step": 19968 }, { "epoch": 0.82759335239753, "grad_norm": 0.4191061556339264, "learning_rate": 8.622404575407187e-07, "loss": 0.6437, "step": 19969 }, { "epoch": 0.8276347963032036, "grad_norm": 0.4298592209815979, "learning_rate": 8.620332380123504e-07, "loss": 0.6494, "step": 19970 }, { "epoch": 0.8276762402088773, "grad_norm": 0.40879446268081665, "learning_rate": 8.61826018483982e-07, "loss": 0.6229, "step": 19971 }, { "epoch": 0.8277176841145509, "grad_norm": 0.39823323488235474, "learning_rate": 8.616187989556136e-07, "loss": 0.6279, "step": 19972 }, { "epoch": 0.8277591280202247, "grad_norm": 0.3998973071575165, "learning_rate": 8.614115794272453e-07, "loss": 0.6783, "step": 19973 }, { "epoch": 0.8278005719258983, "grad_norm": 0.4314455986022949, "learning_rate": 8.612043598988769e-07, "loss": 0.6841, "step": 19974 }, { "epoch": 0.827842015831572, "grad_norm": 0.4018285572528839, "learning_rate": 8.609971403705086e-07, "loss": 0.6371, "step": 19975 }, { "epoch": 0.8278834597372456, "grad_norm": 0.44595712423324585, "learning_rate": 8.607899208421402e-07, "loss": 0.7227, "step": 19976 }, { "epoch": 0.8279249036429193, "grad_norm": 0.3940489590167999, "learning_rate": 8.605827013137718e-07, "loss": 0.655, "step": 19977 }, { "epoch": 0.827966347548593, "grad_norm": 0.46020129323005676, "learning_rate": 8.603754817854035e-07, "loss": 0.7246, "step": 19978 }, { "epoch": 0.8280077914542666, "grad_norm": 0.42290931940078735, "learning_rate": 8.601682622570352e-07, "loss": 0.6593, "step": 19979 }, { "epoch": 0.8280492353599403, "grad_norm": 0.4424780607223511, "learning_rate": 8.599610427286668e-07, "loss": 0.7007, "step": 19980 }, { "epoch": 0.828090679265614, "grad_norm": 0.4345730245113373, "learning_rate": 8.597538232002984e-07, "loss": 0.6843, "step": 19981 }, { "epoch": 0.8281321231712877, "grad_norm": 0.39915740489959717, "learning_rate": 8.595466036719301e-07, "loss": 0.6595, "step": 19982 }, { "epoch": 0.8281735670769613, "grad_norm": 0.43529045581817627, "learning_rate": 8.593393841435618e-07, "loss": 0.6647, "step": 19983 }, { "epoch": 0.8282150109826351, "grad_norm": 0.3980282247066498, "learning_rate": 8.591321646151934e-07, "loss": 0.6272, "step": 19984 }, { "epoch": 0.8282564548883087, "grad_norm": 0.425192266702652, "learning_rate": 8.58924945086825e-07, "loss": 0.6722, "step": 19985 }, { "epoch": 0.8282978987939823, "grad_norm": 0.39500176906585693, "learning_rate": 8.587177255584567e-07, "loss": 0.6597, "step": 19986 }, { "epoch": 0.828339342699656, "grad_norm": 0.4205699563026428, "learning_rate": 8.585105060300883e-07, "loss": 0.6759, "step": 19987 }, { "epoch": 0.8283807866053297, "grad_norm": 0.43331825733184814, "learning_rate": 8.5830328650172e-07, "loss": 0.6914, "step": 19988 }, { "epoch": 0.8284222305110034, "grad_norm": 0.46068274974823, "learning_rate": 8.580960669733516e-07, "loss": 0.673, "step": 19989 }, { "epoch": 0.828463674416677, "grad_norm": 0.4112700819969177, "learning_rate": 8.578888474449833e-07, "loss": 0.6748, "step": 19990 }, { "epoch": 0.8285051183223507, "grad_norm": 0.3983365297317505, "learning_rate": 8.576816279166149e-07, "loss": 0.6262, "step": 19991 }, { "epoch": 0.8285465622280244, "grad_norm": 0.4470222294330597, "learning_rate": 8.574744083882466e-07, "loss": 0.6844, "step": 19992 }, { "epoch": 0.828588006133698, "grad_norm": 0.42061251401901245, "learning_rate": 8.572671888598782e-07, "loss": 0.6584, "step": 19993 }, { "epoch": 0.8286294500393717, "grad_norm": 0.390026718378067, "learning_rate": 8.570599693315098e-07, "loss": 0.6715, "step": 19994 }, { "epoch": 0.8286708939450453, "grad_norm": 0.42781898379325867, "learning_rate": 8.568527498031415e-07, "loss": 0.692, "step": 19995 }, { "epoch": 0.8287123378507191, "grad_norm": 0.4022115170955658, "learning_rate": 8.566455302747731e-07, "loss": 0.663, "step": 19996 }, { "epoch": 0.8287537817563927, "grad_norm": 0.4376991093158722, "learning_rate": 8.564383107464048e-07, "loss": 0.6875, "step": 19997 }, { "epoch": 0.8287952256620664, "grad_norm": 0.4662567973136902, "learning_rate": 8.562310912180364e-07, "loss": 0.6503, "step": 19998 }, { "epoch": 0.82883666956774, "grad_norm": 0.3976411819458008, "learning_rate": 8.560238716896681e-07, "loss": 0.6711, "step": 19999 }, { "epoch": 0.8288781134734138, "grad_norm": 0.3967292904853821, "learning_rate": 8.558166521612997e-07, "loss": 0.6533, "step": 20000 }, { "epoch": 0.8289195573790874, "grad_norm": 0.4101928770542145, "learning_rate": 8.556094326329314e-07, "loss": 0.6526, "step": 20001 }, { "epoch": 0.828961001284761, "grad_norm": 0.41288354992866516, "learning_rate": 8.55402213104563e-07, "loss": 0.6406, "step": 20002 }, { "epoch": 0.8290024451904348, "grad_norm": 0.4233168661594391, "learning_rate": 8.551949935761947e-07, "loss": 0.7458, "step": 20003 }, { "epoch": 0.8290438890961084, "grad_norm": 0.4096802771091461, "learning_rate": 8.549877740478263e-07, "loss": 0.6472, "step": 20004 }, { "epoch": 0.8290853330017821, "grad_norm": 0.41254425048828125, "learning_rate": 8.54780554519458e-07, "loss": 0.6595, "step": 20005 }, { "epoch": 0.8291267769074557, "grad_norm": 0.4419281482696533, "learning_rate": 8.545733349910896e-07, "loss": 0.6667, "step": 20006 }, { "epoch": 0.8291682208131295, "grad_norm": 0.39667999744415283, "learning_rate": 8.543661154627213e-07, "loss": 0.641, "step": 20007 }, { "epoch": 0.8292096647188031, "grad_norm": 0.43128979206085205, "learning_rate": 8.541588959343529e-07, "loss": 0.671, "step": 20008 }, { "epoch": 0.8292511086244768, "grad_norm": 0.4288334846496582, "learning_rate": 8.539516764059845e-07, "loss": 0.7034, "step": 20009 }, { "epoch": 0.8292925525301504, "grad_norm": 0.4451741874217987, "learning_rate": 8.537444568776162e-07, "loss": 0.6748, "step": 20010 }, { "epoch": 0.8293339964358241, "grad_norm": 0.44680196046829224, "learning_rate": 8.535372373492478e-07, "loss": 0.6995, "step": 20011 }, { "epoch": 0.8293754403414978, "grad_norm": 0.41273558139801025, "learning_rate": 8.533300178208795e-07, "loss": 0.6692, "step": 20012 }, { "epoch": 0.8294168842471714, "grad_norm": 0.4450312554836273, "learning_rate": 8.531227982925111e-07, "loss": 0.7041, "step": 20013 }, { "epoch": 0.8294583281528451, "grad_norm": 0.41574862599372864, "learning_rate": 8.529155787641428e-07, "loss": 0.6361, "step": 20014 }, { "epoch": 0.8294997720585188, "grad_norm": 0.45015352964401245, "learning_rate": 8.527083592357744e-07, "loss": 0.677, "step": 20015 }, { "epoch": 0.8295412159641925, "grad_norm": 0.37761011719703674, "learning_rate": 8.525011397074061e-07, "loss": 0.666, "step": 20016 }, { "epoch": 0.8295826598698661, "grad_norm": 0.42317500710487366, "learning_rate": 8.522939201790377e-07, "loss": 0.7002, "step": 20017 }, { "epoch": 0.8296241037755399, "grad_norm": 0.40746697783470154, "learning_rate": 8.520867006506694e-07, "loss": 0.6836, "step": 20018 }, { "epoch": 0.8296655476812135, "grad_norm": 0.4092734456062317, "learning_rate": 8.51879481122301e-07, "loss": 0.6521, "step": 20019 }, { "epoch": 0.8297069915868871, "grad_norm": 0.41898098587989807, "learning_rate": 8.516722615939327e-07, "loss": 0.6643, "step": 20020 }, { "epoch": 0.8297484354925608, "grad_norm": 0.48607155680656433, "learning_rate": 8.514650420655643e-07, "loss": 0.6827, "step": 20021 }, { "epoch": 0.8297898793982345, "grad_norm": 0.3992387652397156, "learning_rate": 8.512578225371959e-07, "loss": 0.6567, "step": 20022 }, { "epoch": 0.8298313233039082, "grad_norm": 0.3944805860519409, "learning_rate": 8.510506030088276e-07, "loss": 0.6401, "step": 20023 }, { "epoch": 0.8298727672095818, "grad_norm": 0.39521360397338867, "learning_rate": 8.508433834804593e-07, "loss": 0.6459, "step": 20024 }, { "epoch": 0.8299142111152555, "grad_norm": 0.4150036871433258, "learning_rate": 8.506361639520909e-07, "loss": 0.6624, "step": 20025 }, { "epoch": 0.8299556550209292, "grad_norm": 0.4565248489379883, "learning_rate": 8.504289444237225e-07, "loss": 0.6752, "step": 20026 }, { "epoch": 0.8299970989266029, "grad_norm": 0.39699459075927734, "learning_rate": 8.502217248953542e-07, "loss": 0.7241, "step": 20027 }, { "epoch": 0.8300385428322765, "grad_norm": 0.3757595717906952, "learning_rate": 8.500145053669859e-07, "loss": 0.5988, "step": 20028 }, { "epoch": 0.8300799867379501, "grad_norm": 0.4404752552509308, "learning_rate": 8.498072858386175e-07, "loss": 0.661, "step": 20029 }, { "epoch": 0.8301214306436239, "grad_norm": 0.4492279291152954, "learning_rate": 8.496000663102491e-07, "loss": 0.6672, "step": 20030 }, { "epoch": 0.8301628745492975, "grad_norm": 0.44739478826522827, "learning_rate": 8.493928467818808e-07, "loss": 0.6711, "step": 20031 }, { "epoch": 0.8302043184549712, "grad_norm": 0.3968082070350647, "learning_rate": 8.491856272535124e-07, "loss": 0.6556, "step": 20032 }, { "epoch": 0.8302457623606448, "grad_norm": 0.41891148686408997, "learning_rate": 8.489784077251441e-07, "loss": 0.662, "step": 20033 }, { "epoch": 0.8302872062663186, "grad_norm": 0.38743114471435547, "learning_rate": 8.487711881967757e-07, "loss": 0.6343, "step": 20034 }, { "epoch": 0.8303286501719922, "grad_norm": 0.4422621428966522, "learning_rate": 8.485639686684073e-07, "loss": 0.7025, "step": 20035 }, { "epoch": 0.8303700940776659, "grad_norm": 0.45171454548835754, "learning_rate": 8.48356749140039e-07, "loss": 0.6604, "step": 20036 }, { "epoch": 0.8304115379833396, "grad_norm": 0.45018911361694336, "learning_rate": 8.481495296116707e-07, "loss": 0.7222, "step": 20037 }, { "epoch": 0.8304529818890132, "grad_norm": 0.3977010250091553, "learning_rate": 8.479423100833023e-07, "loss": 0.6531, "step": 20038 }, { "epoch": 0.8304944257946869, "grad_norm": 0.49777066707611084, "learning_rate": 8.477350905549339e-07, "loss": 0.6611, "step": 20039 }, { "epoch": 0.8305358697003605, "grad_norm": 0.4100944697856903, "learning_rate": 8.475278710265656e-07, "loss": 0.6552, "step": 20040 }, { "epoch": 0.8305773136060343, "grad_norm": 0.3915710151195526, "learning_rate": 8.473206514981973e-07, "loss": 0.6438, "step": 20041 }, { "epoch": 0.8306187575117079, "grad_norm": 0.38771945238113403, "learning_rate": 8.471134319698289e-07, "loss": 0.6447, "step": 20042 }, { "epoch": 0.8306602014173816, "grad_norm": 0.4356440603733063, "learning_rate": 8.469062124414605e-07, "loss": 0.6483, "step": 20043 }, { "epoch": 0.8307016453230552, "grad_norm": 0.4159722328186035, "learning_rate": 8.466989929130922e-07, "loss": 0.6364, "step": 20044 }, { "epoch": 0.830743089228729, "grad_norm": 0.38659438490867615, "learning_rate": 8.464917733847239e-07, "loss": 0.5859, "step": 20045 }, { "epoch": 0.8307845331344026, "grad_norm": 0.45010748505592346, "learning_rate": 8.462845538563555e-07, "loss": 0.6377, "step": 20046 }, { "epoch": 0.8308259770400762, "grad_norm": 0.41263681650161743, "learning_rate": 8.460773343279871e-07, "loss": 0.6489, "step": 20047 }, { "epoch": 0.83086742094575, "grad_norm": 0.44679760932922363, "learning_rate": 8.458701147996187e-07, "loss": 0.7117, "step": 20048 }, { "epoch": 0.8309088648514236, "grad_norm": 0.41662296652793884, "learning_rate": 8.456628952712504e-07, "loss": 0.6719, "step": 20049 }, { "epoch": 0.8309503087570973, "grad_norm": 0.4050852656364441, "learning_rate": 8.454556757428821e-07, "loss": 0.6472, "step": 20050 }, { "epoch": 0.8309917526627709, "grad_norm": 0.42484140396118164, "learning_rate": 8.452484562145137e-07, "loss": 0.7167, "step": 20051 }, { "epoch": 0.8310331965684447, "grad_norm": 0.41925716400146484, "learning_rate": 8.450412366861453e-07, "loss": 0.6787, "step": 20052 }, { "epoch": 0.8310746404741183, "grad_norm": 0.41142305731773376, "learning_rate": 8.44834017157777e-07, "loss": 0.6399, "step": 20053 }, { "epoch": 0.8311160843797919, "grad_norm": 0.42942026257514954, "learning_rate": 8.446267976294087e-07, "loss": 0.6451, "step": 20054 }, { "epoch": 0.8311575282854656, "grad_norm": 0.3716887831687927, "learning_rate": 8.444195781010403e-07, "loss": 0.6289, "step": 20055 }, { "epoch": 0.8311989721911393, "grad_norm": 0.42751744389533997, "learning_rate": 8.442123585726719e-07, "loss": 0.6757, "step": 20056 }, { "epoch": 0.831240416096813, "grad_norm": 0.3912983536720276, "learning_rate": 8.440051390443036e-07, "loss": 0.6493, "step": 20057 }, { "epoch": 0.8312818600024866, "grad_norm": 0.3984086215496063, "learning_rate": 8.437979195159353e-07, "loss": 0.6641, "step": 20058 }, { "epoch": 0.8313233039081603, "grad_norm": 0.44905218482017517, "learning_rate": 8.435906999875669e-07, "loss": 0.6716, "step": 20059 }, { "epoch": 0.831364747813834, "grad_norm": 0.4295237958431244, "learning_rate": 8.433834804591985e-07, "loss": 0.7192, "step": 20060 }, { "epoch": 0.8314061917195077, "grad_norm": 0.41920343041419983, "learning_rate": 8.431762609308301e-07, "loss": 0.6492, "step": 20061 }, { "epoch": 0.8314476356251813, "grad_norm": 0.3962598741054535, "learning_rate": 8.429690414024619e-07, "loss": 0.7192, "step": 20062 }, { "epoch": 0.8314890795308549, "grad_norm": 0.4192592203617096, "learning_rate": 8.427618218740935e-07, "loss": 0.6656, "step": 20063 }, { "epoch": 0.8315305234365287, "grad_norm": 0.4547889530658722, "learning_rate": 8.425546023457251e-07, "loss": 0.6742, "step": 20064 }, { "epoch": 0.8315719673422023, "grad_norm": 0.4208383858203888, "learning_rate": 8.423473828173567e-07, "loss": 0.6531, "step": 20065 }, { "epoch": 0.831613411247876, "grad_norm": 0.41206905245780945, "learning_rate": 8.421401632889885e-07, "loss": 0.6592, "step": 20066 }, { "epoch": 0.8316548551535496, "grad_norm": 0.45992785692214966, "learning_rate": 8.419329437606201e-07, "loss": 0.6538, "step": 20067 }, { "epoch": 0.8316962990592234, "grad_norm": 0.4289086163043976, "learning_rate": 8.417257242322517e-07, "loss": 0.6842, "step": 20068 }, { "epoch": 0.831737742964897, "grad_norm": 0.43026667833328247, "learning_rate": 8.415185047038833e-07, "loss": 0.6027, "step": 20069 }, { "epoch": 0.8317791868705707, "grad_norm": 0.44514214992523193, "learning_rate": 8.413112851755149e-07, "loss": 0.6627, "step": 20070 }, { "epoch": 0.8318206307762444, "grad_norm": 0.37040069699287415, "learning_rate": 8.411040656471467e-07, "loss": 0.6367, "step": 20071 }, { "epoch": 0.831862074681918, "grad_norm": 0.39829742908477783, "learning_rate": 8.408968461187783e-07, "loss": 0.6499, "step": 20072 }, { "epoch": 0.8319035185875917, "grad_norm": 0.430103600025177, "learning_rate": 8.406896265904099e-07, "loss": 0.686, "step": 20073 }, { "epoch": 0.8319449624932653, "grad_norm": 0.40057259798049927, "learning_rate": 8.404824070620415e-07, "loss": 0.6348, "step": 20074 }, { "epoch": 0.8319864063989391, "grad_norm": 0.4294160306453705, "learning_rate": 8.402751875336733e-07, "loss": 0.6461, "step": 20075 }, { "epoch": 0.8320278503046127, "grad_norm": 0.4689786434173584, "learning_rate": 8.400679680053049e-07, "loss": 0.7097, "step": 20076 }, { "epoch": 0.8320692942102864, "grad_norm": 0.45120856165885925, "learning_rate": 8.398607484769365e-07, "loss": 0.7344, "step": 20077 }, { "epoch": 0.83211073811596, "grad_norm": 0.3935043215751648, "learning_rate": 8.396535289485681e-07, "loss": 0.7222, "step": 20078 }, { "epoch": 0.8321521820216338, "grad_norm": 0.3996037542819977, "learning_rate": 8.394463094201999e-07, "loss": 0.6846, "step": 20079 }, { "epoch": 0.8321936259273074, "grad_norm": 0.4432406425476074, "learning_rate": 8.392390898918315e-07, "loss": 0.6857, "step": 20080 }, { "epoch": 0.832235069832981, "grad_norm": 0.40930941700935364, "learning_rate": 8.390318703634631e-07, "loss": 0.6685, "step": 20081 }, { "epoch": 0.8322765137386547, "grad_norm": 0.4179249107837677, "learning_rate": 8.388246508350947e-07, "loss": 0.6803, "step": 20082 }, { "epoch": 0.8323179576443284, "grad_norm": 0.42251846194267273, "learning_rate": 8.386174313067263e-07, "loss": 0.6898, "step": 20083 }, { "epoch": 0.8323594015500021, "grad_norm": 0.4750705659389496, "learning_rate": 8.384102117783581e-07, "loss": 0.6885, "step": 20084 }, { "epoch": 0.8324008454556757, "grad_norm": 0.4333284795284271, "learning_rate": 8.382029922499897e-07, "loss": 0.6851, "step": 20085 }, { "epoch": 0.8324422893613495, "grad_norm": 0.44305646419525146, "learning_rate": 8.379957727216213e-07, "loss": 0.6731, "step": 20086 }, { "epoch": 0.8324837332670231, "grad_norm": 0.41136986017227173, "learning_rate": 8.377885531932529e-07, "loss": 0.6903, "step": 20087 }, { "epoch": 0.8325251771726968, "grad_norm": 0.4479343891143799, "learning_rate": 8.375813336648847e-07, "loss": 0.6873, "step": 20088 }, { "epoch": 0.8325666210783704, "grad_norm": 0.44717758893966675, "learning_rate": 8.373741141365163e-07, "loss": 0.7314, "step": 20089 }, { "epoch": 0.832608064984044, "grad_norm": 0.42782285809516907, "learning_rate": 8.371668946081479e-07, "loss": 0.7343, "step": 20090 }, { "epoch": 0.8326495088897178, "grad_norm": 0.44053348898887634, "learning_rate": 8.369596750797795e-07, "loss": 0.7009, "step": 20091 }, { "epoch": 0.8326909527953914, "grad_norm": 0.45652642846107483, "learning_rate": 8.367524555514113e-07, "loss": 0.6924, "step": 20092 }, { "epoch": 0.8327323967010651, "grad_norm": 0.4076385498046875, "learning_rate": 8.365452360230429e-07, "loss": 0.6257, "step": 20093 }, { "epoch": 0.8327738406067388, "grad_norm": 0.39857017993927, "learning_rate": 8.363380164946745e-07, "loss": 0.6624, "step": 20094 }, { "epoch": 0.8328152845124125, "grad_norm": 0.4346845746040344, "learning_rate": 8.361307969663061e-07, "loss": 0.673, "step": 20095 }, { "epoch": 0.8328567284180861, "grad_norm": 0.4635133743286133, "learning_rate": 8.359235774379377e-07, "loss": 0.7104, "step": 20096 }, { "epoch": 0.8328981723237598, "grad_norm": 0.4417278468608856, "learning_rate": 8.357163579095695e-07, "loss": 0.6729, "step": 20097 }, { "epoch": 0.8329396162294335, "grad_norm": 0.4420102536678314, "learning_rate": 8.355091383812011e-07, "loss": 0.6832, "step": 20098 }, { "epoch": 0.8329810601351071, "grad_norm": 0.41820478439331055, "learning_rate": 8.353019188528327e-07, "loss": 0.6626, "step": 20099 }, { "epoch": 0.8330225040407808, "grad_norm": 0.45711323618888855, "learning_rate": 8.350946993244643e-07, "loss": 0.7045, "step": 20100 }, { "epoch": 0.8330639479464544, "grad_norm": 0.42562970519065857, "learning_rate": 8.348874797960961e-07, "loss": 0.6833, "step": 20101 }, { "epoch": 0.8331053918521282, "grad_norm": 0.43641841411590576, "learning_rate": 8.346802602677277e-07, "loss": 0.6875, "step": 20102 }, { "epoch": 0.8331468357578018, "grad_norm": 0.43513673543930054, "learning_rate": 8.344730407393593e-07, "loss": 0.6655, "step": 20103 }, { "epoch": 0.8331882796634755, "grad_norm": 0.4346787929534912, "learning_rate": 8.342658212109909e-07, "loss": 0.6555, "step": 20104 }, { "epoch": 0.8332297235691491, "grad_norm": 0.4621865153312683, "learning_rate": 8.340586016826227e-07, "loss": 0.6824, "step": 20105 }, { "epoch": 0.8332711674748229, "grad_norm": 0.42973873019218445, "learning_rate": 8.338513821542543e-07, "loss": 0.6549, "step": 20106 }, { "epoch": 0.8333126113804965, "grad_norm": 0.42873603105545044, "learning_rate": 8.336441626258859e-07, "loss": 0.6488, "step": 20107 }, { "epoch": 0.8333540552861701, "grad_norm": 0.449428915977478, "learning_rate": 8.334369430975175e-07, "loss": 0.6703, "step": 20108 }, { "epoch": 0.8333954991918439, "grad_norm": 0.4479316771030426, "learning_rate": 8.332297235691491e-07, "loss": 0.7274, "step": 20109 }, { "epoch": 0.8334369430975175, "grad_norm": 0.4759034216403961, "learning_rate": 8.330225040407809e-07, "loss": 0.7117, "step": 20110 }, { "epoch": 0.8334783870031912, "grad_norm": 0.424565851688385, "learning_rate": 8.328152845124125e-07, "loss": 0.6406, "step": 20111 }, { "epoch": 0.8335198309088648, "grad_norm": 0.4320058822631836, "learning_rate": 8.326080649840441e-07, "loss": 0.7358, "step": 20112 }, { "epoch": 0.8335612748145386, "grad_norm": 0.40586286783218384, "learning_rate": 8.324008454556757e-07, "loss": 0.62, "step": 20113 }, { "epoch": 0.8336027187202122, "grad_norm": 0.4325121343135834, "learning_rate": 8.321936259273075e-07, "loss": 0.6591, "step": 20114 }, { "epoch": 0.8336441626258858, "grad_norm": 0.4304608404636383, "learning_rate": 8.319864063989391e-07, "loss": 0.7225, "step": 20115 }, { "epoch": 0.8336856065315595, "grad_norm": 0.4105498492717743, "learning_rate": 8.317791868705707e-07, "loss": 0.6417, "step": 20116 }, { "epoch": 0.8337270504372332, "grad_norm": 0.45178529620170593, "learning_rate": 8.315719673422023e-07, "loss": 0.6707, "step": 20117 }, { "epoch": 0.8337684943429069, "grad_norm": 0.3989909887313843, "learning_rate": 8.313647478138341e-07, "loss": 0.66, "step": 20118 }, { "epoch": 0.8338099382485805, "grad_norm": 0.40666308999061584, "learning_rate": 8.311575282854657e-07, "loss": 0.6432, "step": 20119 }, { "epoch": 0.8338513821542543, "grad_norm": 0.4271012842655182, "learning_rate": 8.309503087570973e-07, "loss": 0.6152, "step": 20120 }, { "epoch": 0.8338928260599279, "grad_norm": 0.3968338668346405, "learning_rate": 8.307430892287289e-07, "loss": 0.644, "step": 20121 }, { "epoch": 0.8339342699656016, "grad_norm": 0.4402945935726166, "learning_rate": 8.305358697003605e-07, "loss": 0.7002, "step": 20122 }, { "epoch": 0.8339757138712752, "grad_norm": 0.43367862701416016, "learning_rate": 8.303286501719923e-07, "loss": 0.6829, "step": 20123 }, { "epoch": 0.8340171577769488, "grad_norm": 0.41570237278938293, "learning_rate": 8.301214306436239e-07, "loss": 0.6472, "step": 20124 }, { "epoch": 0.8340586016826226, "grad_norm": 0.4177684187889099, "learning_rate": 8.299142111152555e-07, "loss": 0.6934, "step": 20125 }, { "epoch": 0.8341000455882962, "grad_norm": 0.4139949381351471, "learning_rate": 8.297069915868871e-07, "loss": 0.6995, "step": 20126 }, { "epoch": 0.8341414894939699, "grad_norm": 0.4132566452026367, "learning_rate": 8.294997720585189e-07, "loss": 0.72, "step": 20127 }, { "epoch": 0.8341829333996436, "grad_norm": 0.42942988872528076, "learning_rate": 8.292925525301505e-07, "loss": 0.6899, "step": 20128 }, { "epoch": 0.8342243773053173, "grad_norm": 0.44176384806632996, "learning_rate": 8.290853330017821e-07, "loss": 0.6641, "step": 20129 }, { "epoch": 0.8342658212109909, "grad_norm": 0.40067216753959656, "learning_rate": 8.288781134734137e-07, "loss": 0.6798, "step": 20130 }, { "epoch": 0.8343072651166646, "grad_norm": 0.4535911977291107, "learning_rate": 8.286708939450455e-07, "loss": 0.6865, "step": 20131 }, { "epoch": 0.8343487090223383, "grad_norm": 0.40589749813079834, "learning_rate": 8.284636744166771e-07, "loss": 0.6534, "step": 20132 }, { "epoch": 0.8343901529280119, "grad_norm": 0.44679269194602966, "learning_rate": 8.282564548883087e-07, "loss": 0.6257, "step": 20133 }, { "epoch": 0.8344315968336856, "grad_norm": 0.45142728090286255, "learning_rate": 8.280492353599403e-07, "loss": 0.6528, "step": 20134 }, { "epoch": 0.8344730407393592, "grad_norm": 0.4098823368549347, "learning_rate": 8.278420158315719e-07, "loss": 0.6643, "step": 20135 }, { "epoch": 0.834514484645033, "grad_norm": 0.437261164188385, "learning_rate": 8.276347963032037e-07, "loss": 0.666, "step": 20136 }, { "epoch": 0.8345559285507066, "grad_norm": 0.41778817772865295, "learning_rate": 8.274275767748353e-07, "loss": 0.6316, "step": 20137 }, { "epoch": 0.8345973724563803, "grad_norm": 0.4401358962059021, "learning_rate": 8.272203572464669e-07, "loss": 0.7249, "step": 20138 }, { "epoch": 0.834638816362054, "grad_norm": 0.4213072657585144, "learning_rate": 8.270131377180985e-07, "loss": 0.6653, "step": 20139 }, { "epoch": 0.8346802602677277, "grad_norm": 0.387548565864563, "learning_rate": 8.268059181897303e-07, "loss": 0.6355, "step": 20140 }, { "epoch": 0.8347217041734013, "grad_norm": 0.42852258682250977, "learning_rate": 8.265986986613619e-07, "loss": 0.6786, "step": 20141 }, { "epoch": 0.8347631480790749, "grad_norm": 0.3876879811286926, "learning_rate": 8.263914791329935e-07, "loss": 0.6693, "step": 20142 }, { "epoch": 0.8348045919847487, "grad_norm": 0.42805978655815125, "learning_rate": 8.261842596046251e-07, "loss": 0.6583, "step": 20143 }, { "epoch": 0.8348460358904223, "grad_norm": 0.46535688638687134, "learning_rate": 8.259770400762568e-07, "loss": 0.6812, "step": 20144 }, { "epoch": 0.834887479796096, "grad_norm": 0.4829835891723633, "learning_rate": 8.257698205478885e-07, "loss": 0.6481, "step": 20145 }, { "epoch": 0.8349289237017696, "grad_norm": 0.4245811998844147, "learning_rate": 8.255626010195201e-07, "loss": 0.7096, "step": 20146 }, { "epoch": 0.8349703676074434, "grad_norm": 0.4103786051273346, "learning_rate": 8.253553814911517e-07, "loss": 0.634, "step": 20147 }, { "epoch": 0.835011811513117, "grad_norm": 0.40655213594436646, "learning_rate": 8.251481619627833e-07, "loss": 0.6833, "step": 20148 }, { "epoch": 0.8350532554187907, "grad_norm": 0.4138886034488678, "learning_rate": 8.249409424344151e-07, "loss": 0.6106, "step": 20149 }, { "epoch": 0.8350946993244643, "grad_norm": 0.4149421751499176, "learning_rate": 8.247337229060467e-07, "loss": 0.6593, "step": 20150 }, { "epoch": 0.835136143230138, "grad_norm": 0.3820747435092926, "learning_rate": 8.245265033776783e-07, "loss": 0.6034, "step": 20151 }, { "epoch": 0.8351775871358117, "grad_norm": 0.3988364338874817, "learning_rate": 8.243192838493099e-07, "loss": 0.6565, "step": 20152 }, { "epoch": 0.8352190310414853, "grad_norm": 0.4235917925834656, "learning_rate": 8.241120643209417e-07, "loss": 0.6433, "step": 20153 }, { "epoch": 0.835260474947159, "grad_norm": 0.4080042839050293, "learning_rate": 8.239048447925733e-07, "loss": 0.6646, "step": 20154 }, { "epoch": 0.8353019188528327, "grad_norm": 0.42446550726890564, "learning_rate": 8.236976252642049e-07, "loss": 0.6821, "step": 20155 }, { "epoch": 0.8353433627585064, "grad_norm": 0.49676021933555603, "learning_rate": 8.234904057358365e-07, "loss": 0.7761, "step": 20156 }, { "epoch": 0.83538480666418, "grad_norm": 0.4220278561115265, "learning_rate": 8.232831862074682e-07, "loss": 0.6039, "step": 20157 }, { "epoch": 0.8354262505698538, "grad_norm": 0.43039804697036743, "learning_rate": 8.230759666790999e-07, "loss": 0.6738, "step": 20158 }, { "epoch": 0.8354676944755274, "grad_norm": 0.4140724241733551, "learning_rate": 8.228687471507315e-07, "loss": 0.6416, "step": 20159 }, { "epoch": 0.835509138381201, "grad_norm": 0.4029494822025299, "learning_rate": 8.226615276223631e-07, "loss": 0.6484, "step": 20160 }, { "epoch": 0.8355505822868747, "grad_norm": 0.4469091594219208, "learning_rate": 8.224543080939948e-07, "loss": 0.7079, "step": 20161 }, { "epoch": 0.8355920261925484, "grad_norm": 0.46666160225868225, "learning_rate": 8.222470885656265e-07, "loss": 0.6572, "step": 20162 }, { "epoch": 0.8356334700982221, "grad_norm": 0.4139482378959656, "learning_rate": 8.220398690372581e-07, "loss": 0.6355, "step": 20163 }, { "epoch": 0.8356749140038957, "grad_norm": 0.40282413363456726, "learning_rate": 8.218326495088897e-07, "loss": 0.6281, "step": 20164 }, { "epoch": 0.8357163579095694, "grad_norm": 0.43178045749664307, "learning_rate": 8.216254299805213e-07, "loss": 0.6562, "step": 20165 }, { "epoch": 0.8357578018152431, "grad_norm": 0.41251131892204285, "learning_rate": 8.214182104521531e-07, "loss": 0.6219, "step": 20166 }, { "epoch": 0.8357992457209168, "grad_norm": 0.40567490458488464, "learning_rate": 8.212109909237847e-07, "loss": 0.6703, "step": 20167 }, { "epoch": 0.8358406896265904, "grad_norm": 0.4112817347049713, "learning_rate": 8.210037713954163e-07, "loss": 0.6617, "step": 20168 }, { "epoch": 0.835882133532264, "grad_norm": 0.3748422861099243, "learning_rate": 8.207965518670479e-07, "loss": 0.6201, "step": 20169 }, { "epoch": 0.8359235774379378, "grad_norm": 0.4309268295764923, "learning_rate": 8.205893323386796e-07, "loss": 0.6644, "step": 20170 }, { "epoch": 0.8359650213436114, "grad_norm": 0.4265573024749756, "learning_rate": 8.203821128103113e-07, "loss": 0.6501, "step": 20171 }, { "epoch": 0.8360064652492851, "grad_norm": 0.4016571044921875, "learning_rate": 8.201748932819429e-07, "loss": 0.6802, "step": 20172 }, { "epoch": 0.8360479091549587, "grad_norm": 0.43196430802345276, "learning_rate": 8.199676737535745e-07, "loss": 0.6606, "step": 20173 }, { "epoch": 0.8360893530606325, "grad_norm": 0.41298431158065796, "learning_rate": 8.197604542252062e-07, "loss": 0.6646, "step": 20174 }, { "epoch": 0.8361307969663061, "grad_norm": 0.4271107614040375, "learning_rate": 8.195532346968379e-07, "loss": 0.6676, "step": 20175 }, { "epoch": 0.8361722408719797, "grad_norm": 0.43281686305999756, "learning_rate": 8.193460151684695e-07, "loss": 0.6465, "step": 20176 }, { "epoch": 0.8362136847776535, "grad_norm": 0.4030953645706177, "learning_rate": 8.191387956401011e-07, "loss": 0.6869, "step": 20177 }, { "epoch": 0.8362551286833271, "grad_norm": 0.46221786737442017, "learning_rate": 8.189315761117328e-07, "loss": 0.6913, "step": 20178 }, { "epoch": 0.8362965725890008, "grad_norm": 0.45367565751075745, "learning_rate": 8.187243565833645e-07, "loss": 0.699, "step": 20179 }, { "epoch": 0.8363380164946744, "grad_norm": 0.4214944839477539, "learning_rate": 8.185171370549961e-07, "loss": 0.639, "step": 20180 }, { "epoch": 0.8363794604003482, "grad_norm": 0.4334227442741394, "learning_rate": 8.183099175266277e-07, "loss": 0.6277, "step": 20181 }, { "epoch": 0.8364209043060218, "grad_norm": 0.3942674994468689, "learning_rate": 8.181026979982593e-07, "loss": 0.6659, "step": 20182 }, { "epoch": 0.8364623482116955, "grad_norm": 0.4073808789253235, "learning_rate": 8.17895478469891e-07, "loss": 0.6912, "step": 20183 }, { "epoch": 0.8365037921173691, "grad_norm": 0.40669405460357666, "learning_rate": 8.176882589415227e-07, "loss": 0.6094, "step": 20184 }, { "epoch": 0.8365452360230428, "grad_norm": 0.44962188601493835, "learning_rate": 8.174810394131543e-07, "loss": 0.6646, "step": 20185 }, { "epoch": 0.8365866799287165, "grad_norm": 0.4204114079475403, "learning_rate": 8.172738198847859e-07, "loss": 0.6572, "step": 20186 }, { "epoch": 0.8366281238343901, "grad_norm": 0.4026535153388977, "learning_rate": 8.170666003564176e-07, "loss": 0.639, "step": 20187 }, { "epoch": 0.8366695677400638, "grad_norm": 0.4280976355075836, "learning_rate": 8.168593808280493e-07, "loss": 0.6508, "step": 20188 }, { "epoch": 0.8367110116457375, "grad_norm": 0.427510142326355, "learning_rate": 8.166521612996809e-07, "loss": 0.6372, "step": 20189 }, { "epoch": 0.8367524555514112, "grad_norm": 0.39416950941085815, "learning_rate": 8.164449417713125e-07, "loss": 0.6451, "step": 20190 }, { "epoch": 0.8367938994570848, "grad_norm": 0.38611263036727905, "learning_rate": 8.162377222429442e-07, "loss": 0.6241, "step": 20191 }, { "epoch": 0.8368353433627586, "grad_norm": 0.4174487292766571, "learning_rate": 8.16030502714576e-07, "loss": 0.6375, "step": 20192 }, { "epoch": 0.8368767872684322, "grad_norm": 0.4115034341812134, "learning_rate": 8.158232831862075e-07, "loss": 0.6537, "step": 20193 }, { "epoch": 0.8369182311741058, "grad_norm": 0.43263453245162964, "learning_rate": 8.156160636578391e-07, "loss": 0.6384, "step": 20194 }, { "epoch": 0.8369596750797795, "grad_norm": 0.4312496483325958, "learning_rate": 8.154088441294708e-07, "loss": 0.6571, "step": 20195 }, { "epoch": 0.8370011189854532, "grad_norm": 0.4276101887226105, "learning_rate": 8.152016246011024e-07, "loss": 0.6874, "step": 20196 }, { "epoch": 0.8370425628911269, "grad_norm": 0.4064944088459015, "learning_rate": 8.149944050727341e-07, "loss": 0.6412, "step": 20197 }, { "epoch": 0.8370840067968005, "grad_norm": 0.42428916692733765, "learning_rate": 8.147871855443657e-07, "loss": 0.6586, "step": 20198 }, { "epoch": 0.8371254507024742, "grad_norm": 0.41180431842803955, "learning_rate": 8.145799660159974e-07, "loss": 0.6438, "step": 20199 }, { "epoch": 0.8371668946081479, "grad_norm": 0.4133630692958832, "learning_rate": 8.14372746487629e-07, "loss": 0.6755, "step": 20200 }, { "epoch": 0.8372083385138216, "grad_norm": 0.4471549391746521, "learning_rate": 8.141655269592608e-07, "loss": 0.7253, "step": 20201 }, { "epoch": 0.8372497824194952, "grad_norm": 0.4245521128177643, "learning_rate": 8.139583074308923e-07, "loss": 0.6846, "step": 20202 }, { "epoch": 0.8372912263251688, "grad_norm": 0.4138719141483307, "learning_rate": 8.137510879025239e-07, "loss": 0.6318, "step": 20203 }, { "epoch": 0.8373326702308426, "grad_norm": 0.389619916677475, "learning_rate": 8.135438683741556e-07, "loss": 0.6403, "step": 20204 }, { "epoch": 0.8373741141365162, "grad_norm": 0.4178262948989868, "learning_rate": 8.133366488457872e-07, "loss": 0.6936, "step": 20205 }, { "epoch": 0.8374155580421899, "grad_norm": 0.4229121208190918, "learning_rate": 8.13129429317419e-07, "loss": 0.6919, "step": 20206 }, { "epoch": 0.8374570019478635, "grad_norm": 0.46110671758651733, "learning_rate": 8.129222097890505e-07, "loss": 0.6426, "step": 20207 }, { "epoch": 0.8374984458535373, "grad_norm": 0.4340898096561432, "learning_rate": 8.127149902606822e-07, "loss": 0.6816, "step": 20208 }, { "epoch": 0.8375398897592109, "grad_norm": 0.46029648184776306, "learning_rate": 8.125077707323138e-07, "loss": 0.7317, "step": 20209 }, { "epoch": 0.8375813336648846, "grad_norm": 0.4133777320384979, "learning_rate": 8.123005512039456e-07, "loss": 0.6763, "step": 20210 }, { "epoch": 0.8376227775705583, "grad_norm": 0.39068976044654846, "learning_rate": 8.120933316755771e-07, "loss": 0.6766, "step": 20211 }, { "epoch": 0.8376642214762319, "grad_norm": 0.4317997694015503, "learning_rate": 8.118861121472089e-07, "loss": 0.6823, "step": 20212 }, { "epoch": 0.8377056653819056, "grad_norm": 0.4342253506183624, "learning_rate": 8.116788926188404e-07, "loss": 0.6946, "step": 20213 }, { "epoch": 0.8377471092875792, "grad_norm": 0.45126211643218994, "learning_rate": 8.114716730904722e-07, "loss": 0.686, "step": 20214 }, { "epoch": 0.837788553193253, "grad_norm": 0.44056689739227295, "learning_rate": 8.112644535621037e-07, "loss": 0.6978, "step": 20215 }, { "epoch": 0.8378299970989266, "grad_norm": 0.38186904788017273, "learning_rate": 8.110572340337355e-07, "loss": 0.6311, "step": 20216 }, { "epoch": 0.8378714410046003, "grad_norm": 0.4326772093772888, "learning_rate": 8.10850014505367e-07, "loss": 0.703, "step": 20217 }, { "epoch": 0.8379128849102739, "grad_norm": 0.48534396290779114, "learning_rate": 8.106427949769986e-07, "loss": 0.6505, "step": 20218 }, { "epoch": 0.8379543288159477, "grad_norm": 0.4277802109718323, "learning_rate": 8.104355754486304e-07, "loss": 0.6829, "step": 20219 }, { "epoch": 0.8379957727216213, "grad_norm": 0.4214552938938141, "learning_rate": 8.10228355920262e-07, "loss": 0.6577, "step": 20220 }, { "epoch": 0.8380372166272949, "grad_norm": 0.4311408996582031, "learning_rate": 8.100211363918937e-07, "loss": 0.6984, "step": 20221 }, { "epoch": 0.8380786605329686, "grad_norm": 0.3997269570827484, "learning_rate": 8.098139168635252e-07, "loss": 0.6812, "step": 20222 }, { "epoch": 0.8381201044386423, "grad_norm": 0.40064096450805664, "learning_rate": 8.09606697335157e-07, "loss": 0.6853, "step": 20223 }, { "epoch": 0.838161548344316, "grad_norm": 0.40908172726631165, "learning_rate": 8.093994778067885e-07, "loss": 0.6423, "step": 20224 }, { "epoch": 0.8382029922499896, "grad_norm": 0.425047367811203, "learning_rate": 8.091922582784203e-07, "loss": 0.6827, "step": 20225 }, { "epoch": 0.8382444361556634, "grad_norm": 0.4360681176185608, "learning_rate": 8.089850387500519e-07, "loss": 0.6587, "step": 20226 }, { "epoch": 0.838285880061337, "grad_norm": 0.43311211466789246, "learning_rate": 8.087778192216836e-07, "loss": 0.6858, "step": 20227 }, { "epoch": 0.8383273239670106, "grad_norm": 0.415152907371521, "learning_rate": 8.085705996933152e-07, "loss": 0.6395, "step": 20228 }, { "epoch": 0.8383687678726843, "grad_norm": 0.39357760548591614, "learning_rate": 8.083633801649469e-07, "loss": 0.6826, "step": 20229 }, { "epoch": 0.838410211778358, "grad_norm": 0.4715930223464966, "learning_rate": 8.081561606365785e-07, "loss": 0.7057, "step": 20230 }, { "epoch": 0.8384516556840317, "grad_norm": 0.4347377419471741, "learning_rate": 8.0794894110821e-07, "loss": 0.6274, "step": 20231 }, { "epoch": 0.8384930995897053, "grad_norm": 0.3943631052970886, "learning_rate": 8.077417215798418e-07, "loss": 0.6631, "step": 20232 }, { "epoch": 0.838534543495379, "grad_norm": 0.474553644657135, "learning_rate": 8.075345020514735e-07, "loss": 0.6803, "step": 20233 }, { "epoch": 0.8385759874010527, "grad_norm": 0.436998188495636, "learning_rate": 8.073272825231051e-07, "loss": 0.6812, "step": 20234 }, { "epoch": 0.8386174313067264, "grad_norm": 0.4583664536476135, "learning_rate": 8.071200629947367e-07, "loss": 0.6865, "step": 20235 }, { "epoch": 0.8386588752124, "grad_norm": 0.43711233139038086, "learning_rate": 8.069128434663684e-07, "loss": 0.6471, "step": 20236 }, { "epoch": 0.8387003191180736, "grad_norm": 0.4086623191833496, "learning_rate": 8.067056239380001e-07, "loss": 0.6317, "step": 20237 }, { "epoch": 0.8387417630237474, "grad_norm": 0.43112263083457947, "learning_rate": 8.064984044096317e-07, "loss": 0.676, "step": 20238 }, { "epoch": 0.838783206929421, "grad_norm": 0.4171840250492096, "learning_rate": 8.062911848812633e-07, "loss": 0.661, "step": 20239 }, { "epoch": 0.8388246508350947, "grad_norm": 0.4216444194316864, "learning_rate": 8.06083965352895e-07, "loss": 0.6517, "step": 20240 }, { "epoch": 0.8388660947407683, "grad_norm": 0.4085068702697754, "learning_rate": 8.058767458245266e-07, "loss": 0.678, "step": 20241 }, { "epoch": 0.8389075386464421, "grad_norm": 0.4424096345901489, "learning_rate": 8.056695262961583e-07, "loss": 0.7021, "step": 20242 }, { "epoch": 0.8389489825521157, "grad_norm": 0.4305326044559479, "learning_rate": 8.054623067677899e-07, "loss": 0.6682, "step": 20243 }, { "epoch": 0.8389904264577894, "grad_norm": 0.4179757833480835, "learning_rate": 8.052550872394215e-07, "loss": 0.7206, "step": 20244 }, { "epoch": 0.839031870363463, "grad_norm": 0.4429771900177002, "learning_rate": 8.050478677110532e-07, "loss": 0.6875, "step": 20245 }, { "epoch": 0.8390733142691367, "grad_norm": 0.40724819898605347, "learning_rate": 8.048406481826849e-07, "loss": 0.6658, "step": 20246 }, { "epoch": 0.8391147581748104, "grad_norm": 0.40812182426452637, "learning_rate": 8.046334286543165e-07, "loss": 0.6858, "step": 20247 }, { "epoch": 0.839156202080484, "grad_norm": 0.4145753085613251, "learning_rate": 8.04426209125948e-07, "loss": 0.6987, "step": 20248 }, { "epoch": 0.8391976459861578, "grad_norm": 0.4020088315010071, "learning_rate": 8.042189895975798e-07, "loss": 0.6532, "step": 20249 }, { "epoch": 0.8392390898918314, "grad_norm": 0.4070190489292145, "learning_rate": 8.040117700692115e-07, "loss": 0.6599, "step": 20250 }, { "epoch": 0.8392805337975051, "grad_norm": 0.40536993741989136, "learning_rate": 8.038045505408431e-07, "loss": 0.6868, "step": 20251 }, { "epoch": 0.8393219777031787, "grad_norm": 0.45047906041145325, "learning_rate": 8.035973310124747e-07, "loss": 0.7283, "step": 20252 }, { "epoch": 0.8393634216088525, "grad_norm": 0.4204121530056, "learning_rate": 8.033901114841064e-07, "loss": 0.6484, "step": 20253 }, { "epoch": 0.8394048655145261, "grad_norm": 0.39317265152931213, "learning_rate": 8.031828919557381e-07, "loss": 0.626, "step": 20254 }, { "epoch": 0.8394463094201997, "grad_norm": 0.3943878412246704, "learning_rate": 8.029756724273697e-07, "loss": 0.5958, "step": 20255 }, { "epoch": 0.8394877533258734, "grad_norm": 0.4100732207298279, "learning_rate": 8.027684528990013e-07, "loss": 0.6348, "step": 20256 }, { "epoch": 0.8395291972315471, "grad_norm": 0.438375860452652, "learning_rate": 8.025612333706329e-07, "loss": 0.7015, "step": 20257 }, { "epoch": 0.8395706411372208, "grad_norm": 0.4063936471939087, "learning_rate": 8.023540138422646e-07, "loss": 0.645, "step": 20258 }, { "epoch": 0.8396120850428944, "grad_norm": 0.40298527479171753, "learning_rate": 8.021467943138963e-07, "loss": 0.6281, "step": 20259 }, { "epoch": 0.8396535289485682, "grad_norm": 0.4144369661808014, "learning_rate": 8.019395747855279e-07, "loss": 0.6605, "step": 20260 }, { "epoch": 0.8396949728542418, "grad_norm": 0.44305869936943054, "learning_rate": 8.017323552571595e-07, "loss": 0.6591, "step": 20261 }, { "epoch": 0.8397364167599155, "grad_norm": 0.40938323736190796, "learning_rate": 8.015251357287912e-07, "loss": 0.6613, "step": 20262 }, { "epoch": 0.8397778606655891, "grad_norm": 0.3989787697792053, "learning_rate": 8.013179162004229e-07, "loss": 0.6501, "step": 20263 }, { "epoch": 0.8398193045712627, "grad_norm": 0.3831969201564789, "learning_rate": 8.011106966720545e-07, "loss": 0.6719, "step": 20264 }, { "epoch": 0.8398607484769365, "grad_norm": 0.4422278106212616, "learning_rate": 8.009034771436861e-07, "loss": 0.6819, "step": 20265 }, { "epoch": 0.8399021923826101, "grad_norm": 0.41532623767852783, "learning_rate": 8.006962576153178e-07, "loss": 0.6451, "step": 20266 }, { "epoch": 0.8399436362882838, "grad_norm": 0.40918755531311035, "learning_rate": 8.004890380869495e-07, "loss": 0.699, "step": 20267 }, { "epoch": 0.8399850801939575, "grad_norm": 0.3815593421459198, "learning_rate": 8.002818185585811e-07, "loss": 0.7112, "step": 20268 }, { "epoch": 0.8400265240996312, "grad_norm": 0.44708389043807983, "learning_rate": 8.000745990302127e-07, "loss": 0.7285, "step": 20269 }, { "epoch": 0.8400679680053048, "grad_norm": 0.41335397958755493, "learning_rate": 7.998673795018443e-07, "loss": 0.6503, "step": 20270 }, { "epoch": 0.8401094119109785, "grad_norm": 0.3990667164325714, "learning_rate": 7.996601599734761e-07, "loss": 0.6567, "step": 20271 }, { "epoch": 0.8401508558166522, "grad_norm": 0.4109889566898346, "learning_rate": 7.994529404451077e-07, "loss": 0.6465, "step": 20272 }, { "epoch": 0.8401922997223258, "grad_norm": 0.41938143968582153, "learning_rate": 7.992457209167393e-07, "loss": 0.6226, "step": 20273 }, { "epoch": 0.8402337436279995, "grad_norm": 0.38808903098106384, "learning_rate": 7.990385013883709e-07, "loss": 0.604, "step": 20274 }, { "epoch": 0.8402751875336731, "grad_norm": 0.4040178954601288, "learning_rate": 7.988312818600026e-07, "loss": 0.6128, "step": 20275 }, { "epoch": 0.8403166314393469, "grad_norm": 0.44610920548439026, "learning_rate": 7.986240623316343e-07, "loss": 0.6892, "step": 20276 }, { "epoch": 0.8403580753450205, "grad_norm": 0.4109758138656616, "learning_rate": 7.984168428032659e-07, "loss": 0.6754, "step": 20277 }, { "epoch": 0.8403995192506942, "grad_norm": 0.4390300214290619, "learning_rate": 7.982096232748975e-07, "loss": 0.7388, "step": 20278 }, { "epoch": 0.8404409631563678, "grad_norm": 0.42315617203712463, "learning_rate": 7.980024037465291e-07, "loss": 0.6821, "step": 20279 }, { "epoch": 0.8404824070620416, "grad_norm": 0.45524272322654724, "learning_rate": 7.977951842181609e-07, "loss": 0.6749, "step": 20280 }, { "epoch": 0.8405238509677152, "grad_norm": 0.43759286403656006, "learning_rate": 7.975879646897925e-07, "loss": 0.751, "step": 20281 }, { "epoch": 0.8405652948733888, "grad_norm": 0.4256024956703186, "learning_rate": 7.973807451614241e-07, "loss": 0.6589, "step": 20282 }, { "epoch": 0.8406067387790626, "grad_norm": 0.4395647943019867, "learning_rate": 7.971735256330557e-07, "loss": 0.6945, "step": 20283 }, { "epoch": 0.8406481826847362, "grad_norm": 0.4106292128562927, "learning_rate": 7.969663061046875e-07, "loss": 0.6586, "step": 20284 }, { "epoch": 0.8406896265904099, "grad_norm": 0.3862418830394745, "learning_rate": 7.967590865763191e-07, "loss": 0.6754, "step": 20285 }, { "epoch": 0.8407310704960835, "grad_norm": 0.4498670697212219, "learning_rate": 7.965518670479507e-07, "loss": 0.6997, "step": 20286 }, { "epoch": 0.8407725144017573, "grad_norm": 0.4008539915084839, "learning_rate": 7.963446475195823e-07, "loss": 0.6748, "step": 20287 }, { "epoch": 0.8408139583074309, "grad_norm": 0.4114227294921875, "learning_rate": 7.961374279912141e-07, "loss": 0.6827, "step": 20288 }, { "epoch": 0.8408554022131045, "grad_norm": 0.4190865457057953, "learning_rate": 7.959302084628457e-07, "loss": 0.652, "step": 20289 }, { "epoch": 0.8408968461187782, "grad_norm": 0.469209760427475, "learning_rate": 7.957229889344773e-07, "loss": 0.72, "step": 20290 }, { "epoch": 0.8409382900244519, "grad_norm": 0.40231853723526, "learning_rate": 7.955157694061089e-07, "loss": 0.6794, "step": 20291 }, { "epoch": 0.8409797339301256, "grad_norm": 0.4166019856929779, "learning_rate": 7.953085498777405e-07, "loss": 0.6809, "step": 20292 }, { "epoch": 0.8410211778357992, "grad_norm": 0.3955361843109131, "learning_rate": 7.951013303493723e-07, "loss": 0.6364, "step": 20293 }, { "epoch": 0.841062621741473, "grad_norm": 0.44638872146606445, "learning_rate": 7.948941108210039e-07, "loss": 0.687, "step": 20294 }, { "epoch": 0.8411040656471466, "grad_norm": 0.4024045169353485, "learning_rate": 7.946868912926355e-07, "loss": 0.6301, "step": 20295 }, { "epoch": 0.8411455095528203, "grad_norm": 0.42070427536964417, "learning_rate": 7.944796717642671e-07, "loss": 0.6482, "step": 20296 }, { "epoch": 0.8411869534584939, "grad_norm": 0.39480817317962646, "learning_rate": 7.942724522358989e-07, "loss": 0.6609, "step": 20297 }, { "epoch": 0.8412283973641675, "grad_norm": 0.4168970286846161, "learning_rate": 7.940652327075305e-07, "loss": 0.6888, "step": 20298 }, { "epoch": 0.8412698412698413, "grad_norm": 0.43357062339782715, "learning_rate": 7.938580131791621e-07, "loss": 0.6753, "step": 20299 }, { "epoch": 0.8413112851755149, "grad_norm": 0.4303136169910431, "learning_rate": 7.936507936507937e-07, "loss": 0.678, "step": 20300 }, { "epoch": 0.8413527290811886, "grad_norm": 0.4187089204788208, "learning_rate": 7.934435741224255e-07, "loss": 0.6819, "step": 20301 }, { "epoch": 0.8413941729868623, "grad_norm": 0.46501705050468445, "learning_rate": 7.932363545940571e-07, "loss": 0.6971, "step": 20302 }, { "epoch": 0.841435616892536, "grad_norm": 0.4112091660499573, "learning_rate": 7.930291350656887e-07, "loss": 0.6692, "step": 20303 }, { "epoch": 0.8414770607982096, "grad_norm": 0.41882097721099854, "learning_rate": 7.928219155373203e-07, "loss": 0.6632, "step": 20304 }, { "epoch": 0.8415185047038833, "grad_norm": 0.40139561891555786, "learning_rate": 7.926146960089519e-07, "loss": 0.6851, "step": 20305 }, { "epoch": 0.841559948609557, "grad_norm": 0.41318365931510925, "learning_rate": 7.924074764805837e-07, "loss": 0.6484, "step": 20306 }, { "epoch": 0.8416013925152306, "grad_norm": 0.42632460594177246, "learning_rate": 7.922002569522153e-07, "loss": 0.6602, "step": 20307 }, { "epoch": 0.8416428364209043, "grad_norm": 0.44999897480010986, "learning_rate": 7.919930374238469e-07, "loss": 0.6691, "step": 20308 }, { "epoch": 0.8416842803265779, "grad_norm": 0.42027410864830017, "learning_rate": 7.917858178954785e-07, "loss": 0.6995, "step": 20309 }, { "epoch": 0.8417257242322517, "grad_norm": 0.46995192766189575, "learning_rate": 7.915785983671103e-07, "loss": 0.6521, "step": 20310 }, { "epoch": 0.8417671681379253, "grad_norm": 0.42636585235595703, "learning_rate": 7.913713788387419e-07, "loss": 0.7013, "step": 20311 }, { "epoch": 0.841808612043599, "grad_norm": 0.41778379678726196, "learning_rate": 7.911641593103735e-07, "loss": 0.7163, "step": 20312 }, { "epoch": 0.8418500559492726, "grad_norm": 0.44311270117759705, "learning_rate": 7.909569397820051e-07, "loss": 0.6772, "step": 20313 }, { "epoch": 0.8418914998549464, "grad_norm": 0.3899264931678772, "learning_rate": 7.907497202536369e-07, "loss": 0.623, "step": 20314 }, { "epoch": 0.84193294376062, "grad_norm": 0.41782352328300476, "learning_rate": 7.905425007252685e-07, "loss": 0.651, "step": 20315 }, { "epoch": 0.8419743876662936, "grad_norm": 0.37422803044319153, "learning_rate": 7.903352811969001e-07, "loss": 0.6643, "step": 20316 }, { "epoch": 0.8420158315719674, "grad_norm": 0.427614688873291, "learning_rate": 7.901280616685317e-07, "loss": 0.7292, "step": 20317 }, { "epoch": 0.842057275477641, "grad_norm": 0.4152832627296448, "learning_rate": 7.899208421401633e-07, "loss": 0.593, "step": 20318 }, { "epoch": 0.8420987193833147, "grad_norm": 0.39070409536361694, "learning_rate": 7.897136226117951e-07, "loss": 0.6697, "step": 20319 }, { "epoch": 0.8421401632889883, "grad_norm": 0.4044613838195801, "learning_rate": 7.895064030834267e-07, "loss": 0.6848, "step": 20320 }, { "epoch": 0.8421816071946621, "grad_norm": 0.4143247902393341, "learning_rate": 7.892991835550583e-07, "loss": 0.6975, "step": 20321 }, { "epoch": 0.8422230511003357, "grad_norm": 0.42672422528266907, "learning_rate": 7.890919640266899e-07, "loss": 0.6698, "step": 20322 }, { "epoch": 0.8422644950060094, "grad_norm": 0.3664911985397339, "learning_rate": 7.888847444983217e-07, "loss": 0.6427, "step": 20323 }, { "epoch": 0.842305938911683, "grad_norm": 0.4231851398944855, "learning_rate": 7.886775249699533e-07, "loss": 0.6643, "step": 20324 }, { "epoch": 0.8423473828173567, "grad_norm": 0.43241921067237854, "learning_rate": 7.884703054415849e-07, "loss": 0.6375, "step": 20325 }, { "epoch": 0.8423888267230304, "grad_norm": 0.38926368951797485, "learning_rate": 7.882630859132165e-07, "loss": 0.6272, "step": 20326 }, { "epoch": 0.842430270628704, "grad_norm": 0.38392889499664307, "learning_rate": 7.880558663848483e-07, "loss": 0.6116, "step": 20327 }, { "epoch": 0.8424717145343777, "grad_norm": 0.43238773941993713, "learning_rate": 7.878486468564799e-07, "loss": 0.6637, "step": 20328 }, { "epoch": 0.8425131584400514, "grad_norm": 0.39051949977874756, "learning_rate": 7.876414273281115e-07, "loss": 0.642, "step": 20329 }, { "epoch": 0.8425546023457251, "grad_norm": 0.4083532691001892, "learning_rate": 7.874342077997431e-07, "loss": 0.6875, "step": 20330 }, { "epoch": 0.8425960462513987, "grad_norm": 0.4511617422103882, "learning_rate": 7.872269882713747e-07, "loss": 0.6663, "step": 20331 }, { "epoch": 0.8426374901570725, "grad_norm": 0.429328590631485, "learning_rate": 7.870197687430065e-07, "loss": 0.6685, "step": 20332 }, { "epoch": 0.8426789340627461, "grad_norm": 0.4301462173461914, "learning_rate": 7.868125492146381e-07, "loss": 0.6873, "step": 20333 }, { "epoch": 0.8427203779684197, "grad_norm": 0.4104442894458771, "learning_rate": 7.866053296862697e-07, "loss": 0.6134, "step": 20334 }, { "epoch": 0.8427618218740934, "grad_norm": 0.3949783444404602, "learning_rate": 7.863981101579013e-07, "loss": 0.658, "step": 20335 }, { "epoch": 0.842803265779767, "grad_norm": 0.3894331157207489, "learning_rate": 7.861908906295331e-07, "loss": 0.6697, "step": 20336 }, { "epoch": 0.8428447096854408, "grad_norm": 0.4031327962875366, "learning_rate": 7.859836711011647e-07, "loss": 0.6454, "step": 20337 }, { "epoch": 0.8428861535911144, "grad_norm": 0.3981502950191498, "learning_rate": 7.857764515727963e-07, "loss": 0.6475, "step": 20338 }, { "epoch": 0.8429275974967881, "grad_norm": 0.4274056553840637, "learning_rate": 7.855692320444279e-07, "loss": 0.6909, "step": 20339 }, { "epoch": 0.8429690414024618, "grad_norm": 0.38113051652908325, "learning_rate": 7.853620125160597e-07, "loss": 0.6355, "step": 20340 }, { "epoch": 0.8430104853081355, "grad_norm": 0.41596877574920654, "learning_rate": 7.851547929876913e-07, "loss": 0.689, "step": 20341 }, { "epoch": 0.8430519292138091, "grad_norm": 0.451772004365921, "learning_rate": 7.849475734593229e-07, "loss": 0.6687, "step": 20342 }, { "epoch": 0.8430933731194827, "grad_norm": 0.41261178255081177, "learning_rate": 7.847403539309545e-07, "loss": 0.6761, "step": 20343 }, { "epoch": 0.8431348170251565, "grad_norm": 0.43507614731788635, "learning_rate": 7.845331344025861e-07, "loss": 0.6543, "step": 20344 }, { "epoch": 0.8431762609308301, "grad_norm": 0.36950087547302246, "learning_rate": 7.843259148742179e-07, "loss": 0.6219, "step": 20345 }, { "epoch": 0.8432177048365038, "grad_norm": 0.3994353711605072, "learning_rate": 7.841186953458495e-07, "loss": 0.6865, "step": 20346 }, { "epoch": 0.8432591487421774, "grad_norm": 0.41059935092926025, "learning_rate": 7.839114758174811e-07, "loss": 0.6384, "step": 20347 }, { "epoch": 0.8433005926478512, "grad_norm": 0.4057645797729492, "learning_rate": 7.837042562891127e-07, "loss": 0.6328, "step": 20348 }, { "epoch": 0.8433420365535248, "grad_norm": 0.434075266122818, "learning_rate": 7.834970367607445e-07, "loss": 0.6697, "step": 20349 }, { "epoch": 0.8433834804591984, "grad_norm": 0.374485582113266, "learning_rate": 7.832898172323761e-07, "loss": 0.6626, "step": 20350 }, { "epoch": 0.8434249243648722, "grad_norm": 0.45293405652046204, "learning_rate": 7.830825977040077e-07, "loss": 0.7603, "step": 20351 }, { "epoch": 0.8434663682705458, "grad_norm": 0.4895859956741333, "learning_rate": 7.828753781756393e-07, "loss": 0.7407, "step": 20352 }, { "epoch": 0.8435078121762195, "grad_norm": 0.4310372769832611, "learning_rate": 7.826681586472709e-07, "loss": 0.6704, "step": 20353 }, { "epoch": 0.8435492560818931, "grad_norm": 0.4351061284542084, "learning_rate": 7.824609391189027e-07, "loss": 0.6759, "step": 20354 }, { "epoch": 0.8435906999875669, "grad_norm": 0.42802488803863525, "learning_rate": 7.822537195905343e-07, "loss": 0.692, "step": 20355 }, { "epoch": 0.8436321438932405, "grad_norm": 0.4239374101161957, "learning_rate": 7.820465000621659e-07, "loss": 0.6714, "step": 20356 }, { "epoch": 0.8436735877989142, "grad_norm": 0.4451308846473694, "learning_rate": 7.818392805337975e-07, "loss": 0.7197, "step": 20357 }, { "epoch": 0.8437150317045878, "grad_norm": 0.427103191614151, "learning_rate": 7.816320610054293e-07, "loss": 0.6799, "step": 20358 }, { "epoch": 0.8437564756102615, "grad_norm": 0.42711034417152405, "learning_rate": 7.814248414770609e-07, "loss": 0.6897, "step": 20359 }, { "epoch": 0.8437979195159352, "grad_norm": 0.42329227924346924, "learning_rate": 7.812176219486925e-07, "loss": 0.6793, "step": 20360 }, { "epoch": 0.8438393634216088, "grad_norm": 0.3749352693557739, "learning_rate": 7.810104024203241e-07, "loss": 0.5995, "step": 20361 }, { "epoch": 0.8438808073272825, "grad_norm": 0.395255446434021, "learning_rate": 7.808031828919559e-07, "loss": 0.6316, "step": 20362 }, { "epoch": 0.8439222512329562, "grad_norm": 0.41739657521247864, "learning_rate": 7.805959633635875e-07, "loss": 0.6149, "step": 20363 }, { "epoch": 0.8439636951386299, "grad_norm": 0.416291207075119, "learning_rate": 7.803887438352191e-07, "loss": 0.698, "step": 20364 }, { "epoch": 0.8440051390443035, "grad_norm": 0.42187097668647766, "learning_rate": 7.801815243068507e-07, "loss": 0.6829, "step": 20365 }, { "epoch": 0.8440465829499773, "grad_norm": 0.4220981001853943, "learning_rate": 7.799743047784824e-07, "loss": 0.6226, "step": 20366 }, { "epoch": 0.8440880268556509, "grad_norm": 0.4218301773071289, "learning_rate": 7.797670852501141e-07, "loss": 0.6285, "step": 20367 }, { "epoch": 0.8441294707613245, "grad_norm": 0.43591782450675964, "learning_rate": 7.795598657217457e-07, "loss": 0.6646, "step": 20368 }, { "epoch": 0.8441709146669982, "grad_norm": 0.4099085330963135, "learning_rate": 7.793526461933773e-07, "loss": 0.6685, "step": 20369 }, { "epoch": 0.8442123585726719, "grad_norm": 0.43688157200813293, "learning_rate": 7.79145426665009e-07, "loss": 0.6534, "step": 20370 }, { "epoch": 0.8442538024783456, "grad_norm": 0.4327491521835327, "learning_rate": 7.789382071366407e-07, "loss": 0.6997, "step": 20371 }, { "epoch": 0.8442952463840192, "grad_norm": 0.4056679606437683, "learning_rate": 7.787309876082723e-07, "loss": 0.6401, "step": 20372 }, { "epoch": 0.8443366902896929, "grad_norm": 0.4049903452396393, "learning_rate": 7.785237680799039e-07, "loss": 0.6381, "step": 20373 }, { "epoch": 0.8443781341953666, "grad_norm": 0.4465595483779907, "learning_rate": 7.783165485515355e-07, "loss": 0.6967, "step": 20374 }, { "epoch": 0.8444195781010403, "grad_norm": 0.4046452045440674, "learning_rate": 7.781093290231673e-07, "loss": 0.7046, "step": 20375 }, { "epoch": 0.8444610220067139, "grad_norm": 0.4019264280796051, "learning_rate": 7.779021094947989e-07, "loss": 0.6641, "step": 20376 }, { "epoch": 0.8445024659123875, "grad_norm": 0.3953589200973511, "learning_rate": 7.776948899664305e-07, "loss": 0.6449, "step": 20377 }, { "epoch": 0.8445439098180613, "grad_norm": 0.3843584656715393, "learning_rate": 7.774876704380621e-07, "loss": 0.61, "step": 20378 }, { "epoch": 0.8445853537237349, "grad_norm": 0.3851132392883301, "learning_rate": 7.772804509096938e-07, "loss": 0.6539, "step": 20379 }, { "epoch": 0.8446267976294086, "grad_norm": 0.43895745277404785, "learning_rate": 7.770732313813255e-07, "loss": 0.6938, "step": 20380 }, { "epoch": 0.8446682415350822, "grad_norm": 0.42420393228530884, "learning_rate": 7.768660118529571e-07, "loss": 0.6774, "step": 20381 }, { "epoch": 0.844709685440756, "grad_norm": 0.4183800220489502, "learning_rate": 7.766587923245887e-07, "loss": 0.7079, "step": 20382 }, { "epoch": 0.8447511293464296, "grad_norm": 0.41839006543159485, "learning_rate": 7.764515727962204e-07, "loss": 0.7114, "step": 20383 }, { "epoch": 0.8447925732521033, "grad_norm": 0.4254937767982483, "learning_rate": 7.762443532678521e-07, "loss": 0.678, "step": 20384 }, { "epoch": 0.844834017157777, "grad_norm": 0.41902899742126465, "learning_rate": 7.760371337394837e-07, "loss": 0.7141, "step": 20385 }, { "epoch": 0.8448754610634506, "grad_norm": 0.4079195559024811, "learning_rate": 7.758299142111153e-07, "loss": 0.6603, "step": 20386 }, { "epoch": 0.8449169049691243, "grad_norm": 0.42082467675209045, "learning_rate": 7.75622694682747e-07, "loss": 0.6396, "step": 20387 }, { "epoch": 0.8449583488747979, "grad_norm": 0.3983731269836426, "learning_rate": 7.754154751543787e-07, "loss": 0.6548, "step": 20388 }, { "epoch": 0.8449997927804717, "grad_norm": 0.43963998556137085, "learning_rate": 7.752082556260103e-07, "loss": 0.7043, "step": 20389 }, { "epoch": 0.8450412366861453, "grad_norm": 0.40820637345314026, "learning_rate": 7.750010360976419e-07, "loss": 0.6436, "step": 20390 }, { "epoch": 0.845082680591819, "grad_norm": 0.4260731637477875, "learning_rate": 7.747938165692735e-07, "loss": 0.707, "step": 20391 }, { "epoch": 0.8451241244974926, "grad_norm": 0.41070759296417236, "learning_rate": 7.745865970409052e-07, "loss": 0.656, "step": 20392 }, { "epoch": 0.8451655684031664, "grad_norm": 0.3845791816711426, "learning_rate": 7.743793775125369e-07, "loss": 0.7061, "step": 20393 }, { "epoch": 0.84520701230884, "grad_norm": 0.37620067596435547, "learning_rate": 7.741721579841685e-07, "loss": 0.6254, "step": 20394 }, { "epoch": 0.8452484562145136, "grad_norm": 0.4091775119304657, "learning_rate": 7.739649384558001e-07, "loss": 0.6632, "step": 20395 }, { "epoch": 0.8452899001201873, "grad_norm": 0.4363259971141815, "learning_rate": 7.737577189274318e-07, "loss": 0.6471, "step": 20396 }, { "epoch": 0.845331344025861, "grad_norm": 0.4026367962360382, "learning_rate": 7.735504993990635e-07, "loss": 0.6597, "step": 20397 }, { "epoch": 0.8453727879315347, "grad_norm": 0.39174115657806396, "learning_rate": 7.733432798706951e-07, "loss": 0.6532, "step": 20398 }, { "epoch": 0.8454142318372083, "grad_norm": 0.39346399903297424, "learning_rate": 7.731360603423267e-07, "loss": 0.6852, "step": 20399 }, { "epoch": 0.845455675742882, "grad_norm": 0.41150641441345215, "learning_rate": 7.729288408139584e-07, "loss": 0.6597, "step": 20400 }, { "epoch": 0.8454971196485557, "grad_norm": 0.3971468210220337, "learning_rate": 7.727216212855901e-07, "loss": 0.6105, "step": 20401 }, { "epoch": 0.8455385635542294, "grad_norm": 0.4269232749938965, "learning_rate": 7.725144017572217e-07, "loss": 0.6918, "step": 20402 }, { "epoch": 0.845580007459903, "grad_norm": 0.44439801573753357, "learning_rate": 7.723071822288533e-07, "loss": 0.7056, "step": 20403 }, { "epoch": 0.8456214513655766, "grad_norm": 0.4329550266265869, "learning_rate": 7.72099962700485e-07, "loss": 0.6469, "step": 20404 }, { "epoch": 0.8456628952712504, "grad_norm": 0.37064599990844727, "learning_rate": 7.718927431721166e-07, "loss": 0.6388, "step": 20405 }, { "epoch": 0.845704339176924, "grad_norm": 0.5832260251045227, "learning_rate": 7.716855236437483e-07, "loss": 0.6921, "step": 20406 }, { "epoch": 0.8457457830825977, "grad_norm": 0.38717830181121826, "learning_rate": 7.714783041153799e-07, "loss": 0.6429, "step": 20407 }, { "epoch": 0.8457872269882714, "grad_norm": 0.38445159792900085, "learning_rate": 7.712710845870115e-07, "loss": 0.6371, "step": 20408 }, { "epoch": 0.8458286708939451, "grad_norm": 0.4126291871070862, "learning_rate": 7.710638650586432e-07, "loss": 0.6235, "step": 20409 }, { "epoch": 0.8458701147996187, "grad_norm": 0.4096199572086334, "learning_rate": 7.708566455302749e-07, "loss": 0.6708, "step": 20410 }, { "epoch": 0.8459115587052923, "grad_norm": 0.40687939524650574, "learning_rate": 7.706494260019065e-07, "loss": 0.687, "step": 20411 }, { "epoch": 0.8459530026109661, "grad_norm": 0.39303112030029297, "learning_rate": 7.704422064735381e-07, "loss": 0.623, "step": 20412 }, { "epoch": 0.8459944465166397, "grad_norm": 0.4197443127632141, "learning_rate": 7.702349869451698e-07, "loss": 0.6383, "step": 20413 }, { "epoch": 0.8460358904223134, "grad_norm": 0.4085114002227783, "learning_rate": 7.700277674168015e-07, "loss": 0.6508, "step": 20414 }, { "epoch": 0.846077334327987, "grad_norm": 0.4611682891845703, "learning_rate": 7.698205478884331e-07, "loss": 0.7354, "step": 20415 }, { "epoch": 0.8461187782336608, "grad_norm": 0.4064315855503082, "learning_rate": 7.696133283600647e-07, "loss": 0.6476, "step": 20416 }, { "epoch": 0.8461602221393344, "grad_norm": 0.4356051981449127, "learning_rate": 7.694061088316964e-07, "loss": 0.6798, "step": 20417 }, { "epoch": 0.8462016660450081, "grad_norm": 0.4189104437828064, "learning_rate": 7.69198889303328e-07, "loss": 0.6749, "step": 20418 }, { "epoch": 0.8462431099506817, "grad_norm": 0.5172034502029419, "learning_rate": 7.689916697749597e-07, "loss": 0.7185, "step": 20419 }, { "epoch": 0.8462845538563554, "grad_norm": 0.41513264179229736, "learning_rate": 7.687844502465913e-07, "loss": 0.6552, "step": 20420 }, { "epoch": 0.8463259977620291, "grad_norm": 0.4175315201282501, "learning_rate": 7.68577230718223e-07, "loss": 0.6852, "step": 20421 }, { "epoch": 0.8463674416677027, "grad_norm": 0.3819960355758667, "learning_rate": 7.683700111898546e-07, "loss": 0.6356, "step": 20422 }, { "epoch": 0.8464088855733765, "grad_norm": 0.4077717065811157, "learning_rate": 7.681627916614863e-07, "loss": 0.6478, "step": 20423 }, { "epoch": 0.8464503294790501, "grad_norm": 0.4417567849159241, "learning_rate": 7.679555721331179e-07, "loss": 0.6694, "step": 20424 }, { "epoch": 0.8464917733847238, "grad_norm": 0.3939686119556427, "learning_rate": 7.677483526047496e-07, "loss": 0.6677, "step": 20425 }, { "epoch": 0.8465332172903974, "grad_norm": 0.4271582365036011, "learning_rate": 7.675411330763812e-07, "loss": 0.6604, "step": 20426 }, { "epoch": 0.8465746611960712, "grad_norm": 0.4056811034679413, "learning_rate": 7.673339135480128e-07, "loss": 0.6514, "step": 20427 }, { "epoch": 0.8466161051017448, "grad_norm": 0.43984273076057434, "learning_rate": 7.671266940196445e-07, "loss": 0.6816, "step": 20428 }, { "epoch": 0.8466575490074184, "grad_norm": 0.4196353852748871, "learning_rate": 7.669194744912761e-07, "loss": 0.6813, "step": 20429 }, { "epoch": 0.8466989929130921, "grad_norm": 0.3771660625934601, "learning_rate": 7.667122549629078e-07, "loss": 0.6367, "step": 20430 }, { "epoch": 0.8467404368187658, "grad_norm": 0.4578901529312134, "learning_rate": 7.665050354345394e-07, "loss": 0.6997, "step": 20431 }, { "epoch": 0.8467818807244395, "grad_norm": 0.40670812129974365, "learning_rate": 7.662978159061711e-07, "loss": 0.6743, "step": 20432 }, { "epoch": 0.8468233246301131, "grad_norm": 0.3928362727165222, "learning_rate": 7.660905963778027e-07, "loss": 0.6326, "step": 20433 }, { "epoch": 0.8468647685357868, "grad_norm": 0.4217066168785095, "learning_rate": 7.658833768494344e-07, "loss": 0.7036, "step": 20434 }, { "epoch": 0.8469062124414605, "grad_norm": 0.39548835158348083, "learning_rate": 7.65676157321066e-07, "loss": 0.631, "step": 20435 }, { "epoch": 0.8469476563471342, "grad_norm": 0.4086071848869324, "learning_rate": 7.654689377926977e-07, "loss": 0.6477, "step": 20436 }, { "epoch": 0.8469891002528078, "grad_norm": 0.43001681566238403, "learning_rate": 7.652617182643293e-07, "loss": 0.6821, "step": 20437 }, { "epoch": 0.8470305441584814, "grad_norm": 0.43386632204055786, "learning_rate": 7.65054498735961e-07, "loss": 0.6713, "step": 20438 }, { "epoch": 0.8470719880641552, "grad_norm": 0.4168662130832672, "learning_rate": 7.648472792075926e-07, "loss": 0.7065, "step": 20439 }, { "epoch": 0.8471134319698288, "grad_norm": 0.39915046095848083, "learning_rate": 7.646400596792242e-07, "loss": 0.6168, "step": 20440 }, { "epoch": 0.8471548758755025, "grad_norm": 0.3733215928077698, "learning_rate": 7.644328401508559e-07, "loss": 0.6295, "step": 20441 }, { "epoch": 0.8471963197811762, "grad_norm": 0.4335360527038574, "learning_rate": 7.642256206224876e-07, "loss": 0.6675, "step": 20442 }, { "epoch": 0.8472377636868499, "grad_norm": 0.4043378233909607, "learning_rate": 7.640184010941192e-07, "loss": 0.6692, "step": 20443 }, { "epoch": 0.8472792075925235, "grad_norm": 0.4419574439525604, "learning_rate": 7.638111815657508e-07, "loss": 0.6327, "step": 20444 }, { "epoch": 0.8473206514981972, "grad_norm": 0.43890947103500366, "learning_rate": 7.636039620373825e-07, "loss": 0.6483, "step": 20445 }, { "epoch": 0.8473620954038709, "grad_norm": 0.4200500547885895, "learning_rate": 7.633967425090141e-07, "loss": 0.6548, "step": 20446 }, { "epoch": 0.8474035393095445, "grad_norm": 0.4187223017215729, "learning_rate": 7.631895229806458e-07, "loss": 0.7084, "step": 20447 }, { "epoch": 0.8474449832152182, "grad_norm": 0.42845991253852844, "learning_rate": 7.629823034522774e-07, "loss": 0.6686, "step": 20448 }, { "epoch": 0.8474864271208918, "grad_norm": 0.4146058261394501, "learning_rate": 7.627750839239091e-07, "loss": 0.6583, "step": 20449 }, { "epoch": 0.8475278710265656, "grad_norm": 0.4317057728767395, "learning_rate": 7.625678643955407e-07, "loss": 0.6403, "step": 20450 }, { "epoch": 0.8475693149322392, "grad_norm": 0.4178686738014221, "learning_rate": 7.623606448671724e-07, "loss": 0.6412, "step": 20451 }, { "epoch": 0.8476107588379129, "grad_norm": 0.3794616162776947, "learning_rate": 7.62153425338804e-07, "loss": 0.6102, "step": 20452 }, { "epoch": 0.8476522027435865, "grad_norm": 0.43796563148498535, "learning_rate": 7.619462058104356e-07, "loss": 0.6555, "step": 20453 }, { "epoch": 0.8476936466492603, "grad_norm": 0.4346521496772766, "learning_rate": 7.617389862820673e-07, "loss": 0.6963, "step": 20454 }, { "epoch": 0.8477350905549339, "grad_norm": 0.426464319229126, "learning_rate": 7.61531766753699e-07, "loss": 0.6826, "step": 20455 }, { "epoch": 0.8477765344606075, "grad_norm": 0.4010998606681824, "learning_rate": 7.613245472253306e-07, "loss": 0.6482, "step": 20456 }, { "epoch": 0.8478179783662813, "grad_norm": 0.41508328914642334, "learning_rate": 7.611173276969622e-07, "loss": 0.6873, "step": 20457 }, { "epoch": 0.8478594222719549, "grad_norm": 0.4321563243865967, "learning_rate": 7.609101081685939e-07, "loss": 0.6653, "step": 20458 }, { "epoch": 0.8479008661776286, "grad_norm": 0.39460548758506775, "learning_rate": 7.607028886402256e-07, "loss": 0.5974, "step": 20459 }, { "epoch": 0.8479423100833022, "grad_norm": 0.43614527583122253, "learning_rate": 7.604956691118572e-07, "loss": 0.6874, "step": 20460 }, { "epoch": 0.847983753988976, "grad_norm": 0.40487203001976013, "learning_rate": 7.602884495834888e-07, "loss": 0.6412, "step": 20461 }, { "epoch": 0.8480251978946496, "grad_norm": 0.43353933095932007, "learning_rate": 7.600812300551205e-07, "loss": 0.6316, "step": 20462 }, { "epoch": 0.8480666418003233, "grad_norm": 0.44699952006340027, "learning_rate": 7.598740105267522e-07, "loss": 0.6508, "step": 20463 }, { "epoch": 0.8481080857059969, "grad_norm": 0.4085722863674164, "learning_rate": 7.596667909983838e-07, "loss": 0.7041, "step": 20464 }, { "epoch": 0.8481495296116706, "grad_norm": 0.40354153513908386, "learning_rate": 7.594595714700154e-07, "loss": 0.6476, "step": 20465 }, { "epoch": 0.8481909735173443, "grad_norm": 0.4219331443309784, "learning_rate": 7.59252351941647e-07, "loss": 0.6973, "step": 20466 }, { "epoch": 0.8482324174230179, "grad_norm": 0.43789976835250854, "learning_rate": 7.590451324132787e-07, "loss": 0.6921, "step": 20467 }, { "epoch": 0.8482738613286916, "grad_norm": 0.40335455536842346, "learning_rate": 7.588379128849104e-07, "loss": 0.6266, "step": 20468 }, { "epoch": 0.8483153052343653, "grad_norm": 0.39297932386398315, "learning_rate": 7.58630693356542e-07, "loss": 0.6288, "step": 20469 }, { "epoch": 0.848356749140039, "grad_norm": 0.3709549307823181, "learning_rate": 7.584234738281736e-07, "loss": 0.6178, "step": 20470 }, { "epoch": 0.8483981930457126, "grad_norm": 0.3981325626373291, "learning_rate": 7.582162542998053e-07, "loss": 0.6548, "step": 20471 }, { "epoch": 0.8484396369513862, "grad_norm": 0.41424915194511414, "learning_rate": 7.58009034771437e-07, "loss": 0.6589, "step": 20472 }, { "epoch": 0.84848108085706, "grad_norm": 0.38770031929016113, "learning_rate": 7.578018152430686e-07, "loss": 0.6606, "step": 20473 }, { "epoch": 0.8485225247627336, "grad_norm": 0.46688422560691833, "learning_rate": 7.575945957147002e-07, "loss": 0.7151, "step": 20474 }, { "epoch": 0.8485639686684073, "grad_norm": 0.40260228514671326, "learning_rate": 7.573873761863319e-07, "loss": 0.6848, "step": 20475 }, { "epoch": 0.848605412574081, "grad_norm": 0.3820686638355255, "learning_rate": 7.571801566579636e-07, "loss": 0.6425, "step": 20476 }, { "epoch": 0.8486468564797547, "grad_norm": 0.4266296923160553, "learning_rate": 7.569729371295952e-07, "loss": 0.6761, "step": 20477 }, { "epoch": 0.8486883003854283, "grad_norm": 0.39699041843414307, "learning_rate": 7.567657176012268e-07, "loss": 0.6775, "step": 20478 }, { "epoch": 0.848729744291102, "grad_norm": 0.3877445161342621, "learning_rate": 7.565584980728584e-07, "loss": 0.6154, "step": 20479 }, { "epoch": 0.8487711881967757, "grad_norm": 0.42113596200942993, "learning_rate": 7.563512785444902e-07, "loss": 0.6038, "step": 20480 }, { "epoch": 0.8488126321024493, "grad_norm": 0.4289214611053467, "learning_rate": 7.561440590161218e-07, "loss": 0.6652, "step": 20481 }, { "epoch": 0.848854076008123, "grad_norm": 0.4578174352645874, "learning_rate": 7.559368394877534e-07, "loss": 0.6746, "step": 20482 }, { "epoch": 0.8488955199137966, "grad_norm": 0.4689340591430664, "learning_rate": 7.55729619959385e-07, "loss": 0.7081, "step": 20483 }, { "epoch": 0.8489369638194704, "grad_norm": 0.41705599427223206, "learning_rate": 7.555224004310167e-07, "loss": 0.6865, "step": 20484 }, { "epoch": 0.848978407725144, "grad_norm": 0.4212873876094818, "learning_rate": 7.553151809026484e-07, "loss": 0.6375, "step": 20485 }, { "epoch": 0.8490198516308177, "grad_norm": 0.44557350873947144, "learning_rate": 7.5510796137428e-07, "loss": 0.6527, "step": 20486 }, { "epoch": 0.8490612955364913, "grad_norm": 0.40809524059295654, "learning_rate": 7.549007418459116e-07, "loss": 0.6798, "step": 20487 }, { "epoch": 0.8491027394421651, "grad_norm": 0.3946968615055084, "learning_rate": 7.546935223175432e-07, "loss": 0.6733, "step": 20488 }, { "epoch": 0.8491441833478387, "grad_norm": 0.4350440502166748, "learning_rate": 7.54486302789175e-07, "loss": 0.6661, "step": 20489 }, { "epoch": 0.8491856272535123, "grad_norm": 0.4276566505432129, "learning_rate": 7.542790832608066e-07, "loss": 0.6877, "step": 20490 }, { "epoch": 0.849227071159186, "grad_norm": 0.4717964828014374, "learning_rate": 7.540718637324382e-07, "loss": 0.6794, "step": 20491 }, { "epoch": 0.8492685150648597, "grad_norm": 0.4250555634498596, "learning_rate": 7.538646442040698e-07, "loss": 0.6914, "step": 20492 }, { "epoch": 0.8493099589705334, "grad_norm": 0.415860652923584, "learning_rate": 7.536574246757016e-07, "loss": 0.7136, "step": 20493 }, { "epoch": 0.849351402876207, "grad_norm": 0.4054071009159088, "learning_rate": 7.534502051473332e-07, "loss": 0.6812, "step": 20494 }, { "epoch": 0.8493928467818808, "grad_norm": 0.3962019085884094, "learning_rate": 7.532429856189648e-07, "loss": 0.6167, "step": 20495 }, { "epoch": 0.8494342906875544, "grad_norm": 0.426248699426651, "learning_rate": 7.530357660905964e-07, "loss": 0.6975, "step": 20496 }, { "epoch": 0.8494757345932281, "grad_norm": 0.4153566062450409, "learning_rate": 7.528285465622282e-07, "loss": 0.6223, "step": 20497 }, { "epoch": 0.8495171784989017, "grad_norm": 0.45274245738983154, "learning_rate": 7.526213270338598e-07, "loss": 0.6283, "step": 20498 }, { "epoch": 0.8495586224045754, "grad_norm": 0.4107130765914917, "learning_rate": 7.524141075054914e-07, "loss": 0.666, "step": 20499 }, { "epoch": 0.8496000663102491, "grad_norm": 0.40169525146484375, "learning_rate": 7.52206887977123e-07, "loss": 0.6267, "step": 20500 }, { "epoch": 0.8496415102159227, "grad_norm": 0.41305768489837646, "learning_rate": 7.519996684487546e-07, "loss": 0.7397, "step": 20501 }, { "epoch": 0.8496829541215964, "grad_norm": 0.4273608326911926, "learning_rate": 7.517924489203864e-07, "loss": 0.7434, "step": 20502 }, { "epoch": 0.8497243980272701, "grad_norm": 0.4160315692424774, "learning_rate": 7.51585229392018e-07, "loss": 0.738, "step": 20503 }, { "epoch": 0.8497658419329438, "grad_norm": 0.4425608515739441, "learning_rate": 7.513780098636496e-07, "loss": 0.6426, "step": 20504 }, { "epoch": 0.8498072858386174, "grad_norm": 0.40396633744239807, "learning_rate": 7.511707903352812e-07, "loss": 0.6733, "step": 20505 }, { "epoch": 0.8498487297442912, "grad_norm": 0.42526212334632874, "learning_rate": 7.50963570806913e-07, "loss": 0.7002, "step": 20506 }, { "epoch": 0.8498901736499648, "grad_norm": 0.4493483006954193, "learning_rate": 7.507563512785446e-07, "loss": 0.6263, "step": 20507 }, { "epoch": 0.8499316175556384, "grad_norm": 0.4093776047229767, "learning_rate": 7.505491317501762e-07, "loss": 0.7063, "step": 20508 }, { "epoch": 0.8499730614613121, "grad_norm": 0.4123353064060211, "learning_rate": 7.503419122218078e-07, "loss": 0.6359, "step": 20509 }, { "epoch": 0.8500145053669858, "grad_norm": 0.41335126757621765, "learning_rate": 7.501346926934396e-07, "loss": 0.6841, "step": 20510 }, { "epoch": 0.8500559492726595, "grad_norm": 0.41959109902381897, "learning_rate": 7.499274731650712e-07, "loss": 0.6683, "step": 20511 }, { "epoch": 0.8500973931783331, "grad_norm": 0.40418002009391785, "learning_rate": 7.497202536367028e-07, "loss": 0.651, "step": 20512 }, { "epoch": 0.8501388370840068, "grad_norm": 0.45339900255203247, "learning_rate": 7.495130341083344e-07, "loss": 0.7083, "step": 20513 }, { "epoch": 0.8501802809896805, "grad_norm": 0.46708256006240845, "learning_rate": 7.49305814579966e-07, "loss": 0.6401, "step": 20514 }, { "epoch": 0.8502217248953542, "grad_norm": 0.4338332712650299, "learning_rate": 7.490985950515978e-07, "loss": 0.7207, "step": 20515 }, { "epoch": 0.8502631688010278, "grad_norm": 0.414694607257843, "learning_rate": 7.488913755232294e-07, "loss": 0.6696, "step": 20516 }, { "epoch": 0.8503046127067014, "grad_norm": 0.44499385356903076, "learning_rate": 7.48684155994861e-07, "loss": 0.6465, "step": 20517 }, { "epoch": 0.8503460566123752, "grad_norm": 0.3877165615558624, "learning_rate": 7.484769364664926e-07, "loss": 0.6407, "step": 20518 }, { "epoch": 0.8503875005180488, "grad_norm": 0.37963423132896423, "learning_rate": 7.482697169381244e-07, "loss": 0.594, "step": 20519 }, { "epoch": 0.8504289444237225, "grad_norm": 0.3967238664627075, "learning_rate": 7.48062497409756e-07, "loss": 0.6405, "step": 20520 }, { "epoch": 0.8504703883293961, "grad_norm": 0.4245978891849518, "learning_rate": 7.478552778813876e-07, "loss": 0.6625, "step": 20521 }, { "epoch": 0.8505118322350699, "grad_norm": 0.39641013741493225, "learning_rate": 7.476480583530192e-07, "loss": 0.6599, "step": 20522 }, { "epoch": 0.8505532761407435, "grad_norm": 0.4119395911693573, "learning_rate": 7.47440838824651e-07, "loss": 0.6602, "step": 20523 }, { "epoch": 0.8505947200464172, "grad_norm": 0.42134690284729004, "learning_rate": 7.472336192962826e-07, "loss": 0.631, "step": 20524 }, { "epoch": 0.8506361639520909, "grad_norm": 0.4474722146987915, "learning_rate": 7.470263997679142e-07, "loss": 0.6774, "step": 20525 }, { "epoch": 0.8506776078577645, "grad_norm": 0.37624427676200867, "learning_rate": 7.468191802395458e-07, "loss": 0.6644, "step": 20526 }, { "epoch": 0.8507190517634382, "grad_norm": 0.4044206440448761, "learning_rate": 7.466119607111774e-07, "loss": 0.7095, "step": 20527 }, { "epoch": 0.8507604956691118, "grad_norm": 0.40896427631378174, "learning_rate": 7.464047411828092e-07, "loss": 0.6942, "step": 20528 }, { "epoch": 0.8508019395747856, "grad_norm": 0.39833012223243713, "learning_rate": 7.461975216544408e-07, "loss": 0.6276, "step": 20529 }, { "epoch": 0.8508433834804592, "grad_norm": 0.39790770411491394, "learning_rate": 7.459903021260724e-07, "loss": 0.6538, "step": 20530 }, { "epoch": 0.8508848273861329, "grad_norm": 0.4233314096927643, "learning_rate": 7.45783082597704e-07, "loss": 0.6996, "step": 20531 }, { "epoch": 0.8509262712918065, "grad_norm": 0.3879458010196686, "learning_rate": 7.455758630693358e-07, "loss": 0.6735, "step": 20532 }, { "epoch": 0.8509677151974802, "grad_norm": 0.4220264256000519, "learning_rate": 7.453686435409674e-07, "loss": 0.6913, "step": 20533 }, { "epoch": 0.8510091591031539, "grad_norm": 0.4294629693031311, "learning_rate": 7.45161424012599e-07, "loss": 0.7162, "step": 20534 }, { "epoch": 0.8510506030088275, "grad_norm": 0.3961077034473419, "learning_rate": 7.449542044842306e-07, "loss": 0.6479, "step": 20535 }, { "epoch": 0.8510920469145012, "grad_norm": 0.42062968015670776, "learning_rate": 7.447469849558624e-07, "loss": 0.6447, "step": 20536 }, { "epoch": 0.8511334908201749, "grad_norm": 0.40641701221466064, "learning_rate": 7.44539765427494e-07, "loss": 0.6285, "step": 20537 }, { "epoch": 0.8511749347258486, "grad_norm": 0.3852499723434448, "learning_rate": 7.443325458991256e-07, "loss": 0.6493, "step": 20538 }, { "epoch": 0.8512163786315222, "grad_norm": 0.40276870131492615, "learning_rate": 7.441253263707572e-07, "loss": 0.6063, "step": 20539 }, { "epoch": 0.851257822537196, "grad_norm": 0.4118403196334839, "learning_rate": 7.439181068423888e-07, "loss": 0.5885, "step": 20540 }, { "epoch": 0.8512992664428696, "grad_norm": 0.41731491684913635, "learning_rate": 7.437108873140206e-07, "loss": 0.6685, "step": 20541 }, { "epoch": 0.8513407103485432, "grad_norm": 0.42045459151268005, "learning_rate": 7.435036677856522e-07, "loss": 0.6855, "step": 20542 }, { "epoch": 0.8513821542542169, "grad_norm": 0.4106985330581665, "learning_rate": 7.432964482572838e-07, "loss": 0.6433, "step": 20543 }, { "epoch": 0.8514235981598905, "grad_norm": 0.4170352518558502, "learning_rate": 7.430892287289154e-07, "loss": 0.666, "step": 20544 }, { "epoch": 0.8514650420655643, "grad_norm": 0.3775574266910553, "learning_rate": 7.428820092005472e-07, "loss": 0.679, "step": 20545 }, { "epoch": 0.8515064859712379, "grad_norm": 0.438890665769577, "learning_rate": 7.426747896721788e-07, "loss": 0.677, "step": 20546 }, { "epoch": 0.8515479298769116, "grad_norm": 0.44147396087646484, "learning_rate": 7.424675701438104e-07, "loss": 0.7334, "step": 20547 }, { "epoch": 0.8515893737825853, "grad_norm": 0.4002898931503296, "learning_rate": 7.42260350615442e-07, "loss": 0.6495, "step": 20548 }, { "epoch": 0.851630817688259, "grad_norm": 0.4059344232082367, "learning_rate": 7.420531310870738e-07, "loss": 0.7013, "step": 20549 }, { "epoch": 0.8516722615939326, "grad_norm": 0.456600159406662, "learning_rate": 7.418459115587054e-07, "loss": 0.6855, "step": 20550 }, { "epoch": 0.8517137054996062, "grad_norm": 0.42365702986717224, "learning_rate": 7.41638692030337e-07, "loss": 0.6334, "step": 20551 }, { "epoch": 0.85175514940528, "grad_norm": 0.396771639585495, "learning_rate": 7.414314725019686e-07, "loss": 0.6213, "step": 20552 }, { "epoch": 0.8517965933109536, "grad_norm": 0.4176287353038788, "learning_rate": 7.412242529736002e-07, "loss": 0.6459, "step": 20553 }, { "epoch": 0.8518380372166273, "grad_norm": 0.4199005663394928, "learning_rate": 7.41017033445232e-07, "loss": 0.6429, "step": 20554 }, { "epoch": 0.8518794811223009, "grad_norm": 0.4248897433280945, "learning_rate": 7.408098139168636e-07, "loss": 0.6558, "step": 20555 }, { "epoch": 0.8519209250279747, "grad_norm": 0.4089105725288391, "learning_rate": 7.406025943884952e-07, "loss": 0.697, "step": 20556 }, { "epoch": 0.8519623689336483, "grad_norm": 0.4246751368045807, "learning_rate": 7.403953748601268e-07, "loss": 0.5947, "step": 20557 }, { "epoch": 0.852003812839322, "grad_norm": 0.41488566994667053, "learning_rate": 7.401881553317586e-07, "loss": 0.7073, "step": 20558 }, { "epoch": 0.8520452567449956, "grad_norm": 0.4013586640357971, "learning_rate": 7.399809358033902e-07, "loss": 0.6832, "step": 20559 }, { "epoch": 0.8520867006506693, "grad_norm": 0.44106525182724, "learning_rate": 7.397737162750218e-07, "loss": 0.7209, "step": 20560 }, { "epoch": 0.852128144556343, "grad_norm": 0.4139322340488434, "learning_rate": 7.395664967466534e-07, "loss": 0.6907, "step": 20561 }, { "epoch": 0.8521695884620166, "grad_norm": 0.4122198820114136, "learning_rate": 7.39359277218285e-07, "loss": 0.6549, "step": 20562 }, { "epoch": 0.8522110323676904, "grad_norm": 0.43688124418258667, "learning_rate": 7.391520576899168e-07, "loss": 0.6855, "step": 20563 }, { "epoch": 0.852252476273364, "grad_norm": 0.4417873024940491, "learning_rate": 7.389448381615484e-07, "loss": 0.6907, "step": 20564 }, { "epoch": 0.8522939201790377, "grad_norm": 0.42622631788253784, "learning_rate": 7.3873761863318e-07, "loss": 0.6353, "step": 20565 }, { "epoch": 0.8523353640847113, "grad_norm": 0.3992529809474945, "learning_rate": 7.385303991048116e-07, "loss": 0.6819, "step": 20566 }, { "epoch": 0.8523768079903851, "grad_norm": 0.39934906363487244, "learning_rate": 7.383231795764434e-07, "loss": 0.6281, "step": 20567 }, { "epoch": 0.8524182518960587, "grad_norm": 0.3900816738605499, "learning_rate": 7.38115960048075e-07, "loss": 0.647, "step": 20568 }, { "epoch": 0.8524596958017323, "grad_norm": 0.4243934452533722, "learning_rate": 7.379087405197066e-07, "loss": 0.6453, "step": 20569 }, { "epoch": 0.852501139707406, "grad_norm": 0.41517138481140137, "learning_rate": 7.377015209913382e-07, "loss": 0.6384, "step": 20570 }, { "epoch": 0.8525425836130797, "grad_norm": 0.42522910237312317, "learning_rate": 7.3749430146297e-07, "loss": 0.657, "step": 20571 }, { "epoch": 0.8525840275187534, "grad_norm": 0.42327556014060974, "learning_rate": 7.372870819346016e-07, "loss": 0.718, "step": 20572 }, { "epoch": 0.852625471424427, "grad_norm": 0.4190184772014618, "learning_rate": 7.370798624062332e-07, "loss": 0.7026, "step": 20573 }, { "epoch": 0.8526669153301007, "grad_norm": 0.4596066474914551, "learning_rate": 7.368726428778648e-07, "loss": 0.729, "step": 20574 }, { "epoch": 0.8527083592357744, "grad_norm": 0.4370293617248535, "learning_rate": 7.366654233494965e-07, "loss": 0.6577, "step": 20575 }, { "epoch": 0.8527498031414481, "grad_norm": 0.47592470049858093, "learning_rate": 7.364582038211282e-07, "loss": 0.6472, "step": 20576 }, { "epoch": 0.8527912470471217, "grad_norm": 0.4237995147705078, "learning_rate": 7.362509842927598e-07, "loss": 0.6587, "step": 20577 }, { "epoch": 0.8528326909527953, "grad_norm": 0.4135417938232422, "learning_rate": 7.360437647643914e-07, "loss": 0.6089, "step": 20578 }, { "epoch": 0.8528741348584691, "grad_norm": 0.42195311188697815, "learning_rate": 7.35836545236023e-07, "loss": 0.677, "step": 20579 }, { "epoch": 0.8529155787641427, "grad_norm": 0.4275600016117096, "learning_rate": 7.356293257076548e-07, "loss": 0.6542, "step": 20580 }, { "epoch": 0.8529570226698164, "grad_norm": 0.4209749400615692, "learning_rate": 7.354221061792864e-07, "loss": 0.6752, "step": 20581 }, { "epoch": 0.85299846657549, "grad_norm": 0.4078131318092346, "learning_rate": 7.35214886650918e-07, "loss": 0.6881, "step": 20582 }, { "epoch": 0.8530399104811638, "grad_norm": 0.41180264949798584, "learning_rate": 7.350076671225496e-07, "loss": 0.688, "step": 20583 }, { "epoch": 0.8530813543868374, "grad_norm": 0.3994300365447998, "learning_rate": 7.348004475941814e-07, "loss": 0.639, "step": 20584 }, { "epoch": 0.853122798292511, "grad_norm": 0.41855666041374207, "learning_rate": 7.34593228065813e-07, "loss": 0.6479, "step": 20585 }, { "epoch": 0.8531642421981848, "grad_norm": 0.4313127398490906, "learning_rate": 7.343860085374446e-07, "loss": 0.6887, "step": 20586 }, { "epoch": 0.8532056861038584, "grad_norm": 0.40297380089759827, "learning_rate": 7.341787890090762e-07, "loss": 0.7358, "step": 20587 }, { "epoch": 0.8532471300095321, "grad_norm": 0.4103645384311676, "learning_rate": 7.339715694807079e-07, "loss": 0.6616, "step": 20588 }, { "epoch": 0.8532885739152057, "grad_norm": 0.42178410291671753, "learning_rate": 7.337643499523396e-07, "loss": 0.6399, "step": 20589 }, { "epoch": 0.8533300178208795, "grad_norm": 0.43834972381591797, "learning_rate": 7.335571304239712e-07, "loss": 0.6866, "step": 20590 }, { "epoch": 0.8533714617265531, "grad_norm": 0.40926608443260193, "learning_rate": 7.333499108956028e-07, "loss": 0.623, "step": 20591 }, { "epoch": 0.8534129056322268, "grad_norm": 0.39626017212867737, "learning_rate": 7.331426913672345e-07, "loss": 0.6733, "step": 20592 }, { "epoch": 0.8534543495379004, "grad_norm": 0.4075760245323181, "learning_rate": 7.329354718388662e-07, "loss": 0.6459, "step": 20593 }, { "epoch": 0.8534957934435741, "grad_norm": 0.4105537533760071, "learning_rate": 7.327282523104978e-07, "loss": 0.6898, "step": 20594 }, { "epoch": 0.8535372373492478, "grad_norm": 0.4362832307815552, "learning_rate": 7.325210327821294e-07, "loss": 0.6792, "step": 20595 }, { "epoch": 0.8535786812549214, "grad_norm": 0.4502408504486084, "learning_rate": 7.323138132537611e-07, "loss": 0.7025, "step": 20596 }, { "epoch": 0.8536201251605952, "grad_norm": 0.385731965303421, "learning_rate": 7.321065937253928e-07, "loss": 0.6691, "step": 20597 }, { "epoch": 0.8536615690662688, "grad_norm": 0.4145203232765198, "learning_rate": 7.318993741970244e-07, "loss": 0.6448, "step": 20598 }, { "epoch": 0.8537030129719425, "grad_norm": 0.38093337416648865, "learning_rate": 7.31692154668656e-07, "loss": 0.6366, "step": 20599 }, { "epoch": 0.8537444568776161, "grad_norm": 0.4369021952152252, "learning_rate": 7.314849351402876e-07, "loss": 0.6249, "step": 20600 }, { "epoch": 0.8537859007832899, "grad_norm": 0.4268071949481964, "learning_rate": 7.312777156119193e-07, "loss": 0.6581, "step": 20601 }, { "epoch": 0.8538273446889635, "grad_norm": 0.4084811806678772, "learning_rate": 7.31070496083551e-07, "loss": 0.6379, "step": 20602 }, { "epoch": 0.8538687885946371, "grad_norm": 0.439690500497818, "learning_rate": 7.308632765551826e-07, "loss": 0.7015, "step": 20603 }, { "epoch": 0.8539102325003108, "grad_norm": 0.44381242990493774, "learning_rate": 7.306560570268142e-07, "loss": 0.6343, "step": 20604 }, { "epoch": 0.8539516764059845, "grad_norm": 0.46103769540786743, "learning_rate": 7.304488374984459e-07, "loss": 0.6775, "step": 20605 }, { "epoch": 0.8539931203116582, "grad_norm": 0.4114983081817627, "learning_rate": 7.302416179700776e-07, "loss": 0.6346, "step": 20606 }, { "epoch": 0.8540345642173318, "grad_norm": 0.43805304169654846, "learning_rate": 7.300343984417092e-07, "loss": 0.6522, "step": 20607 }, { "epoch": 0.8540760081230055, "grad_norm": 0.4048362076282501, "learning_rate": 7.298271789133408e-07, "loss": 0.6511, "step": 20608 }, { "epoch": 0.8541174520286792, "grad_norm": 0.4062684178352356, "learning_rate": 7.296199593849725e-07, "loss": 0.6721, "step": 20609 }, { "epoch": 0.8541588959343529, "grad_norm": 0.4015860855579376, "learning_rate": 7.294127398566042e-07, "loss": 0.6643, "step": 20610 }, { "epoch": 0.8542003398400265, "grad_norm": 0.42164140939712524, "learning_rate": 7.292055203282358e-07, "loss": 0.6163, "step": 20611 }, { "epoch": 0.8542417837457001, "grad_norm": 0.40147626399993896, "learning_rate": 7.289983007998674e-07, "loss": 0.6226, "step": 20612 }, { "epoch": 0.8542832276513739, "grad_norm": 0.38960352540016174, "learning_rate": 7.287910812714991e-07, "loss": 0.6548, "step": 20613 }, { "epoch": 0.8543246715570475, "grad_norm": 0.43109095096588135, "learning_rate": 7.285838617431307e-07, "loss": 0.6254, "step": 20614 }, { "epoch": 0.8543661154627212, "grad_norm": 0.4323986768722534, "learning_rate": 7.283766422147624e-07, "loss": 0.7076, "step": 20615 }, { "epoch": 0.8544075593683949, "grad_norm": 0.37228113412857056, "learning_rate": 7.28169422686394e-07, "loss": 0.6283, "step": 20616 }, { "epoch": 0.8544490032740686, "grad_norm": 0.4250946342945099, "learning_rate": 7.279622031580256e-07, "loss": 0.6672, "step": 20617 }, { "epoch": 0.8544904471797422, "grad_norm": 0.42265087366104126, "learning_rate": 7.277549836296573e-07, "loss": 0.704, "step": 20618 }, { "epoch": 0.8545318910854159, "grad_norm": 0.3783937394618988, "learning_rate": 7.27547764101289e-07, "loss": 0.6168, "step": 20619 }, { "epoch": 0.8545733349910896, "grad_norm": 0.42652684450149536, "learning_rate": 7.273405445729206e-07, "loss": 0.6614, "step": 20620 }, { "epoch": 0.8546147788967632, "grad_norm": 0.4222947359085083, "learning_rate": 7.271333250445522e-07, "loss": 0.6978, "step": 20621 }, { "epoch": 0.8546562228024369, "grad_norm": 0.43025317788124084, "learning_rate": 7.269261055161839e-07, "loss": 0.6201, "step": 20622 }, { "epoch": 0.8546976667081105, "grad_norm": 0.4381261169910431, "learning_rate": 7.267188859878156e-07, "loss": 0.6934, "step": 20623 }, { "epoch": 0.8547391106137843, "grad_norm": 0.669762134552002, "learning_rate": 7.265116664594472e-07, "loss": 0.7092, "step": 20624 }, { "epoch": 0.8547805545194579, "grad_norm": 0.39167869091033936, "learning_rate": 7.263044469310788e-07, "loss": 0.6626, "step": 20625 }, { "epoch": 0.8548219984251316, "grad_norm": 0.378739595413208, "learning_rate": 7.260972274027105e-07, "loss": 0.6135, "step": 20626 }, { "epoch": 0.8548634423308052, "grad_norm": 0.4129163324832916, "learning_rate": 7.258900078743421e-07, "loss": 0.6935, "step": 20627 }, { "epoch": 0.854904886236479, "grad_norm": 0.4739900231361389, "learning_rate": 7.256827883459738e-07, "loss": 0.7242, "step": 20628 }, { "epoch": 0.8549463301421526, "grad_norm": 0.44078561663627625, "learning_rate": 7.254755688176054e-07, "loss": 0.7068, "step": 20629 }, { "epoch": 0.8549877740478262, "grad_norm": 0.40549206733703613, "learning_rate": 7.252683492892371e-07, "loss": 0.697, "step": 20630 }, { "epoch": 0.8550292179535, "grad_norm": 0.42405807971954346, "learning_rate": 7.250611297608687e-07, "loss": 0.6987, "step": 20631 }, { "epoch": 0.8550706618591736, "grad_norm": 0.4120432734489441, "learning_rate": 7.248539102325004e-07, "loss": 0.6793, "step": 20632 }, { "epoch": 0.8551121057648473, "grad_norm": 0.4435417056083679, "learning_rate": 7.24646690704132e-07, "loss": 0.6608, "step": 20633 }, { "epoch": 0.8551535496705209, "grad_norm": 0.4202413260936737, "learning_rate": 7.244394711757637e-07, "loss": 0.6394, "step": 20634 }, { "epoch": 0.8551949935761947, "grad_norm": 0.37648656964302063, "learning_rate": 7.242322516473953e-07, "loss": 0.6343, "step": 20635 }, { "epoch": 0.8552364374818683, "grad_norm": 0.43615004420280457, "learning_rate": 7.240250321190269e-07, "loss": 0.7203, "step": 20636 }, { "epoch": 0.855277881387542, "grad_norm": 0.4045298099517822, "learning_rate": 7.238178125906586e-07, "loss": 0.6909, "step": 20637 }, { "epoch": 0.8553193252932156, "grad_norm": 0.41749417781829834, "learning_rate": 7.236105930622902e-07, "loss": 0.6331, "step": 20638 }, { "epoch": 0.8553607691988893, "grad_norm": 0.4086287319660187, "learning_rate": 7.234033735339219e-07, "loss": 0.6853, "step": 20639 }, { "epoch": 0.855402213104563, "grad_norm": 0.39390724897384644, "learning_rate": 7.231961540055535e-07, "loss": 0.6178, "step": 20640 }, { "epoch": 0.8554436570102366, "grad_norm": 0.4174099266529083, "learning_rate": 7.229889344771852e-07, "loss": 0.6641, "step": 20641 }, { "epoch": 0.8554851009159103, "grad_norm": 0.4187566339969635, "learning_rate": 7.227817149488168e-07, "loss": 0.688, "step": 20642 }, { "epoch": 0.855526544821584, "grad_norm": 0.43028339743614197, "learning_rate": 7.225744954204485e-07, "loss": 0.7334, "step": 20643 }, { "epoch": 0.8555679887272577, "grad_norm": 0.42927947640419006, "learning_rate": 7.223672758920801e-07, "loss": 0.6547, "step": 20644 }, { "epoch": 0.8556094326329313, "grad_norm": 0.4395941197872162, "learning_rate": 7.221600563637118e-07, "loss": 0.6674, "step": 20645 }, { "epoch": 0.8556508765386049, "grad_norm": 0.4393553137779236, "learning_rate": 7.219528368353434e-07, "loss": 0.6797, "step": 20646 }, { "epoch": 0.8556923204442787, "grad_norm": 0.3965934216976166, "learning_rate": 7.217456173069751e-07, "loss": 0.6257, "step": 20647 }, { "epoch": 0.8557337643499523, "grad_norm": 0.4146082401275635, "learning_rate": 7.215383977786067e-07, "loss": 0.6667, "step": 20648 }, { "epoch": 0.855775208255626, "grad_norm": 0.44024360179901123, "learning_rate": 7.213311782502383e-07, "loss": 0.7244, "step": 20649 }, { "epoch": 0.8558166521612997, "grad_norm": 0.45884424448013306, "learning_rate": 7.2112395872187e-07, "loss": 0.6631, "step": 20650 }, { "epoch": 0.8558580960669734, "grad_norm": 0.4345189929008484, "learning_rate": 7.209167391935017e-07, "loss": 0.6469, "step": 20651 }, { "epoch": 0.855899539972647, "grad_norm": 0.3816482126712799, "learning_rate": 7.207095196651333e-07, "loss": 0.673, "step": 20652 }, { "epoch": 0.8559409838783207, "grad_norm": 0.4629276394844055, "learning_rate": 7.205023001367649e-07, "loss": 0.6785, "step": 20653 }, { "epoch": 0.8559824277839944, "grad_norm": 0.43444257974624634, "learning_rate": 7.202950806083966e-07, "loss": 0.6747, "step": 20654 }, { "epoch": 0.856023871689668, "grad_norm": 0.4620898365974426, "learning_rate": 7.200878610800282e-07, "loss": 0.6879, "step": 20655 }, { "epoch": 0.8560653155953417, "grad_norm": 0.39860424399375916, "learning_rate": 7.198806415516599e-07, "loss": 0.6031, "step": 20656 }, { "epoch": 0.8561067595010153, "grad_norm": 0.37967753410339355, "learning_rate": 7.196734220232915e-07, "loss": 0.6772, "step": 20657 }, { "epoch": 0.8561482034066891, "grad_norm": 0.44143545627593994, "learning_rate": 7.194662024949232e-07, "loss": 0.6477, "step": 20658 }, { "epoch": 0.8561896473123627, "grad_norm": 0.4243814945220947, "learning_rate": 7.192589829665548e-07, "loss": 0.6567, "step": 20659 }, { "epoch": 0.8562310912180364, "grad_norm": 0.4085336923599243, "learning_rate": 7.190517634381865e-07, "loss": 0.6567, "step": 20660 }, { "epoch": 0.85627253512371, "grad_norm": 0.40385058522224426, "learning_rate": 7.188445439098181e-07, "loss": 0.6958, "step": 20661 }, { "epoch": 0.8563139790293838, "grad_norm": 0.45761188864707947, "learning_rate": 7.186373243814497e-07, "loss": 0.6851, "step": 20662 }, { "epoch": 0.8563554229350574, "grad_norm": 0.4179224371910095, "learning_rate": 7.184301048530814e-07, "loss": 0.7062, "step": 20663 }, { "epoch": 0.856396866840731, "grad_norm": 0.3983958959579468, "learning_rate": 7.182228853247131e-07, "loss": 0.6429, "step": 20664 }, { "epoch": 0.8564383107464048, "grad_norm": 0.3968222141265869, "learning_rate": 7.180156657963447e-07, "loss": 0.6581, "step": 20665 }, { "epoch": 0.8564797546520784, "grad_norm": 0.40999695658683777, "learning_rate": 7.178084462679763e-07, "loss": 0.6382, "step": 20666 }, { "epoch": 0.8565211985577521, "grad_norm": 0.41727912425994873, "learning_rate": 7.17601226739608e-07, "loss": 0.6472, "step": 20667 }, { "epoch": 0.8565626424634257, "grad_norm": 0.3964017331600189, "learning_rate": 7.173940072112397e-07, "loss": 0.6272, "step": 20668 }, { "epoch": 0.8566040863690995, "grad_norm": 0.4236403703689575, "learning_rate": 7.171867876828713e-07, "loss": 0.6945, "step": 20669 }, { "epoch": 0.8566455302747731, "grad_norm": 0.41873735189437866, "learning_rate": 7.169795681545029e-07, "loss": 0.6702, "step": 20670 }, { "epoch": 0.8566869741804468, "grad_norm": 0.40993115305900574, "learning_rate": 7.167723486261346e-07, "loss": 0.677, "step": 20671 }, { "epoch": 0.8567284180861204, "grad_norm": 0.43196114897727966, "learning_rate": 7.165651290977662e-07, "loss": 0.6758, "step": 20672 }, { "epoch": 0.8567698619917941, "grad_norm": 0.4199584424495697, "learning_rate": 7.163579095693979e-07, "loss": 0.6819, "step": 20673 }, { "epoch": 0.8568113058974678, "grad_norm": 0.43562430143356323, "learning_rate": 7.161506900410295e-07, "loss": 0.6495, "step": 20674 }, { "epoch": 0.8568527498031414, "grad_norm": 0.46757742762565613, "learning_rate": 7.159434705126611e-07, "loss": 0.7274, "step": 20675 }, { "epoch": 0.8568941937088151, "grad_norm": 0.4230787456035614, "learning_rate": 7.157362509842928e-07, "loss": 0.646, "step": 20676 }, { "epoch": 0.8569356376144888, "grad_norm": 0.39319083094596863, "learning_rate": 7.155290314559245e-07, "loss": 0.6136, "step": 20677 }, { "epoch": 0.8569770815201625, "grad_norm": 0.39663931727409363, "learning_rate": 7.153218119275561e-07, "loss": 0.636, "step": 20678 }, { "epoch": 0.8570185254258361, "grad_norm": 0.38029035925865173, "learning_rate": 7.151145923991877e-07, "loss": 0.6768, "step": 20679 }, { "epoch": 0.8570599693315099, "grad_norm": 0.4543338418006897, "learning_rate": 7.149073728708194e-07, "loss": 0.6786, "step": 20680 }, { "epoch": 0.8571014132371835, "grad_norm": 0.4540020823478699, "learning_rate": 7.147001533424511e-07, "loss": 0.7065, "step": 20681 }, { "epoch": 0.8571428571428571, "grad_norm": 0.42281222343444824, "learning_rate": 7.144929338140827e-07, "loss": 0.7029, "step": 20682 }, { "epoch": 0.8571843010485308, "grad_norm": 0.42932766675949097, "learning_rate": 7.142857142857143e-07, "loss": 0.6892, "step": 20683 }, { "epoch": 0.8572257449542044, "grad_norm": 0.36511629819869995, "learning_rate": 7.14078494757346e-07, "loss": 0.6238, "step": 20684 }, { "epoch": 0.8572671888598782, "grad_norm": 0.47147801518440247, "learning_rate": 7.138712752289777e-07, "loss": 0.7489, "step": 20685 }, { "epoch": 0.8573086327655518, "grad_norm": 0.40835800766944885, "learning_rate": 7.136640557006093e-07, "loss": 0.6415, "step": 20686 }, { "epoch": 0.8573500766712255, "grad_norm": 0.40998151898384094, "learning_rate": 7.134568361722409e-07, "loss": 0.6782, "step": 20687 }, { "epoch": 0.8573915205768992, "grad_norm": 0.4171992838382721, "learning_rate": 7.132496166438725e-07, "loss": 0.6511, "step": 20688 }, { "epoch": 0.8574329644825729, "grad_norm": 0.435227632522583, "learning_rate": 7.130423971155043e-07, "loss": 0.6809, "step": 20689 }, { "epoch": 0.8574744083882465, "grad_norm": 0.38382989168167114, "learning_rate": 7.128351775871359e-07, "loss": 0.6067, "step": 20690 }, { "epoch": 0.8575158522939201, "grad_norm": 0.3736913800239563, "learning_rate": 7.126279580587675e-07, "loss": 0.6385, "step": 20691 }, { "epoch": 0.8575572961995939, "grad_norm": 0.3784366548061371, "learning_rate": 7.124207385303991e-07, "loss": 0.6263, "step": 20692 }, { "epoch": 0.8575987401052675, "grad_norm": 0.4555272161960602, "learning_rate": 7.122135190020308e-07, "loss": 0.653, "step": 20693 }, { "epoch": 0.8576401840109412, "grad_norm": 0.4482711851596832, "learning_rate": 7.120062994736625e-07, "loss": 0.7026, "step": 20694 }, { "epoch": 0.8576816279166148, "grad_norm": 0.40066763758659363, "learning_rate": 7.117990799452941e-07, "loss": 0.6487, "step": 20695 }, { "epoch": 0.8577230718222886, "grad_norm": 0.41767817735671997, "learning_rate": 7.115918604169257e-07, "loss": 0.6357, "step": 20696 }, { "epoch": 0.8577645157279622, "grad_norm": 0.42136138677597046, "learning_rate": 7.113846408885574e-07, "loss": 0.6824, "step": 20697 }, { "epoch": 0.8578059596336359, "grad_norm": 0.41795065999031067, "learning_rate": 7.111774213601891e-07, "loss": 0.6262, "step": 20698 }, { "epoch": 0.8578474035393095, "grad_norm": 0.42105284333229065, "learning_rate": 7.109702018318207e-07, "loss": 0.6638, "step": 20699 }, { "epoch": 0.8578888474449832, "grad_norm": 0.3958751857280731, "learning_rate": 7.107629823034523e-07, "loss": 0.6282, "step": 20700 }, { "epoch": 0.8579302913506569, "grad_norm": 0.42144879698753357, "learning_rate": 7.105557627750839e-07, "loss": 0.6101, "step": 20701 }, { "epoch": 0.8579717352563305, "grad_norm": 0.40834423899650574, "learning_rate": 7.103485432467157e-07, "loss": 0.7122, "step": 20702 }, { "epoch": 0.8580131791620043, "grad_norm": 0.4218199551105499, "learning_rate": 7.101413237183473e-07, "loss": 0.6676, "step": 20703 }, { "epoch": 0.8580546230676779, "grad_norm": 0.44073861837387085, "learning_rate": 7.099341041899789e-07, "loss": 0.6641, "step": 20704 }, { "epoch": 0.8580960669733516, "grad_norm": 0.4332108199596405, "learning_rate": 7.097268846616105e-07, "loss": 0.7095, "step": 20705 }, { "epoch": 0.8581375108790252, "grad_norm": 0.4333076477050781, "learning_rate": 7.095196651332423e-07, "loss": 0.6887, "step": 20706 }, { "epoch": 0.8581789547846989, "grad_norm": 0.4310159981250763, "learning_rate": 7.093124456048739e-07, "loss": 0.6835, "step": 20707 }, { "epoch": 0.8582203986903726, "grad_norm": 0.39767298102378845, "learning_rate": 7.091052260765055e-07, "loss": 0.6073, "step": 20708 }, { "epoch": 0.8582618425960462, "grad_norm": 0.4283129572868347, "learning_rate": 7.088980065481371e-07, "loss": 0.6294, "step": 20709 }, { "epoch": 0.8583032865017199, "grad_norm": 0.42040449380874634, "learning_rate": 7.086907870197687e-07, "loss": 0.6626, "step": 20710 }, { "epoch": 0.8583447304073936, "grad_norm": 0.443744033575058, "learning_rate": 7.084835674914005e-07, "loss": 0.7006, "step": 20711 }, { "epoch": 0.8583861743130673, "grad_norm": 0.4153057932853699, "learning_rate": 7.082763479630321e-07, "loss": 0.7144, "step": 20712 }, { "epoch": 0.8584276182187409, "grad_norm": 0.4464622139930725, "learning_rate": 7.080691284346637e-07, "loss": 0.6514, "step": 20713 }, { "epoch": 0.8584690621244147, "grad_norm": 0.4239831864833832, "learning_rate": 7.078619089062953e-07, "loss": 0.6605, "step": 20714 }, { "epoch": 0.8585105060300883, "grad_norm": 0.416917622089386, "learning_rate": 7.076546893779271e-07, "loss": 0.6705, "step": 20715 }, { "epoch": 0.8585519499357619, "grad_norm": 0.43651047348976135, "learning_rate": 7.074474698495587e-07, "loss": 0.6677, "step": 20716 }, { "epoch": 0.8585933938414356, "grad_norm": 0.41689062118530273, "learning_rate": 7.072402503211903e-07, "loss": 0.653, "step": 20717 }, { "epoch": 0.8586348377471092, "grad_norm": 0.4162941575050354, "learning_rate": 7.070330307928219e-07, "loss": 0.6329, "step": 20718 }, { "epoch": 0.858676281652783, "grad_norm": 0.4778016209602356, "learning_rate": 7.068258112644537e-07, "loss": 0.7329, "step": 20719 }, { "epoch": 0.8587177255584566, "grad_norm": 0.4138352870941162, "learning_rate": 7.066185917360853e-07, "loss": 0.6691, "step": 20720 }, { "epoch": 0.8587591694641303, "grad_norm": 0.43397819995880127, "learning_rate": 7.064113722077169e-07, "loss": 0.675, "step": 20721 }, { "epoch": 0.858800613369804, "grad_norm": 0.44667157530784607, "learning_rate": 7.062041526793485e-07, "loss": 0.6838, "step": 20722 }, { "epoch": 0.8588420572754777, "grad_norm": 0.4274214506149292, "learning_rate": 7.059969331509801e-07, "loss": 0.6847, "step": 20723 }, { "epoch": 0.8588835011811513, "grad_norm": 0.4447210729122162, "learning_rate": 7.057897136226119e-07, "loss": 0.6104, "step": 20724 }, { "epoch": 0.8589249450868249, "grad_norm": 0.440625935792923, "learning_rate": 7.055824940942435e-07, "loss": 0.7159, "step": 20725 }, { "epoch": 0.8589663889924987, "grad_norm": 0.4345286190509796, "learning_rate": 7.053752745658751e-07, "loss": 0.6782, "step": 20726 }, { "epoch": 0.8590078328981723, "grad_norm": 0.44737088680267334, "learning_rate": 7.051680550375067e-07, "loss": 0.7064, "step": 20727 }, { "epoch": 0.859049276803846, "grad_norm": 0.40545275807380676, "learning_rate": 7.049608355091385e-07, "loss": 0.6648, "step": 20728 }, { "epoch": 0.8590907207095196, "grad_norm": 0.43794143199920654, "learning_rate": 7.047536159807701e-07, "loss": 0.688, "step": 20729 }, { "epoch": 0.8591321646151934, "grad_norm": 0.4279376268386841, "learning_rate": 7.045463964524017e-07, "loss": 0.6183, "step": 20730 }, { "epoch": 0.859173608520867, "grad_norm": 0.4055424928665161, "learning_rate": 7.043391769240333e-07, "loss": 0.6619, "step": 20731 }, { "epoch": 0.8592150524265407, "grad_norm": 0.4232601523399353, "learning_rate": 7.041319573956651e-07, "loss": 0.6921, "step": 20732 }, { "epoch": 0.8592564963322143, "grad_norm": 0.3670955002307892, "learning_rate": 7.039247378672967e-07, "loss": 0.5907, "step": 20733 }, { "epoch": 0.859297940237888, "grad_norm": 0.41845718026161194, "learning_rate": 7.037175183389283e-07, "loss": 0.6978, "step": 20734 }, { "epoch": 0.8593393841435617, "grad_norm": 0.4219863712787628, "learning_rate": 7.035102988105599e-07, "loss": 0.616, "step": 20735 }, { "epoch": 0.8593808280492353, "grad_norm": 0.41098758578300476, "learning_rate": 7.033030792821915e-07, "loss": 0.6733, "step": 20736 }, { "epoch": 0.8594222719549091, "grad_norm": 0.4137190878391266, "learning_rate": 7.030958597538233e-07, "loss": 0.6747, "step": 20737 }, { "epoch": 0.8594637158605827, "grad_norm": 0.38438576459884644, "learning_rate": 7.028886402254549e-07, "loss": 0.6543, "step": 20738 }, { "epoch": 0.8595051597662564, "grad_norm": 0.3769966959953308, "learning_rate": 7.026814206970865e-07, "loss": 0.6523, "step": 20739 }, { "epoch": 0.85954660367193, "grad_norm": 0.42673033475875854, "learning_rate": 7.024742011687181e-07, "loss": 0.7256, "step": 20740 }, { "epoch": 0.8595880475776038, "grad_norm": 0.44293245673179626, "learning_rate": 7.022669816403499e-07, "loss": 0.6707, "step": 20741 }, { "epoch": 0.8596294914832774, "grad_norm": 0.4170716404914856, "learning_rate": 7.020597621119815e-07, "loss": 0.7061, "step": 20742 }, { "epoch": 0.859670935388951, "grad_norm": 0.4361152648925781, "learning_rate": 7.018525425836131e-07, "loss": 0.6967, "step": 20743 }, { "epoch": 0.8597123792946247, "grad_norm": 0.4424999952316284, "learning_rate": 7.016453230552447e-07, "loss": 0.6709, "step": 20744 }, { "epoch": 0.8597538232002984, "grad_norm": 0.41426846385002136, "learning_rate": 7.014381035268765e-07, "loss": 0.6814, "step": 20745 }, { "epoch": 0.8597952671059721, "grad_norm": 0.43202832341194153, "learning_rate": 7.012308839985081e-07, "loss": 0.6311, "step": 20746 }, { "epoch": 0.8598367110116457, "grad_norm": 0.435259610414505, "learning_rate": 7.010236644701397e-07, "loss": 0.7069, "step": 20747 }, { "epoch": 0.8598781549173194, "grad_norm": 0.4061526358127594, "learning_rate": 7.008164449417713e-07, "loss": 0.6169, "step": 20748 }, { "epoch": 0.8599195988229931, "grad_norm": 0.37928295135498047, "learning_rate": 7.006092254134029e-07, "loss": 0.6378, "step": 20749 }, { "epoch": 0.8599610427286668, "grad_norm": 0.4216063320636749, "learning_rate": 7.004020058850347e-07, "loss": 0.696, "step": 20750 }, { "epoch": 0.8600024866343404, "grad_norm": 0.4561203122138977, "learning_rate": 7.001947863566663e-07, "loss": 0.6469, "step": 20751 }, { "epoch": 0.860043930540014, "grad_norm": 0.4967528283596039, "learning_rate": 6.999875668282979e-07, "loss": 0.7694, "step": 20752 }, { "epoch": 0.8600853744456878, "grad_norm": 0.4147067964076996, "learning_rate": 6.997803472999295e-07, "loss": 0.6323, "step": 20753 }, { "epoch": 0.8601268183513614, "grad_norm": 0.4385703504085541, "learning_rate": 6.995731277715613e-07, "loss": 0.7288, "step": 20754 }, { "epoch": 0.8601682622570351, "grad_norm": 0.4238215386867523, "learning_rate": 6.993659082431929e-07, "loss": 0.645, "step": 20755 }, { "epoch": 0.8602097061627088, "grad_norm": 0.48376473784446716, "learning_rate": 6.991586887148245e-07, "loss": 0.6849, "step": 20756 }, { "epoch": 0.8602511500683825, "grad_norm": 0.42255550622940063, "learning_rate": 6.989514691864561e-07, "loss": 0.6047, "step": 20757 }, { "epoch": 0.8602925939740561, "grad_norm": 0.41737762093544006, "learning_rate": 6.987442496580879e-07, "loss": 0.6841, "step": 20758 }, { "epoch": 0.8603340378797298, "grad_norm": 0.3802061080932617, "learning_rate": 6.985370301297195e-07, "loss": 0.5896, "step": 20759 }, { "epoch": 0.8603754817854035, "grad_norm": 0.4072643518447876, "learning_rate": 6.983298106013511e-07, "loss": 0.6243, "step": 20760 }, { "epoch": 0.8604169256910771, "grad_norm": 0.4043499231338501, "learning_rate": 6.981225910729827e-07, "loss": 0.6992, "step": 20761 }, { "epoch": 0.8604583695967508, "grad_norm": 0.4156593978404999, "learning_rate": 6.979153715446143e-07, "loss": 0.6743, "step": 20762 }, { "epoch": 0.8604998135024244, "grad_norm": 0.4169057309627533, "learning_rate": 6.977081520162461e-07, "loss": 0.642, "step": 20763 }, { "epoch": 0.8605412574080982, "grad_norm": 0.461443156003952, "learning_rate": 6.975009324878777e-07, "loss": 0.647, "step": 20764 }, { "epoch": 0.8605827013137718, "grad_norm": 0.4311562180519104, "learning_rate": 6.972937129595093e-07, "loss": 0.6526, "step": 20765 }, { "epoch": 0.8606241452194455, "grad_norm": 0.3944648206233978, "learning_rate": 6.970864934311409e-07, "loss": 0.6416, "step": 20766 }, { "epoch": 0.8606655891251191, "grad_norm": 0.4345790147781372, "learning_rate": 6.968792739027727e-07, "loss": 0.6672, "step": 20767 }, { "epoch": 0.8607070330307928, "grad_norm": 0.4515756368637085, "learning_rate": 6.966720543744043e-07, "loss": 0.7393, "step": 20768 }, { "epoch": 0.8607484769364665, "grad_norm": 0.45680031180381775, "learning_rate": 6.964648348460359e-07, "loss": 0.6946, "step": 20769 }, { "epoch": 0.8607899208421401, "grad_norm": 0.42079228162765503, "learning_rate": 6.962576153176675e-07, "loss": 0.6348, "step": 20770 }, { "epoch": 0.8608313647478139, "grad_norm": 0.3976905047893524, "learning_rate": 6.960503957892991e-07, "loss": 0.6373, "step": 20771 }, { "epoch": 0.8608728086534875, "grad_norm": 0.4169802665710449, "learning_rate": 6.958431762609309e-07, "loss": 0.6427, "step": 20772 }, { "epoch": 0.8609142525591612, "grad_norm": 0.4299662709236145, "learning_rate": 6.956359567325625e-07, "loss": 0.606, "step": 20773 }, { "epoch": 0.8609556964648348, "grad_norm": 0.4135642647743225, "learning_rate": 6.954287372041941e-07, "loss": 0.7047, "step": 20774 }, { "epoch": 0.8609971403705086, "grad_norm": 0.4326569139957428, "learning_rate": 6.952215176758257e-07, "loss": 0.6757, "step": 20775 }, { "epoch": 0.8610385842761822, "grad_norm": 0.4333491623401642, "learning_rate": 6.950142981474575e-07, "loss": 0.6245, "step": 20776 }, { "epoch": 0.8610800281818558, "grad_norm": 0.4208334982395172, "learning_rate": 6.948070786190891e-07, "loss": 0.6798, "step": 20777 }, { "epoch": 0.8611214720875295, "grad_norm": 0.4477040767669678, "learning_rate": 6.945998590907207e-07, "loss": 0.6674, "step": 20778 }, { "epoch": 0.8611629159932032, "grad_norm": 0.3821337819099426, "learning_rate": 6.943926395623523e-07, "loss": 0.6428, "step": 20779 }, { "epoch": 0.8612043598988769, "grad_norm": 0.4158639907836914, "learning_rate": 6.941854200339841e-07, "loss": 0.6562, "step": 20780 }, { "epoch": 0.8612458038045505, "grad_norm": 0.39476126432418823, "learning_rate": 6.939782005056157e-07, "loss": 0.6078, "step": 20781 }, { "epoch": 0.8612872477102242, "grad_norm": 0.3789387345314026, "learning_rate": 6.937709809772473e-07, "loss": 0.6025, "step": 20782 }, { "epoch": 0.8613286916158979, "grad_norm": 0.4036412239074707, "learning_rate": 6.935637614488789e-07, "loss": 0.6697, "step": 20783 }, { "epoch": 0.8613701355215716, "grad_norm": 0.40359246730804443, "learning_rate": 6.933565419205106e-07, "loss": 0.6848, "step": 20784 }, { "epoch": 0.8614115794272452, "grad_norm": 0.40351003408432007, "learning_rate": 6.931493223921423e-07, "loss": 0.6931, "step": 20785 }, { "epoch": 0.8614530233329188, "grad_norm": 0.4472194015979767, "learning_rate": 6.929421028637739e-07, "loss": 0.7092, "step": 20786 }, { "epoch": 0.8614944672385926, "grad_norm": 0.43898746371269226, "learning_rate": 6.927348833354055e-07, "loss": 0.7178, "step": 20787 }, { "epoch": 0.8615359111442662, "grad_norm": 0.42746561765670776, "learning_rate": 6.925276638070371e-07, "loss": 0.6306, "step": 20788 }, { "epoch": 0.8615773550499399, "grad_norm": 0.41697919368743896, "learning_rate": 6.923204442786689e-07, "loss": 0.7349, "step": 20789 }, { "epoch": 0.8616187989556136, "grad_norm": 0.4274982213973999, "learning_rate": 6.921132247503005e-07, "loss": 0.6534, "step": 20790 }, { "epoch": 0.8616602428612873, "grad_norm": 0.38506534695625305, "learning_rate": 6.919060052219321e-07, "loss": 0.6274, "step": 20791 }, { "epoch": 0.8617016867669609, "grad_norm": 0.41367805004119873, "learning_rate": 6.916987856935637e-07, "loss": 0.6333, "step": 20792 }, { "epoch": 0.8617431306726346, "grad_norm": 0.4192332923412323, "learning_rate": 6.914915661651955e-07, "loss": 0.6528, "step": 20793 }, { "epoch": 0.8617845745783083, "grad_norm": 0.4149060845375061, "learning_rate": 6.912843466368271e-07, "loss": 0.6738, "step": 20794 }, { "epoch": 0.8618260184839819, "grad_norm": 0.39049437642097473, "learning_rate": 6.910771271084587e-07, "loss": 0.6201, "step": 20795 }, { "epoch": 0.8618674623896556, "grad_norm": 0.40260255336761475, "learning_rate": 6.908699075800903e-07, "loss": 0.6678, "step": 20796 }, { "epoch": 0.8619089062953292, "grad_norm": 0.4282781481742859, "learning_rate": 6.90662688051722e-07, "loss": 0.6361, "step": 20797 }, { "epoch": 0.861950350201003, "grad_norm": 0.3925972282886505, "learning_rate": 6.904554685233537e-07, "loss": 0.6587, "step": 20798 }, { "epoch": 0.8619917941066766, "grad_norm": 0.46792784333229065, "learning_rate": 6.902482489949853e-07, "loss": 0.7544, "step": 20799 }, { "epoch": 0.8620332380123503, "grad_norm": 0.3964206576347351, "learning_rate": 6.900410294666169e-07, "loss": 0.6342, "step": 20800 }, { "epoch": 0.8620746819180239, "grad_norm": 0.42005443572998047, "learning_rate": 6.898338099382486e-07, "loss": 0.7211, "step": 20801 }, { "epoch": 0.8621161258236977, "grad_norm": 0.4279838800430298, "learning_rate": 6.896265904098803e-07, "loss": 0.6705, "step": 20802 }, { "epoch": 0.8621575697293713, "grad_norm": 0.44034284353256226, "learning_rate": 6.894193708815119e-07, "loss": 0.6841, "step": 20803 }, { "epoch": 0.8621990136350449, "grad_norm": 0.4010227620601654, "learning_rate": 6.892121513531435e-07, "loss": 0.644, "step": 20804 }, { "epoch": 0.8622404575407187, "grad_norm": 0.44678133726119995, "learning_rate": 6.890049318247752e-07, "loss": 0.6697, "step": 20805 }, { "epoch": 0.8622819014463923, "grad_norm": 0.40810662508010864, "learning_rate": 6.887977122964069e-07, "loss": 0.6929, "step": 20806 }, { "epoch": 0.862323345352066, "grad_norm": 0.42817357182502747, "learning_rate": 6.885904927680385e-07, "loss": 0.7024, "step": 20807 }, { "epoch": 0.8623647892577396, "grad_norm": 0.4252474308013916, "learning_rate": 6.883832732396701e-07, "loss": 0.6604, "step": 20808 }, { "epoch": 0.8624062331634134, "grad_norm": 0.4117342531681061, "learning_rate": 6.881760537113017e-07, "loss": 0.7184, "step": 20809 }, { "epoch": 0.862447677069087, "grad_norm": 0.41226378083229065, "learning_rate": 6.879688341829334e-07, "loss": 0.645, "step": 20810 }, { "epoch": 0.8624891209747607, "grad_norm": 0.4464651346206665, "learning_rate": 6.877616146545651e-07, "loss": 0.6675, "step": 20811 }, { "epoch": 0.8625305648804343, "grad_norm": 0.38623860478401184, "learning_rate": 6.875543951261967e-07, "loss": 0.6044, "step": 20812 }, { "epoch": 0.862572008786108, "grad_norm": 0.42095017433166504, "learning_rate": 6.873471755978283e-07, "loss": 0.7186, "step": 20813 }, { "epoch": 0.8626134526917817, "grad_norm": 0.42698630690574646, "learning_rate": 6.8713995606946e-07, "loss": 0.7003, "step": 20814 }, { "epoch": 0.8626548965974553, "grad_norm": 0.40745851397514343, "learning_rate": 6.869327365410917e-07, "loss": 0.6466, "step": 20815 }, { "epoch": 0.862696340503129, "grad_norm": 0.4222664535045624, "learning_rate": 6.867255170127233e-07, "loss": 0.6536, "step": 20816 }, { "epoch": 0.8627377844088027, "grad_norm": 0.4130344092845917, "learning_rate": 6.865182974843549e-07, "loss": 0.6838, "step": 20817 }, { "epoch": 0.8627792283144764, "grad_norm": 0.42726221680641174, "learning_rate": 6.863110779559866e-07, "loss": 0.6301, "step": 20818 }, { "epoch": 0.86282067222015, "grad_norm": 0.4120759963989258, "learning_rate": 6.861038584276183e-07, "loss": 0.6868, "step": 20819 }, { "epoch": 0.8628621161258238, "grad_norm": 0.49347439408302307, "learning_rate": 6.858966388992499e-07, "loss": 0.7289, "step": 20820 }, { "epoch": 0.8629035600314974, "grad_norm": 0.4265531897544861, "learning_rate": 6.856894193708815e-07, "loss": 0.6593, "step": 20821 }, { "epoch": 0.862945003937171, "grad_norm": 0.4273729622364044, "learning_rate": 6.854821998425132e-07, "loss": 0.7057, "step": 20822 }, { "epoch": 0.8629864478428447, "grad_norm": 0.4245201349258423, "learning_rate": 6.852749803141448e-07, "loss": 0.6505, "step": 20823 }, { "epoch": 0.8630278917485183, "grad_norm": 0.4108963906764984, "learning_rate": 6.850677607857765e-07, "loss": 0.6798, "step": 20824 }, { "epoch": 0.8630693356541921, "grad_norm": 0.4078311324119568, "learning_rate": 6.848605412574081e-07, "loss": 0.6597, "step": 20825 }, { "epoch": 0.8631107795598657, "grad_norm": 0.4702009856700897, "learning_rate": 6.846533217290397e-07, "loss": 0.7085, "step": 20826 }, { "epoch": 0.8631522234655394, "grad_norm": 0.40734416246414185, "learning_rate": 6.844461022006714e-07, "loss": 0.6641, "step": 20827 }, { "epoch": 0.8631936673712131, "grad_norm": 0.4306730329990387, "learning_rate": 6.842388826723031e-07, "loss": 0.634, "step": 20828 }, { "epoch": 0.8632351112768867, "grad_norm": 0.43333733081817627, "learning_rate": 6.840316631439347e-07, "loss": 0.6533, "step": 20829 }, { "epoch": 0.8632765551825604, "grad_norm": 0.4167692959308624, "learning_rate": 6.838244436155663e-07, "loss": 0.6749, "step": 20830 }, { "epoch": 0.863317999088234, "grad_norm": 0.4385184049606323, "learning_rate": 6.83617224087198e-07, "loss": 0.7112, "step": 20831 }, { "epoch": 0.8633594429939078, "grad_norm": 0.45269450545310974, "learning_rate": 6.834100045588297e-07, "loss": 0.7019, "step": 20832 }, { "epoch": 0.8634008868995814, "grad_norm": 0.4097804129123688, "learning_rate": 6.832027850304613e-07, "loss": 0.6222, "step": 20833 }, { "epoch": 0.8634423308052551, "grad_norm": 0.4247879981994629, "learning_rate": 6.829955655020929e-07, "loss": 0.6796, "step": 20834 }, { "epoch": 0.8634837747109287, "grad_norm": 0.4304753541946411, "learning_rate": 6.827883459737246e-07, "loss": 0.6914, "step": 20835 }, { "epoch": 0.8635252186166025, "grad_norm": 0.3951328992843628, "learning_rate": 6.825811264453562e-07, "loss": 0.5956, "step": 20836 }, { "epoch": 0.8635666625222761, "grad_norm": 0.42578229308128357, "learning_rate": 6.823739069169879e-07, "loss": 0.6713, "step": 20837 }, { "epoch": 0.8636081064279497, "grad_norm": 0.4293753206729889, "learning_rate": 6.821666873886195e-07, "loss": 0.6216, "step": 20838 }, { "epoch": 0.8636495503336235, "grad_norm": 0.4271979331970215, "learning_rate": 6.819594678602512e-07, "loss": 0.7095, "step": 20839 }, { "epoch": 0.8636909942392971, "grad_norm": 0.44095200300216675, "learning_rate": 6.817522483318828e-07, "loss": 0.6804, "step": 20840 }, { "epoch": 0.8637324381449708, "grad_norm": 0.42021045088768005, "learning_rate": 6.815450288035145e-07, "loss": 0.6577, "step": 20841 }, { "epoch": 0.8637738820506444, "grad_norm": 0.4489968419075012, "learning_rate": 6.813378092751461e-07, "loss": 0.6763, "step": 20842 }, { "epoch": 0.8638153259563182, "grad_norm": 0.4154908359050751, "learning_rate": 6.811305897467777e-07, "loss": 0.6234, "step": 20843 }, { "epoch": 0.8638567698619918, "grad_norm": 0.39254358410835266, "learning_rate": 6.809233702184094e-07, "loss": 0.6383, "step": 20844 }, { "epoch": 0.8638982137676655, "grad_norm": 0.36391326785087585, "learning_rate": 6.80716150690041e-07, "loss": 0.6, "step": 20845 }, { "epoch": 0.8639396576733391, "grad_norm": 0.41372087597846985, "learning_rate": 6.805089311616727e-07, "loss": 0.6748, "step": 20846 }, { "epoch": 0.8639811015790128, "grad_norm": 0.42433053255081177, "learning_rate": 6.803017116333043e-07, "loss": 0.6451, "step": 20847 }, { "epoch": 0.8640225454846865, "grad_norm": 0.4240246117115021, "learning_rate": 6.80094492104936e-07, "loss": 0.6644, "step": 20848 }, { "epoch": 0.8640639893903601, "grad_norm": 0.4566548764705658, "learning_rate": 6.798872725765676e-07, "loss": 0.6868, "step": 20849 }, { "epoch": 0.8641054332960338, "grad_norm": 0.39275214076042175, "learning_rate": 6.796800530481993e-07, "loss": 0.652, "step": 20850 }, { "epoch": 0.8641468772017075, "grad_norm": 0.40007081627845764, "learning_rate": 6.794728335198309e-07, "loss": 0.6448, "step": 20851 }, { "epoch": 0.8641883211073812, "grad_norm": 0.42563021183013916, "learning_rate": 6.792656139914626e-07, "loss": 0.6583, "step": 20852 }, { "epoch": 0.8642297650130548, "grad_norm": 0.4261209964752197, "learning_rate": 6.790583944630942e-07, "loss": 0.655, "step": 20853 }, { "epoch": 0.8642712089187286, "grad_norm": 0.4443355202674866, "learning_rate": 6.788511749347259e-07, "loss": 0.7251, "step": 20854 }, { "epoch": 0.8643126528244022, "grad_norm": 0.4288378953933716, "learning_rate": 6.786439554063575e-07, "loss": 0.6267, "step": 20855 }, { "epoch": 0.8643540967300758, "grad_norm": 0.40441545844078064, "learning_rate": 6.784367358779892e-07, "loss": 0.657, "step": 20856 }, { "epoch": 0.8643955406357495, "grad_norm": 0.4161124527454376, "learning_rate": 6.782295163496208e-07, "loss": 0.6987, "step": 20857 }, { "epoch": 0.8644369845414231, "grad_norm": 0.42267096042633057, "learning_rate": 6.780222968212524e-07, "loss": 0.6572, "step": 20858 }, { "epoch": 0.8644784284470969, "grad_norm": 0.42510706186294556, "learning_rate": 6.778150772928841e-07, "loss": 0.6505, "step": 20859 }, { "epoch": 0.8645198723527705, "grad_norm": 0.3682912290096283, "learning_rate": 6.776078577645158e-07, "loss": 0.7031, "step": 20860 }, { "epoch": 0.8645613162584442, "grad_norm": 0.4171193838119507, "learning_rate": 6.774006382361474e-07, "loss": 0.7207, "step": 20861 }, { "epoch": 0.8646027601641179, "grad_norm": 0.4350731372833252, "learning_rate": 6.77193418707779e-07, "loss": 0.6917, "step": 20862 }, { "epoch": 0.8646442040697916, "grad_norm": 0.4271436035633087, "learning_rate": 6.769861991794107e-07, "loss": 0.6476, "step": 20863 }, { "epoch": 0.8646856479754652, "grad_norm": 0.45376431941986084, "learning_rate": 6.767789796510423e-07, "loss": 0.6948, "step": 20864 }, { "epoch": 0.8647270918811388, "grad_norm": 0.4329504370689392, "learning_rate": 6.76571760122674e-07, "loss": 0.7001, "step": 20865 }, { "epoch": 0.8647685357868126, "grad_norm": 0.400450736284256, "learning_rate": 6.763645405943056e-07, "loss": 0.6722, "step": 20866 }, { "epoch": 0.8648099796924862, "grad_norm": 0.40707460045814514, "learning_rate": 6.761573210659373e-07, "loss": 0.6364, "step": 20867 }, { "epoch": 0.8648514235981599, "grad_norm": 0.42245185375213623, "learning_rate": 6.759501015375689e-07, "loss": 0.6653, "step": 20868 }, { "epoch": 0.8648928675038335, "grad_norm": 0.4060738682746887, "learning_rate": 6.757428820092006e-07, "loss": 0.6433, "step": 20869 }, { "epoch": 0.8649343114095073, "grad_norm": 0.40571385622024536, "learning_rate": 6.755356624808322e-07, "loss": 0.6207, "step": 20870 }, { "epoch": 0.8649757553151809, "grad_norm": 0.48484402894973755, "learning_rate": 6.753284429524638e-07, "loss": 0.703, "step": 20871 }, { "epoch": 0.8650171992208546, "grad_norm": 0.430320143699646, "learning_rate": 6.751212234240955e-07, "loss": 0.7029, "step": 20872 }, { "epoch": 0.8650586431265282, "grad_norm": 0.4147166311740875, "learning_rate": 6.749140038957272e-07, "loss": 0.6689, "step": 20873 }, { "epoch": 0.8651000870322019, "grad_norm": 0.4049677550792694, "learning_rate": 6.747067843673588e-07, "loss": 0.6838, "step": 20874 }, { "epoch": 0.8651415309378756, "grad_norm": 0.4344314932823181, "learning_rate": 6.744995648389904e-07, "loss": 0.7153, "step": 20875 }, { "epoch": 0.8651829748435492, "grad_norm": 0.4016934931278229, "learning_rate": 6.742923453106221e-07, "loss": 0.6401, "step": 20876 }, { "epoch": 0.865224418749223, "grad_norm": 0.4326065182685852, "learning_rate": 6.740851257822538e-07, "loss": 0.6298, "step": 20877 }, { "epoch": 0.8652658626548966, "grad_norm": 0.40421611070632935, "learning_rate": 6.738779062538854e-07, "loss": 0.6266, "step": 20878 }, { "epoch": 0.8653073065605703, "grad_norm": 0.43931135535240173, "learning_rate": 6.73670686725517e-07, "loss": 0.6692, "step": 20879 }, { "epoch": 0.8653487504662439, "grad_norm": 0.44456881284713745, "learning_rate": 6.734634671971487e-07, "loss": 0.7, "step": 20880 }, { "epoch": 0.8653901943719177, "grad_norm": 0.4109469950199127, "learning_rate": 6.732562476687803e-07, "loss": 0.6843, "step": 20881 }, { "epoch": 0.8654316382775913, "grad_norm": 0.4496559500694275, "learning_rate": 6.73049028140412e-07, "loss": 0.6367, "step": 20882 }, { "epoch": 0.8654730821832649, "grad_norm": 0.4600655436515808, "learning_rate": 6.728418086120436e-07, "loss": 0.7131, "step": 20883 }, { "epoch": 0.8655145260889386, "grad_norm": 0.432919979095459, "learning_rate": 6.726345890836752e-07, "loss": 0.6924, "step": 20884 }, { "epoch": 0.8655559699946123, "grad_norm": 0.4070434272289276, "learning_rate": 6.724273695553069e-07, "loss": 0.62, "step": 20885 }, { "epoch": 0.865597413900286, "grad_norm": 0.4597512185573578, "learning_rate": 6.722201500269386e-07, "loss": 0.6699, "step": 20886 }, { "epoch": 0.8656388578059596, "grad_norm": 0.44829005002975464, "learning_rate": 6.720129304985702e-07, "loss": 0.6836, "step": 20887 }, { "epoch": 0.8656803017116333, "grad_norm": 0.41864582896232605, "learning_rate": 6.718057109702018e-07, "loss": 0.6965, "step": 20888 }, { "epoch": 0.865721745617307, "grad_norm": 0.4184340238571167, "learning_rate": 6.715984914418335e-07, "loss": 0.6475, "step": 20889 }, { "epoch": 0.8657631895229806, "grad_norm": 0.4089394211769104, "learning_rate": 6.713912719134652e-07, "loss": 0.7126, "step": 20890 }, { "epoch": 0.8658046334286543, "grad_norm": 0.43311527371406555, "learning_rate": 6.711840523850968e-07, "loss": 0.6779, "step": 20891 }, { "epoch": 0.865846077334328, "grad_norm": 0.4505446255207062, "learning_rate": 6.709768328567284e-07, "loss": 0.6689, "step": 20892 }, { "epoch": 0.8658875212400017, "grad_norm": 0.43238765001296997, "learning_rate": 6.707696133283601e-07, "loss": 0.7117, "step": 20893 }, { "epoch": 0.8659289651456753, "grad_norm": 0.37029340863227844, "learning_rate": 6.705623937999918e-07, "loss": 0.6482, "step": 20894 }, { "epoch": 0.865970409051349, "grad_norm": 0.45924702286720276, "learning_rate": 6.703551742716234e-07, "loss": 0.7045, "step": 20895 }, { "epoch": 0.8660118529570227, "grad_norm": 0.4316651523113251, "learning_rate": 6.70147954743255e-07, "loss": 0.6835, "step": 20896 }, { "epoch": 0.8660532968626964, "grad_norm": 0.4028855264186859, "learning_rate": 6.699407352148866e-07, "loss": 0.6827, "step": 20897 }, { "epoch": 0.86609474076837, "grad_norm": 0.41253799200057983, "learning_rate": 6.697335156865185e-07, "loss": 0.676, "step": 20898 }, { "epoch": 0.8661361846740436, "grad_norm": 0.40884727239608765, "learning_rate": 6.6952629615815e-07, "loss": 0.6208, "step": 20899 }, { "epoch": 0.8661776285797174, "grad_norm": 0.39559206366539, "learning_rate": 6.693190766297816e-07, "loss": 0.653, "step": 20900 }, { "epoch": 0.866219072485391, "grad_norm": 0.4069955348968506, "learning_rate": 6.691118571014132e-07, "loss": 0.6355, "step": 20901 }, { "epoch": 0.8662605163910647, "grad_norm": 0.41326963901519775, "learning_rate": 6.689046375730449e-07, "loss": 0.6558, "step": 20902 }, { "epoch": 0.8663019602967383, "grad_norm": 0.4666498303413391, "learning_rate": 6.686974180446767e-07, "loss": 0.7168, "step": 20903 }, { "epoch": 0.8663434042024121, "grad_norm": 0.3905469477176666, "learning_rate": 6.684901985163082e-07, "loss": 0.6877, "step": 20904 }, { "epoch": 0.8663848481080857, "grad_norm": 0.430497407913208, "learning_rate": 6.682829789879398e-07, "loss": 0.6978, "step": 20905 }, { "epoch": 0.8664262920137594, "grad_norm": 0.40694501996040344, "learning_rate": 6.680757594595715e-07, "loss": 0.649, "step": 20906 }, { "epoch": 0.866467735919433, "grad_norm": 0.4109271466732025, "learning_rate": 6.678685399312033e-07, "loss": 0.6963, "step": 20907 }, { "epoch": 0.8665091798251067, "grad_norm": 0.4173610508441925, "learning_rate": 6.676613204028348e-07, "loss": 0.6855, "step": 20908 }, { "epoch": 0.8665506237307804, "grad_norm": 0.425766259431839, "learning_rate": 6.674541008744664e-07, "loss": 0.6655, "step": 20909 }, { "epoch": 0.866592067636454, "grad_norm": 0.4009172320365906, "learning_rate": 6.67246881346098e-07, "loss": 0.6799, "step": 20910 }, { "epoch": 0.8666335115421278, "grad_norm": 0.3937987983226776, "learning_rate": 6.670396618177299e-07, "loss": 0.6449, "step": 20911 }, { "epoch": 0.8666749554478014, "grad_norm": 0.38850513100624084, "learning_rate": 6.668324422893615e-07, "loss": 0.6431, "step": 20912 }, { "epoch": 0.8667163993534751, "grad_norm": 0.4118667244911194, "learning_rate": 6.66625222760993e-07, "loss": 0.7085, "step": 20913 }, { "epoch": 0.8667578432591487, "grad_norm": 0.38834768533706665, "learning_rate": 6.664180032326246e-07, "loss": 0.6594, "step": 20914 }, { "epoch": 0.8667992871648225, "grad_norm": 0.3838385343551636, "learning_rate": 6.662107837042565e-07, "loss": 0.6642, "step": 20915 }, { "epoch": 0.8668407310704961, "grad_norm": 0.38794219493865967, "learning_rate": 6.66003564175888e-07, "loss": 0.6074, "step": 20916 }, { "epoch": 0.8668821749761697, "grad_norm": 0.3803756833076477, "learning_rate": 6.657963446475196e-07, "loss": 0.6777, "step": 20917 }, { "epoch": 0.8669236188818434, "grad_norm": 0.4118775427341461, "learning_rate": 6.655891251191512e-07, "loss": 0.6586, "step": 20918 }, { "epoch": 0.8669650627875171, "grad_norm": 0.4366982877254486, "learning_rate": 6.653819055907828e-07, "loss": 0.6821, "step": 20919 }, { "epoch": 0.8670065066931908, "grad_norm": 0.4392503798007965, "learning_rate": 6.651746860624147e-07, "loss": 0.6917, "step": 20920 }, { "epoch": 0.8670479505988644, "grad_norm": 0.4193454086780548, "learning_rate": 6.649674665340463e-07, "loss": 0.6602, "step": 20921 }, { "epoch": 0.8670893945045381, "grad_norm": 0.426750123500824, "learning_rate": 6.647602470056778e-07, "loss": 0.653, "step": 20922 }, { "epoch": 0.8671308384102118, "grad_norm": 0.4447825849056244, "learning_rate": 6.645530274773094e-07, "loss": 0.7476, "step": 20923 }, { "epoch": 0.8671722823158855, "grad_norm": 0.44760167598724365, "learning_rate": 6.643458079489413e-07, "loss": 0.6819, "step": 20924 }, { "epoch": 0.8672137262215591, "grad_norm": 0.41266539692878723, "learning_rate": 6.641385884205729e-07, "loss": 0.6975, "step": 20925 }, { "epoch": 0.8672551701272327, "grad_norm": 0.4238797724246979, "learning_rate": 6.639313688922044e-07, "loss": 0.6488, "step": 20926 }, { "epoch": 0.8672966140329065, "grad_norm": 0.4176521599292755, "learning_rate": 6.63724149363836e-07, "loss": 0.6619, "step": 20927 }, { "epoch": 0.8673380579385801, "grad_norm": 0.43469467759132385, "learning_rate": 6.635169298354679e-07, "loss": 0.6808, "step": 20928 }, { "epoch": 0.8673795018442538, "grad_norm": 0.41081467270851135, "learning_rate": 6.633097103070995e-07, "loss": 0.6744, "step": 20929 }, { "epoch": 0.8674209457499275, "grad_norm": 0.4399205446243286, "learning_rate": 6.63102490778731e-07, "loss": 0.7065, "step": 20930 }, { "epoch": 0.8674623896556012, "grad_norm": 0.4125112295150757, "learning_rate": 6.628952712503626e-07, "loss": 0.6846, "step": 20931 }, { "epoch": 0.8675038335612748, "grad_norm": 0.5592701435089111, "learning_rate": 6.626880517219942e-07, "loss": 0.7273, "step": 20932 }, { "epoch": 0.8675452774669485, "grad_norm": 0.42536336183547974, "learning_rate": 6.624808321936261e-07, "loss": 0.6655, "step": 20933 }, { "epoch": 0.8675867213726222, "grad_norm": 0.4069126546382904, "learning_rate": 6.622736126652577e-07, "loss": 0.7106, "step": 20934 }, { "epoch": 0.8676281652782958, "grad_norm": 0.4172263443470001, "learning_rate": 6.620663931368892e-07, "loss": 0.6902, "step": 20935 }, { "epoch": 0.8676696091839695, "grad_norm": 0.4257378876209259, "learning_rate": 6.618591736085208e-07, "loss": 0.7275, "step": 20936 }, { "epoch": 0.8677110530896431, "grad_norm": 0.4231618642807007, "learning_rate": 6.616519540801527e-07, "loss": 0.7102, "step": 20937 }, { "epoch": 0.8677524969953169, "grad_norm": 0.39940279722213745, "learning_rate": 6.614447345517843e-07, "loss": 0.6661, "step": 20938 }, { "epoch": 0.8677939409009905, "grad_norm": 0.3883577883243561, "learning_rate": 6.612375150234159e-07, "loss": 0.6641, "step": 20939 }, { "epoch": 0.8678353848066642, "grad_norm": 0.4278617203235626, "learning_rate": 6.610302954950474e-07, "loss": 0.6866, "step": 20940 }, { "epoch": 0.8678768287123378, "grad_norm": 0.4029589593410492, "learning_rate": 6.608230759666793e-07, "loss": 0.6941, "step": 20941 }, { "epoch": 0.8679182726180115, "grad_norm": 0.4023185968399048, "learning_rate": 6.606158564383109e-07, "loss": 0.5927, "step": 20942 }, { "epoch": 0.8679597165236852, "grad_norm": 0.3980408310890198, "learning_rate": 6.604086369099425e-07, "loss": 0.6074, "step": 20943 }, { "epoch": 0.8680011604293588, "grad_norm": 0.4162020683288574, "learning_rate": 6.60201417381574e-07, "loss": 0.6667, "step": 20944 }, { "epoch": 0.8680426043350326, "grad_norm": 0.41835200786590576, "learning_rate": 6.599941978532056e-07, "loss": 0.6866, "step": 20945 }, { "epoch": 0.8680840482407062, "grad_norm": 0.41209304332733154, "learning_rate": 6.597869783248375e-07, "loss": 0.605, "step": 20946 }, { "epoch": 0.8681254921463799, "grad_norm": 0.4571639895439148, "learning_rate": 6.595797587964691e-07, "loss": 0.6716, "step": 20947 }, { "epoch": 0.8681669360520535, "grad_norm": 0.4019971787929535, "learning_rate": 6.593725392681007e-07, "loss": 0.6605, "step": 20948 }, { "epoch": 0.8682083799577273, "grad_norm": 0.46099260449409485, "learning_rate": 6.591653197397322e-07, "loss": 0.6453, "step": 20949 }, { "epoch": 0.8682498238634009, "grad_norm": 0.41087642312049866, "learning_rate": 6.589581002113641e-07, "loss": 0.6447, "step": 20950 }, { "epoch": 0.8682912677690745, "grad_norm": 0.3926309645175934, "learning_rate": 6.587508806829957e-07, "loss": 0.6365, "step": 20951 }, { "epoch": 0.8683327116747482, "grad_norm": 0.38196465373039246, "learning_rate": 6.585436611546273e-07, "loss": 0.6526, "step": 20952 }, { "epoch": 0.8683741555804219, "grad_norm": 0.4099431037902832, "learning_rate": 6.583364416262589e-07, "loss": 0.6643, "step": 20953 }, { "epoch": 0.8684155994860956, "grad_norm": 0.42053699493408203, "learning_rate": 6.581292220978907e-07, "loss": 0.688, "step": 20954 }, { "epoch": 0.8684570433917692, "grad_norm": 0.419247031211853, "learning_rate": 6.579220025695223e-07, "loss": 0.6941, "step": 20955 }, { "epoch": 0.868498487297443, "grad_norm": 0.4384819567203522, "learning_rate": 6.577147830411539e-07, "loss": 0.6731, "step": 20956 }, { "epoch": 0.8685399312031166, "grad_norm": 0.426577091217041, "learning_rate": 6.575075635127855e-07, "loss": 0.6803, "step": 20957 }, { "epoch": 0.8685813751087903, "grad_norm": 0.40765416622161865, "learning_rate": 6.57300343984417e-07, "loss": 0.6384, "step": 20958 }, { "epoch": 0.8686228190144639, "grad_norm": 0.43082523345947266, "learning_rate": 6.570931244560489e-07, "loss": 0.6447, "step": 20959 }, { "epoch": 0.8686642629201375, "grad_norm": 0.4293767213821411, "learning_rate": 6.568859049276805e-07, "loss": 0.6843, "step": 20960 }, { "epoch": 0.8687057068258113, "grad_norm": 0.40518850088119507, "learning_rate": 6.56678685399312e-07, "loss": 0.6561, "step": 20961 }, { "epoch": 0.8687471507314849, "grad_norm": 0.43323907256126404, "learning_rate": 6.564714658709437e-07, "loss": 0.6624, "step": 20962 }, { "epoch": 0.8687885946371586, "grad_norm": 0.42290908098220825, "learning_rate": 6.562642463425755e-07, "loss": 0.6597, "step": 20963 }, { "epoch": 0.8688300385428323, "grad_norm": 0.42473524808883667, "learning_rate": 6.560570268142071e-07, "loss": 0.7131, "step": 20964 }, { "epoch": 0.868871482448506, "grad_norm": 0.4045064449310303, "learning_rate": 6.558498072858387e-07, "loss": 0.6685, "step": 20965 }, { "epoch": 0.8689129263541796, "grad_norm": 0.4135255515575409, "learning_rate": 6.556425877574703e-07, "loss": 0.6777, "step": 20966 }, { "epoch": 0.8689543702598533, "grad_norm": 0.396879643201828, "learning_rate": 6.554353682291021e-07, "loss": 0.6187, "step": 20967 }, { "epoch": 0.868995814165527, "grad_norm": 0.4325798451900482, "learning_rate": 6.552281487007337e-07, "loss": 0.6785, "step": 20968 }, { "epoch": 0.8690372580712006, "grad_norm": 0.4022548794746399, "learning_rate": 6.550209291723653e-07, "loss": 0.6643, "step": 20969 }, { "epoch": 0.8690787019768743, "grad_norm": 0.4126604497432709, "learning_rate": 6.548137096439969e-07, "loss": 0.6635, "step": 20970 }, { "epoch": 0.8691201458825479, "grad_norm": 0.42352133989334106, "learning_rate": 6.546064901156285e-07, "loss": 0.6288, "step": 20971 }, { "epoch": 0.8691615897882217, "grad_norm": 0.46328747272491455, "learning_rate": 6.543992705872603e-07, "loss": 0.7012, "step": 20972 }, { "epoch": 0.8692030336938953, "grad_norm": 0.38528740406036377, "learning_rate": 6.541920510588919e-07, "loss": 0.6105, "step": 20973 }, { "epoch": 0.869244477599569, "grad_norm": 0.4061183035373688, "learning_rate": 6.539848315305235e-07, "loss": 0.6709, "step": 20974 }, { "epoch": 0.8692859215052426, "grad_norm": 0.43848398327827454, "learning_rate": 6.53777612002155e-07, "loss": 0.7651, "step": 20975 }, { "epoch": 0.8693273654109164, "grad_norm": 0.4271588623523712, "learning_rate": 6.535703924737869e-07, "loss": 0.6932, "step": 20976 }, { "epoch": 0.86936880931659, "grad_norm": 0.4046817123889923, "learning_rate": 6.533631729454185e-07, "loss": 0.636, "step": 20977 }, { "epoch": 0.8694102532222636, "grad_norm": 0.3679453730583191, "learning_rate": 6.531559534170501e-07, "loss": 0.6003, "step": 20978 }, { "epoch": 0.8694516971279374, "grad_norm": 0.43561503291130066, "learning_rate": 6.529487338886817e-07, "loss": 0.6569, "step": 20979 }, { "epoch": 0.869493141033611, "grad_norm": 0.40223008394241333, "learning_rate": 6.527415143603135e-07, "loss": 0.6663, "step": 20980 }, { "epoch": 0.8695345849392847, "grad_norm": 0.39952352643013, "learning_rate": 6.525342948319451e-07, "loss": 0.6548, "step": 20981 }, { "epoch": 0.8695760288449583, "grad_norm": 0.4100901186466217, "learning_rate": 6.523270753035767e-07, "loss": 0.6777, "step": 20982 }, { "epoch": 0.8696174727506321, "grad_norm": 0.42813199758529663, "learning_rate": 6.521198557752083e-07, "loss": 0.7083, "step": 20983 }, { "epoch": 0.8696589166563057, "grad_norm": 0.420285701751709, "learning_rate": 6.519126362468399e-07, "loss": 0.6743, "step": 20984 }, { "epoch": 0.8697003605619794, "grad_norm": 0.5559378862380981, "learning_rate": 6.517054167184717e-07, "loss": 0.6818, "step": 20985 }, { "epoch": 0.869741804467653, "grad_norm": 0.43568581342697144, "learning_rate": 6.514981971901033e-07, "loss": 0.7173, "step": 20986 }, { "epoch": 0.8697832483733267, "grad_norm": 0.4175143837928772, "learning_rate": 6.512909776617349e-07, "loss": 0.6581, "step": 20987 }, { "epoch": 0.8698246922790004, "grad_norm": 0.4171862304210663, "learning_rate": 6.510837581333665e-07, "loss": 0.6802, "step": 20988 }, { "epoch": 0.869866136184674, "grad_norm": 0.4128933846950531, "learning_rate": 6.508765386049983e-07, "loss": 0.6733, "step": 20989 }, { "epoch": 0.8699075800903477, "grad_norm": 0.38840848207473755, "learning_rate": 6.506693190766299e-07, "loss": 0.6692, "step": 20990 }, { "epoch": 0.8699490239960214, "grad_norm": 0.4228536784648895, "learning_rate": 6.504620995482615e-07, "loss": 0.6798, "step": 20991 }, { "epoch": 0.8699904679016951, "grad_norm": 0.4262978434562683, "learning_rate": 6.502548800198931e-07, "loss": 0.7113, "step": 20992 }, { "epoch": 0.8700319118073687, "grad_norm": 0.42713019251823425, "learning_rate": 6.500476604915248e-07, "loss": 0.6509, "step": 20993 }, { "epoch": 0.8700733557130425, "grad_norm": 0.42333167791366577, "learning_rate": 6.498404409631565e-07, "loss": 0.6721, "step": 20994 }, { "epoch": 0.8701147996187161, "grad_norm": 0.45988669991493225, "learning_rate": 6.496332214347881e-07, "loss": 0.6885, "step": 20995 }, { "epoch": 0.8701562435243897, "grad_norm": 0.4318194091320038, "learning_rate": 6.494260019064197e-07, "loss": 0.7512, "step": 20996 }, { "epoch": 0.8701976874300634, "grad_norm": 0.40950191020965576, "learning_rate": 6.492187823780513e-07, "loss": 0.6694, "step": 20997 }, { "epoch": 0.870239131335737, "grad_norm": 0.39523282647132874, "learning_rate": 6.490115628496831e-07, "loss": 0.6381, "step": 20998 }, { "epoch": 0.8702805752414108, "grad_norm": 0.4065968096256256, "learning_rate": 6.488043433213147e-07, "loss": 0.6917, "step": 20999 }, { "epoch": 0.8703220191470844, "grad_norm": 0.39876100420951843, "learning_rate": 6.485971237929463e-07, "loss": 0.6447, "step": 21000 }, { "epoch": 0.8703634630527581, "grad_norm": 0.36696720123291016, "learning_rate": 6.483899042645779e-07, "loss": 0.6635, "step": 21001 }, { "epoch": 0.8704049069584318, "grad_norm": 0.405466765165329, "learning_rate": 6.481826847362097e-07, "loss": 0.6565, "step": 21002 }, { "epoch": 0.8704463508641054, "grad_norm": 0.39545005559921265, "learning_rate": 6.479754652078413e-07, "loss": 0.6193, "step": 21003 }, { "epoch": 0.8704877947697791, "grad_norm": 0.429261177778244, "learning_rate": 6.477682456794729e-07, "loss": 0.6819, "step": 21004 }, { "epoch": 0.8705292386754527, "grad_norm": 0.382962703704834, "learning_rate": 6.475610261511045e-07, "loss": 0.6793, "step": 21005 }, { "epoch": 0.8705706825811265, "grad_norm": 0.3871862292289734, "learning_rate": 6.473538066227362e-07, "loss": 0.6281, "step": 21006 }, { "epoch": 0.8706121264868001, "grad_norm": 0.4119024872779846, "learning_rate": 6.471465870943679e-07, "loss": 0.6272, "step": 21007 }, { "epoch": 0.8706535703924738, "grad_norm": 0.45024019479751587, "learning_rate": 6.469393675659995e-07, "loss": 0.6782, "step": 21008 }, { "epoch": 0.8706950142981474, "grad_norm": 0.3999943137168884, "learning_rate": 6.467321480376311e-07, "loss": 0.6731, "step": 21009 }, { "epoch": 0.8707364582038212, "grad_norm": 0.4422866404056549, "learning_rate": 6.465249285092628e-07, "loss": 0.6545, "step": 21010 }, { "epoch": 0.8707779021094948, "grad_norm": 0.4573245346546173, "learning_rate": 6.463177089808945e-07, "loss": 0.6886, "step": 21011 }, { "epoch": 0.8708193460151684, "grad_norm": 0.42297235131263733, "learning_rate": 6.461104894525261e-07, "loss": 0.6646, "step": 21012 }, { "epoch": 0.8708607899208421, "grad_norm": 0.4467531442642212, "learning_rate": 6.459032699241577e-07, "loss": 0.6794, "step": 21013 }, { "epoch": 0.8709022338265158, "grad_norm": 0.429007887840271, "learning_rate": 6.456960503957893e-07, "loss": 0.7156, "step": 21014 }, { "epoch": 0.8709436777321895, "grad_norm": 0.39854303002357483, "learning_rate": 6.454888308674211e-07, "loss": 0.7006, "step": 21015 }, { "epoch": 0.8709851216378631, "grad_norm": 0.4026521146297455, "learning_rate": 6.452816113390527e-07, "loss": 0.6826, "step": 21016 }, { "epoch": 0.8710265655435369, "grad_norm": 0.45026010274887085, "learning_rate": 6.450743918106843e-07, "loss": 0.6583, "step": 21017 }, { "epoch": 0.8710680094492105, "grad_norm": 0.41821566224098206, "learning_rate": 6.448671722823159e-07, "loss": 0.7026, "step": 21018 }, { "epoch": 0.8711094533548842, "grad_norm": 0.39174166321754456, "learning_rate": 6.446599527539476e-07, "loss": 0.673, "step": 21019 }, { "epoch": 0.8711508972605578, "grad_norm": 0.40558144450187683, "learning_rate": 6.444527332255793e-07, "loss": 0.6407, "step": 21020 }, { "epoch": 0.8711923411662315, "grad_norm": 0.42113354802131653, "learning_rate": 6.442455136972109e-07, "loss": 0.6854, "step": 21021 }, { "epoch": 0.8712337850719052, "grad_norm": 0.4299522340297699, "learning_rate": 6.440382941688425e-07, "loss": 0.6599, "step": 21022 }, { "epoch": 0.8712752289775788, "grad_norm": 0.38888901472091675, "learning_rate": 6.438310746404742e-07, "loss": 0.6404, "step": 21023 }, { "epoch": 0.8713166728832525, "grad_norm": 0.42769384384155273, "learning_rate": 6.436238551121059e-07, "loss": 0.7002, "step": 21024 }, { "epoch": 0.8713581167889262, "grad_norm": 0.3867104649543762, "learning_rate": 6.434166355837375e-07, "loss": 0.6853, "step": 21025 }, { "epoch": 0.8713995606945999, "grad_norm": 0.4051859676837921, "learning_rate": 6.432094160553691e-07, "loss": 0.6565, "step": 21026 }, { "epoch": 0.8714410046002735, "grad_norm": 0.4128601551055908, "learning_rate": 6.430021965270008e-07, "loss": 0.6271, "step": 21027 }, { "epoch": 0.8714824485059472, "grad_norm": 0.39655977487564087, "learning_rate": 6.427949769986325e-07, "loss": 0.6671, "step": 21028 }, { "epoch": 0.8715238924116209, "grad_norm": 0.43138906359672546, "learning_rate": 6.425877574702641e-07, "loss": 0.6962, "step": 21029 }, { "epoch": 0.8715653363172945, "grad_norm": 0.41463854908943176, "learning_rate": 6.423805379418957e-07, "loss": 0.6521, "step": 21030 }, { "epoch": 0.8716067802229682, "grad_norm": 0.4136026203632355, "learning_rate": 6.421733184135274e-07, "loss": 0.6509, "step": 21031 }, { "epoch": 0.8716482241286418, "grad_norm": 0.41526171565055847, "learning_rate": 6.41966098885159e-07, "loss": 0.6719, "step": 21032 }, { "epoch": 0.8716896680343156, "grad_norm": 0.44354721903800964, "learning_rate": 6.417588793567907e-07, "loss": 0.698, "step": 21033 }, { "epoch": 0.8717311119399892, "grad_norm": 0.4231584668159485, "learning_rate": 6.415516598284223e-07, "loss": 0.6531, "step": 21034 }, { "epoch": 0.8717725558456629, "grad_norm": 0.3978957533836365, "learning_rate": 6.413444403000539e-07, "loss": 0.6324, "step": 21035 }, { "epoch": 0.8718139997513366, "grad_norm": 0.39762282371520996, "learning_rate": 6.411372207716856e-07, "loss": 0.6205, "step": 21036 }, { "epoch": 0.8718554436570103, "grad_norm": 0.4275680482387543, "learning_rate": 6.409300012433173e-07, "loss": 0.6528, "step": 21037 }, { "epoch": 0.8718968875626839, "grad_norm": 0.45531949400901794, "learning_rate": 6.407227817149489e-07, "loss": 0.7322, "step": 21038 }, { "epoch": 0.8719383314683575, "grad_norm": 0.4012257754802704, "learning_rate": 6.405155621865805e-07, "loss": 0.6812, "step": 21039 }, { "epoch": 0.8719797753740313, "grad_norm": 0.40079963207244873, "learning_rate": 6.403083426582122e-07, "loss": 0.6064, "step": 21040 }, { "epoch": 0.8720212192797049, "grad_norm": 0.43314310908317566, "learning_rate": 6.401011231298439e-07, "loss": 0.6603, "step": 21041 }, { "epoch": 0.8720626631853786, "grad_norm": 0.37559404969215393, "learning_rate": 6.398939036014755e-07, "loss": 0.6086, "step": 21042 }, { "epoch": 0.8721041070910522, "grad_norm": 0.4518875777721405, "learning_rate": 6.396866840731071e-07, "loss": 0.6499, "step": 21043 }, { "epoch": 0.872145550996726, "grad_norm": 0.4204740524291992, "learning_rate": 6.394794645447388e-07, "loss": 0.655, "step": 21044 }, { "epoch": 0.8721869949023996, "grad_norm": 0.4391556978225708, "learning_rate": 6.392722450163704e-07, "loss": 0.6715, "step": 21045 }, { "epoch": 0.8722284388080733, "grad_norm": 0.3926897644996643, "learning_rate": 6.390650254880021e-07, "loss": 0.6149, "step": 21046 }, { "epoch": 0.872269882713747, "grad_norm": 0.40009206533432007, "learning_rate": 6.388578059596337e-07, "loss": 0.7205, "step": 21047 }, { "epoch": 0.8723113266194206, "grad_norm": 0.42757606506347656, "learning_rate": 6.386505864312654e-07, "loss": 0.6503, "step": 21048 }, { "epoch": 0.8723527705250943, "grad_norm": 0.4311619699001312, "learning_rate": 6.38443366902897e-07, "loss": 0.6276, "step": 21049 }, { "epoch": 0.8723942144307679, "grad_norm": 0.45492252707481384, "learning_rate": 6.382361473745287e-07, "loss": 0.6329, "step": 21050 }, { "epoch": 0.8724356583364417, "grad_norm": 0.3861817717552185, "learning_rate": 6.380289278461603e-07, "loss": 0.6641, "step": 21051 }, { "epoch": 0.8724771022421153, "grad_norm": 0.4098234474658966, "learning_rate": 6.378217083177919e-07, "loss": 0.6205, "step": 21052 }, { "epoch": 0.872518546147789, "grad_norm": 0.43558695912361145, "learning_rate": 6.376144887894236e-07, "loss": 0.6543, "step": 21053 }, { "epoch": 0.8725599900534626, "grad_norm": 0.38374799489974976, "learning_rate": 6.374072692610552e-07, "loss": 0.6018, "step": 21054 }, { "epoch": 0.8726014339591364, "grad_norm": 0.40227314829826355, "learning_rate": 6.372000497326869e-07, "loss": 0.6847, "step": 21055 }, { "epoch": 0.87264287786481, "grad_norm": 0.4054569900035858, "learning_rate": 6.369928302043185e-07, "loss": 0.6326, "step": 21056 }, { "epoch": 0.8726843217704836, "grad_norm": 0.41419917345046997, "learning_rate": 6.367856106759502e-07, "loss": 0.7007, "step": 21057 }, { "epoch": 0.8727257656761573, "grad_norm": 0.47099941968917847, "learning_rate": 6.365783911475818e-07, "loss": 0.7246, "step": 21058 }, { "epoch": 0.872767209581831, "grad_norm": 0.4324527084827423, "learning_rate": 6.363711716192135e-07, "loss": 0.6759, "step": 21059 }, { "epoch": 0.8728086534875047, "grad_norm": 0.4270777702331543, "learning_rate": 6.361639520908451e-07, "loss": 0.6792, "step": 21060 }, { "epoch": 0.8728500973931783, "grad_norm": 0.38193923234939575, "learning_rate": 6.359567325624768e-07, "loss": 0.6562, "step": 21061 }, { "epoch": 0.872891541298852, "grad_norm": 0.414305180311203, "learning_rate": 6.357495130341084e-07, "loss": 0.7297, "step": 21062 }, { "epoch": 0.8729329852045257, "grad_norm": 0.3815478980541229, "learning_rate": 6.355422935057401e-07, "loss": 0.5847, "step": 21063 }, { "epoch": 0.8729744291101993, "grad_norm": 0.4070812165737152, "learning_rate": 6.353350739773717e-07, "loss": 0.6982, "step": 21064 }, { "epoch": 0.873015873015873, "grad_norm": 0.4201981723308563, "learning_rate": 6.351278544490034e-07, "loss": 0.7017, "step": 21065 }, { "epoch": 0.8730573169215466, "grad_norm": 0.3947572112083435, "learning_rate": 6.34920634920635e-07, "loss": 0.62, "step": 21066 }, { "epoch": 0.8730987608272204, "grad_norm": 0.414168119430542, "learning_rate": 6.347134153922666e-07, "loss": 0.6602, "step": 21067 }, { "epoch": 0.873140204732894, "grad_norm": 0.43096923828125, "learning_rate": 6.345061958638983e-07, "loss": 0.649, "step": 21068 }, { "epoch": 0.8731816486385677, "grad_norm": 0.40803661942481995, "learning_rate": 6.342989763355299e-07, "loss": 0.6643, "step": 21069 }, { "epoch": 0.8732230925442414, "grad_norm": 0.40051186084747314, "learning_rate": 6.340917568071616e-07, "loss": 0.6411, "step": 21070 }, { "epoch": 0.8732645364499151, "grad_norm": 0.4332613945007324, "learning_rate": 6.338845372787932e-07, "loss": 0.6499, "step": 21071 }, { "epoch": 0.8733059803555887, "grad_norm": 0.4091212749481201, "learning_rate": 6.336773177504249e-07, "loss": 0.6309, "step": 21072 }, { "epoch": 0.8733474242612623, "grad_norm": 0.403309166431427, "learning_rate": 6.334700982220565e-07, "loss": 0.6385, "step": 21073 }, { "epoch": 0.8733888681669361, "grad_norm": 0.43522611260414124, "learning_rate": 6.332628786936882e-07, "loss": 0.7429, "step": 21074 }, { "epoch": 0.8734303120726097, "grad_norm": 0.4101661443710327, "learning_rate": 6.330556591653198e-07, "loss": 0.6569, "step": 21075 }, { "epoch": 0.8734717559782834, "grad_norm": 0.4063767194747925, "learning_rate": 6.328484396369515e-07, "loss": 0.6647, "step": 21076 }, { "epoch": 0.873513199883957, "grad_norm": 0.4405270516872406, "learning_rate": 6.326412201085831e-07, "loss": 0.7084, "step": 21077 }, { "epoch": 0.8735546437896308, "grad_norm": 0.4283601939678192, "learning_rate": 6.324340005802148e-07, "loss": 0.6158, "step": 21078 }, { "epoch": 0.8735960876953044, "grad_norm": 0.42846760153770447, "learning_rate": 6.322267810518464e-07, "loss": 0.6646, "step": 21079 }, { "epoch": 0.8736375316009781, "grad_norm": 0.40826937556266785, "learning_rate": 6.32019561523478e-07, "loss": 0.6353, "step": 21080 }, { "epoch": 0.8736789755066517, "grad_norm": 0.4547218680381775, "learning_rate": 6.318123419951097e-07, "loss": 0.7053, "step": 21081 }, { "epoch": 0.8737204194123254, "grad_norm": 0.4659343957901001, "learning_rate": 6.316051224667414e-07, "loss": 0.6785, "step": 21082 }, { "epoch": 0.8737618633179991, "grad_norm": 0.41162532567977905, "learning_rate": 6.31397902938373e-07, "loss": 0.6262, "step": 21083 }, { "epoch": 0.8738033072236727, "grad_norm": 0.43572402000427246, "learning_rate": 6.311906834100046e-07, "loss": 0.748, "step": 21084 }, { "epoch": 0.8738447511293465, "grad_norm": 0.3738400340080261, "learning_rate": 6.309834638816363e-07, "loss": 0.6163, "step": 21085 }, { "epoch": 0.8738861950350201, "grad_norm": 0.43965157866477966, "learning_rate": 6.30776244353268e-07, "loss": 0.7086, "step": 21086 }, { "epoch": 0.8739276389406938, "grad_norm": 0.42424824833869934, "learning_rate": 6.305690248248996e-07, "loss": 0.6914, "step": 21087 }, { "epoch": 0.8739690828463674, "grad_norm": 0.40504661202430725, "learning_rate": 6.303618052965312e-07, "loss": 0.6515, "step": 21088 }, { "epoch": 0.8740105267520412, "grad_norm": 0.3959163725376129, "learning_rate": 6.301545857681629e-07, "loss": 0.6543, "step": 21089 }, { "epoch": 0.8740519706577148, "grad_norm": 0.44820645451545715, "learning_rate": 6.299473662397945e-07, "loss": 0.6427, "step": 21090 }, { "epoch": 0.8740934145633884, "grad_norm": 0.4210261106491089, "learning_rate": 6.297401467114262e-07, "loss": 0.6658, "step": 21091 }, { "epoch": 0.8741348584690621, "grad_norm": 0.39452698826789856, "learning_rate": 6.295329271830578e-07, "loss": 0.6471, "step": 21092 }, { "epoch": 0.8741763023747358, "grad_norm": 0.38127636909484863, "learning_rate": 6.293257076546894e-07, "loss": 0.6726, "step": 21093 }, { "epoch": 0.8742177462804095, "grad_norm": 0.43088850378990173, "learning_rate": 6.291184881263211e-07, "loss": 0.6559, "step": 21094 }, { "epoch": 0.8742591901860831, "grad_norm": 0.43449413776397705, "learning_rate": 6.289112685979528e-07, "loss": 0.6373, "step": 21095 }, { "epoch": 0.8743006340917568, "grad_norm": 0.4328954219818115, "learning_rate": 6.287040490695844e-07, "loss": 0.6877, "step": 21096 }, { "epoch": 0.8743420779974305, "grad_norm": 0.45773303508758545, "learning_rate": 6.28496829541216e-07, "loss": 0.708, "step": 21097 }, { "epoch": 0.8743835219031042, "grad_norm": 0.3907792270183563, "learning_rate": 6.282896100128477e-07, "loss": 0.6217, "step": 21098 }, { "epoch": 0.8744249658087778, "grad_norm": 0.4148302376270294, "learning_rate": 6.280823904844794e-07, "loss": 0.631, "step": 21099 }, { "epoch": 0.8744664097144514, "grad_norm": 0.4146079123020172, "learning_rate": 6.27875170956111e-07, "loss": 0.6582, "step": 21100 }, { "epoch": 0.8745078536201252, "grad_norm": 0.4227794110774994, "learning_rate": 6.276679514277426e-07, "loss": 0.6294, "step": 21101 }, { "epoch": 0.8745492975257988, "grad_norm": 0.42427343130111694, "learning_rate": 6.274607318993743e-07, "loss": 0.6741, "step": 21102 }, { "epoch": 0.8745907414314725, "grad_norm": 0.3804783523082733, "learning_rate": 6.27253512371006e-07, "loss": 0.6018, "step": 21103 }, { "epoch": 0.8746321853371462, "grad_norm": 0.40360864996910095, "learning_rate": 6.270462928426376e-07, "loss": 0.6256, "step": 21104 }, { "epoch": 0.8746736292428199, "grad_norm": 0.42289596796035767, "learning_rate": 6.268390733142692e-07, "loss": 0.6466, "step": 21105 }, { "epoch": 0.8747150731484935, "grad_norm": 0.3998088836669922, "learning_rate": 6.266318537859008e-07, "loss": 0.66, "step": 21106 }, { "epoch": 0.8747565170541672, "grad_norm": 0.3816741704940796, "learning_rate": 6.264246342575325e-07, "loss": 0.594, "step": 21107 }, { "epoch": 0.8747979609598409, "grad_norm": 0.418963223695755, "learning_rate": 6.262174147291642e-07, "loss": 0.6201, "step": 21108 }, { "epoch": 0.8748394048655145, "grad_norm": 0.4480084180831909, "learning_rate": 6.260101952007958e-07, "loss": 0.6921, "step": 21109 }, { "epoch": 0.8748808487711882, "grad_norm": 0.38828662037849426, "learning_rate": 6.258029756724274e-07, "loss": 0.6001, "step": 21110 }, { "epoch": 0.8749222926768618, "grad_norm": 0.4004436731338501, "learning_rate": 6.255957561440591e-07, "loss": 0.6473, "step": 21111 }, { "epoch": 0.8749637365825356, "grad_norm": 0.4194926917552948, "learning_rate": 6.253885366156908e-07, "loss": 0.6829, "step": 21112 }, { "epoch": 0.8750051804882092, "grad_norm": 0.3987944424152374, "learning_rate": 6.251813170873224e-07, "loss": 0.6279, "step": 21113 }, { "epoch": 0.8750466243938829, "grad_norm": 0.3936912417411804, "learning_rate": 6.249740975589541e-07, "loss": 0.6919, "step": 21114 }, { "epoch": 0.8750880682995565, "grad_norm": 0.43640032410621643, "learning_rate": 6.247668780305857e-07, "loss": 0.6576, "step": 21115 }, { "epoch": 0.8751295122052303, "grad_norm": 0.39306315779685974, "learning_rate": 6.245596585022173e-07, "loss": 0.6439, "step": 21116 }, { "epoch": 0.8751709561109039, "grad_norm": 0.4292284846305847, "learning_rate": 6.24352438973849e-07, "loss": 0.662, "step": 21117 }, { "epoch": 0.8752124000165775, "grad_norm": 0.43263867497444153, "learning_rate": 6.241452194454806e-07, "loss": 0.679, "step": 21118 }, { "epoch": 0.8752538439222513, "grad_norm": 0.41625720262527466, "learning_rate": 6.239379999171123e-07, "loss": 0.62, "step": 21119 }, { "epoch": 0.8752952878279249, "grad_norm": 0.4633916914463043, "learning_rate": 6.237307803887439e-07, "loss": 0.7173, "step": 21120 }, { "epoch": 0.8753367317335986, "grad_norm": 0.40823599696159363, "learning_rate": 6.235235608603756e-07, "loss": 0.6501, "step": 21121 }, { "epoch": 0.8753781756392722, "grad_norm": 0.44758760929107666, "learning_rate": 6.233163413320072e-07, "loss": 0.7009, "step": 21122 }, { "epoch": 0.875419619544946, "grad_norm": 0.4339844882488251, "learning_rate": 6.231091218036389e-07, "loss": 0.6036, "step": 21123 }, { "epoch": 0.8754610634506196, "grad_norm": 0.411300927400589, "learning_rate": 6.229019022752705e-07, "loss": 0.6858, "step": 21124 }, { "epoch": 0.8755025073562932, "grad_norm": 0.4465458393096924, "learning_rate": 6.226946827469022e-07, "loss": 0.6667, "step": 21125 }, { "epoch": 0.8755439512619669, "grad_norm": 0.43640896677970886, "learning_rate": 6.224874632185338e-07, "loss": 0.6432, "step": 21126 }, { "epoch": 0.8755853951676406, "grad_norm": 0.4123358130455017, "learning_rate": 6.222802436901655e-07, "loss": 0.646, "step": 21127 }, { "epoch": 0.8756268390733143, "grad_norm": 0.4537299871444702, "learning_rate": 6.220730241617971e-07, "loss": 0.6877, "step": 21128 }, { "epoch": 0.8756682829789879, "grad_norm": 0.4167579412460327, "learning_rate": 6.218658046334287e-07, "loss": 0.6571, "step": 21129 }, { "epoch": 0.8757097268846616, "grad_norm": 0.41824257373809814, "learning_rate": 6.216585851050604e-07, "loss": 0.6554, "step": 21130 }, { "epoch": 0.8757511707903353, "grad_norm": 0.4184277057647705, "learning_rate": 6.21451365576692e-07, "loss": 0.696, "step": 21131 }, { "epoch": 0.875792614696009, "grad_norm": 0.4107036888599396, "learning_rate": 6.212441460483237e-07, "loss": 0.6631, "step": 21132 }, { "epoch": 0.8758340586016826, "grad_norm": 0.44150468707084656, "learning_rate": 6.210369265199553e-07, "loss": 0.709, "step": 21133 }, { "epoch": 0.8758755025073562, "grad_norm": 0.40125706791877747, "learning_rate": 6.20829706991587e-07, "loss": 0.6221, "step": 21134 }, { "epoch": 0.87591694641303, "grad_norm": 0.4508236050605774, "learning_rate": 6.206224874632186e-07, "loss": 0.7021, "step": 21135 }, { "epoch": 0.8759583903187036, "grad_norm": 0.4027940034866333, "learning_rate": 6.204152679348503e-07, "loss": 0.6931, "step": 21136 }, { "epoch": 0.8759998342243773, "grad_norm": 0.40112560987472534, "learning_rate": 6.202080484064819e-07, "loss": 0.6167, "step": 21137 }, { "epoch": 0.876041278130051, "grad_norm": 0.47703316807746887, "learning_rate": 6.200008288781136e-07, "loss": 0.611, "step": 21138 }, { "epoch": 0.8760827220357247, "grad_norm": 0.41878071427345276, "learning_rate": 6.197936093497452e-07, "loss": 0.6478, "step": 21139 }, { "epoch": 0.8761241659413983, "grad_norm": 0.4195132851600647, "learning_rate": 6.195863898213769e-07, "loss": 0.6746, "step": 21140 }, { "epoch": 0.876165609847072, "grad_norm": 0.41365063190460205, "learning_rate": 6.193791702930085e-07, "loss": 0.6284, "step": 21141 }, { "epoch": 0.8762070537527457, "grad_norm": 0.4118717610836029, "learning_rate": 6.191719507646401e-07, "loss": 0.6636, "step": 21142 }, { "epoch": 0.8762484976584193, "grad_norm": 0.4001978635787964, "learning_rate": 6.189647312362718e-07, "loss": 0.6678, "step": 21143 }, { "epoch": 0.876289941564093, "grad_norm": 0.40394824743270874, "learning_rate": 6.187575117079034e-07, "loss": 0.6541, "step": 21144 }, { "epoch": 0.8763313854697666, "grad_norm": 0.43684524297714233, "learning_rate": 6.185502921795351e-07, "loss": 0.6406, "step": 21145 }, { "epoch": 0.8763728293754404, "grad_norm": 0.44488877058029175, "learning_rate": 6.183430726511667e-07, "loss": 0.6777, "step": 21146 }, { "epoch": 0.876414273281114, "grad_norm": 0.41524744033813477, "learning_rate": 6.181358531227984e-07, "loss": 0.6229, "step": 21147 }, { "epoch": 0.8764557171867877, "grad_norm": 0.4127786159515381, "learning_rate": 6.1792863359443e-07, "loss": 0.6218, "step": 21148 }, { "epoch": 0.8764971610924613, "grad_norm": 0.44900187849998474, "learning_rate": 6.177214140660617e-07, "loss": 0.6418, "step": 21149 }, { "epoch": 0.8765386049981351, "grad_norm": 0.43120262026786804, "learning_rate": 6.175141945376933e-07, "loss": 0.6667, "step": 21150 }, { "epoch": 0.8765800489038087, "grad_norm": 0.42008501291275024, "learning_rate": 6.17306975009325e-07, "loss": 0.6675, "step": 21151 }, { "epoch": 0.8766214928094823, "grad_norm": 0.40160608291625977, "learning_rate": 6.170997554809566e-07, "loss": 0.6511, "step": 21152 }, { "epoch": 0.876662936715156, "grad_norm": 0.39608532190322876, "learning_rate": 6.168925359525882e-07, "loss": 0.6526, "step": 21153 }, { "epoch": 0.8767043806208297, "grad_norm": 0.424333781003952, "learning_rate": 6.166853164242199e-07, "loss": 0.6997, "step": 21154 }, { "epoch": 0.8767458245265034, "grad_norm": 0.41962510347366333, "learning_rate": 6.164780968958515e-07, "loss": 0.6531, "step": 21155 }, { "epoch": 0.876787268432177, "grad_norm": 0.4095209836959839, "learning_rate": 6.162708773674832e-07, "loss": 0.6737, "step": 21156 }, { "epoch": 0.8768287123378508, "grad_norm": 0.4183366000652313, "learning_rate": 6.160636578391148e-07, "loss": 0.6625, "step": 21157 }, { "epoch": 0.8768701562435244, "grad_norm": 0.40923160314559937, "learning_rate": 6.158564383107465e-07, "loss": 0.6329, "step": 21158 }, { "epoch": 0.8769116001491981, "grad_norm": 0.4039829671382904, "learning_rate": 6.156492187823781e-07, "loss": 0.6147, "step": 21159 }, { "epoch": 0.8769530440548717, "grad_norm": 0.4238888621330261, "learning_rate": 6.154419992540098e-07, "loss": 0.6587, "step": 21160 }, { "epoch": 0.8769944879605454, "grad_norm": 0.3870724141597748, "learning_rate": 6.152347797256414e-07, "loss": 0.6809, "step": 21161 }, { "epoch": 0.8770359318662191, "grad_norm": 0.43098098039627075, "learning_rate": 6.150275601972731e-07, "loss": 0.6646, "step": 21162 }, { "epoch": 0.8770773757718927, "grad_norm": 0.3778671622276306, "learning_rate": 6.148203406689047e-07, "loss": 0.6141, "step": 21163 }, { "epoch": 0.8771188196775664, "grad_norm": 0.41942545771598816, "learning_rate": 6.146131211405364e-07, "loss": 0.697, "step": 21164 }, { "epoch": 0.8771602635832401, "grad_norm": 0.390924334526062, "learning_rate": 6.14405901612168e-07, "loss": 0.6564, "step": 21165 }, { "epoch": 0.8772017074889138, "grad_norm": 0.4021517038345337, "learning_rate": 6.141986820837996e-07, "loss": 0.663, "step": 21166 }, { "epoch": 0.8772431513945874, "grad_norm": 0.3966197371482849, "learning_rate": 6.139914625554313e-07, "loss": 0.6219, "step": 21167 }, { "epoch": 0.8772845953002611, "grad_norm": 0.4305345118045807, "learning_rate": 6.137842430270629e-07, "loss": 0.7175, "step": 21168 }, { "epoch": 0.8773260392059348, "grad_norm": 0.44699615240097046, "learning_rate": 6.135770234986946e-07, "loss": 0.7095, "step": 21169 }, { "epoch": 0.8773674831116084, "grad_norm": 0.4194971024990082, "learning_rate": 6.133698039703262e-07, "loss": 0.6501, "step": 21170 }, { "epoch": 0.8774089270172821, "grad_norm": 0.4412505328655243, "learning_rate": 6.131625844419579e-07, "loss": 0.6674, "step": 21171 }, { "epoch": 0.8774503709229557, "grad_norm": 0.41273123025894165, "learning_rate": 6.129553649135895e-07, "loss": 0.6448, "step": 21172 }, { "epoch": 0.8774918148286295, "grad_norm": 0.4396305978298187, "learning_rate": 6.127481453852212e-07, "loss": 0.6968, "step": 21173 }, { "epoch": 0.8775332587343031, "grad_norm": 0.38865870237350464, "learning_rate": 6.125409258568528e-07, "loss": 0.6519, "step": 21174 }, { "epoch": 0.8775747026399768, "grad_norm": 0.39792460203170776, "learning_rate": 6.123337063284845e-07, "loss": 0.6466, "step": 21175 }, { "epoch": 0.8776161465456505, "grad_norm": 0.3990892767906189, "learning_rate": 6.121264868001161e-07, "loss": 0.6431, "step": 21176 }, { "epoch": 0.8776575904513242, "grad_norm": 0.39086923003196716, "learning_rate": 6.119192672717477e-07, "loss": 0.6038, "step": 21177 }, { "epoch": 0.8776990343569978, "grad_norm": 0.4340440034866333, "learning_rate": 6.117120477433794e-07, "loss": 0.6926, "step": 21178 }, { "epoch": 0.8777404782626714, "grad_norm": 0.41801774501800537, "learning_rate": 6.11504828215011e-07, "loss": 0.6538, "step": 21179 }, { "epoch": 0.8777819221683452, "grad_norm": 0.40189599990844727, "learning_rate": 6.112976086866427e-07, "loss": 0.6492, "step": 21180 }, { "epoch": 0.8778233660740188, "grad_norm": 0.4554777145385742, "learning_rate": 6.110903891582743e-07, "loss": 0.658, "step": 21181 }, { "epoch": 0.8778648099796925, "grad_norm": 0.385280579328537, "learning_rate": 6.10883169629906e-07, "loss": 0.6172, "step": 21182 }, { "epoch": 0.8779062538853661, "grad_norm": 0.3970181941986084, "learning_rate": 6.106759501015376e-07, "loss": 0.7048, "step": 21183 }, { "epoch": 0.8779476977910399, "grad_norm": 0.4412635266780853, "learning_rate": 6.104687305731693e-07, "loss": 0.6836, "step": 21184 }, { "epoch": 0.8779891416967135, "grad_norm": 0.3977997303009033, "learning_rate": 6.102615110448009e-07, "loss": 0.6342, "step": 21185 }, { "epoch": 0.8780305856023871, "grad_norm": 0.42402365803718567, "learning_rate": 6.100542915164326e-07, "loss": 0.6301, "step": 21186 }, { "epoch": 0.8780720295080608, "grad_norm": 0.4120456874370575, "learning_rate": 6.098470719880642e-07, "loss": 0.6562, "step": 21187 }, { "epoch": 0.8781134734137345, "grad_norm": 0.4150981307029724, "learning_rate": 6.096398524596959e-07, "loss": 0.6559, "step": 21188 }, { "epoch": 0.8781549173194082, "grad_norm": 0.46171069145202637, "learning_rate": 6.094326329313275e-07, "loss": 0.7285, "step": 21189 }, { "epoch": 0.8781963612250818, "grad_norm": 0.4841157793998718, "learning_rate": 6.092254134029591e-07, "loss": 0.7439, "step": 21190 }, { "epoch": 0.8782378051307556, "grad_norm": 0.3994734287261963, "learning_rate": 6.090181938745908e-07, "loss": 0.6266, "step": 21191 }, { "epoch": 0.8782792490364292, "grad_norm": 0.4370063543319702, "learning_rate": 6.088109743462224e-07, "loss": 0.7087, "step": 21192 }, { "epoch": 0.8783206929421029, "grad_norm": 0.4016975164413452, "learning_rate": 6.086037548178541e-07, "loss": 0.6544, "step": 21193 }, { "epoch": 0.8783621368477765, "grad_norm": 0.43727266788482666, "learning_rate": 6.083965352894857e-07, "loss": 0.7144, "step": 21194 }, { "epoch": 0.8784035807534502, "grad_norm": 0.4964316189289093, "learning_rate": 6.081893157611174e-07, "loss": 0.7661, "step": 21195 }, { "epoch": 0.8784450246591239, "grad_norm": 0.44300737977027893, "learning_rate": 6.07982096232749e-07, "loss": 0.6588, "step": 21196 }, { "epoch": 0.8784864685647975, "grad_norm": 0.3880154490470886, "learning_rate": 6.077748767043807e-07, "loss": 0.6753, "step": 21197 }, { "epoch": 0.8785279124704712, "grad_norm": 0.39382079243659973, "learning_rate": 6.075676571760123e-07, "loss": 0.6309, "step": 21198 }, { "epoch": 0.8785693563761449, "grad_norm": 0.4457118809223175, "learning_rate": 6.07360437647644e-07, "loss": 0.673, "step": 21199 }, { "epoch": 0.8786108002818186, "grad_norm": 0.4266894459724426, "learning_rate": 6.071532181192756e-07, "loss": 0.6327, "step": 21200 }, { "epoch": 0.8786522441874922, "grad_norm": 0.3981444835662842, "learning_rate": 6.069459985909073e-07, "loss": 0.6548, "step": 21201 }, { "epoch": 0.878693688093166, "grad_norm": 0.4332673251628876, "learning_rate": 6.067387790625389e-07, "loss": 0.6472, "step": 21202 }, { "epoch": 0.8787351319988396, "grad_norm": 0.40309593081474304, "learning_rate": 6.065315595341705e-07, "loss": 0.6715, "step": 21203 }, { "epoch": 0.8787765759045132, "grad_norm": 0.43588098883628845, "learning_rate": 6.063243400058022e-07, "loss": 0.6614, "step": 21204 }, { "epoch": 0.8788180198101869, "grad_norm": 0.3771733343601227, "learning_rate": 6.061171204774338e-07, "loss": 0.6388, "step": 21205 }, { "epoch": 0.8788594637158605, "grad_norm": 0.4000172019004822, "learning_rate": 6.059099009490655e-07, "loss": 0.6332, "step": 21206 }, { "epoch": 0.8789009076215343, "grad_norm": 0.4109748601913452, "learning_rate": 6.057026814206971e-07, "loss": 0.6387, "step": 21207 }, { "epoch": 0.8789423515272079, "grad_norm": 0.4211612641811371, "learning_rate": 6.054954618923288e-07, "loss": 0.6509, "step": 21208 }, { "epoch": 0.8789837954328816, "grad_norm": 0.41582900285720825, "learning_rate": 6.052882423639604e-07, "loss": 0.7034, "step": 21209 }, { "epoch": 0.8790252393385553, "grad_norm": 0.3982144892215729, "learning_rate": 6.050810228355921e-07, "loss": 0.5945, "step": 21210 }, { "epoch": 0.879066683244229, "grad_norm": 0.40995556116104126, "learning_rate": 6.048738033072237e-07, "loss": 0.6763, "step": 21211 }, { "epoch": 0.8791081271499026, "grad_norm": 0.5177196860313416, "learning_rate": 6.046665837788554e-07, "loss": 0.7014, "step": 21212 }, { "epoch": 0.8791495710555762, "grad_norm": 0.38519400358200073, "learning_rate": 6.04459364250487e-07, "loss": 0.6329, "step": 21213 }, { "epoch": 0.87919101496125, "grad_norm": 0.3544653058052063, "learning_rate": 6.042521447221186e-07, "loss": 0.5948, "step": 21214 }, { "epoch": 0.8792324588669236, "grad_norm": 0.41371381282806396, "learning_rate": 6.040449251937503e-07, "loss": 0.6869, "step": 21215 }, { "epoch": 0.8792739027725973, "grad_norm": 0.3963320851325989, "learning_rate": 6.038377056653819e-07, "loss": 0.6578, "step": 21216 }, { "epoch": 0.8793153466782709, "grad_norm": 0.44351184368133545, "learning_rate": 6.036304861370136e-07, "loss": 0.663, "step": 21217 }, { "epoch": 0.8793567905839447, "grad_norm": 0.42671236395835876, "learning_rate": 6.034232666086452e-07, "loss": 0.6646, "step": 21218 }, { "epoch": 0.8793982344896183, "grad_norm": 0.41453787684440613, "learning_rate": 6.032160470802769e-07, "loss": 0.6863, "step": 21219 }, { "epoch": 0.879439678395292, "grad_norm": 0.4558846652507782, "learning_rate": 6.030088275519085e-07, "loss": 0.6976, "step": 21220 }, { "epoch": 0.8794811223009656, "grad_norm": 0.3961840569972992, "learning_rate": 6.028016080235402e-07, "loss": 0.6587, "step": 21221 }, { "epoch": 0.8795225662066393, "grad_norm": 0.397318571805954, "learning_rate": 6.025943884951718e-07, "loss": 0.62, "step": 21222 }, { "epoch": 0.879564010112313, "grad_norm": 0.42686906456947327, "learning_rate": 6.023871689668035e-07, "loss": 0.6252, "step": 21223 }, { "epoch": 0.8796054540179866, "grad_norm": 0.3899343013763428, "learning_rate": 6.021799494384351e-07, "loss": 0.64, "step": 21224 }, { "epoch": 0.8796468979236604, "grad_norm": 0.4354437589645386, "learning_rate": 6.019727299100668e-07, "loss": 0.6409, "step": 21225 }, { "epoch": 0.879688341829334, "grad_norm": 0.3970591127872467, "learning_rate": 6.017655103816984e-07, "loss": 0.6385, "step": 21226 }, { "epoch": 0.8797297857350077, "grad_norm": 0.41292405128479004, "learning_rate": 6.0155829085333e-07, "loss": 0.6653, "step": 21227 }, { "epoch": 0.8797712296406813, "grad_norm": 0.42619726061820984, "learning_rate": 6.013510713249617e-07, "loss": 0.6836, "step": 21228 }, { "epoch": 0.8798126735463551, "grad_norm": 0.4213118851184845, "learning_rate": 6.011438517965933e-07, "loss": 0.6665, "step": 21229 }, { "epoch": 0.8798541174520287, "grad_norm": 0.4193699359893799, "learning_rate": 6.00936632268225e-07, "loss": 0.7043, "step": 21230 }, { "epoch": 0.8798955613577023, "grad_norm": 0.4275221526622772, "learning_rate": 6.007294127398566e-07, "loss": 0.5917, "step": 21231 }, { "epoch": 0.879937005263376, "grad_norm": 0.4113697409629822, "learning_rate": 6.005221932114883e-07, "loss": 0.6556, "step": 21232 }, { "epoch": 0.8799784491690497, "grad_norm": 0.401626318693161, "learning_rate": 6.003149736831199e-07, "loss": 0.6436, "step": 21233 }, { "epoch": 0.8800198930747234, "grad_norm": 0.42154234647750854, "learning_rate": 6.001077541547516e-07, "loss": 0.6644, "step": 21234 }, { "epoch": 0.880061336980397, "grad_norm": 0.40765756368637085, "learning_rate": 5.999005346263832e-07, "loss": 0.6636, "step": 21235 }, { "epoch": 0.8801027808860707, "grad_norm": 0.4323737323284149, "learning_rate": 5.996933150980149e-07, "loss": 0.6565, "step": 21236 }, { "epoch": 0.8801442247917444, "grad_norm": 0.42075368762016296, "learning_rate": 5.994860955696465e-07, "loss": 0.655, "step": 21237 }, { "epoch": 0.8801856686974181, "grad_norm": 0.4402667284011841, "learning_rate": 5.992788760412782e-07, "loss": 0.6195, "step": 21238 }, { "epoch": 0.8802271126030917, "grad_norm": 0.4020743668079376, "learning_rate": 5.990716565129098e-07, "loss": 0.6056, "step": 21239 }, { "epoch": 0.8802685565087653, "grad_norm": 0.4630289077758789, "learning_rate": 5.988644369845414e-07, "loss": 0.6586, "step": 21240 }, { "epoch": 0.8803100004144391, "grad_norm": 0.4193219244480133, "learning_rate": 5.986572174561731e-07, "loss": 0.6516, "step": 21241 }, { "epoch": 0.8803514443201127, "grad_norm": 0.4290623962879181, "learning_rate": 5.984499979278047e-07, "loss": 0.6736, "step": 21242 }, { "epoch": 0.8803928882257864, "grad_norm": 0.43410077691078186, "learning_rate": 5.982427783994364e-07, "loss": 0.6534, "step": 21243 }, { "epoch": 0.88043433213146, "grad_norm": 0.40461185574531555, "learning_rate": 5.98035558871068e-07, "loss": 0.6373, "step": 21244 }, { "epoch": 0.8804757760371338, "grad_norm": 0.40267953276634216, "learning_rate": 5.978283393426997e-07, "loss": 0.6079, "step": 21245 }, { "epoch": 0.8805172199428074, "grad_norm": 0.4229053556919098, "learning_rate": 5.976211198143313e-07, "loss": 0.7283, "step": 21246 }, { "epoch": 0.880558663848481, "grad_norm": 0.4041632115840912, "learning_rate": 5.97413900285963e-07, "loss": 0.6349, "step": 21247 }, { "epoch": 0.8806001077541548, "grad_norm": 0.40068912506103516, "learning_rate": 5.972066807575946e-07, "loss": 0.6384, "step": 21248 }, { "epoch": 0.8806415516598284, "grad_norm": 0.49801430106163025, "learning_rate": 5.969994612292263e-07, "loss": 0.7448, "step": 21249 }, { "epoch": 0.8806829955655021, "grad_norm": 0.39630112051963806, "learning_rate": 5.967922417008579e-07, "loss": 0.6049, "step": 21250 }, { "epoch": 0.8807244394711757, "grad_norm": 0.4059813320636749, "learning_rate": 5.965850221724896e-07, "loss": 0.6573, "step": 21251 }, { "epoch": 0.8807658833768495, "grad_norm": 0.41820117831230164, "learning_rate": 5.963778026441212e-07, "loss": 0.6666, "step": 21252 }, { "epoch": 0.8808073272825231, "grad_norm": 0.43901243805885315, "learning_rate": 5.961705831157529e-07, "loss": 0.6279, "step": 21253 }, { "epoch": 0.8808487711881968, "grad_norm": 0.4226698875427246, "learning_rate": 5.959633635873845e-07, "loss": 0.6538, "step": 21254 }, { "epoch": 0.8808902150938704, "grad_norm": 0.44338512420654297, "learning_rate": 5.957561440590162e-07, "loss": 0.6517, "step": 21255 }, { "epoch": 0.8809316589995441, "grad_norm": 0.4916036128997803, "learning_rate": 5.955489245306478e-07, "loss": 0.6963, "step": 21256 }, { "epoch": 0.8809731029052178, "grad_norm": 0.39940357208251953, "learning_rate": 5.953417050022795e-07, "loss": 0.6791, "step": 21257 }, { "epoch": 0.8810145468108914, "grad_norm": 0.43063634634017944, "learning_rate": 5.951344854739111e-07, "loss": 0.6755, "step": 21258 }, { "epoch": 0.8810559907165652, "grad_norm": 0.4553086757659912, "learning_rate": 5.949272659455427e-07, "loss": 0.6721, "step": 21259 }, { "epoch": 0.8810974346222388, "grad_norm": 0.4346065819263458, "learning_rate": 5.947200464171744e-07, "loss": 0.6426, "step": 21260 }, { "epoch": 0.8811388785279125, "grad_norm": 0.45353439450263977, "learning_rate": 5.94512826888806e-07, "loss": 0.7029, "step": 21261 }, { "epoch": 0.8811803224335861, "grad_norm": 0.4067379832267761, "learning_rate": 5.943056073604377e-07, "loss": 0.6959, "step": 21262 }, { "epoch": 0.8812217663392599, "grad_norm": 0.41099658608436584, "learning_rate": 5.940983878320693e-07, "loss": 0.6663, "step": 21263 }, { "epoch": 0.8812632102449335, "grad_norm": 0.4050459563732147, "learning_rate": 5.93891168303701e-07, "loss": 0.676, "step": 21264 }, { "epoch": 0.8813046541506071, "grad_norm": 0.43283700942993164, "learning_rate": 5.936839487753326e-07, "loss": 0.7485, "step": 21265 }, { "epoch": 0.8813460980562808, "grad_norm": 0.40667301416397095, "learning_rate": 5.934767292469643e-07, "loss": 0.641, "step": 21266 }, { "epoch": 0.8813875419619545, "grad_norm": 0.3854729235172272, "learning_rate": 5.932695097185959e-07, "loss": 0.6256, "step": 21267 }, { "epoch": 0.8814289858676282, "grad_norm": 0.42123374342918396, "learning_rate": 5.930622901902276e-07, "loss": 0.6699, "step": 21268 }, { "epoch": 0.8814704297733018, "grad_norm": 0.39144134521484375, "learning_rate": 5.928550706618592e-07, "loss": 0.6841, "step": 21269 }, { "epoch": 0.8815118736789755, "grad_norm": 0.4290797710418701, "learning_rate": 5.926478511334909e-07, "loss": 0.7054, "step": 21270 }, { "epoch": 0.8815533175846492, "grad_norm": 0.4198606312274933, "learning_rate": 5.924406316051225e-07, "loss": 0.6694, "step": 21271 }, { "epoch": 0.8815947614903229, "grad_norm": 0.4345215857028961, "learning_rate": 5.922334120767542e-07, "loss": 0.7074, "step": 21272 }, { "epoch": 0.8816362053959965, "grad_norm": 0.400138795375824, "learning_rate": 5.920261925483858e-07, "loss": 0.6615, "step": 21273 }, { "epoch": 0.8816776493016701, "grad_norm": 0.40557560324668884, "learning_rate": 5.918189730200175e-07, "loss": 0.7346, "step": 21274 }, { "epoch": 0.8817190932073439, "grad_norm": 0.45335280895233154, "learning_rate": 5.916117534916491e-07, "loss": 0.6381, "step": 21275 }, { "epoch": 0.8817605371130175, "grad_norm": 0.41442054510116577, "learning_rate": 5.914045339632808e-07, "loss": 0.7017, "step": 21276 }, { "epoch": 0.8818019810186912, "grad_norm": 0.4143434762954712, "learning_rate": 5.911973144349124e-07, "loss": 0.6693, "step": 21277 }, { "epoch": 0.8818434249243648, "grad_norm": 0.3999609053134918, "learning_rate": 5.90990094906544e-07, "loss": 0.6685, "step": 21278 }, { "epoch": 0.8818848688300386, "grad_norm": 0.4353742301464081, "learning_rate": 5.907828753781757e-07, "loss": 0.688, "step": 21279 }, { "epoch": 0.8819263127357122, "grad_norm": 0.40500181913375854, "learning_rate": 5.905756558498073e-07, "loss": 0.6571, "step": 21280 }, { "epoch": 0.8819677566413859, "grad_norm": 0.3766523599624634, "learning_rate": 5.90368436321439e-07, "loss": 0.6405, "step": 21281 }, { "epoch": 0.8820092005470596, "grad_norm": 0.40084701776504517, "learning_rate": 5.901612167930706e-07, "loss": 0.6388, "step": 21282 }, { "epoch": 0.8820506444527332, "grad_norm": 0.39553922414779663, "learning_rate": 5.899539972647023e-07, "loss": 0.671, "step": 21283 }, { "epoch": 0.8820920883584069, "grad_norm": 0.4461310803890228, "learning_rate": 5.897467777363339e-07, "loss": 0.6897, "step": 21284 }, { "epoch": 0.8821335322640805, "grad_norm": 0.4045344591140747, "learning_rate": 5.895395582079656e-07, "loss": 0.656, "step": 21285 }, { "epoch": 0.8821749761697543, "grad_norm": 0.38338449597358704, "learning_rate": 5.893323386795972e-07, "loss": 0.6578, "step": 21286 }, { "epoch": 0.8822164200754279, "grad_norm": 0.4106886386871338, "learning_rate": 5.891251191512289e-07, "loss": 0.6997, "step": 21287 }, { "epoch": 0.8822578639811016, "grad_norm": 0.6928749680519104, "learning_rate": 5.889178996228605e-07, "loss": 0.6553, "step": 21288 }, { "epoch": 0.8822993078867752, "grad_norm": 0.4545588493347168, "learning_rate": 5.887106800944922e-07, "loss": 0.6865, "step": 21289 }, { "epoch": 0.882340751792449, "grad_norm": 0.48300936818122864, "learning_rate": 5.885034605661238e-07, "loss": 0.6462, "step": 21290 }, { "epoch": 0.8823821956981226, "grad_norm": 0.40506988763809204, "learning_rate": 5.882962410377555e-07, "loss": 0.6299, "step": 21291 }, { "epoch": 0.8824236396037962, "grad_norm": 0.4171604514122009, "learning_rate": 5.880890215093871e-07, "loss": 0.7, "step": 21292 }, { "epoch": 0.88246508350947, "grad_norm": 0.40383443236351013, "learning_rate": 5.878818019810188e-07, "loss": 0.674, "step": 21293 }, { "epoch": 0.8825065274151436, "grad_norm": 0.38713788986206055, "learning_rate": 5.876745824526504e-07, "loss": 0.6255, "step": 21294 }, { "epoch": 0.8825479713208173, "grad_norm": 0.4053410291671753, "learning_rate": 5.874673629242821e-07, "loss": 0.6407, "step": 21295 }, { "epoch": 0.8825894152264909, "grad_norm": 0.39805781841278076, "learning_rate": 5.872601433959137e-07, "loss": 0.6461, "step": 21296 }, { "epoch": 0.8826308591321647, "grad_norm": 0.43926721811294556, "learning_rate": 5.870529238675453e-07, "loss": 0.6709, "step": 21297 }, { "epoch": 0.8826723030378383, "grad_norm": 0.4555812180042267, "learning_rate": 5.86845704339177e-07, "loss": 0.6035, "step": 21298 }, { "epoch": 0.882713746943512, "grad_norm": 0.4007062315940857, "learning_rate": 5.866384848108086e-07, "loss": 0.6633, "step": 21299 }, { "epoch": 0.8827551908491856, "grad_norm": 0.3850429058074951, "learning_rate": 5.864312652824403e-07, "loss": 0.6608, "step": 21300 }, { "epoch": 0.8827966347548593, "grad_norm": 0.45208337903022766, "learning_rate": 5.862240457540719e-07, "loss": 0.6851, "step": 21301 }, { "epoch": 0.882838078660533, "grad_norm": 0.4389796853065491, "learning_rate": 5.860168262257036e-07, "loss": 0.6719, "step": 21302 }, { "epoch": 0.8828795225662066, "grad_norm": 0.39704954624176025, "learning_rate": 5.858096066973352e-07, "loss": 0.6296, "step": 21303 }, { "epoch": 0.8829209664718803, "grad_norm": 0.4411863684654236, "learning_rate": 5.856023871689669e-07, "loss": 0.7302, "step": 21304 }, { "epoch": 0.882962410377554, "grad_norm": 0.3994826376438141, "learning_rate": 5.853951676405985e-07, "loss": 0.6729, "step": 21305 }, { "epoch": 0.8830038542832277, "grad_norm": 0.44235241413116455, "learning_rate": 5.851879481122302e-07, "loss": 0.6794, "step": 21306 }, { "epoch": 0.8830452981889013, "grad_norm": 0.4170055091381073, "learning_rate": 5.849807285838618e-07, "loss": 0.6632, "step": 21307 }, { "epoch": 0.8830867420945749, "grad_norm": 0.3976626992225647, "learning_rate": 5.847735090554935e-07, "loss": 0.6895, "step": 21308 }, { "epoch": 0.8831281860002487, "grad_norm": 0.42260727286338806, "learning_rate": 5.845662895271251e-07, "loss": 0.7009, "step": 21309 }, { "epoch": 0.8831696299059223, "grad_norm": 0.41299498081207275, "learning_rate": 5.843590699987568e-07, "loss": 0.6184, "step": 21310 }, { "epoch": 0.883211073811596, "grad_norm": 0.4086816608905792, "learning_rate": 5.841518504703884e-07, "loss": 0.673, "step": 21311 }, { "epoch": 0.8832525177172696, "grad_norm": 0.434195876121521, "learning_rate": 5.839446309420201e-07, "loss": 0.6509, "step": 21312 }, { "epoch": 0.8832939616229434, "grad_norm": 0.4236471652984619, "learning_rate": 5.837374114136517e-07, "loss": 0.7057, "step": 21313 }, { "epoch": 0.883335405528617, "grad_norm": 0.39274778962135315, "learning_rate": 5.835301918852833e-07, "loss": 0.6697, "step": 21314 }, { "epoch": 0.8833768494342907, "grad_norm": 0.38926005363464355, "learning_rate": 5.83322972356915e-07, "loss": 0.6917, "step": 21315 }, { "epoch": 0.8834182933399644, "grad_norm": 0.4058479964733124, "learning_rate": 5.831157528285466e-07, "loss": 0.65, "step": 21316 }, { "epoch": 0.883459737245638, "grad_norm": 0.39445558190345764, "learning_rate": 5.829085333001783e-07, "loss": 0.6544, "step": 21317 }, { "epoch": 0.8835011811513117, "grad_norm": 0.4244757294654846, "learning_rate": 5.827013137718099e-07, "loss": 0.7065, "step": 21318 }, { "epoch": 0.8835426250569853, "grad_norm": 0.43243154883384705, "learning_rate": 5.824940942434416e-07, "loss": 0.6478, "step": 21319 }, { "epoch": 0.8835840689626591, "grad_norm": 0.3923470079898834, "learning_rate": 5.822868747150732e-07, "loss": 0.6278, "step": 21320 }, { "epoch": 0.8836255128683327, "grad_norm": 0.44109416007995605, "learning_rate": 5.820796551867049e-07, "loss": 0.6639, "step": 21321 }, { "epoch": 0.8836669567740064, "grad_norm": 0.42157456278800964, "learning_rate": 5.818724356583365e-07, "loss": 0.6692, "step": 21322 }, { "epoch": 0.88370840067968, "grad_norm": 0.4430670738220215, "learning_rate": 5.816652161299682e-07, "loss": 0.655, "step": 21323 }, { "epoch": 0.8837498445853538, "grad_norm": 0.43569067120552063, "learning_rate": 5.814579966015998e-07, "loss": 0.7205, "step": 21324 }, { "epoch": 0.8837912884910274, "grad_norm": 0.42636728286743164, "learning_rate": 5.812507770732314e-07, "loss": 0.6936, "step": 21325 }, { "epoch": 0.883832732396701, "grad_norm": 0.4302840530872345, "learning_rate": 5.810435575448631e-07, "loss": 0.7065, "step": 21326 }, { "epoch": 0.8838741763023747, "grad_norm": 0.4147132635116577, "learning_rate": 5.808363380164947e-07, "loss": 0.6721, "step": 21327 }, { "epoch": 0.8839156202080484, "grad_norm": 0.38858553767204285, "learning_rate": 5.806291184881264e-07, "loss": 0.63, "step": 21328 }, { "epoch": 0.8839570641137221, "grad_norm": 0.3891463279724121, "learning_rate": 5.80421898959758e-07, "loss": 0.6357, "step": 21329 }, { "epoch": 0.8839985080193957, "grad_norm": 0.3993394672870636, "learning_rate": 5.802146794313897e-07, "loss": 0.64, "step": 21330 }, { "epoch": 0.8840399519250695, "grad_norm": 0.4401186406612396, "learning_rate": 5.800074599030213e-07, "loss": 0.6859, "step": 21331 }, { "epoch": 0.8840813958307431, "grad_norm": 0.4040793180465698, "learning_rate": 5.79800240374653e-07, "loss": 0.6753, "step": 21332 }, { "epoch": 0.8841228397364168, "grad_norm": 0.41444963216781616, "learning_rate": 5.795930208462846e-07, "loss": 0.646, "step": 21333 }, { "epoch": 0.8841642836420904, "grad_norm": 0.37345439195632935, "learning_rate": 5.793858013179163e-07, "loss": 0.6665, "step": 21334 }, { "epoch": 0.884205727547764, "grad_norm": 0.4315361976623535, "learning_rate": 5.791785817895479e-07, "loss": 0.7035, "step": 21335 }, { "epoch": 0.8842471714534378, "grad_norm": 0.3848472237586975, "learning_rate": 5.789713622611796e-07, "loss": 0.6428, "step": 21336 }, { "epoch": 0.8842886153591114, "grad_norm": 0.4175746738910675, "learning_rate": 5.787641427328112e-07, "loss": 0.6957, "step": 21337 }, { "epoch": 0.8843300592647851, "grad_norm": 0.3899771571159363, "learning_rate": 5.785569232044428e-07, "loss": 0.6012, "step": 21338 }, { "epoch": 0.8843715031704588, "grad_norm": 0.4082960784435272, "learning_rate": 5.783497036760745e-07, "loss": 0.6299, "step": 21339 }, { "epoch": 0.8844129470761325, "grad_norm": 0.40709570050239563, "learning_rate": 5.781424841477061e-07, "loss": 0.6084, "step": 21340 }, { "epoch": 0.8844543909818061, "grad_norm": 0.4515201449394226, "learning_rate": 5.779352646193378e-07, "loss": 0.6829, "step": 21341 }, { "epoch": 0.8844958348874798, "grad_norm": 0.4040144383907318, "learning_rate": 5.777280450909694e-07, "loss": 0.6584, "step": 21342 }, { "epoch": 0.8845372787931535, "grad_norm": 0.4204562306404114, "learning_rate": 5.775208255626011e-07, "loss": 0.6538, "step": 21343 }, { "epoch": 0.8845787226988271, "grad_norm": 0.41292986273765564, "learning_rate": 5.773136060342327e-07, "loss": 0.662, "step": 21344 }, { "epoch": 0.8846201666045008, "grad_norm": 0.4320838749408722, "learning_rate": 5.771063865058644e-07, "loss": 0.6692, "step": 21345 }, { "epoch": 0.8846616105101744, "grad_norm": 0.4192841053009033, "learning_rate": 5.76899166977496e-07, "loss": 0.6758, "step": 21346 }, { "epoch": 0.8847030544158482, "grad_norm": 0.40273743867874146, "learning_rate": 5.766919474491277e-07, "loss": 0.6781, "step": 21347 }, { "epoch": 0.8847444983215218, "grad_norm": 0.409249871969223, "learning_rate": 5.764847279207593e-07, "loss": 0.6335, "step": 21348 }, { "epoch": 0.8847859422271955, "grad_norm": 0.41869544982910156, "learning_rate": 5.76277508392391e-07, "loss": 0.7096, "step": 21349 }, { "epoch": 0.8848273861328692, "grad_norm": 0.41468721628189087, "learning_rate": 5.760702888640226e-07, "loss": 0.6143, "step": 21350 }, { "epoch": 0.8848688300385429, "grad_norm": 0.3962930738925934, "learning_rate": 5.758630693356542e-07, "loss": 0.6349, "step": 21351 }, { "epoch": 0.8849102739442165, "grad_norm": 0.40675088763237, "learning_rate": 5.756558498072859e-07, "loss": 0.6156, "step": 21352 }, { "epoch": 0.8849517178498901, "grad_norm": 0.4155443608760834, "learning_rate": 5.754486302789175e-07, "loss": 0.6562, "step": 21353 }, { "epoch": 0.8849931617555639, "grad_norm": 0.4477280080318451, "learning_rate": 5.752414107505492e-07, "loss": 0.6943, "step": 21354 }, { "epoch": 0.8850346056612375, "grad_norm": 0.4371253550052643, "learning_rate": 5.750341912221808e-07, "loss": 0.6763, "step": 21355 }, { "epoch": 0.8850760495669112, "grad_norm": 0.3861115574836731, "learning_rate": 5.748269716938125e-07, "loss": 0.6855, "step": 21356 }, { "epoch": 0.8851174934725848, "grad_norm": 0.41206416487693787, "learning_rate": 5.746197521654441e-07, "loss": 0.6265, "step": 21357 }, { "epoch": 0.8851589373782586, "grad_norm": 0.42784106731414795, "learning_rate": 5.744125326370758e-07, "loss": 0.6604, "step": 21358 }, { "epoch": 0.8852003812839322, "grad_norm": 0.4599573314189911, "learning_rate": 5.742053131087074e-07, "loss": 0.6892, "step": 21359 }, { "epoch": 0.8852418251896058, "grad_norm": 0.426604300737381, "learning_rate": 5.739980935803391e-07, "loss": 0.6624, "step": 21360 }, { "epoch": 0.8852832690952795, "grad_norm": 0.40245509147644043, "learning_rate": 5.737908740519707e-07, "loss": 0.6777, "step": 21361 }, { "epoch": 0.8853247130009532, "grad_norm": 0.4394230842590332, "learning_rate": 5.735836545236023e-07, "loss": 0.7209, "step": 21362 }, { "epoch": 0.8853661569066269, "grad_norm": 0.41601940989494324, "learning_rate": 5.73376434995234e-07, "loss": 0.6803, "step": 21363 }, { "epoch": 0.8854076008123005, "grad_norm": 0.4124889075756073, "learning_rate": 5.731692154668656e-07, "loss": 0.7029, "step": 21364 }, { "epoch": 0.8854490447179743, "grad_norm": 0.41583630442619324, "learning_rate": 5.729619959384973e-07, "loss": 0.6561, "step": 21365 }, { "epoch": 0.8854904886236479, "grad_norm": 0.47018930315971375, "learning_rate": 5.727547764101289e-07, "loss": 0.7701, "step": 21366 }, { "epoch": 0.8855319325293216, "grad_norm": 0.4338330328464508, "learning_rate": 5.725475568817606e-07, "loss": 0.6775, "step": 21367 }, { "epoch": 0.8855733764349952, "grad_norm": 0.43430954217910767, "learning_rate": 5.723403373533922e-07, "loss": 0.7552, "step": 21368 }, { "epoch": 0.8856148203406689, "grad_norm": 0.42488640546798706, "learning_rate": 5.721331178250239e-07, "loss": 0.6528, "step": 21369 }, { "epoch": 0.8856562642463426, "grad_norm": 0.4809923768043518, "learning_rate": 5.719258982966555e-07, "loss": 0.6588, "step": 21370 }, { "epoch": 0.8856977081520162, "grad_norm": 0.4260718822479248, "learning_rate": 5.717186787682872e-07, "loss": 0.6392, "step": 21371 }, { "epoch": 0.8857391520576899, "grad_norm": 0.42809367179870605, "learning_rate": 5.715114592399188e-07, "loss": 0.686, "step": 21372 }, { "epoch": 0.8857805959633636, "grad_norm": 0.4245171546936035, "learning_rate": 5.713042397115505e-07, "loss": 0.6946, "step": 21373 }, { "epoch": 0.8858220398690373, "grad_norm": 0.44846656918525696, "learning_rate": 5.710970201831821e-07, "loss": 0.6609, "step": 21374 }, { "epoch": 0.8858634837747109, "grad_norm": 0.44118067622184753, "learning_rate": 5.708898006548137e-07, "loss": 0.6614, "step": 21375 }, { "epoch": 0.8859049276803846, "grad_norm": 0.42059674859046936, "learning_rate": 5.706825811264454e-07, "loss": 0.6552, "step": 21376 }, { "epoch": 0.8859463715860583, "grad_norm": 0.4402751326560974, "learning_rate": 5.70475361598077e-07, "loss": 0.6614, "step": 21377 }, { "epoch": 0.8859878154917319, "grad_norm": 0.40249964594841003, "learning_rate": 5.702681420697087e-07, "loss": 0.6755, "step": 21378 }, { "epoch": 0.8860292593974056, "grad_norm": 0.4175712466239929, "learning_rate": 5.700609225413403e-07, "loss": 0.6201, "step": 21379 }, { "epoch": 0.8860707033030792, "grad_norm": 0.4190959930419922, "learning_rate": 5.69853703012972e-07, "loss": 0.6951, "step": 21380 }, { "epoch": 0.886112147208753, "grad_norm": 0.4328671395778656, "learning_rate": 5.696464834846036e-07, "loss": 0.6935, "step": 21381 }, { "epoch": 0.8861535911144266, "grad_norm": 0.4444023370742798, "learning_rate": 5.694392639562353e-07, "loss": 0.7029, "step": 21382 }, { "epoch": 0.8861950350201003, "grad_norm": 0.3819735646247864, "learning_rate": 5.692320444278669e-07, "loss": 0.6147, "step": 21383 }, { "epoch": 0.886236478925774, "grad_norm": 0.41260862350463867, "learning_rate": 5.690248248994986e-07, "loss": 0.6453, "step": 21384 }, { "epoch": 0.8862779228314477, "grad_norm": 0.4013561010360718, "learning_rate": 5.688176053711302e-07, "loss": 0.6814, "step": 21385 }, { "epoch": 0.8863193667371213, "grad_norm": 0.4620875418186188, "learning_rate": 5.686103858427619e-07, "loss": 0.7476, "step": 21386 }, { "epoch": 0.8863608106427949, "grad_norm": 0.4387966990470886, "learning_rate": 5.684031663143935e-07, "loss": 0.671, "step": 21387 }, { "epoch": 0.8864022545484687, "grad_norm": 0.40118181705474854, "learning_rate": 5.681959467860251e-07, "loss": 0.6296, "step": 21388 }, { "epoch": 0.8864436984541423, "grad_norm": 0.426576167345047, "learning_rate": 5.679887272576568e-07, "loss": 0.6582, "step": 21389 }, { "epoch": 0.886485142359816, "grad_norm": 0.3915785551071167, "learning_rate": 5.677815077292884e-07, "loss": 0.6382, "step": 21390 }, { "epoch": 0.8865265862654896, "grad_norm": 0.4120689034461975, "learning_rate": 5.675742882009201e-07, "loss": 0.6677, "step": 21391 }, { "epoch": 0.8865680301711634, "grad_norm": 0.41568928956985474, "learning_rate": 5.673670686725517e-07, "loss": 0.6726, "step": 21392 }, { "epoch": 0.886609474076837, "grad_norm": 0.4572696089744568, "learning_rate": 5.671598491441834e-07, "loss": 0.6711, "step": 21393 }, { "epoch": 0.8866509179825107, "grad_norm": 0.4576156735420227, "learning_rate": 5.66952629615815e-07, "loss": 0.6656, "step": 21394 }, { "epoch": 0.8866923618881843, "grad_norm": 0.42794856429100037, "learning_rate": 5.667454100874467e-07, "loss": 0.6865, "step": 21395 }, { "epoch": 0.886733805793858, "grad_norm": 0.42485445737838745, "learning_rate": 5.665381905590783e-07, "loss": 0.666, "step": 21396 }, { "epoch": 0.8867752496995317, "grad_norm": 0.40749481320381165, "learning_rate": 5.6633097103071e-07, "loss": 0.7169, "step": 21397 }, { "epoch": 0.8868166936052053, "grad_norm": 0.43550944328308105, "learning_rate": 5.661237515023416e-07, "loss": 0.6998, "step": 21398 }, { "epoch": 0.886858137510879, "grad_norm": 0.3828023672103882, "learning_rate": 5.659165319739732e-07, "loss": 0.647, "step": 21399 }, { "epoch": 0.8868995814165527, "grad_norm": 0.4325985908508301, "learning_rate": 5.657093124456049e-07, "loss": 0.6487, "step": 21400 }, { "epoch": 0.8869410253222264, "grad_norm": 0.46142271161079407, "learning_rate": 5.655020929172365e-07, "loss": 0.6792, "step": 21401 }, { "epoch": 0.8869824692279, "grad_norm": 0.4016042649745941, "learning_rate": 5.652948733888682e-07, "loss": 0.6659, "step": 21402 }, { "epoch": 0.8870239131335738, "grad_norm": 0.41583186388015747, "learning_rate": 5.650876538604998e-07, "loss": 0.6611, "step": 21403 }, { "epoch": 0.8870653570392474, "grad_norm": 0.4193524420261383, "learning_rate": 5.648804343321315e-07, "loss": 0.674, "step": 21404 }, { "epoch": 0.887106800944921, "grad_norm": 0.4389016926288605, "learning_rate": 5.646732148037631e-07, "loss": 0.6627, "step": 21405 }, { "epoch": 0.8871482448505947, "grad_norm": 0.41006413102149963, "learning_rate": 5.644659952753948e-07, "loss": 0.6987, "step": 21406 }, { "epoch": 0.8871896887562684, "grad_norm": 0.4075542986392975, "learning_rate": 5.642587757470264e-07, "loss": 0.6519, "step": 21407 }, { "epoch": 0.8872311326619421, "grad_norm": 0.3941805064678192, "learning_rate": 5.640515562186581e-07, "loss": 0.6753, "step": 21408 }, { "epoch": 0.8872725765676157, "grad_norm": 0.41101667284965515, "learning_rate": 5.638443366902897e-07, "loss": 0.6588, "step": 21409 }, { "epoch": 0.8873140204732894, "grad_norm": 0.4959043562412262, "learning_rate": 5.636371171619214e-07, "loss": 0.6927, "step": 21410 }, { "epoch": 0.8873554643789631, "grad_norm": 0.4106086194515228, "learning_rate": 5.63429897633553e-07, "loss": 0.6764, "step": 21411 }, { "epoch": 0.8873969082846368, "grad_norm": 0.4185197055339813, "learning_rate": 5.632226781051846e-07, "loss": 0.6982, "step": 21412 }, { "epoch": 0.8874383521903104, "grad_norm": 0.4126123785972595, "learning_rate": 5.630154585768163e-07, "loss": 0.6282, "step": 21413 }, { "epoch": 0.887479796095984, "grad_norm": 0.4600255787372589, "learning_rate": 5.628082390484479e-07, "loss": 0.6646, "step": 21414 }, { "epoch": 0.8875212400016578, "grad_norm": 0.42496591806411743, "learning_rate": 5.626010195200796e-07, "loss": 0.672, "step": 21415 }, { "epoch": 0.8875626839073314, "grad_norm": 0.42459532618522644, "learning_rate": 5.623937999917112e-07, "loss": 0.6699, "step": 21416 }, { "epoch": 0.8876041278130051, "grad_norm": 0.4087842106819153, "learning_rate": 5.621865804633429e-07, "loss": 0.6676, "step": 21417 }, { "epoch": 0.8876455717186787, "grad_norm": 0.41371971368789673, "learning_rate": 5.619793609349745e-07, "loss": 0.6721, "step": 21418 }, { "epoch": 0.8876870156243525, "grad_norm": 0.44439592957496643, "learning_rate": 5.617721414066062e-07, "loss": 0.6731, "step": 21419 }, { "epoch": 0.8877284595300261, "grad_norm": 0.4632638096809387, "learning_rate": 5.615649218782378e-07, "loss": 0.7107, "step": 21420 }, { "epoch": 0.8877699034356997, "grad_norm": 0.4751431345939636, "learning_rate": 5.613577023498695e-07, "loss": 0.7124, "step": 21421 }, { "epoch": 0.8878113473413735, "grad_norm": 0.4198697805404663, "learning_rate": 5.611504828215011e-07, "loss": 0.7062, "step": 21422 }, { "epoch": 0.8878527912470471, "grad_norm": 0.4256114363670349, "learning_rate": 5.609432632931328e-07, "loss": 0.6487, "step": 21423 }, { "epoch": 0.8878942351527208, "grad_norm": 0.44787174463272095, "learning_rate": 5.607360437647644e-07, "loss": 0.7546, "step": 21424 }, { "epoch": 0.8879356790583944, "grad_norm": 0.4393247067928314, "learning_rate": 5.60528824236396e-07, "loss": 0.6642, "step": 21425 }, { "epoch": 0.8879771229640682, "grad_norm": 0.40169140696525574, "learning_rate": 5.603216047080277e-07, "loss": 0.6677, "step": 21426 }, { "epoch": 0.8880185668697418, "grad_norm": 0.43607109785079956, "learning_rate": 5.601143851796593e-07, "loss": 0.6838, "step": 21427 }, { "epoch": 0.8880600107754155, "grad_norm": 0.40074434876441956, "learning_rate": 5.59907165651291e-07, "loss": 0.6422, "step": 21428 }, { "epoch": 0.8881014546810891, "grad_norm": 0.42218637466430664, "learning_rate": 5.596999461229226e-07, "loss": 0.6511, "step": 21429 }, { "epoch": 0.8881428985867628, "grad_norm": 0.4322926700115204, "learning_rate": 5.594927265945543e-07, "loss": 0.7112, "step": 21430 }, { "epoch": 0.8881843424924365, "grad_norm": 0.3780708611011505, "learning_rate": 5.592855070661859e-07, "loss": 0.6172, "step": 21431 }, { "epoch": 0.8882257863981101, "grad_norm": 0.4227933883666992, "learning_rate": 5.590782875378176e-07, "loss": 0.6642, "step": 21432 }, { "epoch": 0.8882672303037839, "grad_norm": 0.40627148747444153, "learning_rate": 5.588710680094492e-07, "loss": 0.67, "step": 21433 }, { "epoch": 0.8883086742094575, "grad_norm": 0.4355160593986511, "learning_rate": 5.586638484810809e-07, "loss": 0.6846, "step": 21434 }, { "epoch": 0.8883501181151312, "grad_norm": 0.4284313917160034, "learning_rate": 5.584566289527125e-07, "loss": 0.6895, "step": 21435 }, { "epoch": 0.8883915620208048, "grad_norm": 0.4165712893009186, "learning_rate": 5.582494094243441e-07, "loss": 0.6506, "step": 21436 }, { "epoch": 0.8884330059264786, "grad_norm": 0.4220913052558899, "learning_rate": 5.580421898959758e-07, "loss": 0.6646, "step": 21437 }, { "epoch": 0.8884744498321522, "grad_norm": 0.47515106201171875, "learning_rate": 5.578349703676074e-07, "loss": 0.7295, "step": 21438 }, { "epoch": 0.8885158937378258, "grad_norm": 0.42370641231536865, "learning_rate": 5.576277508392391e-07, "loss": 0.7219, "step": 21439 }, { "epoch": 0.8885573376434995, "grad_norm": 0.3939555585384369, "learning_rate": 5.574205313108707e-07, "loss": 0.6401, "step": 21440 }, { "epoch": 0.8885987815491732, "grad_norm": 0.43276670575141907, "learning_rate": 5.572133117825024e-07, "loss": 0.6936, "step": 21441 }, { "epoch": 0.8886402254548469, "grad_norm": 0.4184841215610504, "learning_rate": 5.57006092254134e-07, "loss": 0.6854, "step": 21442 }, { "epoch": 0.8886816693605205, "grad_norm": 0.4263876974582672, "learning_rate": 5.567988727257657e-07, "loss": 0.6948, "step": 21443 }, { "epoch": 0.8887231132661942, "grad_norm": 0.40944844484329224, "learning_rate": 5.565916531973973e-07, "loss": 0.6592, "step": 21444 }, { "epoch": 0.8887645571718679, "grad_norm": 0.39088836312294006, "learning_rate": 5.56384433669029e-07, "loss": 0.6974, "step": 21445 }, { "epoch": 0.8888060010775416, "grad_norm": 0.42904531955718994, "learning_rate": 5.561772141406606e-07, "loss": 0.6456, "step": 21446 }, { "epoch": 0.8888474449832152, "grad_norm": 0.4065399169921875, "learning_rate": 5.559699946122923e-07, "loss": 0.6094, "step": 21447 }, { "epoch": 0.8888888888888888, "grad_norm": 0.46251675486564636, "learning_rate": 5.557627750839239e-07, "loss": 0.6592, "step": 21448 }, { "epoch": 0.8889303327945626, "grad_norm": 0.4851667284965515, "learning_rate": 5.555555555555555e-07, "loss": 0.6625, "step": 21449 }, { "epoch": 0.8889717767002362, "grad_norm": 0.4046773314476013, "learning_rate": 5.553483360271872e-07, "loss": 0.6942, "step": 21450 }, { "epoch": 0.8890132206059099, "grad_norm": 0.38640618324279785, "learning_rate": 5.551411164988188e-07, "loss": 0.6624, "step": 21451 }, { "epoch": 0.8890546645115835, "grad_norm": 0.4353013336658478, "learning_rate": 5.549338969704505e-07, "loss": 0.6771, "step": 21452 }, { "epoch": 0.8890961084172573, "grad_norm": 0.410611629486084, "learning_rate": 5.547266774420821e-07, "loss": 0.657, "step": 21453 }, { "epoch": 0.8891375523229309, "grad_norm": 0.3647153377532959, "learning_rate": 5.545194579137138e-07, "loss": 0.5778, "step": 21454 }, { "epoch": 0.8891789962286046, "grad_norm": 0.44402986764907837, "learning_rate": 5.543122383853454e-07, "loss": 0.7234, "step": 21455 }, { "epoch": 0.8892204401342783, "grad_norm": 0.470130056142807, "learning_rate": 5.541050188569771e-07, "loss": 0.6587, "step": 21456 }, { "epoch": 0.8892618840399519, "grad_norm": 0.4010375440120697, "learning_rate": 5.538977993286087e-07, "loss": 0.6627, "step": 21457 }, { "epoch": 0.8893033279456256, "grad_norm": 0.4176090359687805, "learning_rate": 5.536905798002404e-07, "loss": 0.6447, "step": 21458 }, { "epoch": 0.8893447718512992, "grad_norm": 0.4478818476200104, "learning_rate": 5.53483360271872e-07, "loss": 0.651, "step": 21459 }, { "epoch": 0.889386215756973, "grad_norm": 0.415207177400589, "learning_rate": 5.532761407435037e-07, "loss": 0.6689, "step": 21460 }, { "epoch": 0.8894276596626466, "grad_norm": 0.48885300755500793, "learning_rate": 5.530689212151353e-07, "loss": 0.7124, "step": 21461 }, { "epoch": 0.8894691035683203, "grad_norm": 0.38643890619277954, "learning_rate": 5.52861701686767e-07, "loss": 0.6476, "step": 21462 }, { "epoch": 0.8895105474739939, "grad_norm": 0.3775377869606018, "learning_rate": 5.526544821583986e-07, "loss": 0.6235, "step": 21463 }, { "epoch": 0.8895519913796677, "grad_norm": 0.41830676794052124, "learning_rate": 5.524472626300303e-07, "loss": 0.6716, "step": 21464 }, { "epoch": 0.8895934352853413, "grad_norm": 0.413295716047287, "learning_rate": 5.522400431016619e-07, "loss": 0.7122, "step": 21465 }, { "epoch": 0.8896348791910149, "grad_norm": 0.4661704897880554, "learning_rate": 5.520328235732936e-07, "loss": 0.7109, "step": 21466 }, { "epoch": 0.8896763230966886, "grad_norm": 0.4171830117702484, "learning_rate": 5.518256040449252e-07, "loss": 0.7448, "step": 21467 }, { "epoch": 0.8897177670023623, "grad_norm": 0.4147372841835022, "learning_rate": 5.516183845165568e-07, "loss": 0.6704, "step": 21468 }, { "epoch": 0.889759210908036, "grad_norm": 0.41747069358825684, "learning_rate": 5.514111649881885e-07, "loss": 0.6707, "step": 21469 }, { "epoch": 0.8898006548137096, "grad_norm": 0.4109131395816803, "learning_rate": 5.512039454598201e-07, "loss": 0.71, "step": 21470 }, { "epoch": 0.8898420987193834, "grad_norm": 0.4317873418331146, "learning_rate": 5.509967259314518e-07, "loss": 0.6814, "step": 21471 }, { "epoch": 0.889883542625057, "grad_norm": 0.433786004781723, "learning_rate": 5.507895064030834e-07, "loss": 0.6746, "step": 21472 }, { "epoch": 0.8899249865307307, "grad_norm": 0.42062774300575256, "learning_rate": 5.505822868747151e-07, "loss": 0.6777, "step": 21473 }, { "epoch": 0.8899664304364043, "grad_norm": 0.40893206000328064, "learning_rate": 5.503750673463467e-07, "loss": 0.6804, "step": 21474 }, { "epoch": 0.890007874342078, "grad_norm": 0.412617027759552, "learning_rate": 5.501678478179784e-07, "loss": 0.646, "step": 21475 }, { "epoch": 0.8900493182477517, "grad_norm": 0.41835370659828186, "learning_rate": 5.4996062828961e-07, "loss": 0.6543, "step": 21476 }, { "epoch": 0.8900907621534253, "grad_norm": 0.40206417441368103, "learning_rate": 5.497534087612417e-07, "loss": 0.6427, "step": 21477 }, { "epoch": 0.890132206059099, "grad_norm": 0.38961368799209595, "learning_rate": 5.495461892328733e-07, "loss": 0.6504, "step": 21478 }, { "epoch": 0.8901736499647727, "grad_norm": 0.39861997961997986, "learning_rate": 5.49338969704505e-07, "loss": 0.655, "step": 21479 }, { "epoch": 0.8902150938704464, "grad_norm": 0.3833748996257782, "learning_rate": 5.491317501761366e-07, "loss": 0.6387, "step": 21480 }, { "epoch": 0.89025653777612, "grad_norm": 0.42654335498809814, "learning_rate": 5.489245306477683e-07, "loss": 0.6876, "step": 21481 }, { "epoch": 0.8902979816817936, "grad_norm": 0.4320532977581024, "learning_rate": 5.487173111193999e-07, "loss": 0.7001, "step": 21482 }, { "epoch": 0.8903394255874674, "grad_norm": 0.41375985741615295, "learning_rate": 5.485100915910316e-07, "loss": 0.6874, "step": 21483 }, { "epoch": 0.890380869493141, "grad_norm": 0.39052531123161316, "learning_rate": 5.483028720626632e-07, "loss": 0.6611, "step": 21484 }, { "epoch": 0.8904223133988147, "grad_norm": 0.4876834750175476, "learning_rate": 5.480956525342948e-07, "loss": 0.7561, "step": 21485 }, { "epoch": 0.8904637573044883, "grad_norm": 0.4162476658821106, "learning_rate": 5.478884330059265e-07, "loss": 0.6584, "step": 21486 }, { "epoch": 0.8905052012101621, "grad_norm": 0.4234643876552582, "learning_rate": 5.476812134775581e-07, "loss": 0.6205, "step": 21487 }, { "epoch": 0.8905466451158357, "grad_norm": 0.39526426792144775, "learning_rate": 5.474739939491898e-07, "loss": 0.666, "step": 21488 }, { "epoch": 0.8905880890215094, "grad_norm": 0.3973953127861023, "learning_rate": 5.472667744208214e-07, "loss": 0.6445, "step": 21489 }, { "epoch": 0.890629532927183, "grad_norm": 0.4397544264793396, "learning_rate": 5.470595548924531e-07, "loss": 0.7252, "step": 21490 }, { "epoch": 0.8906709768328567, "grad_norm": 0.4076325595378876, "learning_rate": 5.468523353640847e-07, "loss": 0.7139, "step": 21491 }, { "epoch": 0.8907124207385304, "grad_norm": 0.4012235701084137, "learning_rate": 5.466451158357164e-07, "loss": 0.6538, "step": 21492 }, { "epoch": 0.890753864644204, "grad_norm": 0.37476491928100586, "learning_rate": 5.46437896307348e-07, "loss": 0.6281, "step": 21493 }, { "epoch": 0.8907953085498778, "grad_norm": 0.4615389108657837, "learning_rate": 5.462306767789797e-07, "loss": 0.7285, "step": 21494 }, { "epoch": 0.8908367524555514, "grad_norm": 0.4027417004108429, "learning_rate": 5.460234572506113e-07, "loss": 0.661, "step": 21495 }, { "epoch": 0.8908781963612251, "grad_norm": 0.4122719168663025, "learning_rate": 5.45816237722243e-07, "loss": 0.6604, "step": 21496 }, { "epoch": 0.8909196402668987, "grad_norm": 0.41437840461730957, "learning_rate": 5.456090181938746e-07, "loss": 0.6523, "step": 21497 }, { "epoch": 0.8909610841725725, "grad_norm": 0.3941861689090729, "learning_rate": 5.454017986655063e-07, "loss": 0.65, "step": 21498 }, { "epoch": 0.8910025280782461, "grad_norm": 0.42280423641204834, "learning_rate": 5.451945791371379e-07, "loss": 0.6383, "step": 21499 }, { "epoch": 0.8910439719839197, "grad_norm": 0.5476798415184021, "learning_rate": 5.449873596087696e-07, "loss": 0.6686, "step": 21500 }, { "epoch": 0.8910854158895934, "grad_norm": 0.4216171205043793, "learning_rate": 5.447801400804012e-07, "loss": 0.676, "step": 21501 }, { "epoch": 0.8911268597952671, "grad_norm": 0.39519500732421875, "learning_rate": 5.445729205520329e-07, "loss": 0.6572, "step": 21502 }, { "epoch": 0.8911683037009408, "grad_norm": 0.424181193113327, "learning_rate": 5.443657010236645e-07, "loss": 0.7041, "step": 21503 }, { "epoch": 0.8912097476066144, "grad_norm": 0.4139166474342346, "learning_rate": 5.441584814952961e-07, "loss": 0.6399, "step": 21504 }, { "epoch": 0.8912511915122882, "grad_norm": 0.40471363067626953, "learning_rate": 5.439512619669278e-07, "loss": 0.6039, "step": 21505 }, { "epoch": 0.8912926354179618, "grad_norm": 0.3936883509159088, "learning_rate": 5.437440424385594e-07, "loss": 0.6355, "step": 21506 }, { "epoch": 0.8913340793236355, "grad_norm": 0.400671124458313, "learning_rate": 5.435368229101911e-07, "loss": 0.6658, "step": 21507 }, { "epoch": 0.8913755232293091, "grad_norm": 0.41553181409835815, "learning_rate": 5.433296033818227e-07, "loss": 0.6836, "step": 21508 }, { "epoch": 0.8914169671349828, "grad_norm": 0.3719368875026703, "learning_rate": 5.431223838534544e-07, "loss": 0.649, "step": 21509 }, { "epoch": 0.8914584110406565, "grad_norm": 0.3963344991207123, "learning_rate": 5.42915164325086e-07, "loss": 0.6299, "step": 21510 }, { "epoch": 0.8914998549463301, "grad_norm": 0.3754962682723999, "learning_rate": 5.427079447967177e-07, "loss": 0.6318, "step": 21511 }, { "epoch": 0.8915412988520038, "grad_norm": 0.4597218632698059, "learning_rate": 5.425007252683493e-07, "loss": 0.7097, "step": 21512 }, { "epoch": 0.8915827427576775, "grad_norm": 0.4156400263309479, "learning_rate": 5.42293505739981e-07, "loss": 0.6748, "step": 21513 }, { "epoch": 0.8916241866633512, "grad_norm": 0.3961472511291504, "learning_rate": 5.420862862116126e-07, "loss": 0.658, "step": 21514 }, { "epoch": 0.8916656305690248, "grad_norm": 0.44837409257888794, "learning_rate": 5.418790666832443e-07, "loss": 0.6279, "step": 21515 }, { "epoch": 0.8917070744746985, "grad_norm": 0.4305555522441864, "learning_rate": 5.416718471548759e-07, "loss": 0.6512, "step": 21516 }, { "epoch": 0.8917485183803722, "grad_norm": 0.4055415689945221, "learning_rate": 5.414646276265076e-07, "loss": 0.6332, "step": 21517 }, { "epoch": 0.8917899622860458, "grad_norm": 0.43324723839759827, "learning_rate": 5.412574080981392e-07, "loss": 0.6665, "step": 21518 }, { "epoch": 0.8918314061917195, "grad_norm": 0.43128296732902527, "learning_rate": 5.410501885697709e-07, "loss": 0.6218, "step": 21519 }, { "epoch": 0.8918728500973931, "grad_norm": 0.3806942105293274, "learning_rate": 5.408429690414025e-07, "loss": 0.6404, "step": 21520 }, { "epoch": 0.8919142940030669, "grad_norm": 0.45334523916244507, "learning_rate": 5.406357495130342e-07, "loss": 0.6849, "step": 21521 }, { "epoch": 0.8919557379087405, "grad_norm": 0.41324469447135925, "learning_rate": 5.404285299846658e-07, "loss": 0.6729, "step": 21522 }, { "epoch": 0.8919971818144142, "grad_norm": 0.4556848406791687, "learning_rate": 5.402213104562974e-07, "loss": 0.6873, "step": 21523 }, { "epoch": 0.8920386257200879, "grad_norm": 0.422860711812973, "learning_rate": 5.400140909279291e-07, "loss": 0.6829, "step": 21524 }, { "epoch": 0.8920800696257616, "grad_norm": 0.44406893849372864, "learning_rate": 5.398068713995607e-07, "loss": 0.6655, "step": 21525 }, { "epoch": 0.8921215135314352, "grad_norm": 0.43663206696510315, "learning_rate": 5.395996518711924e-07, "loss": 0.6688, "step": 21526 }, { "epoch": 0.8921629574371088, "grad_norm": 0.42670127749443054, "learning_rate": 5.39392432342824e-07, "loss": 0.6691, "step": 21527 }, { "epoch": 0.8922044013427826, "grad_norm": 0.4078049659729004, "learning_rate": 5.391852128144557e-07, "loss": 0.6648, "step": 21528 }, { "epoch": 0.8922458452484562, "grad_norm": 0.4168897271156311, "learning_rate": 5.389779932860873e-07, "loss": 0.6283, "step": 21529 }, { "epoch": 0.8922872891541299, "grad_norm": 0.4247773587703705, "learning_rate": 5.38770773757719e-07, "loss": 0.6675, "step": 21530 }, { "epoch": 0.8923287330598035, "grad_norm": 0.41870251297950745, "learning_rate": 5.385635542293506e-07, "loss": 0.6583, "step": 21531 }, { "epoch": 0.8923701769654773, "grad_norm": 0.4011913537979126, "learning_rate": 5.383563347009823e-07, "loss": 0.6646, "step": 21532 }, { "epoch": 0.8924116208711509, "grad_norm": 0.44034889340400696, "learning_rate": 5.381491151726139e-07, "loss": 0.7085, "step": 21533 }, { "epoch": 0.8924530647768246, "grad_norm": 0.41620439291000366, "learning_rate": 5.379418956442455e-07, "loss": 0.6774, "step": 21534 }, { "epoch": 0.8924945086824982, "grad_norm": 0.42178142070770264, "learning_rate": 5.377346761158772e-07, "loss": 0.6765, "step": 21535 }, { "epoch": 0.8925359525881719, "grad_norm": 0.4151946008205414, "learning_rate": 5.375274565875088e-07, "loss": 0.6395, "step": 21536 }, { "epoch": 0.8925773964938456, "grad_norm": 0.42614197731018066, "learning_rate": 5.373202370591405e-07, "loss": 0.6404, "step": 21537 }, { "epoch": 0.8926188403995192, "grad_norm": 0.4456254839897156, "learning_rate": 5.371130175307721e-07, "loss": 0.6719, "step": 21538 }, { "epoch": 0.892660284305193, "grad_norm": 0.5199732184410095, "learning_rate": 5.369057980024038e-07, "loss": 0.7312, "step": 21539 }, { "epoch": 0.8927017282108666, "grad_norm": 0.40631303191185, "learning_rate": 5.366985784740354e-07, "loss": 0.6605, "step": 21540 }, { "epoch": 0.8927431721165403, "grad_norm": 0.4384613037109375, "learning_rate": 5.364913589456671e-07, "loss": 0.686, "step": 21541 }, { "epoch": 0.8927846160222139, "grad_norm": 0.40261247754096985, "learning_rate": 5.362841394172987e-07, "loss": 0.6603, "step": 21542 }, { "epoch": 0.8928260599278875, "grad_norm": 0.4232719838619232, "learning_rate": 5.360769198889304e-07, "loss": 0.7292, "step": 21543 }, { "epoch": 0.8928675038335613, "grad_norm": 0.40407630801200867, "learning_rate": 5.35869700360562e-07, "loss": 0.6035, "step": 21544 }, { "epoch": 0.8929089477392349, "grad_norm": 0.4483436942100525, "learning_rate": 5.356624808321937e-07, "loss": 0.6797, "step": 21545 }, { "epoch": 0.8929503916449086, "grad_norm": 0.3982340395450592, "learning_rate": 5.354552613038253e-07, "loss": 0.6191, "step": 21546 }, { "epoch": 0.8929918355505823, "grad_norm": 0.39528003334999084, "learning_rate": 5.352480417754569e-07, "loss": 0.6501, "step": 21547 }, { "epoch": 0.893033279456256, "grad_norm": 0.5671359896659851, "learning_rate": 5.350408222470886e-07, "loss": 0.6998, "step": 21548 }, { "epoch": 0.8930747233619296, "grad_norm": 0.39818331599235535, "learning_rate": 5.348336027187202e-07, "loss": 0.6721, "step": 21549 }, { "epoch": 0.8931161672676033, "grad_norm": 0.41658154129981995, "learning_rate": 5.346263831903519e-07, "loss": 0.6896, "step": 21550 }, { "epoch": 0.893157611173277, "grad_norm": 0.4083831012248993, "learning_rate": 5.344191636619835e-07, "loss": 0.6437, "step": 21551 }, { "epoch": 0.8931990550789506, "grad_norm": 0.3779272735118866, "learning_rate": 5.342119441336152e-07, "loss": 0.6174, "step": 21552 }, { "epoch": 0.8932404989846243, "grad_norm": 0.40685805678367615, "learning_rate": 5.340047246052468e-07, "loss": 0.6675, "step": 21553 }, { "epoch": 0.8932819428902979, "grad_norm": 0.4362069070339203, "learning_rate": 5.337975050768785e-07, "loss": 0.6394, "step": 21554 }, { "epoch": 0.8933233867959717, "grad_norm": 0.4189010262489319, "learning_rate": 5.335902855485101e-07, "loss": 0.6486, "step": 21555 }, { "epoch": 0.8933648307016453, "grad_norm": 0.42723435163497925, "learning_rate": 5.333830660201418e-07, "loss": 0.7119, "step": 21556 }, { "epoch": 0.893406274607319, "grad_norm": 0.381091445684433, "learning_rate": 5.331758464917734e-07, "loss": 0.6226, "step": 21557 }, { "epoch": 0.8934477185129927, "grad_norm": 0.4040159285068512, "learning_rate": 5.329686269634051e-07, "loss": 0.6368, "step": 21558 }, { "epoch": 0.8934891624186664, "grad_norm": 0.41120877861976624, "learning_rate": 5.327614074350367e-07, "loss": 0.6699, "step": 21559 }, { "epoch": 0.89353060632434, "grad_norm": 0.4002573788166046, "learning_rate": 5.325541879066683e-07, "loss": 0.6218, "step": 21560 }, { "epoch": 0.8935720502300136, "grad_norm": 0.3949008285999298, "learning_rate": 5.323469683783e-07, "loss": 0.655, "step": 21561 }, { "epoch": 0.8936134941356874, "grad_norm": 0.4075847566127777, "learning_rate": 5.321397488499316e-07, "loss": 0.6555, "step": 21562 }, { "epoch": 0.893654938041361, "grad_norm": 0.42333805561065674, "learning_rate": 5.319325293215633e-07, "loss": 0.6787, "step": 21563 }, { "epoch": 0.8936963819470347, "grad_norm": 0.42599019408226013, "learning_rate": 5.317253097931949e-07, "loss": 0.6636, "step": 21564 }, { "epoch": 0.8937378258527083, "grad_norm": 0.40359124541282654, "learning_rate": 5.315180902648266e-07, "loss": 0.6493, "step": 21565 }, { "epoch": 0.8937792697583821, "grad_norm": 0.4086727201938629, "learning_rate": 5.313108707364582e-07, "loss": 0.6285, "step": 21566 }, { "epoch": 0.8938207136640557, "grad_norm": 0.4239732325077057, "learning_rate": 5.311036512080899e-07, "loss": 0.6826, "step": 21567 }, { "epoch": 0.8938621575697294, "grad_norm": 0.4134672284126282, "learning_rate": 5.308964316797215e-07, "loss": 0.709, "step": 21568 }, { "epoch": 0.893903601475403, "grad_norm": 0.4345559775829315, "learning_rate": 5.306892121513532e-07, "loss": 0.6874, "step": 21569 }, { "epoch": 0.8939450453810767, "grad_norm": 0.4147503077983856, "learning_rate": 5.304819926229848e-07, "loss": 0.6975, "step": 21570 }, { "epoch": 0.8939864892867504, "grad_norm": 0.42487600445747375, "learning_rate": 5.302747730946164e-07, "loss": 0.6799, "step": 21571 }, { "epoch": 0.894027933192424, "grad_norm": 0.3928578197956085, "learning_rate": 5.300675535662481e-07, "loss": 0.6724, "step": 21572 }, { "epoch": 0.8940693770980978, "grad_norm": 0.4153605103492737, "learning_rate": 5.298603340378797e-07, "loss": 0.6608, "step": 21573 }, { "epoch": 0.8941108210037714, "grad_norm": 0.46701285243034363, "learning_rate": 5.296531145095114e-07, "loss": 0.6914, "step": 21574 }, { "epoch": 0.8941522649094451, "grad_norm": 0.38295358419418335, "learning_rate": 5.29445894981143e-07, "loss": 0.7283, "step": 21575 }, { "epoch": 0.8941937088151187, "grad_norm": 0.4339483082294464, "learning_rate": 5.292386754527747e-07, "loss": 0.7286, "step": 21576 }, { "epoch": 0.8942351527207925, "grad_norm": 0.40063896775245667, "learning_rate": 5.290314559244063e-07, "loss": 0.635, "step": 21577 }, { "epoch": 0.8942765966264661, "grad_norm": 0.42637500166893005, "learning_rate": 5.28824236396038e-07, "loss": 0.6895, "step": 21578 }, { "epoch": 0.8943180405321397, "grad_norm": 0.3816472291946411, "learning_rate": 5.286170168676696e-07, "loss": 0.6515, "step": 21579 }, { "epoch": 0.8943594844378134, "grad_norm": 0.40653225779533386, "learning_rate": 5.284097973393013e-07, "loss": 0.6782, "step": 21580 }, { "epoch": 0.8944009283434871, "grad_norm": 0.4274885654449463, "learning_rate": 5.282025778109329e-07, "loss": 0.6753, "step": 21581 }, { "epoch": 0.8944423722491608, "grad_norm": 0.4499562978744507, "learning_rate": 5.279953582825646e-07, "loss": 0.6964, "step": 21582 }, { "epoch": 0.8944838161548344, "grad_norm": 0.40025967359542847, "learning_rate": 5.277881387541962e-07, "loss": 0.647, "step": 21583 }, { "epoch": 0.8945252600605081, "grad_norm": 0.41534289717674255, "learning_rate": 5.275809192258278e-07, "loss": 0.639, "step": 21584 }, { "epoch": 0.8945667039661818, "grad_norm": 0.40859201550483704, "learning_rate": 5.273736996974595e-07, "loss": 0.6899, "step": 21585 }, { "epoch": 0.8946081478718555, "grad_norm": 0.4099723994731903, "learning_rate": 5.271664801690911e-07, "loss": 0.6143, "step": 21586 }, { "epoch": 0.8946495917775291, "grad_norm": 0.4361809194087982, "learning_rate": 5.269592606407228e-07, "loss": 0.678, "step": 21587 }, { "epoch": 0.8946910356832027, "grad_norm": 0.4017447531223297, "learning_rate": 5.267520411123544e-07, "loss": 0.6299, "step": 21588 }, { "epoch": 0.8947324795888765, "grad_norm": 0.4370596706867218, "learning_rate": 5.265448215839861e-07, "loss": 0.6964, "step": 21589 }, { "epoch": 0.8947739234945501, "grad_norm": 0.39070379734039307, "learning_rate": 5.263376020556177e-07, "loss": 0.6411, "step": 21590 }, { "epoch": 0.8948153674002238, "grad_norm": 0.4805527925491333, "learning_rate": 5.261303825272494e-07, "loss": 0.7256, "step": 21591 }, { "epoch": 0.8948568113058974, "grad_norm": 0.436856746673584, "learning_rate": 5.25923162998881e-07, "loss": 0.7449, "step": 21592 }, { "epoch": 0.8948982552115712, "grad_norm": 0.4093371331691742, "learning_rate": 5.257159434705127e-07, "loss": 0.6842, "step": 21593 }, { "epoch": 0.8949396991172448, "grad_norm": 0.3973616063594818, "learning_rate": 5.255087239421443e-07, "loss": 0.7096, "step": 21594 }, { "epoch": 0.8949811430229185, "grad_norm": 0.4307800829410553, "learning_rate": 5.25301504413776e-07, "loss": 0.6326, "step": 21595 }, { "epoch": 0.8950225869285922, "grad_norm": 0.3821743428707123, "learning_rate": 5.250942848854076e-07, "loss": 0.6288, "step": 21596 }, { "epoch": 0.8950640308342658, "grad_norm": 0.4651239216327667, "learning_rate": 5.248870653570392e-07, "loss": 0.657, "step": 21597 }, { "epoch": 0.8951054747399395, "grad_norm": 0.43149086833000183, "learning_rate": 5.246798458286709e-07, "loss": 0.6558, "step": 21598 }, { "epoch": 0.8951469186456131, "grad_norm": 0.38808968663215637, "learning_rate": 5.244726263003025e-07, "loss": 0.6079, "step": 21599 }, { "epoch": 0.8951883625512869, "grad_norm": 0.4123295545578003, "learning_rate": 5.242654067719342e-07, "loss": 0.6024, "step": 21600 }, { "epoch": 0.8952298064569605, "grad_norm": 0.40237629413604736, "learning_rate": 5.240581872435658e-07, "loss": 0.6516, "step": 21601 }, { "epoch": 0.8952712503626342, "grad_norm": 0.44824451208114624, "learning_rate": 5.238509677151975e-07, "loss": 0.6675, "step": 21602 }, { "epoch": 0.8953126942683078, "grad_norm": 0.3879373371601105, "learning_rate": 5.236437481868291e-07, "loss": 0.6165, "step": 21603 }, { "epoch": 0.8953541381739815, "grad_norm": 0.4469955563545227, "learning_rate": 5.234365286584608e-07, "loss": 0.6924, "step": 21604 }, { "epoch": 0.8953955820796552, "grad_norm": 0.45527762174606323, "learning_rate": 5.232293091300924e-07, "loss": 0.701, "step": 21605 }, { "epoch": 0.8954370259853288, "grad_norm": 0.3921760022640228, "learning_rate": 5.230220896017241e-07, "loss": 0.6193, "step": 21606 }, { "epoch": 0.8954784698910025, "grad_norm": 0.4063795804977417, "learning_rate": 5.228148700733557e-07, "loss": 0.6562, "step": 21607 }, { "epoch": 0.8955199137966762, "grad_norm": 0.4430139362812042, "learning_rate": 5.226076505449873e-07, "loss": 0.6432, "step": 21608 }, { "epoch": 0.8955613577023499, "grad_norm": 0.41347554326057434, "learning_rate": 5.22400431016619e-07, "loss": 0.6643, "step": 21609 }, { "epoch": 0.8956028016080235, "grad_norm": 0.41163283586502075, "learning_rate": 5.221932114882506e-07, "loss": 0.6726, "step": 21610 }, { "epoch": 0.8956442455136973, "grad_norm": 0.4329853951931, "learning_rate": 5.219859919598823e-07, "loss": 0.6995, "step": 21611 }, { "epoch": 0.8956856894193709, "grad_norm": 0.40636536478996277, "learning_rate": 5.217787724315139e-07, "loss": 0.6442, "step": 21612 }, { "epoch": 0.8957271333250445, "grad_norm": 0.4101714789867401, "learning_rate": 5.215715529031456e-07, "loss": 0.6769, "step": 21613 }, { "epoch": 0.8957685772307182, "grad_norm": 0.46497735381126404, "learning_rate": 5.213643333747772e-07, "loss": 0.687, "step": 21614 }, { "epoch": 0.8958100211363919, "grad_norm": 0.41650301218032837, "learning_rate": 5.21157113846409e-07, "loss": 0.6573, "step": 21615 }, { "epoch": 0.8958514650420656, "grad_norm": 0.41922280192375183, "learning_rate": 5.209498943180405e-07, "loss": 0.6819, "step": 21616 }, { "epoch": 0.8958929089477392, "grad_norm": 0.41188955307006836, "learning_rate": 5.207426747896722e-07, "loss": 0.6069, "step": 21617 }, { "epoch": 0.8959343528534129, "grad_norm": 0.3900759518146515, "learning_rate": 5.205354552613038e-07, "loss": 0.6423, "step": 21618 }, { "epoch": 0.8959757967590866, "grad_norm": 0.41566193103790283, "learning_rate": 5.203282357329355e-07, "loss": 0.647, "step": 21619 }, { "epoch": 0.8960172406647603, "grad_norm": 0.44791263341903687, "learning_rate": 5.201210162045671e-07, "loss": 0.6772, "step": 21620 }, { "epoch": 0.8960586845704339, "grad_norm": 0.3969278931617737, "learning_rate": 5.199137966761987e-07, "loss": 0.6354, "step": 21621 }, { "epoch": 0.8961001284761075, "grad_norm": 0.41287511587142944, "learning_rate": 5.197065771478304e-07, "loss": 0.687, "step": 21622 }, { "epoch": 0.8961415723817813, "grad_norm": 0.4647110104560852, "learning_rate": 5.19499357619462e-07, "loss": 0.728, "step": 21623 }, { "epoch": 0.8961830162874549, "grad_norm": 0.4311895966529846, "learning_rate": 5.192921380910937e-07, "loss": 0.6873, "step": 21624 }, { "epoch": 0.8962244601931286, "grad_norm": 0.4035327434539795, "learning_rate": 5.190849185627253e-07, "loss": 0.6659, "step": 21625 }, { "epoch": 0.8962659040988022, "grad_norm": 0.4065888524055481, "learning_rate": 5.18877699034357e-07, "loss": 0.6176, "step": 21626 }, { "epoch": 0.896307348004476, "grad_norm": 0.4296950101852417, "learning_rate": 5.186704795059886e-07, "loss": 0.668, "step": 21627 }, { "epoch": 0.8963487919101496, "grad_norm": 0.48781752586364746, "learning_rate": 5.184632599776203e-07, "loss": 0.7815, "step": 21628 }, { "epoch": 0.8963902358158233, "grad_norm": 0.368587851524353, "learning_rate": 5.182560404492519e-07, "loss": 0.6306, "step": 21629 }, { "epoch": 0.896431679721497, "grad_norm": 0.3955225944519043, "learning_rate": 5.180488209208836e-07, "loss": 0.6531, "step": 21630 }, { "epoch": 0.8964731236271706, "grad_norm": 0.39316800236701965, "learning_rate": 5.178416013925152e-07, "loss": 0.6356, "step": 21631 }, { "epoch": 0.8965145675328443, "grad_norm": 0.4311050474643707, "learning_rate": 5.17634381864147e-07, "loss": 0.667, "step": 21632 }, { "epoch": 0.8965560114385179, "grad_norm": 0.4411413073539734, "learning_rate": 5.174271623357785e-07, "loss": 0.6326, "step": 21633 }, { "epoch": 0.8965974553441917, "grad_norm": 0.42417746782302856, "learning_rate": 5.172199428074101e-07, "loss": 0.7006, "step": 21634 }, { "epoch": 0.8966388992498653, "grad_norm": 0.3984362781047821, "learning_rate": 5.170127232790418e-07, "loss": 0.6235, "step": 21635 }, { "epoch": 0.896680343155539, "grad_norm": 0.38406893610954285, "learning_rate": 5.168055037506734e-07, "loss": 0.6444, "step": 21636 }, { "epoch": 0.8967217870612126, "grad_norm": 0.423697829246521, "learning_rate": 5.165982842223051e-07, "loss": 0.651, "step": 21637 }, { "epoch": 0.8967632309668864, "grad_norm": 0.4383939504623413, "learning_rate": 5.163910646939367e-07, "loss": 0.6458, "step": 21638 }, { "epoch": 0.89680467487256, "grad_norm": 0.41231778264045715, "learning_rate": 5.161838451655685e-07, "loss": 0.6975, "step": 21639 }, { "epoch": 0.8968461187782336, "grad_norm": 0.41430342197418213, "learning_rate": 5.159766256372e-07, "loss": 0.6658, "step": 21640 }, { "epoch": 0.8968875626839073, "grad_norm": 0.42921629548072815, "learning_rate": 5.157694061088318e-07, "loss": 0.6631, "step": 21641 }, { "epoch": 0.896929006589581, "grad_norm": 0.4215526878833771, "learning_rate": 5.155621865804633e-07, "loss": 0.6368, "step": 21642 }, { "epoch": 0.8969704504952547, "grad_norm": 0.5884553790092468, "learning_rate": 5.15354967052095e-07, "loss": 0.6708, "step": 21643 }, { "epoch": 0.8970118944009283, "grad_norm": 0.39017221331596375, "learning_rate": 5.151477475237266e-07, "loss": 0.6597, "step": 21644 }, { "epoch": 0.897053338306602, "grad_norm": 0.43384143710136414, "learning_rate": 5.149405279953582e-07, "loss": 0.6736, "step": 21645 }, { "epoch": 0.8970947822122757, "grad_norm": 0.4396486282348633, "learning_rate": 5.1473330846699e-07, "loss": 0.6499, "step": 21646 }, { "epoch": 0.8971362261179494, "grad_norm": 0.4012719988822937, "learning_rate": 5.145260889386215e-07, "loss": 0.6333, "step": 21647 }, { "epoch": 0.897177670023623, "grad_norm": 0.381801962852478, "learning_rate": 5.143188694102533e-07, "loss": 0.6539, "step": 21648 }, { "epoch": 0.8972191139292967, "grad_norm": 0.41323745250701904, "learning_rate": 5.141116498818848e-07, "loss": 0.6008, "step": 21649 }, { "epoch": 0.8972605578349704, "grad_norm": 0.4583244323730469, "learning_rate": 5.139044303535166e-07, "loss": 0.7344, "step": 21650 }, { "epoch": 0.897302001740644, "grad_norm": 0.41953304409980774, "learning_rate": 5.136972108251481e-07, "loss": 0.6482, "step": 21651 }, { "epoch": 0.8973434456463177, "grad_norm": 0.42950865626335144, "learning_rate": 5.134899912967799e-07, "loss": 0.6195, "step": 21652 }, { "epoch": 0.8973848895519914, "grad_norm": 0.4639635384082794, "learning_rate": 5.132827717684114e-07, "loss": 0.6992, "step": 21653 }, { "epoch": 0.8974263334576651, "grad_norm": 0.3812353014945984, "learning_rate": 5.130755522400432e-07, "loss": 0.6555, "step": 21654 }, { "epoch": 0.8974677773633387, "grad_norm": 0.4112999141216278, "learning_rate": 5.128683327116747e-07, "loss": 0.6699, "step": 21655 }, { "epoch": 0.8975092212690124, "grad_norm": 0.3868524432182312, "learning_rate": 5.126611131833065e-07, "loss": 0.6378, "step": 21656 }, { "epoch": 0.8975506651746861, "grad_norm": 0.43214574456214905, "learning_rate": 5.12453893654938e-07, "loss": 0.7219, "step": 21657 }, { "epoch": 0.8975921090803597, "grad_norm": 0.4264596998691559, "learning_rate": 5.122466741265696e-07, "loss": 0.6356, "step": 21658 }, { "epoch": 0.8976335529860334, "grad_norm": 0.3986511826515198, "learning_rate": 5.120394545982014e-07, "loss": 0.625, "step": 21659 }, { "epoch": 0.897674996891707, "grad_norm": 0.45593035221099854, "learning_rate": 5.11832235069833e-07, "loss": 0.6582, "step": 21660 }, { "epoch": 0.8977164407973808, "grad_norm": 0.4224576950073242, "learning_rate": 5.116250155414647e-07, "loss": 0.73, "step": 21661 }, { "epoch": 0.8977578847030544, "grad_norm": 0.4360947608947754, "learning_rate": 5.114177960130962e-07, "loss": 0.7012, "step": 21662 }, { "epoch": 0.8977993286087281, "grad_norm": 0.4460400640964508, "learning_rate": 5.11210576484728e-07, "loss": 0.6938, "step": 21663 }, { "epoch": 0.8978407725144018, "grad_norm": 0.4628468155860901, "learning_rate": 5.110033569563596e-07, "loss": 0.6288, "step": 21664 }, { "epoch": 0.8978822164200754, "grad_norm": 0.3927665650844574, "learning_rate": 5.107961374279913e-07, "loss": 0.6349, "step": 21665 }, { "epoch": 0.8979236603257491, "grad_norm": 0.40729469060897827, "learning_rate": 5.105889178996229e-07, "loss": 0.6531, "step": 21666 }, { "epoch": 0.8979651042314227, "grad_norm": 0.40527626872062683, "learning_rate": 5.103816983712546e-07, "loss": 0.6794, "step": 21667 }, { "epoch": 0.8980065481370965, "grad_norm": 0.42476168274879456, "learning_rate": 5.101744788428862e-07, "loss": 0.6943, "step": 21668 }, { "epoch": 0.8980479920427701, "grad_norm": 0.4106142222881317, "learning_rate": 5.099672593145179e-07, "loss": 0.6406, "step": 21669 }, { "epoch": 0.8980894359484438, "grad_norm": 0.4043257236480713, "learning_rate": 5.097600397861495e-07, "loss": 0.6854, "step": 21670 }, { "epoch": 0.8981308798541174, "grad_norm": 0.43675118684768677, "learning_rate": 5.095528202577812e-07, "loss": 0.644, "step": 21671 }, { "epoch": 0.8981723237597912, "grad_norm": 0.42304709553718567, "learning_rate": 5.093456007294128e-07, "loss": 0.6484, "step": 21672 }, { "epoch": 0.8982137676654648, "grad_norm": 0.38777920603752136, "learning_rate": 5.091383812010445e-07, "loss": 0.5853, "step": 21673 }, { "epoch": 0.8982552115711384, "grad_norm": 0.40734386444091797, "learning_rate": 5.089311616726761e-07, "loss": 0.667, "step": 21674 }, { "epoch": 0.8982966554768121, "grad_norm": 0.3839884102344513, "learning_rate": 5.087239421443077e-07, "loss": 0.6184, "step": 21675 }, { "epoch": 0.8983380993824858, "grad_norm": 0.4026780426502228, "learning_rate": 5.085167226159394e-07, "loss": 0.6503, "step": 21676 }, { "epoch": 0.8983795432881595, "grad_norm": 0.4248570203781128, "learning_rate": 5.08309503087571e-07, "loss": 0.6741, "step": 21677 }, { "epoch": 0.8984209871938331, "grad_norm": 0.40241941809654236, "learning_rate": 5.081022835592027e-07, "loss": 0.6475, "step": 21678 }, { "epoch": 0.8984624310995069, "grad_norm": 0.42992886900901794, "learning_rate": 5.078950640308343e-07, "loss": 0.67, "step": 21679 }, { "epoch": 0.8985038750051805, "grad_norm": 0.39304637908935547, "learning_rate": 5.07687844502466e-07, "loss": 0.6493, "step": 21680 }, { "epoch": 0.8985453189108542, "grad_norm": 0.42343226075172424, "learning_rate": 5.074806249740976e-07, "loss": 0.6458, "step": 21681 }, { "epoch": 0.8985867628165278, "grad_norm": 0.43401435017585754, "learning_rate": 5.072734054457293e-07, "loss": 0.684, "step": 21682 }, { "epoch": 0.8986282067222015, "grad_norm": 0.43685391545295715, "learning_rate": 5.070661859173609e-07, "loss": 0.6578, "step": 21683 }, { "epoch": 0.8986696506278752, "grad_norm": 0.4203037619590759, "learning_rate": 5.068589663889926e-07, "loss": 0.6521, "step": 21684 }, { "epoch": 0.8987110945335488, "grad_norm": 0.40927252173423767, "learning_rate": 5.066517468606242e-07, "loss": 0.6743, "step": 21685 }, { "epoch": 0.8987525384392225, "grad_norm": 0.4302552342414856, "learning_rate": 5.064445273322559e-07, "loss": 0.6611, "step": 21686 }, { "epoch": 0.8987939823448962, "grad_norm": 0.3948485851287842, "learning_rate": 5.062373078038875e-07, "loss": 0.6707, "step": 21687 }, { "epoch": 0.8988354262505699, "grad_norm": 0.42815446853637695, "learning_rate": 5.060300882755192e-07, "loss": 0.6884, "step": 21688 }, { "epoch": 0.8988768701562435, "grad_norm": 0.45705458521842957, "learning_rate": 5.058228687471508e-07, "loss": 0.6604, "step": 21689 }, { "epoch": 0.8989183140619172, "grad_norm": 0.38672807812690735, "learning_rate": 5.056156492187825e-07, "loss": 0.6357, "step": 21690 }, { "epoch": 0.8989597579675909, "grad_norm": 0.41418200731277466, "learning_rate": 5.054084296904141e-07, "loss": 0.729, "step": 21691 }, { "epoch": 0.8990012018732645, "grad_norm": 0.416007399559021, "learning_rate": 5.052012101620458e-07, "loss": 0.7388, "step": 21692 }, { "epoch": 0.8990426457789382, "grad_norm": 0.41595259308815, "learning_rate": 5.049939906336774e-07, "loss": 0.6658, "step": 21693 }, { "epoch": 0.8990840896846118, "grad_norm": 0.49643537402153015, "learning_rate": 5.04786771105309e-07, "loss": 0.7134, "step": 21694 }, { "epoch": 0.8991255335902856, "grad_norm": 0.4049939513206482, "learning_rate": 5.045795515769407e-07, "loss": 0.6526, "step": 21695 }, { "epoch": 0.8991669774959592, "grad_norm": 0.4014173746109009, "learning_rate": 5.043723320485723e-07, "loss": 0.6814, "step": 21696 }, { "epoch": 0.8992084214016329, "grad_norm": 0.4296542704105377, "learning_rate": 5.04165112520204e-07, "loss": 0.644, "step": 21697 }, { "epoch": 0.8992498653073066, "grad_norm": 0.4030790627002716, "learning_rate": 5.039578929918356e-07, "loss": 0.6282, "step": 21698 }, { "epoch": 0.8992913092129803, "grad_norm": 0.39832839369773865, "learning_rate": 5.037506734634673e-07, "loss": 0.6473, "step": 21699 }, { "epoch": 0.8993327531186539, "grad_norm": 0.4235720932483673, "learning_rate": 5.035434539350989e-07, "loss": 0.6575, "step": 21700 }, { "epoch": 0.8993741970243275, "grad_norm": 0.4068671762943268, "learning_rate": 5.033362344067306e-07, "loss": 0.6416, "step": 21701 }, { "epoch": 0.8994156409300013, "grad_norm": 0.39585357904434204, "learning_rate": 5.031290148783622e-07, "loss": 0.651, "step": 21702 }, { "epoch": 0.8994570848356749, "grad_norm": 0.40354934334754944, "learning_rate": 5.029217953499939e-07, "loss": 0.6775, "step": 21703 }, { "epoch": 0.8994985287413486, "grad_norm": 0.416501522064209, "learning_rate": 5.027145758216255e-07, "loss": 0.603, "step": 21704 }, { "epoch": 0.8995399726470222, "grad_norm": 0.42291387915611267, "learning_rate": 5.025073562932572e-07, "loss": 0.675, "step": 21705 }, { "epoch": 0.899581416552696, "grad_norm": 0.37700366973876953, "learning_rate": 5.023001367648888e-07, "loss": 0.6324, "step": 21706 }, { "epoch": 0.8996228604583696, "grad_norm": 0.45422127842903137, "learning_rate": 5.020929172365205e-07, "loss": 0.6661, "step": 21707 }, { "epoch": 0.8996643043640433, "grad_norm": 0.39959803223609924, "learning_rate": 5.018856977081521e-07, "loss": 0.6039, "step": 21708 }, { "epoch": 0.8997057482697169, "grad_norm": 0.4188365340232849, "learning_rate": 5.016784781797838e-07, "loss": 0.6692, "step": 21709 }, { "epoch": 0.8997471921753906, "grad_norm": 0.42786991596221924, "learning_rate": 5.014712586514154e-07, "loss": 0.6711, "step": 21710 }, { "epoch": 0.8997886360810643, "grad_norm": 0.40679505467414856, "learning_rate": 5.012640391230471e-07, "loss": 0.7031, "step": 21711 }, { "epoch": 0.8998300799867379, "grad_norm": 0.3900405466556549, "learning_rate": 5.010568195946787e-07, "loss": 0.6313, "step": 21712 }, { "epoch": 0.8998715238924117, "grad_norm": 0.3941775858402252, "learning_rate": 5.008496000663103e-07, "loss": 0.6938, "step": 21713 }, { "epoch": 0.8999129677980853, "grad_norm": 0.40731289982795715, "learning_rate": 5.00642380537942e-07, "loss": 0.6608, "step": 21714 }, { "epoch": 0.899954411703759, "grad_norm": 0.3974337875843048, "learning_rate": 5.004351610095736e-07, "loss": 0.6483, "step": 21715 }, { "epoch": 0.8999958556094326, "grad_norm": 0.42606380581855774, "learning_rate": 5.002279414812053e-07, "loss": 0.6595, "step": 21716 }, { "epoch": 0.9000372995151062, "grad_norm": 0.4259994328022003, "learning_rate": 5.000207219528369e-07, "loss": 0.6316, "step": 21717 }, { "epoch": 0.90007874342078, "grad_norm": 0.44175952672958374, "learning_rate": 4.998135024244686e-07, "loss": 0.6918, "step": 21718 }, { "epoch": 0.9001201873264536, "grad_norm": 0.3832865059375763, "learning_rate": 4.996062828961002e-07, "loss": 0.6283, "step": 21719 }, { "epoch": 0.9001616312321273, "grad_norm": 0.4191640615463257, "learning_rate": 4.993990633677319e-07, "loss": 0.7319, "step": 21720 }, { "epoch": 0.900203075137801, "grad_norm": 0.42710813879966736, "learning_rate": 4.991918438393635e-07, "loss": 0.6819, "step": 21721 }, { "epoch": 0.9002445190434747, "grad_norm": 0.4307880103588104, "learning_rate": 4.989846243109952e-07, "loss": 0.6685, "step": 21722 }, { "epoch": 0.9002859629491483, "grad_norm": 0.40458813309669495, "learning_rate": 4.987774047826268e-07, "loss": 0.6506, "step": 21723 }, { "epoch": 0.900327406854822, "grad_norm": 0.4133318066596985, "learning_rate": 4.985701852542585e-07, "loss": 0.6466, "step": 21724 }, { "epoch": 0.9003688507604957, "grad_norm": 0.4259662628173828, "learning_rate": 4.983629657258901e-07, "loss": 0.7036, "step": 21725 }, { "epoch": 0.9004102946661693, "grad_norm": 0.4297091066837311, "learning_rate": 4.981557461975218e-07, "loss": 0.6233, "step": 21726 }, { "epoch": 0.900451738571843, "grad_norm": 0.39043527841567993, "learning_rate": 4.979485266691534e-07, "loss": 0.641, "step": 21727 }, { "epoch": 0.9004931824775166, "grad_norm": 0.4170440137386322, "learning_rate": 4.977413071407851e-07, "loss": 0.6475, "step": 21728 }, { "epoch": 0.9005346263831904, "grad_norm": 0.3916419446468353, "learning_rate": 4.975340876124167e-07, "loss": 0.6041, "step": 21729 }, { "epoch": 0.900576070288864, "grad_norm": 0.43758535385131836, "learning_rate": 4.973268680840483e-07, "loss": 0.6274, "step": 21730 }, { "epoch": 0.9006175141945377, "grad_norm": 0.4131115972995758, "learning_rate": 4.9711964855568e-07, "loss": 0.699, "step": 21731 }, { "epoch": 0.9006589581002113, "grad_norm": 0.4266568422317505, "learning_rate": 4.969124290273116e-07, "loss": 0.687, "step": 21732 }, { "epoch": 0.9007004020058851, "grad_norm": 0.3950921297073364, "learning_rate": 4.967052094989433e-07, "loss": 0.6989, "step": 21733 }, { "epoch": 0.9007418459115587, "grad_norm": 0.44160592555999756, "learning_rate": 4.964979899705749e-07, "loss": 0.6877, "step": 21734 }, { "epoch": 0.9007832898172323, "grad_norm": 0.4287833273410797, "learning_rate": 4.962907704422066e-07, "loss": 0.6804, "step": 21735 }, { "epoch": 0.9008247337229061, "grad_norm": 0.3995215892791748, "learning_rate": 4.960835509138382e-07, "loss": 0.7102, "step": 21736 }, { "epoch": 0.9008661776285797, "grad_norm": 0.38380053639411926, "learning_rate": 4.958763313854699e-07, "loss": 0.6267, "step": 21737 }, { "epoch": 0.9009076215342534, "grad_norm": 0.40147364139556885, "learning_rate": 4.956691118571015e-07, "loss": 0.616, "step": 21738 }, { "epoch": 0.900949065439927, "grad_norm": 0.4564894437789917, "learning_rate": 4.954618923287332e-07, "loss": 0.6484, "step": 21739 }, { "epoch": 0.9009905093456008, "grad_norm": 0.4524424970149994, "learning_rate": 4.952546728003648e-07, "loss": 0.7368, "step": 21740 }, { "epoch": 0.9010319532512744, "grad_norm": 0.4386501908302307, "learning_rate": 4.950474532719965e-07, "loss": 0.7346, "step": 21741 }, { "epoch": 0.9010733971569481, "grad_norm": 0.3903694748878479, "learning_rate": 4.948402337436281e-07, "loss": 0.6395, "step": 21742 }, { "epoch": 0.9011148410626217, "grad_norm": 0.4003303050994873, "learning_rate": 4.946330142152597e-07, "loss": 0.6736, "step": 21743 }, { "epoch": 0.9011562849682954, "grad_norm": 0.42025068402290344, "learning_rate": 4.944257946868914e-07, "loss": 0.7606, "step": 21744 }, { "epoch": 0.9011977288739691, "grad_norm": 0.3982343375682831, "learning_rate": 4.94218575158523e-07, "loss": 0.623, "step": 21745 }, { "epoch": 0.9012391727796427, "grad_norm": 0.4200702905654907, "learning_rate": 4.940113556301547e-07, "loss": 0.6672, "step": 21746 }, { "epoch": 0.9012806166853164, "grad_norm": 0.38132521510124207, "learning_rate": 4.938041361017863e-07, "loss": 0.658, "step": 21747 }, { "epoch": 0.9013220605909901, "grad_norm": 0.4360782206058502, "learning_rate": 4.93596916573418e-07, "loss": 0.6978, "step": 21748 }, { "epoch": 0.9013635044966638, "grad_norm": 0.427347332239151, "learning_rate": 4.933896970450496e-07, "loss": 0.6709, "step": 21749 }, { "epoch": 0.9014049484023374, "grad_norm": 0.4409312605857849, "learning_rate": 4.931824775166813e-07, "loss": 0.6678, "step": 21750 }, { "epoch": 0.9014463923080112, "grad_norm": 0.4005622863769531, "learning_rate": 4.929752579883129e-07, "loss": 0.6703, "step": 21751 }, { "epoch": 0.9014878362136848, "grad_norm": 0.4347497522830963, "learning_rate": 4.927680384599446e-07, "loss": 0.697, "step": 21752 }, { "epoch": 0.9015292801193584, "grad_norm": 0.384727418422699, "learning_rate": 4.925608189315762e-07, "loss": 0.6282, "step": 21753 }, { "epoch": 0.9015707240250321, "grad_norm": 0.4598029553890228, "learning_rate": 4.923535994032079e-07, "loss": 0.6947, "step": 21754 }, { "epoch": 0.9016121679307058, "grad_norm": 0.4261048138141632, "learning_rate": 4.921463798748395e-07, "loss": 0.6271, "step": 21755 }, { "epoch": 0.9016536118363795, "grad_norm": 0.43014076352119446, "learning_rate": 4.919391603464711e-07, "loss": 0.6313, "step": 21756 }, { "epoch": 0.9016950557420531, "grad_norm": 0.42324361205101013, "learning_rate": 4.917319408181028e-07, "loss": 0.6033, "step": 21757 }, { "epoch": 0.9017364996477268, "grad_norm": 0.3989635705947876, "learning_rate": 4.915247212897344e-07, "loss": 0.6724, "step": 21758 }, { "epoch": 0.9017779435534005, "grad_norm": 0.4641168415546417, "learning_rate": 4.913175017613661e-07, "loss": 0.708, "step": 21759 }, { "epoch": 0.9018193874590742, "grad_norm": 0.47041693329811096, "learning_rate": 4.911102822329977e-07, "loss": 0.6971, "step": 21760 }, { "epoch": 0.9018608313647478, "grad_norm": 0.42518502473831177, "learning_rate": 4.909030627046294e-07, "loss": 0.6741, "step": 21761 }, { "epoch": 0.9019022752704214, "grad_norm": 0.3835727274417877, "learning_rate": 4.90695843176261e-07, "loss": 0.6494, "step": 21762 }, { "epoch": 0.9019437191760952, "grad_norm": 0.4638034701347351, "learning_rate": 4.904886236478927e-07, "loss": 0.6993, "step": 21763 }, { "epoch": 0.9019851630817688, "grad_norm": 0.4210434854030609, "learning_rate": 4.902814041195243e-07, "loss": 0.6855, "step": 21764 }, { "epoch": 0.9020266069874425, "grad_norm": 0.4069121181964874, "learning_rate": 4.90074184591156e-07, "loss": 0.6544, "step": 21765 }, { "epoch": 0.9020680508931161, "grad_norm": 0.4243365526199341, "learning_rate": 4.898669650627876e-07, "loss": 0.6738, "step": 21766 }, { "epoch": 0.9021094947987899, "grad_norm": 0.44677332043647766, "learning_rate": 4.896597455344193e-07, "loss": 0.6775, "step": 21767 }, { "epoch": 0.9021509387044635, "grad_norm": 0.3879317343235016, "learning_rate": 4.894525260060509e-07, "loss": 0.6553, "step": 21768 }, { "epoch": 0.9021923826101372, "grad_norm": 0.4478183388710022, "learning_rate": 4.892453064776825e-07, "loss": 0.679, "step": 21769 }, { "epoch": 0.9022338265158109, "grad_norm": 0.4068681001663208, "learning_rate": 4.890380869493142e-07, "loss": 0.6697, "step": 21770 }, { "epoch": 0.9022752704214845, "grad_norm": 0.4193711280822754, "learning_rate": 4.888308674209458e-07, "loss": 0.7144, "step": 21771 }, { "epoch": 0.9023167143271582, "grad_norm": 0.43674561381340027, "learning_rate": 4.886236478925775e-07, "loss": 0.6571, "step": 21772 }, { "epoch": 0.9023581582328318, "grad_norm": 0.3922615051269531, "learning_rate": 4.884164283642091e-07, "loss": 0.6565, "step": 21773 }, { "epoch": 0.9023996021385056, "grad_norm": 0.4094826281070709, "learning_rate": 4.882092088358408e-07, "loss": 0.6272, "step": 21774 }, { "epoch": 0.9024410460441792, "grad_norm": 0.4444669187068939, "learning_rate": 4.880019893074724e-07, "loss": 0.7183, "step": 21775 }, { "epoch": 0.9024824899498529, "grad_norm": 0.4068131446838379, "learning_rate": 4.877947697791041e-07, "loss": 0.661, "step": 21776 }, { "epoch": 0.9025239338555265, "grad_norm": 0.41802167892456055, "learning_rate": 4.875875502507357e-07, "loss": 0.6248, "step": 21777 }, { "epoch": 0.9025653777612002, "grad_norm": 0.4208430051803589, "learning_rate": 4.873803307223674e-07, "loss": 0.6805, "step": 21778 }, { "epoch": 0.9026068216668739, "grad_norm": 0.4063315987586975, "learning_rate": 4.87173111193999e-07, "loss": 0.5923, "step": 21779 }, { "epoch": 0.9026482655725475, "grad_norm": 0.40667447447776794, "learning_rate": 4.869658916656306e-07, "loss": 0.6437, "step": 21780 }, { "epoch": 0.9026897094782212, "grad_norm": 0.4293023347854614, "learning_rate": 4.867586721372623e-07, "loss": 0.7148, "step": 21781 }, { "epoch": 0.9027311533838949, "grad_norm": 0.41112715005874634, "learning_rate": 4.865514526088939e-07, "loss": 0.6965, "step": 21782 }, { "epoch": 0.9027725972895686, "grad_norm": 0.39250093698501587, "learning_rate": 4.863442330805256e-07, "loss": 0.6075, "step": 21783 }, { "epoch": 0.9028140411952422, "grad_norm": 0.42396581172943115, "learning_rate": 4.861370135521572e-07, "loss": 0.6631, "step": 21784 }, { "epoch": 0.902855485100916, "grad_norm": 0.3919146955013275, "learning_rate": 4.859297940237889e-07, "loss": 0.6434, "step": 21785 }, { "epoch": 0.9028969290065896, "grad_norm": 0.4356183111667633, "learning_rate": 4.857225744954205e-07, "loss": 0.6401, "step": 21786 }, { "epoch": 0.9029383729122632, "grad_norm": 0.3960706889629364, "learning_rate": 4.855153549670522e-07, "loss": 0.6464, "step": 21787 }, { "epoch": 0.9029798168179369, "grad_norm": 0.43283525109291077, "learning_rate": 4.853081354386838e-07, "loss": 0.6786, "step": 21788 }, { "epoch": 0.9030212607236106, "grad_norm": 0.4411073625087738, "learning_rate": 4.851009159103155e-07, "loss": 0.668, "step": 21789 }, { "epoch": 0.9030627046292843, "grad_norm": 0.43951287865638733, "learning_rate": 4.848936963819471e-07, "loss": 0.698, "step": 21790 }, { "epoch": 0.9031041485349579, "grad_norm": 0.391604483127594, "learning_rate": 4.846864768535788e-07, "loss": 0.6636, "step": 21791 }, { "epoch": 0.9031455924406316, "grad_norm": 0.4240918457508087, "learning_rate": 4.844792573252104e-07, "loss": 0.6539, "step": 21792 }, { "epoch": 0.9031870363463053, "grad_norm": 0.4464181959629059, "learning_rate": 4.84272037796842e-07, "loss": 0.6599, "step": 21793 }, { "epoch": 0.903228480251979, "grad_norm": 0.4299499988555908, "learning_rate": 4.840648182684737e-07, "loss": 0.7209, "step": 21794 }, { "epoch": 0.9032699241576526, "grad_norm": 0.39692774415016174, "learning_rate": 4.838575987401053e-07, "loss": 0.6685, "step": 21795 }, { "epoch": 0.9033113680633262, "grad_norm": 0.39123156666755676, "learning_rate": 4.83650379211737e-07, "loss": 0.6401, "step": 21796 }, { "epoch": 0.903352811969, "grad_norm": 0.4436837136745453, "learning_rate": 4.834431596833686e-07, "loss": 0.7235, "step": 21797 }, { "epoch": 0.9033942558746736, "grad_norm": 0.3717900514602661, "learning_rate": 4.832359401550003e-07, "loss": 0.6525, "step": 21798 }, { "epoch": 0.9034356997803473, "grad_norm": 0.41760775446891785, "learning_rate": 4.830287206266319e-07, "loss": 0.6498, "step": 21799 }, { "epoch": 0.903477143686021, "grad_norm": 0.4355441629886627, "learning_rate": 4.828215010982636e-07, "loss": 0.6812, "step": 21800 }, { "epoch": 0.9035185875916947, "grad_norm": 0.4129011034965515, "learning_rate": 4.826142815698952e-07, "loss": 0.6224, "step": 21801 }, { "epoch": 0.9035600314973683, "grad_norm": 0.44283023476600647, "learning_rate": 4.824070620415269e-07, "loss": 0.6927, "step": 21802 }, { "epoch": 0.903601475403042, "grad_norm": 0.38623684644699097, "learning_rate": 4.821998425131585e-07, "loss": 0.6517, "step": 21803 }, { "epoch": 0.9036429193087157, "grad_norm": 0.42322438955307007, "learning_rate": 4.819926229847902e-07, "loss": 0.626, "step": 21804 }, { "epoch": 0.9036843632143893, "grad_norm": 0.42495718598365784, "learning_rate": 4.817854034564218e-07, "loss": 0.6725, "step": 21805 }, { "epoch": 0.903725807120063, "grad_norm": 0.40512368083000183, "learning_rate": 4.815781839280534e-07, "loss": 0.6428, "step": 21806 }, { "epoch": 0.9037672510257366, "grad_norm": 0.4157402217388153, "learning_rate": 4.813709643996851e-07, "loss": 0.694, "step": 21807 }, { "epoch": 0.9038086949314104, "grad_norm": 0.3808349668979645, "learning_rate": 4.811637448713167e-07, "loss": 0.6125, "step": 21808 }, { "epoch": 0.903850138837084, "grad_norm": 0.4222026765346527, "learning_rate": 4.809565253429484e-07, "loss": 0.6724, "step": 21809 }, { "epoch": 0.9038915827427577, "grad_norm": 0.3946753442287445, "learning_rate": 4.8074930581458e-07, "loss": 0.6426, "step": 21810 }, { "epoch": 0.9039330266484313, "grad_norm": 0.42505571246147156, "learning_rate": 4.805420862862117e-07, "loss": 0.6462, "step": 21811 }, { "epoch": 0.9039744705541051, "grad_norm": 0.4211755394935608, "learning_rate": 4.803348667578433e-07, "loss": 0.6456, "step": 21812 }, { "epoch": 0.9040159144597787, "grad_norm": 0.42822906374931335, "learning_rate": 4.80127647229475e-07, "loss": 0.6517, "step": 21813 }, { "epoch": 0.9040573583654523, "grad_norm": 0.38781803846359253, "learning_rate": 4.799204277011066e-07, "loss": 0.5944, "step": 21814 }, { "epoch": 0.904098802271126, "grad_norm": 0.4216715395450592, "learning_rate": 4.797132081727383e-07, "loss": 0.6654, "step": 21815 }, { "epoch": 0.9041402461767997, "grad_norm": 0.41587114334106445, "learning_rate": 4.795059886443699e-07, "loss": 0.6495, "step": 21816 }, { "epoch": 0.9041816900824734, "grad_norm": 0.45223137736320496, "learning_rate": 4.792987691160015e-07, "loss": 0.6786, "step": 21817 }, { "epoch": 0.904223133988147, "grad_norm": 0.39984941482543945, "learning_rate": 4.790915495876332e-07, "loss": 0.6467, "step": 21818 }, { "epoch": 0.9042645778938208, "grad_norm": 0.5335744023323059, "learning_rate": 4.788843300592648e-07, "loss": 0.6859, "step": 21819 }, { "epoch": 0.9043060217994944, "grad_norm": 0.42930686473846436, "learning_rate": 4.786771105308965e-07, "loss": 0.6436, "step": 21820 }, { "epoch": 0.9043474657051681, "grad_norm": 0.41653481125831604, "learning_rate": 4.784698910025281e-07, "loss": 0.5924, "step": 21821 }, { "epoch": 0.9043889096108417, "grad_norm": 0.4115052819252014, "learning_rate": 4.782626714741598e-07, "loss": 0.649, "step": 21822 }, { "epoch": 0.9044303535165154, "grad_norm": 0.4311596751213074, "learning_rate": 4.780554519457914e-07, "loss": 0.6598, "step": 21823 }, { "epoch": 0.9044717974221891, "grad_norm": 0.4343392848968506, "learning_rate": 4.778482324174231e-07, "loss": 0.688, "step": 21824 }, { "epoch": 0.9045132413278627, "grad_norm": 0.383236289024353, "learning_rate": 4.776410128890547e-07, "loss": 0.6514, "step": 21825 }, { "epoch": 0.9045546852335364, "grad_norm": 0.4686683714389801, "learning_rate": 4.774337933606864e-07, "loss": 0.6909, "step": 21826 }, { "epoch": 0.9045961291392101, "grad_norm": 0.44237831234931946, "learning_rate": 4.77226573832318e-07, "loss": 0.6925, "step": 21827 }, { "epoch": 0.9046375730448838, "grad_norm": 0.4202333688735962, "learning_rate": 4.770193543039497e-07, "loss": 0.6427, "step": 21828 }, { "epoch": 0.9046790169505574, "grad_norm": 0.43009433150291443, "learning_rate": 4.768121347755813e-07, "loss": 0.6407, "step": 21829 }, { "epoch": 0.9047204608562311, "grad_norm": 0.4319974482059479, "learning_rate": 4.766049152472129e-07, "loss": 0.6824, "step": 21830 }, { "epoch": 0.9047619047619048, "grad_norm": 0.40904760360717773, "learning_rate": 4.763976957188446e-07, "loss": 0.6404, "step": 21831 }, { "epoch": 0.9048033486675784, "grad_norm": 0.4286786615848541, "learning_rate": 4.7619047619047623e-07, "loss": 0.6536, "step": 21832 }, { "epoch": 0.9048447925732521, "grad_norm": 0.415861040353775, "learning_rate": 4.759832566621079e-07, "loss": 0.6387, "step": 21833 }, { "epoch": 0.9048862364789257, "grad_norm": 0.4225499927997589, "learning_rate": 4.7577603713373953e-07, "loss": 0.6683, "step": 21834 }, { "epoch": 0.9049276803845995, "grad_norm": 0.4072605073451996, "learning_rate": 4.755688176053712e-07, "loss": 0.6633, "step": 21835 }, { "epoch": 0.9049691242902731, "grad_norm": 0.42319801449775696, "learning_rate": 4.753615980770028e-07, "loss": 0.6719, "step": 21836 }, { "epoch": 0.9050105681959468, "grad_norm": 0.4009440839290619, "learning_rate": 4.751543785486345e-07, "loss": 0.6562, "step": 21837 }, { "epoch": 0.9050520121016205, "grad_norm": 0.4773741364479065, "learning_rate": 4.749471590202661e-07, "loss": 0.6685, "step": 21838 }, { "epoch": 0.9050934560072941, "grad_norm": 0.3910205066204071, "learning_rate": 4.747399394918978e-07, "loss": 0.6338, "step": 21839 }, { "epoch": 0.9051348999129678, "grad_norm": 0.3949735462665558, "learning_rate": 4.745327199635294e-07, "loss": 0.6263, "step": 21840 }, { "epoch": 0.9051763438186414, "grad_norm": 0.4101082384586334, "learning_rate": 4.743255004351611e-07, "loss": 0.6216, "step": 21841 }, { "epoch": 0.9052177877243152, "grad_norm": 0.4428708851337433, "learning_rate": 4.741182809067927e-07, "loss": 0.6558, "step": 21842 }, { "epoch": 0.9052592316299888, "grad_norm": 0.3821789026260376, "learning_rate": 4.7391106137842433e-07, "loss": 0.5985, "step": 21843 }, { "epoch": 0.9053006755356625, "grad_norm": 0.43065911531448364, "learning_rate": 4.73703841850056e-07, "loss": 0.678, "step": 21844 }, { "epoch": 0.9053421194413361, "grad_norm": 0.4532856047153473, "learning_rate": 4.7349662232168763e-07, "loss": 0.6516, "step": 21845 }, { "epoch": 0.9053835633470099, "grad_norm": 0.519549548625946, "learning_rate": 4.732894027933193e-07, "loss": 0.6705, "step": 21846 }, { "epoch": 0.9054250072526835, "grad_norm": 0.42888814210891724, "learning_rate": 4.7308218326495093e-07, "loss": 0.6409, "step": 21847 }, { "epoch": 0.9054664511583571, "grad_norm": 0.44154980778694153, "learning_rate": 4.728749637365826e-07, "loss": 0.7161, "step": 21848 }, { "epoch": 0.9055078950640308, "grad_norm": 0.4338739812374115, "learning_rate": 4.7266774420821423e-07, "loss": 0.7103, "step": 21849 }, { "epoch": 0.9055493389697045, "grad_norm": 0.44967222213745117, "learning_rate": 4.724605246798459e-07, "loss": 0.6931, "step": 21850 }, { "epoch": 0.9055907828753782, "grad_norm": 0.40397676825523376, "learning_rate": 4.7225330515147753e-07, "loss": 0.6584, "step": 21851 }, { "epoch": 0.9056322267810518, "grad_norm": 0.41361865401268005, "learning_rate": 4.720460856231092e-07, "loss": 0.6486, "step": 21852 }, { "epoch": 0.9056736706867256, "grad_norm": 0.42927393317222595, "learning_rate": 4.7183886609474083e-07, "loss": 0.6436, "step": 21853 }, { "epoch": 0.9057151145923992, "grad_norm": 0.4177165925502777, "learning_rate": 4.7163164656637243e-07, "loss": 0.6389, "step": 21854 }, { "epoch": 0.9057565584980729, "grad_norm": 0.4381028115749359, "learning_rate": 4.714244270380041e-07, "loss": 0.6863, "step": 21855 }, { "epoch": 0.9057980024037465, "grad_norm": 0.41930004954338074, "learning_rate": 4.7121720750963573e-07, "loss": 0.6681, "step": 21856 }, { "epoch": 0.9058394463094201, "grad_norm": 0.46559232473373413, "learning_rate": 4.710099879812674e-07, "loss": 0.6669, "step": 21857 }, { "epoch": 0.9058808902150939, "grad_norm": 0.43353271484375, "learning_rate": 4.7080276845289903e-07, "loss": 0.6804, "step": 21858 }, { "epoch": 0.9059223341207675, "grad_norm": 0.4458071291446686, "learning_rate": 4.705955489245307e-07, "loss": 0.6609, "step": 21859 }, { "epoch": 0.9059637780264412, "grad_norm": 0.4055100679397583, "learning_rate": 4.7038832939616233e-07, "loss": 0.6891, "step": 21860 }, { "epoch": 0.9060052219321149, "grad_norm": 0.407653272151947, "learning_rate": 4.70181109867794e-07, "loss": 0.5922, "step": 21861 }, { "epoch": 0.9060466658377886, "grad_norm": 0.4469847083091736, "learning_rate": 4.6997389033942563e-07, "loss": 0.7156, "step": 21862 }, { "epoch": 0.9060881097434622, "grad_norm": 0.4088365435600281, "learning_rate": 4.697666708110573e-07, "loss": 0.6755, "step": 21863 }, { "epoch": 0.9061295536491359, "grad_norm": 0.40674006938934326, "learning_rate": 4.6955945128268893e-07, "loss": 0.6138, "step": 21864 }, { "epoch": 0.9061709975548096, "grad_norm": 0.4210219085216522, "learning_rate": 4.693522317543206e-07, "loss": 0.6562, "step": 21865 }, { "epoch": 0.9062124414604832, "grad_norm": 0.426429808139801, "learning_rate": 4.6914501222595223e-07, "loss": 0.636, "step": 21866 }, { "epoch": 0.9062538853661569, "grad_norm": 0.4359915554523468, "learning_rate": 4.6893779269758383e-07, "loss": 0.752, "step": 21867 }, { "epoch": 0.9062953292718305, "grad_norm": 0.4718107581138611, "learning_rate": 4.6873057316921553e-07, "loss": 0.7227, "step": 21868 }, { "epoch": 0.9063367731775043, "grad_norm": 0.4304783344268799, "learning_rate": 4.6852335364084713e-07, "loss": 0.6841, "step": 21869 }, { "epoch": 0.9063782170831779, "grad_norm": 0.4141804277896881, "learning_rate": 4.6831613411247883e-07, "loss": 0.6493, "step": 21870 }, { "epoch": 0.9064196609888516, "grad_norm": 0.45378121733665466, "learning_rate": 4.6810891458411043e-07, "loss": 0.7488, "step": 21871 }, { "epoch": 0.9064611048945252, "grad_norm": 0.40535059571266174, "learning_rate": 4.679016950557421e-07, "loss": 0.6337, "step": 21872 }, { "epoch": 0.906502548800199, "grad_norm": 0.4472917318344116, "learning_rate": 4.6769447552737373e-07, "loss": 0.6449, "step": 21873 }, { "epoch": 0.9065439927058726, "grad_norm": 0.39975082874298096, "learning_rate": 4.674872559990054e-07, "loss": 0.615, "step": 21874 }, { "epoch": 0.9065854366115462, "grad_norm": 0.38931435346603394, "learning_rate": 4.6728003647063703e-07, "loss": 0.631, "step": 21875 }, { "epoch": 0.90662688051722, "grad_norm": 0.4334350824356079, "learning_rate": 4.670728169422687e-07, "loss": 0.6769, "step": 21876 }, { "epoch": 0.9066683244228936, "grad_norm": 0.4389737546443939, "learning_rate": 4.6686559741390033e-07, "loss": 0.6421, "step": 21877 }, { "epoch": 0.9067097683285673, "grad_norm": 0.4046443700790405, "learning_rate": 4.66658377885532e-07, "loss": 0.6304, "step": 21878 }, { "epoch": 0.9067512122342409, "grad_norm": 0.40728917717933655, "learning_rate": 4.6645115835716363e-07, "loss": 0.6309, "step": 21879 }, { "epoch": 0.9067926561399147, "grad_norm": 0.41511136293411255, "learning_rate": 4.6624393882879523e-07, "loss": 0.6647, "step": 21880 }, { "epoch": 0.9068341000455883, "grad_norm": 0.5885186195373535, "learning_rate": 4.6603671930042694e-07, "loss": 0.6678, "step": 21881 }, { "epoch": 0.906875543951262, "grad_norm": 0.4216212034225464, "learning_rate": 4.6582949977205853e-07, "loss": 0.7012, "step": 21882 }, { "epoch": 0.9069169878569356, "grad_norm": 0.4141342043876648, "learning_rate": 4.6562228024369024e-07, "loss": 0.6354, "step": 21883 }, { "epoch": 0.9069584317626093, "grad_norm": 0.47348377108573914, "learning_rate": 4.6541506071532183e-07, "loss": 0.7502, "step": 21884 }, { "epoch": 0.906999875668283, "grad_norm": 0.41425594687461853, "learning_rate": 4.6520784118695354e-07, "loss": 0.6205, "step": 21885 }, { "epoch": 0.9070413195739566, "grad_norm": 0.4626140296459198, "learning_rate": 4.6500062165858513e-07, "loss": 0.6932, "step": 21886 }, { "epoch": 0.9070827634796303, "grad_norm": 0.42274755239486694, "learning_rate": 4.6479340213021684e-07, "loss": 0.6451, "step": 21887 }, { "epoch": 0.907124207385304, "grad_norm": 0.40548062324523926, "learning_rate": 4.6458618260184843e-07, "loss": 0.6821, "step": 21888 }, { "epoch": 0.9071656512909777, "grad_norm": 0.4095577299594879, "learning_rate": 4.6437896307348014e-07, "loss": 0.6631, "step": 21889 }, { "epoch": 0.9072070951966513, "grad_norm": 0.4232381582260132, "learning_rate": 4.6417174354511174e-07, "loss": 0.6317, "step": 21890 }, { "epoch": 0.9072485391023251, "grad_norm": 0.4019046127796173, "learning_rate": 4.6396452401674333e-07, "loss": 0.6663, "step": 21891 }, { "epoch": 0.9072899830079987, "grad_norm": 0.4106959402561188, "learning_rate": 4.6375730448837504e-07, "loss": 0.6738, "step": 21892 }, { "epoch": 0.9073314269136723, "grad_norm": 0.4423587918281555, "learning_rate": 4.6355008496000663e-07, "loss": 0.6569, "step": 21893 }, { "epoch": 0.907372870819346, "grad_norm": 0.4258795380592346, "learning_rate": 4.6334286543163834e-07, "loss": 0.7046, "step": 21894 }, { "epoch": 0.9074143147250197, "grad_norm": 0.4396324157714844, "learning_rate": 4.6313564590326993e-07, "loss": 0.7324, "step": 21895 }, { "epoch": 0.9074557586306934, "grad_norm": 0.4092320203781128, "learning_rate": 4.6292842637490164e-07, "loss": 0.6588, "step": 21896 }, { "epoch": 0.907497202536367, "grad_norm": 0.46263691782951355, "learning_rate": 4.6272120684653324e-07, "loss": 0.6875, "step": 21897 }, { "epoch": 0.9075386464420407, "grad_norm": 0.4211302101612091, "learning_rate": 4.6251398731816494e-07, "loss": 0.6006, "step": 21898 }, { "epoch": 0.9075800903477144, "grad_norm": 0.42266300320625305, "learning_rate": 4.6230676778979654e-07, "loss": 0.6685, "step": 21899 }, { "epoch": 0.907621534253388, "grad_norm": 0.4128749370574951, "learning_rate": 4.6209954826142824e-07, "loss": 0.6694, "step": 21900 }, { "epoch": 0.9076629781590617, "grad_norm": 0.40134397149086, "learning_rate": 4.6189232873305984e-07, "loss": 0.6202, "step": 21901 }, { "epoch": 0.9077044220647353, "grad_norm": 0.4068869650363922, "learning_rate": 4.6168510920469154e-07, "loss": 0.6194, "step": 21902 }, { "epoch": 0.9077458659704091, "grad_norm": 0.40625011920928955, "learning_rate": 4.6147788967632314e-07, "loss": 0.6892, "step": 21903 }, { "epoch": 0.9077873098760827, "grad_norm": 0.4061359465122223, "learning_rate": 4.6127067014795473e-07, "loss": 0.6736, "step": 21904 }, { "epoch": 0.9078287537817564, "grad_norm": 0.4065978229045868, "learning_rate": 4.6106345061958644e-07, "loss": 0.5699, "step": 21905 }, { "epoch": 0.90787019768743, "grad_norm": 0.45524531602859497, "learning_rate": 4.6085623109121804e-07, "loss": 0.7012, "step": 21906 }, { "epoch": 0.9079116415931038, "grad_norm": 0.39973676204681396, "learning_rate": 4.6064901156284974e-07, "loss": 0.6704, "step": 21907 }, { "epoch": 0.9079530854987774, "grad_norm": 0.44014686346054077, "learning_rate": 4.6044179203448134e-07, "loss": 0.6669, "step": 21908 }, { "epoch": 0.907994529404451, "grad_norm": 0.4096478819847107, "learning_rate": 4.6023457250611304e-07, "loss": 0.6682, "step": 21909 }, { "epoch": 0.9080359733101248, "grad_norm": 0.4659821391105652, "learning_rate": 4.6002735297774464e-07, "loss": 0.6686, "step": 21910 }, { "epoch": 0.9080774172157984, "grad_norm": 0.4361088275909424, "learning_rate": 4.5982013344937634e-07, "loss": 0.679, "step": 21911 }, { "epoch": 0.9081188611214721, "grad_norm": 0.4240829050540924, "learning_rate": 4.5961291392100794e-07, "loss": 0.6421, "step": 21912 }, { "epoch": 0.9081603050271457, "grad_norm": 0.4424685537815094, "learning_rate": 4.5940569439263964e-07, "loss": 0.7422, "step": 21913 }, { "epoch": 0.9082017489328195, "grad_norm": 0.4150722026824951, "learning_rate": 4.5919847486427124e-07, "loss": 0.6754, "step": 21914 }, { "epoch": 0.9082431928384931, "grad_norm": 0.45071372389793396, "learning_rate": 4.5899125533590294e-07, "loss": 0.6362, "step": 21915 }, { "epoch": 0.9082846367441668, "grad_norm": 0.4109066128730774, "learning_rate": 4.5878403580753454e-07, "loss": 0.665, "step": 21916 }, { "epoch": 0.9083260806498404, "grad_norm": 0.39218297600746155, "learning_rate": 4.5857681627916614e-07, "loss": 0.6315, "step": 21917 }, { "epoch": 0.9083675245555141, "grad_norm": 0.4137602150440216, "learning_rate": 4.5836959675079784e-07, "loss": 0.6589, "step": 21918 }, { "epoch": 0.9084089684611878, "grad_norm": 0.4427436292171478, "learning_rate": 4.5816237722242944e-07, "loss": 0.7095, "step": 21919 }, { "epoch": 0.9084504123668614, "grad_norm": 0.4001578390598297, "learning_rate": 4.5795515769406114e-07, "loss": 0.6466, "step": 21920 }, { "epoch": 0.9084918562725351, "grad_norm": 0.3878651261329651, "learning_rate": 4.5774793816569274e-07, "loss": 0.5944, "step": 21921 }, { "epoch": 0.9085333001782088, "grad_norm": 0.40754836797714233, "learning_rate": 4.5754071863732444e-07, "loss": 0.6643, "step": 21922 }, { "epoch": 0.9085747440838825, "grad_norm": 0.4177807867527008, "learning_rate": 4.5733349910895604e-07, "loss": 0.6506, "step": 21923 }, { "epoch": 0.9086161879895561, "grad_norm": 0.3908924460411072, "learning_rate": 4.5712627958058774e-07, "loss": 0.6453, "step": 21924 }, { "epoch": 0.9086576318952299, "grad_norm": 0.4043509364128113, "learning_rate": 4.5691906005221934e-07, "loss": 0.6527, "step": 21925 }, { "epoch": 0.9086990758009035, "grad_norm": 0.434023916721344, "learning_rate": 4.5671184052385104e-07, "loss": 0.6661, "step": 21926 }, { "epoch": 0.9087405197065771, "grad_norm": 0.4161599576473236, "learning_rate": 4.5650462099548264e-07, "loss": 0.717, "step": 21927 }, { "epoch": 0.9087819636122508, "grad_norm": 0.38105711340904236, "learning_rate": 4.562974014671143e-07, "loss": 0.5566, "step": 21928 }, { "epoch": 0.9088234075179245, "grad_norm": 0.43955835700035095, "learning_rate": 4.5609018193874594e-07, "loss": 0.647, "step": 21929 }, { "epoch": 0.9088648514235982, "grad_norm": 0.436076819896698, "learning_rate": 4.5588296241037754e-07, "loss": 0.6602, "step": 21930 }, { "epoch": 0.9089062953292718, "grad_norm": 0.4477609694004059, "learning_rate": 4.5567574288200924e-07, "loss": 0.6985, "step": 21931 }, { "epoch": 0.9089477392349455, "grad_norm": 0.39782240986824036, "learning_rate": 4.5546852335364084e-07, "loss": 0.6471, "step": 21932 }, { "epoch": 0.9089891831406192, "grad_norm": 0.3890943229198456, "learning_rate": 4.5526130382527254e-07, "loss": 0.6461, "step": 21933 }, { "epoch": 0.9090306270462929, "grad_norm": 0.3940759003162384, "learning_rate": 4.5505408429690414e-07, "loss": 0.6405, "step": 21934 }, { "epoch": 0.9090720709519665, "grad_norm": 0.4153643548488617, "learning_rate": 4.5484686476853584e-07, "loss": 0.6406, "step": 21935 }, { "epoch": 0.9091135148576401, "grad_norm": 0.39626652002334595, "learning_rate": 4.5463964524016744e-07, "loss": 0.6571, "step": 21936 }, { "epoch": 0.9091549587633139, "grad_norm": 0.4438709020614624, "learning_rate": 4.5443242571179914e-07, "loss": 0.6987, "step": 21937 }, { "epoch": 0.9091964026689875, "grad_norm": 0.4175708591938019, "learning_rate": 4.5422520618343074e-07, "loss": 0.6433, "step": 21938 }, { "epoch": 0.9092378465746612, "grad_norm": 0.385701984167099, "learning_rate": 4.5401798665506244e-07, "loss": 0.6625, "step": 21939 }, { "epoch": 0.9092792904803348, "grad_norm": 0.4024161994457245, "learning_rate": 4.5381076712669404e-07, "loss": 0.6106, "step": 21940 }, { "epoch": 0.9093207343860086, "grad_norm": 0.44621536135673523, "learning_rate": 4.536035475983257e-07, "loss": 0.7097, "step": 21941 }, { "epoch": 0.9093621782916822, "grad_norm": 0.40440747141838074, "learning_rate": 4.5339632806995734e-07, "loss": 0.6637, "step": 21942 }, { "epoch": 0.9094036221973559, "grad_norm": 0.39670151472091675, "learning_rate": 4.53189108541589e-07, "loss": 0.6516, "step": 21943 }, { "epoch": 0.9094450661030296, "grad_norm": 0.4108840525150299, "learning_rate": 4.5298188901322064e-07, "loss": 0.649, "step": 21944 }, { "epoch": 0.9094865100087032, "grad_norm": 0.4383496344089508, "learning_rate": 4.527746694848523e-07, "loss": 0.6748, "step": 21945 }, { "epoch": 0.9095279539143769, "grad_norm": 0.4075096547603607, "learning_rate": 4.5256744995648394e-07, "loss": 0.6283, "step": 21946 }, { "epoch": 0.9095693978200505, "grad_norm": 0.4298741817474365, "learning_rate": 4.523602304281156e-07, "loss": 0.6432, "step": 21947 }, { "epoch": 0.9096108417257243, "grad_norm": 0.4200645089149475, "learning_rate": 4.5215301089974724e-07, "loss": 0.6765, "step": 21948 }, { "epoch": 0.9096522856313979, "grad_norm": 0.42554399371147156, "learning_rate": 4.5194579137137884e-07, "loss": 0.6326, "step": 21949 }, { "epoch": 0.9096937295370716, "grad_norm": 0.51891028881073, "learning_rate": 4.5173857184301054e-07, "loss": 0.6926, "step": 21950 }, { "epoch": 0.9097351734427452, "grad_norm": 0.39836055040359497, "learning_rate": 4.5153135231464214e-07, "loss": 0.6775, "step": 21951 }, { "epoch": 0.909776617348419, "grad_norm": 0.44383516907691956, "learning_rate": 4.5132413278627385e-07, "loss": 0.6877, "step": 21952 }, { "epoch": 0.9098180612540926, "grad_norm": 0.38334357738494873, "learning_rate": 4.5111691325790544e-07, "loss": 0.621, "step": 21953 }, { "epoch": 0.9098595051597662, "grad_norm": 0.4602614939212799, "learning_rate": 4.509096937295371e-07, "loss": 0.6599, "step": 21954 }, { "epoch": 0.90990094906544, "grad_norm": 0.39133772253990173, "learning_rate": 4.5070247420116874e-07, "loss": 0.6481, "step": 21955 }, { "epoch": 0.9099423929711136, "grad_norm": 0.39993515610694885, "learning_rate": 4.504952546728004e-07, "loss": 0.6229, "step": 21956 }, { "epoch": 0.9099838368767873, "grad_norm": 0.4412272572517395, "learning_rate": 4.5028803514443204e-07, "loss": 0.6851, "step": 21957 }, { "epoch": 0.9100252807824609, "grad_norm": 0.4108656644821167, "learning_rate": 4.500808156160637e-07, "loss": 0.6387, "step": 21958 }, { "epoch": 0.9100667246881347, "grad_norm": 0.4274214208126068, "learning_rate": 4.4987359608769535e-07, "loss": 0.6506, "step": 21959 }, { "epoch": 0.9101081685938083, "grad_norm": 0.4025028944015503, "learning_rate": 4.49666376559327e-07, "loss": 0.6488, "step": 21960 }, { "epoch": 0.9101496124994819, "grad_norm": 0.46380704641342163, "learning_rate": 4.4945915703095865e-07, "loss": 0.7078, "step": 21961 }, { "epoch": 0.9101910564051556, "grad_norm": 0.41215527057647705, "learning_rate": 4.492519375025903e-07, "loss": 0.6287, "step": 21962 }, { "epoch": 0.9102325003108293, "grad_norm": 0.4708963632583618, "learning_rate": 4.4904471797422195e-07, "loss": 0.6975, "step": 21963 }, { "epoch": 0.910273944216503, "grad_norm": 0.4384475350379944, "learning_rate": 4.488374984458536e-07, "loss": 0.6367, "step": 21964 }, { "epoch": 0.9103153881221766, "grad_norm": 0.45328259468078613, "learning_rate": 4.486302789174852e-07, "loss": 0.6735, "step": 21965 }, { "epoch": 0.9103568320278503, "grad_norm": 0.4006426930427551, "learning_rate": 4.484230593891169e-07, "loss": 0.6594, "step": 21966 }, { "epoch": 0.910398275933524, "grad_norm": 0.45825493335723877, "learning_rate": 4.482158398607485e-07, "loss": 0.7468, "step": 21967 }, { "epoch": 0.9104397198391977, "grad_norm": 0.4354480504989624, "learning_rate": 4.4800862033238015e-07, "loss": 0.6713, "step": 21968 }, { "epoch": 0.9104811637448713, "grad_norm": 0.4258207082748413, "learning_rate": 4.478014008040118e-07, "loss": 0.6912, "step": 21969 }, { "epoch": 0.9105226076505449, "grad_norm": 0.3814166188240051, "learning_rate": 4.4759418127564345e-07, "loss": 0.6454, "step": 21970 }, { "epoch": 0.9105640515562187, "grad_norm": 0.466389536857605, "learning_rate": 4.473869617472751e-07, "loss": 0.7026, "step": 21971 }, { "epoch": 0.9106054954618923, "grad_norm": 0.42857474088668823, "learning_rate": 4.4717974221890675e-07, "loss": 0.6694, "step": 21972 }, { "epoch": 0.910646939367566, "grad_norm": 0.44757792353630066, "learning_rate": 4.469725226905384e-07, "loss": 0.7053, "step": 21973 }, { "epoch": 0.9106883832732396, "grad_norm": 0.3950522243976593, "learning_rate": 4.4676530316217005e-07, "loss": 0.6348, "step": 21974 }, { "epoch": 0.9107298271789134, "grad_norm": 0.3857004642486572, "learning_rate": 4.465580836338017e-07, "loss": 0.6711, "step": 21975 }, { "epoch": 0.910771271084587, "grad_norm": 0.40685707330703735, "learning_rate": 4.4635086410543335e-07, "loss": 0.6753, "step": 21976 }, { "epoch": 0.9108127149902607, "grad_norm": 0.42790019512176514, "learning_rate": 4.46143644577065e-07, "loss": 0.6578, "step": 21977 }, { "epoch": 0.9108541588959344, "grad_norm": 0.4190075099468231, "learning_rate": 4.459364250486966e-07, "loss": 0.7056, "step": 21978 }, { "epoch": 0.910895602801608, "grad_norm": 0.4036070704460144, "learning_rate": 4.457292055203283e-07, "loss": 0.656, "step": 21979 }, { "epoch": 0.9109370467072817, "grad_norm": 0.40884602069854736, "learning_rate": 4.455219859919599e-07, "loss": 0.6571, "step": 21980 }, { "epoch": 0.9109784906129553, "grad_norm": 0.4441774785518646, "learning_rate": 4.453147664635916e-07, "loss": 0.6443, "step": 21981 }, { "epoch": 0.9110199345186291, "grad_norm": 0.41033732891082764, "learning_rate": 4.451075469352232e-07, "loss": 0.6296, "step": 21982 }, { "epoch": 0.9110613784243027, "grad_norm": 0.4200485050678253, "learning_rate": 4.449003274068549e-07, "loss": 0.6843, "step": 21983 }, { "epoch": 0.9111028223299764, "grad_norm": 0.4122755825519562, "learning_rate": 4.446931078784865e-07, "loss": 0.6691, "step": 21984 }, { "epoch": 0.91114426623565, "grad_norm": 0.41610583662986755, "learning_rate": 4.4448588835011815e-07, "loss": 0.6558, "step": 21985 }, { "epoch": 0.9111857101413238, "grad_norm": 0.4091094136238098, "learning_rate": 4.442786688217498e-07, "loss": 0.6469, "step": 21986 }, { "epoch": 0.9112271540469974, "grad_norm": 0.4278522729873657, "learning_rate": 4.4407144929338145e-07, "loss": 0.657, "step": 21987 }, { "epoch": 0.911268597952671, "grad_norm": 0.4479804039001465, "learning_rate": 4.438642297650131e-07, "loss": 0.6539, "step": 21988 }, { "epoch": 0.9113100418583447, "grad_norm": 0.39609208703041077, "learning_rate": 4.4365701023664475e-07, "loss": 0.6855, "step": 21989 }, { "epoch": 0.9113514857640184, "grad_norm": 0.4221287667751312, "learning_rate": 4.434497907082764e-07, "loss": 0.6411, "step": 21990 }, { "epoch": 0.9113929296696921, "grad_norm": 0.41076070070266724, "learning_rate": 4.43242571179908e-07, "loss": 0.6555, "step": 21991 }, { "epoch": 0.9114343735753657, "grad_norm": 0.44083017110824585, "learning_rate": 4.430353516515397e-07, "loss": 0.6818, "step": 21992 }, { "epoch": 0.9114758174810395, "grad_norm": 0.4445989727973938, "learning_rate": 4.428281321231713e-07, "loss": 0.6917, "step": 21993 }, { "epoch": 0.9115172613867131, "grad_norm": 0.41486185789108276, "learning_rate": 4.42620912594803e-07, "loss": 0.6536, "step": 21994 }, { "epoch": 0.9115587052923868, "grad_norm": 0.43957579135894775, "learning_rate": 4.424136930664346e-07, "loss": 0.6379, "step": 21995 }, { "epoch": 0.9116001491980604, "grad_norm": 0.41291743516921997, "learning_rate": 4.422064735380663e-07, "loss": 0.6539, "step": 21996 }, { "epoch": 0.911641593103734, "grad_norm": 0.416046142578125, "learning_rate": 4.419992540096979e-07, "loss": 0.6846, "step": 21997 }, { "epoch": 0.9116830370094078, "grad_norm": 0.4313727617263794, "learning_rate": 4.417920344813296e-07, "loss": 0.6499, "step": 21998 }, { "epoch": 0.9117244809150814, "grad_norm": 0.4496467113494873, "learning_rate": 4.415848149529612e-07, "loss": 0.7135, "step": 21999 }, { "epoch": 0.9117659248207551, "grad_norm": 0.4458792805671692, "learning_rate": 4.413775954245929e-07, "loss": 0.657, "step": 22000 }, { "epoch": 0.9118073687264288, "grad_norm": 0.39992889761924744, "learning_rate": 4.411703758962245e-07, "loss": 0.658, "step": 22001 }, { "epoch": 0.9118488126321025, "grad_norm": 0.40975651144981384, "learning_rate": 4.409631563678561e-07, "loss": 0.6447, "step": 22002 }, { "epoch": 0.9118902565377761, "grad_norm": 0.5595182180404663, "learning_rate": 4.407559368394878e-07, "loss": 0.7329, "step": 22003 }, { "epoch": 0.9119317004434498, "grad_norm": 0.40182971954345703, "learning_rate": 4.405487173111194e-07, "loss": 0.6836, "step": 22004 }, { "epoch": 0.9119731443491235, "grad_norm": 0.42242270708084106, "learning_rate": 4.403414977827511e-07, "loss": 0.6414, "step": 22005 }, { "epoch": 0.9120145882547971, "grad_norm": 0.38808000087738037, "learning_rate": 4.401342782543827e-07, "loss": 0.6488, "step": 22006 }, { "epoch": 0.9120560321604708, "grad_norm": 0.3949698209762573, "learning_rate": 4.399270587260144e-07, "loss": 0.6497, "step": 22007 }, { "epoch": 0.9120974760661444, "grad_norm": 0.41160544753074646, "learning_rate": 4.39719839197646e-07, "loss": 0.6862, "step": 22008 }, { "epoch": 0.9121389199718182, "grad_norm": 0.4011923372745514, "learning_rate": 4.395126196692777e-07, "loss": 0.65, "step": 22009 }, { "epoch": 0.9121803638774918, "grad_norm": 0.4130443036556244, "learning_rate": 4.393054001409093e-07, "loss": 0.6526, "step": 22010 }, { "epoch": 0.9122218077831655, "grad_norm": 0.39396265149116516, "learning_rate": 4.39098180612541e-07, "loss": 0.6409, "step": 22011 }, { "epoch": 0.9122632516888391, "grad_norm": 0.433217853307724, "learning_rate": 4.388909610841726e-07, "loss": 0.7363, "step": 22012 }, { "epoch": 0.9123046955945129, "grad_norm": 0.4155697226524353, "learning_rate": 4.386837415558043e-07, "loss": 0.6562, "step": 22013 }, { "epoch": 0.9123461395001865, "grad_norm": 0.4628601372241974, "learning_rate": 4.384765220274359e-07, "loss": 0.6797, "step": 22014 }, { "epoch": 0.9123875834058601, "grad_norm": 0.46119269728660583, "learning_rate": 4.382693024990675e-07, "loss": 0.6533, "step": 22015 }, { "epoch": 0.9124290273115339, "grad_norm": 0.42125385999679565, "learning_rate": 4.380620829706992e-07, "loss": 0.6631, "step": 22016 }, { "epoch": 0.9124704712172075, "grad_norm": 0.4196785092353821, "learning_rate": 4.378548634423308e-07, "loss": 0.668, "step": 22017 }, { "epoch": 0.9125119151228812, "grad_norm": 0.4031142294406891, "learning_rate": 4.376476439139625e-07, "loss": 0.6443, "step": 22018 }, { "epoch": 0.9125533590285548, "grad_norm": 0.431690514087677, "learning_rate": 4.374404243855941e-07, "loss": 0.6937, "step": 22019 }, { "epoch": 0.9125948029342286, "grad_norm": 0.4232026934623718, "learning_rate": 4.372332048572258e-07, "loss": 0.609, "step": 22020 }, { "epoch": 0.9126362468399022, "grad_norm": 0.4330886900424957, "learning_rate": 4.370259853288574e-07, "loss": 0.6357, "step": 22021 }, { "epoch": 0.9126776907455758, "grad_norm": 0.38470128178596497, "learning_rate": 4.368187658004891e-07, "loss": 0.6125, "step": 22022 }, { "epoch": 0.9127191346512495, "grad_norm": 0.41551080346107483, "learning_rate": 4.366115462721207e-07, "loss": 0.6606, "step": 22023 }, { "epoch": 0.9127605785569232, "grad_norm": 0.3932059407234192, "learning_rate": 4.364043267437524e-07, "loss": 0.6472, "step": 22024 }, { "epoch": 0.9128020224625969, "grad_norm": 0.4135592579841614, "learning_rate": 4.36197107215384e-07, "loss": 0.608, "step": 22025 }, { "epoch": 0.9128434663682705, "grad_norm": 0.40718215703964233, "learning_rate": 4.359898876870156e-07, "loss": 0.6643, "step": 22026 }, { "epoch": 0.9128849102739443, "grad_norm": 0.40444228053092957, "learning_rate": 4.357826681586473e-07, "loss": 0.646, "step": 22027 }, { "epoch": 0.9129263541796179, "grad_norm": 0.4239163398742676, "learning_rate": 4.355754486302789e-07, "loss": 0.6718, "step": 22028 }, { "epoch": 0.9129677980852916, "grad_norm": 0.4534132778644562, "learning_rate": 4.353682291019106e-07, "loss": 0.7458, "step": 22029 }, { "epoch": 0.9130092419909652, "grad_norm": 0.3860693871974945, "learning_rate": 4.351610095735422e-07, "loss": 0.6191, "step": 22030 }, { "epoch": 0.9130506858966388, "grad_norm": 0.3922826051712036, "learning_rate": 4.349537900451739e-07, "loss": 0.655, "step": 22031 }, { "epoch": 0.9130921298023126, "grad_norm": 0.43598267436027527, "learning_rate": 4.347465705168055e-07, "loss": 0.6982, "step": 22032 }, { "epoch": 0.9131335737079862, "grad_norm": 0.4274459779262543, "learning_rate": 4.345393509884372e-07, "loss": 0.6753, "step": 22033 }, { "epoch": 0.9131750176136599, "grad_norm": 0.44872531294822693, "learning_rate": 4.343321314600688e-07, "loss": 0.6226, "step": 22034 }, { "epoch": 0.9132164615193336, "grad_norm": 0.4802679419517517, "learning_rate": 4.341249119317005e-07, "loss": 0.7542, "step": 22035 }, { "epoch": 0.9132579054250073, "grad_norm": 0.45738622546195984, "learning_rate": 4.339176924033321e-07, "loss": 0.7058, "step": 22036 }, { "epoch": 0.9132993493306809, "grad_norm": 0.39424073696136475, "learning_rate": 4.337104728749638e-07, "loss": 0.6538, "step": 22037 }, { "epoch": 0.9133407932363546, "grad_norm": 0.40317121148109436, "learning_rate": 4.335032533465954e-07, "loss": 0.6401, "step": 22038 }, { "epoch": 0.9133822371420283, "grad_norm": 0.40300115942955017, "learning_rate": 4.3329603381822706e-07, "loss": 0.6306, "step": 22039 }, { "epoch": 0.9134236810477019, "grad_norm": 0.41830310225486755, "learning_rate": 4.330888142898587e-07, "loss": 0.6422, "step": 22040 }, { "epoch": 0.9134651249533756, "grad_norm": 0.4213850796222687, "learning_rate": 4.3288159476149036e-07, "loss": 0.7317, "step": 22041 }, { "epoch": 0.9135065688590492, "grad_norm": 0.41475746035575867, "learning_rate": 4.32674375233122e-07, "loss": 0.6157, "step": 22042 }, { "epoch": 0.913548012764723, "grad_norm": 0.42407694458961487, "learning_rate": 4.324671557047536e-07, "loss": 0.6875, "step": 22043 }, { "epoch": 0.9135894566703966, "grad_norm": 0.4370182454586029, "learning_rate": 4.322599361763853e-07, "loss": 0.672, "step": 22044 }, { "epoch": 0.9136309005760703, "grad_norm": 0.4042162299156189, "learning_rate": 4.320527166480169e-07, "loss": 0.6309, "step": 22045 }, { "epoch": 0.913672344481744, "grad_norm": 0.4311542510986328, "learning_rate": 4.318454971196486e-07, "loss": 0.7224, "step": 22046 }, { "epoch": 0.9137137883874177, "grad_norm": 0.4370914101600647, "learning_rate": 4.316382775912802e-07, "loss": 0.689, "step": 22047 }, { "epoch": 0.9137552322930913, "grad_norm": 0.4042140245437622, "learning_rate": 4.314310580629119e-07, "loss": 0.6653, "step": 22048 }, { "epoch": 0.9137966761987649, "grad_norm": 0.41949939727783203, "learning_rate": 4.312238385345435e-07, "loss": 0.6208, "step": 22049 }, { "epoch": 0.9138381201044387, "grad_norm": 0.38674020767211914, "learning_rate": 4.310166190061752e-07, "loss": 0.6428, "step": 22050 }, { "epoch": 0.9138795640101123, "grad_norm": 0.41467535495758057, "learning_rate": 4.308093994778068e-07, "loss": 0.6255, "step": 22051 }, { "epoch": 0.913921007915786, "grad_norm": 0.42031770944595337, "learning_rate": 4.3060217994943846e-07, "loss": 0.6649, "step": 22052 }, { "epoch": 0.9139624518214596, "grad_norm": 0.4564233422279358, "learning_rate": 4.303949604210701e-07, "loss": 0.6639, "step": 22053 }, { "epoch": 0.9140038957271334, "grad_norm": 0.4058915674686432, "learning_rate": 4.3018774089270176e-07, "loss": 0.6449, "step": 22054 }, { "epoch": 0.914045339632807, "grad_norm": 0.4156801998615265, "learning_rate": 4.299805213643334e-07, "loss": 0.6193, "step": 22055 }, { "epoch": 0.9140867835384807, "grad_norm": 0.39422816038131714, "learning_rate": 4.2977330183596506e-07, "loss": 0.6525, "step": 22056 }, { "epoch": 0.9141282274441543, "grad_norm": 0.38649216294288635, "learning_rate": 4.295660823075967e-07, "loss": 0.6765, "step": 22057 }, { "epoch": 0.914169671349828, "grad_norm": 0.41150787472724915, "learning_rate": 4.2935886277922836e-07, "loss": 0.6249, "step": 22058 }, { "epoch": 0.9142111152555017, "grad_norm": 0.42460593581199646, "learning_rate": 4.2915164325086e-07, "loss": 0.6697, "step": 22059 }, { "epoch": 0.9142525591611753, "grad_norm": 0.43042606115341187, "learning_rate": 4.2894442372249166e-07, "loss": 0.6335, "step": 22060 }, { "epoch": 0.914294003066849, "grad_norm": 0.39188334345817566, "learning_rate": 4.287372041941233e-07, "loss": 0.657, "step": 22061 }, { "epoch": 0.9143354469725227, "grad_norm": 0.4087007939815521, "learning_rate": 4.285299846657549e-07, "loss": 0.668, "step": 22062 }, { "epoch": 0.9143768908781964, "grad_norm": 0.41290420293807983, "learning_rate": 4.2832276513738656e-07, "loss": 0.6279, "step": 22063 }, { "epoch": 0.91441833478387, "grad_norm": 0.45419225096702576, "learning_rate": 4.281155456090182e-07, "loss": 0.6479, "step": 22064 }, { "epoch": 0.9144597786895438, "grad_norm": 0.41928571462631226, "learning_rate": 4.2790832608064986e-07, "loss": 0.6855, "step": 22065 }, { "epoch": 0.9145012225952174, "grad_norm": 0.4196389615535736, "learning_rate": 4.277011065522815e-07, "loss": 0.6785, "step": 22066 }, { "epoch": 0.914542666500891, "grad_norm": 0.39780735969543457, "learning_rate": 4.2749388702391316e-07, "loss": 0.6121, "step": 22067 }, { "epoch": 0.9145841104065647, "grad_norm": 0.4089689552783966, "learning_rate": 4.272866674955448e-07, "loss": 0.64, "step": 22068 }, { "epoch": 0.9146255543122384, "grad_norm": 0.4306671917438507, "learning_rate": 4.2707944796717646e-07, "loss": 0.6172, "step": 22069 }, { "epoch": 0.9146669982179121, "grad_norm": 0.42286109924316406, "learning_rate": 4.268722284388081e-07, "loss": 0.605, "step": 22070 }, { "epoch": 0.9147084421235857, "grad_norm": 0.41175487637519836, "learning_rate": 4.2666500891043976e-07, "loss": 0.6726, "step": 22071 }, { "epoch": 0.9147498860292594, "grad_norm": 0.41872844099998474, "learning_rate": 4.264577893820714e-07, "loss": 0.6763, "step": 22072 }, { "epoch": 0.9147913299349331, "grad_norm": 0.4220089912414551, "learning_rate": 4.2625056985370306e-07, "loss": 0.6331, "step": 22073 }, { "epoch": 0.9148327738406067, "grad_norm": 0.4154084622859955, "learning_rate": 4.260433503253347e-07, "loss": 0.6609, "step": 22074 }, { "epoch": 0.9148742177462804, "grad_norm": 0.4035089910030365, "learning_rate": 4.2583613079696636e-07, "loss": 0.6476, "step": 22075 }, { "epoch": 0.914915661651954, "grad_norm": 0.42882323265075684, "learning_rate": 4.2562891126859796e-07, "loss": 0.6831, "step": 22076 }, { "epoch": 0.9149571055576278, "grad_norm": 0.43196332454681396, "learning_rate": 4.2542169174022966e-07, "loss": 0.6411, "step": 22077 }, { "epoch": 0.9149985494633014, "grad_norm": 0.4543108642101288, "learning_rate": 4.2521447221186126e-07, "loss": 0.6669, "step": 22078 }, { "epoch": 0.9150399933689751, "grad_norm": 0.3963073492050171, "learning_rate": 4.2500725268349296e-07, "loss": 0.6545, "step": 22079 }, { "epoch": 0.9150814372746487, "grad_norm": 0.41498783230781555, "learning_rate": 4.2480003315512456e-07, "loss": 0.679, "step": 22080 }, { "epoch": 0.9151228811803225, "grad_norm": 0.38928982615470886, "learning_rate": 4.245928136267562e-07, "loss": 0.6365, "step": 22081 }, { "epoch": 0.9151643250859961, "grad_norm": 0.4207240641117096, "learning_rate": 4.2438559409838786e-07, "loss": 0.6577, "step": 22082 }, { "epoch": 0.9152057689916697, "grad_norm": 0.4153026342391968, "learning_rate": 4.241783745700195e-07, "loss": 0.6445, "step": 22083 }, { "epoch": 0.9152472128973435, "grad_norm": 0.43801453709602356, "learning_rate": 4.2397115504165116e-07, "loss": 0.7031, "step": 22084 }, { "epoch": 0.9152886568030171, "grad_norm": 0.4115074872970581, "learning_rate": 4.237639355132828e-07, "loss": 0.6948, "step": 22085 }, { "epoch": 0.9153301007086908, "grad_norm": 0.4357074797153473, "learning_rate": 4.2355671598491446e-07, "loss": 0.6864, "step": 22086 }, { "epoch": 0.9153715446143644, "grad_norm": 0.44178858399391174, "learning_rate": 4.233494964565461e-07, "loss": 0.6357, "step": 22087 }, { "epoch": 0.9154129885200382, "grad_norm": 0.4617457091808319, "learning_rate": 4.2314227692817776e-07, "loss": 0.6183, "step": 22088 }, { "epoch": 0.9154544324257118, "grad_norm": 0.43514496088027954, "learning_rate": 4.2293505739980936e-07, "loss": 0.6959, "step": 22089 }, { "epoch": 0.9154958763313855, "grad_norm": 0.3878042995929718, "learning_rate": 4.2272783787144106e-07, "loss": 0.611, "step": 22090 }, { "epoch": 0.9155373202370591, "grad_norm": 0.4126483201980591, "learning_rate": 4.2252061834307266e-07, "loss": 0.6809, "step": 22091 }, { "epoch": 0.9155787641427328, "grad_norm": 0.4401007294654846, "learning_rate": 4.2231339881470437e-07, "loss": 0.7075, "step": 22092 }, { "epoch": 0.9156202080484065, "grad_norm": 0.4173707365989685, "learning_rate": 4.2210617928633596e-07, "loss": 0.6227, "step": 22093 }, { "epoch": 0.9156616519540801, "grad_norm": 0.4250272810459137, "learning_rate": 4.2189895975796767e-07, "loss": 0.6339, "step": 22094 }, { "epoch": 0.9157030958597538, "grad_norm": 0.38277488946914673, "learning_rate": 4.2169174022959926e-07, "loss": 0.6501, "step": 22095 }, { "epoch": 0.9157445397654275, "grad_norm": 0.45190346240997314, "learning_rate": 4.2148452070123097e-07, "loss": 0.6846, "step": 22096 }, { "epoch": 0.9157859836711012, "grad_norm": 0.40072646737098694, "learning_rate": 4.2127730117286256e-07, "loss": 0.5901, "step": 22097 }, { "epoch": 0.9158274275767748, "grad_norm": 0.40093716979026794, "learning_rate": 4.2107008164449427e-07, "loss": 0.647, "step": 22098 }, { "epoch": 0.9158688714824486, "grad_norm": 0.3733333349227905, "learning_rate": 4.2086286211612586e-07, "loss": 0.6316, "step": 22099 }, { "epoch": 0.9159103153881222, "grad_norm": 0.44776588678359985, "learning_rate": 4.2065564258775746e-07, "loss": 0.6304, "step": 22100 }, { "epoch": 0.9159517592937958, "grad_norm": 0.4183082580566406, "learning_rate": 4.2044842305938917e-07, "loss": 0.6985, "step": 22101 }, { "epoch": 0.9159932031994695, "grad_norm": 0.43177926540374756, "learning_rate": 4.2024120353102076e-07, "loss": 0.7092, "step": 22102 }, { "epoch": 0.9160346471051432, "grad_norm": 0.4306720495223999, "learning_rate": 4.2003398400265247e-07, "loss": 0.6677, "step": 22103 }, { "epoch": 0.9160760910108169, "grad_norm": 0.4406459331512451, "learning_rate": 4.1982676447428406e-07, "loss": 0.6584, "step": 22104 }, { "epoch": 0.9161175349164905, "grad_norm": 0.4169570207595825, "learning_rate": 4.1961954494591577e-07, "loss": 0.6575, "step": 22105 }, { "epoch": 0.9161589788221642, "grad_norm": 0.4203692078590393, "learning_rate": 4.1941232541754736e-07, "loss": 0.6599, "step": 22106 }, { "epoch": 0.9162004227278379, "grad_norm": 0.42578038573265076, "learning_rate": 4.1920510588917907e-07, "loss": 0.687, "step": 22107 }, { "epoch": 0.9162418666335116, "grad_norm": 0.41912221908569336, "learning_rate": 4.1899788636081067e-07, "loss": 0.6926, "step": 22108 }, { "epoch": 0.9162833105391852, "grad_norm": 0.4718819260597229, "learning_rate": 4.1879066683244237e-07, "loss": 0.71, "step": 22109 }, { "epoch": 0.9163247544448588, "grad_norm": 0.4216209352016449, "learning_rate": 4.1858344730407397e-07, "loss": 0.7125, "step": 22110 }, { "epoch": 0.9163661983505326, "grad_norm": 0.41354256868362427, "learning_rate": 4.1837622777570567e-07, "loss": 0.7076, "step": 22111 }, { "epoch": 0.9164076422562062, "grad_norm": 0.42258474230766296, "learning_rate": 4.1816900824733727e-07, "loss": 0.6454, "step": 22112 }, { "epoch": 0.9164490861618799, "grad_norm": 0.4206172525882721, "learning_rate": 4.1796178871896886e-07, "loss": 0.7007, "step": 22113 }, { "epoch": 0.9164905300675535, "grad_norm": 0.4062540531158447, "learning_rate": 4.1775456919060057e-07, "loss": 0.6541, "step": 22114 }, { "epoch": 0.9165319739732273, "grad_norm": 0.39082372188568115, "learning_rate": 4.1754734966223216e-07, "loss": 0.6409, "step": 22115 }, { "epoch": 0.9165734178789009, "grad_norm": 0.42778483033180237, "learning_rate": 4.1734013013386387e-07, "loss": 0.7219, "step": 22116 }, { "epoch": 0.9166148617845746, "grad_norm": 0.4433804154396057, "learning_rate": 4.1713291060549547e-07, "loss": 0.7152, "step": 22117 }, { "epoch": 0.9166563056902483, "grad_norm": 0.3983723521232605, "learning_rate": 4.1692569107712717e-07, "loss": 0.618, "step": 22118 }, { "epoch": 0.9166977495959219, "grad_norm": 0.42044684290885925, "learning_rate": 4.1671847154875877e-07, "loss": 0.6731, "step": 22119 }, { "epoch": 0.9167391935015956, "grad_norm": 0.3724820613861084, "learning_rate": 4.1651125202039047e-07, "loss": 0.627, "step": 22120 }, { "epoch": 0.9167806374072692, "grad_norm": 0.461250901222229, "learning_rate": 4.1630403249202207e-07, "loss": 0.6454, "step": 22121 }, { "epoch": 0.916822081312943, "grad_norm": 0.40330570936203003, "learning_rate": 4.1609681296365377e-07, "loss": 0.667, "step": 22122 }, { "epoch": 0.9168635252186166, "grad_norm": 0.40236085653305054, "learning_rate": 4.1588959343528537e-07, "loss": 0.6366, "step": 22123 }, { "epoch": 0.9169049691242903, "grad_norm": 0.47887974977493286, "learning_rate": 4.1568237390691707e-07, "loss": 0.6954, "step": 22124 }, { "epoch": 0.9169464130299639, "grad_norm": 0.4403918981552124, "learning_rate": 4.1547515437854867e-07, "loss": 0.6582, "step": 22125 }, { "epoch": 0.9169878569356377, "grad_norm": 0.40729039907455444, "learning_rate": 4.1526793485018027e-07, "loss": 0.6371, "step": 22126 }, { "epoch": 0.9170293008413113, "grad_norm": 0.4006808400154114, "learning_rate": 4.1506071532181197e-07, "loss": 0.6934, "step": 22127 }, { "epoch": 0.9170707447469849, "grad_norm": 0.4384099543094635, "learning_rate": 4.1485349579344357e-07, "loss": 0.687, "step": 22128 }, { "epoch": 0.9171121886526586, "grad_norm": 0.4215103089809418, "learning_rate": 4.1464627626507527e-07, "loss": 0.6768, "step": 22129 }, { "epoch": 0.9171536325583323, "grad_norm": 0.43386855721473694, "learning_rate": 4.1443905673670687e-07, "loss": 0.6904, "step": 22130 }, { "epoch": 0.917195076464006, "grad_norm": 0.4184907078742981, "learning_rate": 4.1423183720833857e-07, "loss": 0.6757, "step": 22131 }, { "epoch": 0.9172365203696796, "grad_norm": 0.3844473958015442, "learning_rate": 4.1402461767997017e-07, "loss": 0.6368, "step": 22132 }, { "epoch": 0.9172779642753534, "grad_norm": 0.418864905834198, "learning_rate": 4.1381739815160187e-07, "loss": 0.6106, "step": 22133 }, { "epoch": 0.917319408181027, "grad_norm": 0.4102354347705841, "learning_rate": 4.1361017862323347e-07, "loss": 0.6317, "step": 22134 }, { "epoch": 0.9173608520867006, "grad_norm": 0.41153115034103394, "learning_rate": 4.1340295909486517e-07, "loss": 0.665, "step": 22135 }, { "epoch": 0.9174022959923743, "grad_norm": 0.41083332896232605, "learning_rate": 4.1319573956649677e-07, "loss": 0.6643, "step": 22136 }, { "epoch": 0.917443739898048, "grad_norm": 0.4087878465652466, "learning_rate": 4.129885200381284e-07, "loss": 0.6724, "step": 22137 }, { "epoch": 0.9174851838037217, "grad_norm": 0.4624066650867462, "learning_rate": 4.1278130050976007e-07, "loss": 0.7079, "step": 22138 }, { "epoch": 0.9175266277093953, "grad_norm": 0.40069225430488586, "learning_rate": 4.1257408098139167e-07, "loss": 0.6512, "step": 22139 }, { "epoch": 0.917568071615069, "grad_norm": 0.4340575933456421, "learning_rate": 4.1236686145302337e-07, "loss": 0.7229, "step": 22140 }, { "epoch": 0.9176095155207427, "grad_norm": 0.43180498480796814, "learning_rate": 4.1215964192465497e-07, "loss": 0.6735, "step": 22141 }, { "epoch": 0.9176509594264164, "grad_norm": 0.45644280314445496, "learning_rate": 4.1195242239628667e-07, "loss": 0.6956, "step": 22142 }, { "epoch": 0.91769240333209, "grad_norm": 0.4217568039894104, "learning_rate": 4.1174520286791827e-07, "loss": 0.6978, "step": 22143 }, { "epoch": 0.9177338472377636, "grad_norm": 0.40324866771698, "learning_rate": 4.1153798333954997e-07, "loss": 0.662, "step": 22144 }, { "epoch": 0.9177752911434374, "grad_norm": 0.4438250958919525, "learning_rate": 4.1133076381118157e-07, "loss": 0.6971, "step": 22145 }, { "epoch": 0.917816735049111, "grad_norm": 0.4567849040031433, "learning_rate": 4.1112354428281327e-07, "loss": 0.6703, "step": 22146 }, { "epoch": 0.9178581789547847, "grad_norm": 0.4421851634979248, "learning_rate": 4.1091632475444487e-07, "loss": 0.6924, "step": 22147 }, { "epoch": 0.9178996228604583, "grad_norm": 0.37911370396614075, "learning_rate": 4.1070910522607657e-07, "loss": 0.6204, "step": 22148 }, { "epoch": 0.9179410667661321, "grad_norm": 0.3992007076740265, "learning_rate": 4.1050188569770817e-07, "loss": 0.6184, "step": 22149 }, { "epoch": 0.9179825106718057, "grad_norm": 0.370268315076828, "learning_rate": 4.102946661693398e-07, "loss": 0.618, "step": 22150 }, { "epoch": 0.9180239545774794, "grad_norm": 0.45464763045310974, "learning_rate": 4.1008744664097147e-07, "loss": 0.6882, "step": 22151 }, { "epoch": 0.918065398483153, "grad_norm": 0.3815627694129944, "learning_rate": 4.098802271126031e-07, "loss": 0.6111, "step": 22152 }, { "epoch": 0.9181068423888267, "grad_norm": 0.42523372173309326, "learning_rate": 4.0967300758423477e-07, "loss": 0.6437, "step": 22153 }, { "epoch": 0.9181482862945004, "grad_norm": 0.41412872076034546, "learning_rate": 4.094657880558664e-07, "loss": 0.6329, "step": 22154 }, { "epoch": 0.918189730200174, "grad_norm": 0.43132030963897705, "learning_rate": 4.0925856852749807e-07, "loss": 0.6401, "step": 22155 }, { "epoch": 0.9182311741058478, "grad_norm": 0.4029728174209595, "learning_rate": 4.0905134899912967e-07, "loss": 0.62, "step": 22156 }, { "epoch": 0.9182726180115214, "grad_norm": 0.5114448070526123, "learning_rate": 4.088441294707614e-07, "loss": 0.6687, "step": 22157 }, { "epoch": 0.9183140619171951, "grad_norm": 0.3804416060447693, "learning_rate": 4.0863690994239297e-07, "loss": 0.5996, "step": 22158 }, { "epoch": 0.9183555058228687, "grad_norm": 0.41832447052001953, "learning_rate": 4.084296904140247e-07, "loss": 0.6686, "step": 22159 }, { "epoch": 0.9183969497285425, "grad_norm": 0.3986947536468506, "learning_rate": 4.0822247088565627e-07, "loss": 0.6166, "step": 22160 }, { "epoch": 0.9184383936342161, "grad_norm": 0.44590428471565247, "learning_rate": 4.08015251357288e-07, "loss": 0.6731, "step": 22161 }, { "epoch": 0.9184798375398897, "grad_norm": 0.4256608188152313, "learning_rate": 4.0780803182891957e-07, "loss": 0.6646, "step": 22162 }, { "epoch": 0.9185212814455634, "grad_norm": 0.42692098021507263, "learning_rate": 4.076008123005512e-07, "loss": 0.6296, "step": 22163 }, { "epoch": 0.9185627253512371, "grad_norm": 0.4565250873565674, "learning_rate": 4.0739359277218287e-07, "loss": 0.7126, "step": 22164 }, { "epoch": 0.9186041692569108, "grad_norm": 0.39262405037879944, "learning_rate": 4.071863732438145e-07, "loss": 0.6113, "step": 22165 }, { "epoch": 0.9186456131625844, "grad_norm": 0.4332748055458069, "learning_rate": 4.069791537154462e-07, "loss": 0.6444, "step": 22166 }, { "epoch": 0.9186870570682582, "grad_norm": 0.4202212989330292, "learning_rate": 4.067719341870778e-07, "loss": 0.6565, "step": 22167 }, { "epoch": 0.9187285009739318, "grad_norm": 0.4355592131614685, "learning_rate": 4.065647146587095e-07, "loss": 0.6653, "step": 22168 }, { "epoch": 0.9187699448796055, "grad_norm": 0.4152291715145111, "learning_rate": 4.063574951303411e-07, "loss": 0.6592, "step": 22169 }, { "epoch": 0.9188113887852791, "grad_norm": 0.4618321657180786, "learning_rate": 4.061502756019728e-07, "loss": 0.7183, "step": 22170 }, { "epoch": 0.9188528326909527, "grad_norm": 0.44260236620903015, "learning_rate": 4.059430560736044e-07, "loss": 0.6848, "step": 22171 }, { "epoch": 0.9188942765966265, "grad_norm": 0.4012022912502289, "learning_rate": 4.057358365452361e-07, "loss": 0.5914, "step": 22172 }, { "epoch": 0.9189357205023001, "grad_norm": 0.4166465997695923, "learning_rate": 4.055286170168677e-07, "loss": 0.6831, "step": 22173 }, { "epoch": 0.9189771644079738, "grad_norm": 0.3947274386882782, "learning_rate": 4.053213974884993e-07, "loss": 0.6215, "step": 22174 }, { "epoch": 0.9190186083136475, "grad_norm": 0.42787227034568787, "learning_rate": 4.05114177960131e-07, "loss": 0.6644, "step": 22175 }, { "epoch": 0.9190600522193212, "grad_norm": 0.4242860674858093, "learning_rate": 4.049069584317626e-07, "loss": 0.698, "step": 22176 }, { "epoch": 0.9191014961249948, "grad_norm": 0.40455371141433716, "learning_rate": 4.046997389033943e-07, "loss": 0.5961, "step": 22177 }, { "epoch": 0.9191429400306685, "grad_norm": 0.46630680561065674, "learning_rate": 4.044925193750259e-07, "loss": 0.6616, "step": 22178 }, { "epoch": 0.9191843839363422, "grad_norm": 0.43062227964401245, "learning_rate": 4.042852998466576e-07, "loss": 0.661, "step": 22179 }, { "epoch": 0.9192258278420158, "grad_norm": 0.4337824285030365, "learning_rate": 4.040780803182892e-07, "loss": 0.67, "step": 22180 }, { "epoch": 0.9192672717476895, "grad_norm": 0.42257601022720337, "learning_rate": 4.038708607899209e-07, "loss": 0.67, "step": 22181 }, { "epoch": 0.9193087156533631, "grad_norm": 0.4307631850242615, "learning_rate": 4.0366364126155253e-07, "loss": 0.6742, "step": 22182 }, { "epoch": 0.9193501595590369, "grad_norm": 0.45422986149787903, "learning_rate": 4.034564217331842e-07, "loss": 0.6643, "step": 22183 }, { "epoch": 0.9193916034647105, "grad_norm": 0.39353176951408386, "learning_rate": 4.0324920220481583e-07, "loss": 0.6332, "step": 22184 }, { "epoch": 0.9194330473703842, "grad_norm": 0.43233948945999146, "learning_rate": 4.030419826764475e-07, "loss": 0.6454, "step": 22185 }, { "epoch": 0.9194744912760578, "grad_norm": 0.398840993642807, "learning_rate": 4.0283476314807913e-07, "loss": 0.5741, "step": 22186 }, { "epoch": 0.9195159351817316, "grad_norm": 0.3804643452167511, "learning_rate": 4.026275436197107e-07, "loss": 0.6682, "step": 22187 }, { "epoch": 0.9195573790874052, "grad_norm": 0.4021351933479309, "learning_rate": 4.0242032409134243e-07, "loss": 0.688, "step": 22188 }, { "epoch": 0.9195988229930788, "grad_norm": 0.41401660442352295, "learning_rate": 4.02213104562974e-07, "loss": 0.6674, "step": 22189 }, { "epoch": 0.9196402668987526, "grad_norm": 0.44052591919898987, "learning_rate": 4.0200588503460573e-07, "loss": 0.6726, "step": 22190 }, { "epoch": 0.9196817108044262, "grad_norm": 0.38475707173347473, "learning_rate": 4.0179866550623733e-07, "loss": 0.6694, "step": 22191 }, { "epoch": 0.9197231547100999, "grad_norm": 0.3973960280418396, "learning_rate": 4.0159144597786903e-07, "loss": 0.6128, "step": 22192 }, { "epoch": 0.9197645986157735, "grad_norm": 0.43617480993270874, "learning_rate": 4.0138422644950063e-07, "loss": 0.6523, "step": 22193 }, { "epoch": 0.9198060425214473, "grad_norm": 0.392958402633667, "learning_rate": 4.011770069211323e-07, "loss": 0.6201, "step": 22194 }, { "epoch": 0.9198474864271209, "grad_norm": 0.3944983184337616, "learning_rate": 4.0096978739276393e-07, "loss": 0.6851, "step": 22195 }, { "epoch": 0.9198889303327945, "grad_norm": 0.40340930223464966, "learning_rate": 4.007625678643956e-07, "loss": 0.6499, "step": 22196 }, { "epoch": 0.9199303742384682, "grad_norm": 0.39751264452934265, "learning_rate": 4.0055534833602723e-07, "loss": 0.6449, "step": 22197 }, { "epoch": 0.9199718181441419, "grad_norm": 0.43647652864456177, "learning_rate": 4.003481288076589e-07, "loss": 0.6189, "step": 22198 }, { "epoch": 0.9200132620498156, "grad_norm": 0.3826138377189636, "learning_rate": 4.0014090927929053e-07, "loss": 0.6426, "step": 22199 }, { "epoch": 0.9200547059554892, "grad_norm": 0.4139733910560608, "learning_rate": 3.9993368975092213e-07, "loss": 0.6978, "step": 22200 }, { "epoch": 0.920096149861163, "grad_norm": 0.4198732078075409, "learning_rate": 3.9972647022255383e-07, "loss": 0.6327, "step": 22201 }, { "epoch": 0.9201375937668366, "grad_norm": 0.3935845196247101, "learning_rate": 3.9951925069418543e-07, "loss": 0.6676, "step": 22202 }, { "epoch": 0.9201790376725103, "grad_norm": 0.4179231524467468, "learning_rate": 3.9931203116581713e-07, "loss": 0.6917, "step": 22203 }, { "epoch": 0.9202204815781839, "grad_norm": 0.4452146887779236, "learning_rate": 3.9910481163744873e-07, "loss": 0.6738, "step": 22204 }, { "epoch": 0.9202619254838575, "grad_norm": 0.43203210830688477, "learning_rate": 3.9889759210908043e-07, "loss": 0.5864, "step": 22205 }, { "epoch": 0.9203033693895313, "grad_norm": 0.37480705976486206, "learning_rate": 3.9869037258071203e-07, "loss": 0.6183, "step": 22206 }, { "epoch": 0.9203448132952049, "grad_norm": 0.4728090465068817, "learning_rate": 3.9848315305234373e-07, "loss": 0.7705, "step": 22207 }, { "epoch": 0.9203862572008786, "grad_norm": 0.41070929169654846, "learning_rate": 3.9827593352397533e-07, "loss": 0.6298, "step": 22208 }, { "epoch": 0.9204277011065523, "grad_norm": 0.4104289412498474, "learning_rate": 3.9806871399560703e-07, "loss": 0.6788, "step": 22209 }, { "epoch": 0.920469145012226, "grad_norm": 0.4289223253726959, "learning_rate": 3.9786149446723863e-07, "loss": 0.619, "step": 22210 }, { "epoch": 0.9205105889178996, "grad_norm": 0.383742094039917, "learning_rate": 3.9765427493887023e-07, "loss": 0.6434, "step": 22211 }, { "epoch": 0.9205520328235733, "grad_norm": 0.42177265882492065, "learning_rate": 3.9744705541050193e-07, "loss": 0.667, "step": 22212 }, { "epoch": 0.920593476729247, "grad_norm": 0.4245764911174774, "learning_rate": 3.9723983588213353e-07, "loss": 0.7073, "step": 22213 }, { "epoch": 0.9206349206349206, "grad_norm": 0.42813989520072937, "learning_rate": 3.9703261635376523e-07, "loss": 0.6753, "step": 22214 }, { "epoch": 0.9206763645405943, "grad_norm": 0.38868942856788635, "learning_rate": 3.9682539682539683e-07, "loss": 0.5836, "step": 22215 }, { "epoch": 0.9207178084462679, "grad_norm": 0.4881787598133087, "learning_rate": 3.9661817729702853e-07, "loss": 0.741, "step": 22216 }, { "epoch": 0.9207592523519417, "grad_norm": 0.4125223457813263, "learning_rate": 3.9641095776866013e-07, "loss": 0.6808, "step": 22217 }, { "epoch": 0.9208006962576153, "grad_norm": 0.43530043959617615, "learning_rate": 3.9620373824029183e-07, "loss": 0.6553, "step": 22218 }, { "epoch": 0.920842140163289, "grad_norm": 0.4123053550720215, "learning_rate": 3.9599651871192343e-07, "loss": 0.6707, "step": 22219 }, { "epoch": 0.9208835840689626, "grad_norm": 0.40143659710884094, "learning_rate": 3.9578929918355513e-07, "loss": 0.6642, "step": 22220 }, { "epoch": 0.9209250279746364, "grad_norm": 0.39907631278038025, "learning_rate": 3.9558207965518673e-07, "loss": 0.5948, "step": 22221 }, { "epoch": 0.92096647188031, "grad_norm": 0.4393535554409027, "learning_rate": 3.9537486012681843e-07, "loss": 0.6959, "step": 22222 }, { "epoch": 0.9210079157859836, "grad_norm": 0.4174802303314209, "learning_rate": 3.9516764059845003e-07, "loss": 0.6058, "step": 22223 }, { "epoch": 0.9210493596916574, "grad_norm": 0.41545554995536804, "learning_rate": 3.9496042107008163e-07, "loss": 0.7029, "step": 22224 }, { "epoch": 0.921090803597331, "grad_norm": 0.3938498795032501, "learning_rate": 3.9475320154171333e-07, "loss": 0.6531, "step": 22225 }, { "epoch": 0.9211322475030047, "grad_norm": 0.44151178002357483, "learning_rate": 3.9454598201334493e-07, "loss": 0.6904, "step": 22226 }, { "epoch": 0.9211736914086783, "grad_norm": 0.4208865463733673, "learning_rate": 3.9433876248497663e-07, "loss": 0.6453, "step": 22227 }, { "epoch": 0.9212151353143521, "grad_norm": 0.4188539385795593, "learning_rate": 3.9413154295660823e-07, "loss": 0.6448, "step": 22228 }, { "epoch": 0.9212565792200257, "grad_norm": 0.37795382738113403, "learning_rate": 3.9392432342823993e-07, "loss": 0.5927, "step": 22229 }, { "epoch": 0.9212980231256994, "grad_norm": 0.4400404095649719, "learning_rate": 3.9371710389987153e-07, "loss": 0.6738, "step": 22230 }, { "epoch": 0.921339467031373, "grad_norm": 0.42634958028793335, "learning_rate": 3.9350988437150323e-07, "loss": 0.6829, "step": 22231 }, { "epoch": 0.9213809109370467, "grad_norm": 0.42551514506340027, "learning_rate": 3.9330266484313483e-07, "loss": 0.6827, "step": 22232 }, { "epoch": 0.9214223548427204, "grad_norm": 0.42764368653297424, "learning_rate": 3.9309544531476654e-07, "loss": 0.6526, "step": 22233 }, { "epoch": 0.921463798748394, "grad_norm": 0.4405936598777771, "learning_rate": 3.9288822578639813e-07, "loss": 0.6869, "step": 22234 }, { "epoch": 0.9215052426540677, "grad_norm": 0.43019893765449524, "learning_rate": 3.9268100625802984e-07, "loss": 0.6882, "step": 22235 }, { "epoch": 0.9215466865597414, "grad_norm": 0.42577120661735535, "learning_rate": 3.9247378672966143e-07, "loss": 0.6783, "step": 22236 }, { "epoch": 0.9215881304654151, "grad_norm": 0.43746110796928406, "learning_rate": 3.9226656720129303e-07, "loss": 0.7174, "step": 22237 }, { "epoch": 0.9216295743710887, "grad_norm": 0.38967365026474, "learning_rate": 3.9205934767292473e-07, "loss": 0.6376, "step": 22238 }, { "epoch": 0.9216710182767625, "grad_norm": 0.4589861035346985, "learning_rate": 3.9185212814455633e-07, "loss": 0.6965, "step": 22239 }, { "epoch": 0.9217124621824361, "grad_norm": 0.4128563106060028, "learning_rate": 3.9164490861618804e-07, "loss": 0.7007, "step": 22240 }, { "epoch": 0.9217539060881097, "grad_norm": 0.43714439868927, "learning_rate": 3.9143768908781963e-07, "loss": 0.6401, "step": 22241 }, { "epoch": 0.9217953499937834, "grad_norm": 0.4154449701309204, "learning_rate": 3.9123046955945134e-07, "loss": 0.6279, "step": 22242 }, { "epoch": 0.921836793899457, "grad_norm": 0.4214541018009186, "learning_rate": 3.9102325003108293e-07, "loss": 0.6663, "step": 22243 }, { "epoch": 0.9218782378051308, "grad_norm": 0.3632116913795471, "learning_rate": 3.9081603050271464e-07, "loss": 0.6108, "step": 22244 }, { "epoch": 0.9219196817108044, "grad_norm": 0.421127051115036, "learning_rate": 3.9060881097434623e-07, "loss": 0.646, "step": 22245 }, { "epoch": 0.9219611256164781, "grad_norm": 0.4117325246334076, "learning_rate": 3.9040159144597794e-07, "loss": 0.6608, "step": 22246 }, { "epoch": 0.9220025695221518, "grad_norm": 0.37873950600624084, "learning_rate": 3.9019437191760953e-07, "loss": 0.6443, "step": 22247 }, { "epoch": 0.9220440134278255, "grad_norm": 0.3980756998062134, "learning_rate": 3.899871523892412e-07, "loss": 0.6646, "step": 22248 }, { "epoch": 0.9220854573334991, "grad_norm": 0.40306130051612854, "learning_rate": 3.8977993286087284e-07, "loss": 0.6427, "step": 22249 }, { "epoch": 0.9221269012391727, "grad_norm": 0.40291473269462585, "learning_rate": 3.895727133325045e-07, "loss": 0.6312, "step": 22250 }, { "epoch": 0.9221683451448465, "grad_norm": 0.4417590796947479, "learning_rate": 3.8936549380413614e-07, "loss": 0.6606, "step": 22251 }, { "epoch": 0.9222097890505201, "grad_norm": 0.4224981963634491, "learning_rate": 3.8915827427576773e-07, "loss": 0.6362, "step": 22252 }, { "epoch": 0.9222512329561938, "grad_norm": 0.4432431757450104, "learning_rate": 3.8895105474739944e-07, "loss": 0.6736, "step": 22253 }, { "epoch": 0.9222926768618674, "grad_norm": 0.4231889843940735, "learning_rate": 3.8874383521903103e-07, "loss": 0.6399, "step": 22254 }, { "epoch": 0.9223341207675412, "grad_norm": 0.3831135332584381, "learning_rate": 3.8853661569066274e-07, "loss": 0.646, "step": 22255 }, { "epoch": 0.9223755646732148, "grad_norm": 0.41470733284950256, "learning_rate": 3.8832939616229433e-07, "loss": 0.614, "step": 22256 }, { "epoch": 0.9224170085788884, "grad_norm": 0.4269106388092041, "learning_rate": 3.8812217663392604e-07, "loss": 0.7148, "step": 22257 }, { "epoch": 0.9224584524845622, "grad_norm": 0.43638163805007935, "learning_rate": 3.8791495710555764e-07, "loss": 0.6493, "step": 22258 }, { "epoch": 0.9224998963902358, "grad_norm": 0.45652151107788086, "learning_rate": 3.8770773757718934e-07, "loss": 0.6677, "step": 22259 }, { "epoch": 0.9225413402959095, "grad_norm": 0.434817910194397, "learning_rate": 3.8750051804882094e-07, "loss": 0.6677, "step": 22260 }, { "epoch": 0.9225827842015831, "grad_norm": 0.40999308228492737, "learning_rate": 3.872932985204526e-07, "loss": 0.7249, "step": 22261 }, { "epoch": 0.9226242281072569, "grad_norm": 0.4012381136417389, "learning_rate": 3.8708607899208424e-07, "loss": 0.6793, "step": 22262 }, { "epoch": 0.9226656720129305, "grad_norm": 0.44949448108673096, "learning_rate": 3.868788594637159e-07, "loss": 0.6729, "step": 22263 }, { "epoch": 0.9227071159186042, "grad_norm": 0.4265710413455963, "learning_rate": 3.8667163993534754e-07, "loss": 0.6295, "step": 22264 }, { "epoch": 0.9227485598242778, "grad_norm": 0.3944624066352844, "learning_rate": 3.864644204069792e-07, "loss": 0.6619, "step": 22265 }, { "epoch": 0.9227900037299515, "grad_norm": 0.4055692255496979, "learning_rate": 3.8625720087861084e-07, "loss": 0.5896, "step": 22266 }, { "epoch": 0.9228314476356252, "grad_norm": 0.4230136573314667, "learning_rate": 3.860499813502425e-07, "loss": 0.6871, "step": 22267 }, { "epoch": 0.9228728915412988, "grad_norm": 0.4314816892147064, "learning_rate": 3.8584276182187414e-07, "loss": 0.6733, "step": 22268 }, { "epoch": 0.9229143354469725, "grad_norm": 0.4521991014480591, "learning_rate": 3.8563554229350574e-07, "loss": 0.666, "step": 22269 }, { "epoch": 0.9229557793526462, "grad_norm": 0.41855400800704956, "learning_rate": 3.8542832276513744e-07, "loss": 0.7229, "step": 22270 }, { "epoch": 0.9229972232583199, "grad_norm": 0.42210933566093445, "learning_rate": 3.8522110323676904e-07, "loss": 0.71, "step": 22271 }, { "epoch": 0.9230386671639935, "grad_norm": 0.38391363620758057, "learning_rate": 3.8501388370840074e-07, "loss": 0.6051, "step": 22272 }, { "epoch": 0.9230801110696673, "grad_norm": 0.4737195372581482, "learning_rate": 3.8480666418003234e-07, "loss": 0.6715, "step": 22273 }, { "epoch": 0.9231215549753409, "grad_norm": 0.4364435374736786, "learning_rate": 3.84599444651664e-07, "loss": 0.6816, "step": 22274 }, { "epoch": 0.9231629988810145, "grad_norm": 0.44413185119628906, "learning_rate": 3.8439222512329564e-07, "loss": 0.6519, "step": 22275 }, { "epoch": 0.9232044427866882, "grad_norm": 0.4837568700313568, "learning_rate": 3.841850055949273e-07, "loss": 0.6941, "step": 22276 }, { "epoch": 0.9232458866923619, "grad_norm": 0.41548749804496765, "learning_rate": 3.8397778606655894e-07, "loss": 0.6486, "step": 22277 }, { "epoch": 0.9232873305980356, "grad_norm": 0.6428281664848328, "learning_rate": 3.837705665381906e-07, "loss": 0.6951, "step": 22278 }, { "epoch": 0.9233287745037092, "grad_norm": 0.4326765239238739, "learning_rate": 3.8356334700982224e-07, "loss": 0.6753, "step": 22279 }, { "epoch": 0.9233702184093829, "grad_norm": 0.4001523554325104, "learning_rate": 3.833561274814539e-07, "loss": 0.6816, "step": 22280 }, { "epoch": 0.9234116623150566, "grad_norm": 0.46318763494491577, "learning_rate": 3.8314890795308554e-07, "loss": 0.7075, "step": 22281 }, { "epoch": 0.9234531062207303, "grad_norm": 0.38921278715133667, "learning_rate": 3.829416884247172e-07, "loss": 0.605, "step": 22282 }, { "epoch": 0.9234945501264039, "grad_norm": 0.4211929142475128, "learning_rate": 3.8273446889634884e-07, "loss": 0.649, "step": 22283 }, { "epoch": 0.9235359940320775, "grad_norm": 0.4073669910430908, "learning_rate": 3.825272493679805e-07, "loss": 0.6866, "step": 22284 }, { "epoch": 0.9235774379377513, "grad_norm": 0.38739991188049316, "learning_rate": 3.823200298396121e-07, "loss": 0.6279, "step": 22285 }, { "epoch": 0.9236188818434249, "grad_norm": 0.39897942543029785, "learning_rate": 3.821128103112438e-07, "loss": 0.6874, "step": 22286 }, { "epoch": 0.9236603257490986, "grad_norm": 0.39754992723464966, "learning_rate": 3.819055907828754e-07, "loss": 0.6572, "step": 22287 }, { "epoch": 0.9237017696547722, "grad_norm": 0.4159892797470093, "learning_rate": 3.8169837125450704e-07, "loss": 0.672, "step": 22288 }, { "epoch": 0.923743213560446, "grad_norm": 0.4716191291809082, "learning_rate": 3.814911517261387e-07, "loss": 0.7478, "step": 22289 }, { "epoch": 0.9237846574661196, "grad_norm": 0.4174673557281494, "learning_rate": 3.8128393219777034e-07, "loss": 0.6775, "step": 22290 }, { "epoch": 0.9238261013717933, "grad_norm": 0.4108074903488159, "learning_rate": 3.81076712669402e-07, "loss": 0.6827, "step": 22291 }, { "epoch": 0.923867545277467, "grad_norm": 0.3985379636287689, "learning_rate": 3.8086949314103364e-07, "loss": 0.6844, "step": 22292 }, { "epoch": 0.9239089891831406, "grad_norm": 0.37764260172843933, "learning_rate": 3.806622736126653e-07, "loss": 0.6245, "step": 22293 }, { "epoch": 0.9239504330888143, "grad_norm": 0.39742356538772583, "learning_rate": 3.8045505408429694e-07, "loss": 0.6708, "step": 22294 }, { "epoch": 0.9239918769944879, "grad_norm": 0.3933504819869995, "learning_rate": 3.802478345559286e-07, "loss": 0.7126, "step": 22295 }, { "epoch": 0.9240333209001617, "grad_norm": 0.4181134104728699, "learning_rate": 3.8004061502756024e-07, "loss": 0.6335, "step": 22296 }, { "epoch": 0.9240747648058353, "grad_norm": 0.43315309286117554, "learning_rate": 3.798333954991919e-07, "loss": 0.7173, "step": 22297 }, { "epoch": 0.924116208711509, "grad_norm": 0.40219634771347046, "learning_rate": 3.796261759708235e-07, "loss": 0.6688, "step": 22298 }, { "epoch": 0.9241576526171826, "grad_norm": 0.41858118772506714, "learning_rate": 3.794189564424552e-07, "loss": 0.6696, "step": 22299 }, { "epoch": 0.9241990965228564, "grad_norm": 0.40735116600990295, "learning_rate": 3.792117369140868e-07, "loss": 0.6833, "step": 22300 }, { "epoch": 0.92424054042853, "grad_norm": 0.4013233482837677, "learning_rate": 3.790045173857185e-07, "loss": 0.6575, "step": 22301 }, { "epoch": 0.9242819843342036, "grad_norm": 0.43694594502449036, "learning_rate": 3.787972978573501e-07, "loss": 0.7086, "step": 22302 }, { "epoch": 0.9243234282398773, "grad_norm": 0.40606066584587097, "learning_rate": 3.785900783289818e-07, "loss": 0.6672, "step": 22303 }, { "epoch": 0.924364872145551, "grad_norm": 0.43133068084716797, "learning_rate": 3.783828588006134e-07, "loss": 0.6897, "step": 22304 }, { "epoch": 0.9244063160512247, "grad_norm": 0.42128878831863403, "learning_rate": 3.781756392722451e-07, "loss": 0.6545, "step": 22305 }, { "epoch": 0.9244477599568983, "grad_norm": 0.4442581534385681, "learning_rate": 3.779684197438767e-07, "loss": 0.6787, "step": 22306 }, { "epoch": 0.924489203862572, "grad_norm": 0.46650707721710205, "learning_rate": 3.7776120021550834e-07, "loss": 0.6731, "step": 22307 }, { "epoch": 0.9245306477682457, "grad_norm": 0.42433956265449524, "learning_rate": 3.7755398068714e-07, "loss": 0.6501, "step": 22308 }, { "epoch": 0.9245720916739194, "grad_norm": 0.44297778606414795, "learning_rate": 3.773467611587716e-07, "loss": 0.6851, "step": 22309 }, { "epoch": 0.924613535579593, "grad_norm": 0.4343014657497406, "learning_rate": 3.771395416304033e-07, "loss": 0.6331, "step": 22310 }, { "epoch": 0.9246549794852666, "grad_norm": 0.41484177112579346, "learning_rate": 3.769323221020349e-07, "loss": 0.6577, "step": 22311 }, { "epoch": 0.9246964233909404, "grad_norm": 0.4072597026824951, "learning_rate": 3.767251025736666e-07, "loss": 0.7261, "step": 22312 }, { "epoch": 0.924737867296614, "grad_norm": 0.40252262353897095, "learning_rate": 3.765178830452982e-07, "loss": 0.6587, "step": 22313 }, { "epoch": 0.9247793112022877, "grad_norm": 0.4436371624469757, "learning_rate": 3.763106635169299e-07, "loss": 0.6655, "step": 22314 }, { "epoch": 0.9248207551079614, "grad_norm": 0.38914090394973755, "learning_rate": 3.761034439885615e-07, "loss": 0.644, "step": 22315 }, { "epoch": 0.9248621990136351, "grad_norm": 0.42281731963157654, "learning_rate": 3.758962244601932e-07, "loss": 0.6711, "step": 22316 }, { "epoch": 0.9249036429193087, "grad_norm": 0.40579429268836975, "learning_rate": 3.756890049318248e-07, "loss": 0.6565, "step": 22317 }, { "epoch": 0.9249450868249823, "grad_norm": 0.4136389493942261, "learning_rate": 3.754817854034565e-07, "loss": 0.6559, "step": 22318 }, { "epoch": 0.9249865307306561, "grad_norm": 0.43421193957328796, "learning_rate": 3.752745658750881e-07, "loss": 0.6481, "step": 22319 }, { "epoch": 0.9250279746363297, "grad_norm": 0.39531201124191284, "learning_rate": 3.750673463467198e-07, "loss": 0.6343, "step": 22320 }, { "epoch": 0.9250694185420034, "grad_norm": 0.4292892813682556, "learning_rate": 3.748601268183514e-07, "loss": 0.6388, "step": 22321 }, { "epoch": 0.925110862447677, "grad_norm": 0.42729225754737854, "learning_rate": 3.74652907289983e-07, "loss": 0.6355, "step": 22322 }, { "epoch": 0.9251523063533508, "grad_norm": 0.3878740072250366, "learning_rate": 3.744456877616147e-07, "loss": 0.6288, "step": 22323 }, { "epoch": 0.9251937502590244, "grad_norm": 0.4222938120365143, "learning_rate": 3.742384682332463e-07, "loss": 0.7173, "step": 22324 }, { "epoch": 0.9252351941646981, "grad_norm": 0.45415201783180237, "learning_rate": 3.74031248704878e-07, "loss": 0.6831, "step": 22325 }, { "epoch": 0.9252766380703717, "grad_norm": 0.43415573239326477, "learning_rate": 3.738240291765096e-07, "loss": 0.6713, "step": 22326 }, { "epoch": 0.9253180819760454, "grad_norm": 0.4511180818080902, "learning_rate": 3.736168096481413e-07, "loss": 0.7261, "step": 22327 }, { "epoch": 0.9253595258817191, "grad_norm": 0.39859965443611145, "learning_rate": 3.734095901197729e-07, "loss": 0.6218, "step": 22328 }, { "epoch": 0.9254009697873927, "grad_norm": 0.415436327457428, "learning_rate": 3.732023705914046e-07, "loss": 0.6366, "step": 22329 }, { "epoch": 0.9254424136930665, "grad_norm": 0.4083605110645294, "learning_rate": 3.729951510630362e-07, "loss": 0.6324, "step": 22330 }, { "epoch": 0.9254838575987401, "grad_norm": 0.396239310503006, "learning_rate": 3.727879315346679e-07, "loss": 0.6146, "step": 22331 }, { "epoch": 0.9255253015044138, "grad_norm": 0.4331321120262146, "learning_rate": 3.725807120062995e-07, "loss": 0.6821, "step": 22332 }, { "epoch": 0.9255667454100874, "grad_norm": 0.4008884131908417, "learning_rate": 3.723734924779312e-07, "loss": 0.6613, "step": 22333 }, { "epoch": 0.9256081893157612, "grad_norm": 0.432350218296051, "learning_rate": 3.721662729495628e-07, "loss": 0.7405, "step": 22334 }, { "epoch": 0.9256496332214348, "grad_norm": 0.4215299189090729, "learning_rate": 3.719590534211944e-07, "loss": 0.6714, "step": 22335 }, { "epoch": 0.9256910771271084, "grad_norm": 0.41322043538093567, "learning_rate": 3.717518338928261e-07, "loss": 0.6182, "step": 22336 }, { "epoch": 0.9257325210327821, "grad_norm": 0.4187880754470825, "learning_rate": 3.715446143644577e-07, "loss": 0.6825, "step": 22337 }, { "epoch": 0.9257739649384558, "grad_norm": 0.39930951595306396, "learning_rate": 3.713373948360894e-07, "loss": 0.666, "step": 22338 }, { "epoch": 0.9258154088441295, "grad_norm": 0.4128972589969635, "learning_rate": 3.71130175307721e-07, "loss": 0.6642, "step": 22339 }, { "epoch": 0.9258568527498031, "grad_norm": 0.4360491633415222, "learning_rate": 3.709229557793527e-07, "loss": 0.6334, "step": 22340 }, { "epoch": 0.9258982966554768, "grad_norm": 0.4114395081996918, "learning_rate": 3.707157362509843e-07, "loss": 0.6855, "step": 22341 }, { "epoch": 0.9259397405611505, "grad_norm": 0.4181136190891266, "learning_rate": 3.70508516722616e-07, "loss": 0.6924, "step": 22342 }, { "epoch": 0.9259811844668242, "grad_norm": 0.42109012603759766, "learning_rate": 3.703012971942476e-07, "loss": 0.6553, "step": 22343 }, { "epoch": 0.9260226283724978, "grad_norm": 0.4225921630859375, "learning_rate": 3.700940776658793e-07, "loss": 0.6582, "step": 22344 }, { "epoch": 0.9260640722781714, "grad_norm": 0.4129653871059418, "learning_rate": 3.698868581375109e-07, "loss": 0.6176, "step": 22345 }, { "epoch": 0.9261055161838452, "grad_norm": 0.4387122392654419, "learning_rate": 3.696796386091425e-07, "loss": 0.7214, "step": 22346 }, { "epoch": 0.9261469600895188, "grad_norm": 0.4470447301864624, "learning_rate": 3.694724190807742e-07, "loss": 0.6636, "step": 22347 }, { "epoch": 0.9261884039951925, "grad_norm": 0.4072017967700958, "learning_rate": 3.692651995524058e-07, "loss": 0.682, "step": 22348 }, { "epoch": 0.9262298479008662, "grad_norm": 0.4034588038921356, "learning_rate": 3.690579800240375e-07, "loss": 0.6029, "step": 22349 }, { "epoch": 0.9262712918065399, "grad_norm": 0.44186776876449585, "learning_rate": 3.688507604956691e-07, "loss": 0.7224, "step": 22350 }, { "epoch": 0.9263127357122135, "grad_norm": 0.39881622791290283, "learning_rate": 3.686435409673008e-07, "loss": 0.6754, "step": 22351 }, { "epoch": 0.9263541796178872, "grad_norm": 0.43790096044540405, "learning_rate": 3.684363214389324e-07, "loss": 0.6663, "step": 22352 }, { "epoch": 0.9263956235235609, "grad_norm": 0.3885153830051422, "learning_rate": 3.682291019105641e-07, "loss": 0.6702, "step": 22353 }, { "epoch": 0.9264370674292345, "grad_norm": 0.4624767005443573, "learning_rate": 3.680218823821957e-07, "loss": 0.6471, "step": 22354 }, { "epoch": 0.9264785113349082, "grad_norm": 0.42617201805114746, "learning_rate": 3.678146628538274e-07, "loss": 0.6804, "step": 22355 }, { "epoch": 0.9265199552405818, "grad_norm": 0.41617870330810547, "learning_rate": 3.67607443325459e-07, "loss": 0.6208, "step": 22356 }, { "epoch": 0.9265613991462556, "grad_norm": 0.3823138475418091, "learning_rate": 3.674002237970907e-07, "loss": 0.6506, "step": 22357 }, { "epoch": 0.9266028430519292, "grad_norm": 0.40782982110977173, "learning_rate": 3.671930042687223e-07, "loss": 0.6117, "step": 22358 }, { "epoch": 0.9266442869576029, "grad_norm": 0.4224918484687805, "learning_rate": 3.6698578474035395e-07, "loss": 0.6823, "step": 22359 }, { "epoch": 0.9266857308632765, "grad_norm": 0.4176846146583557, "learning_rate": 3.667785652119856e-07, "loss": 0.6891, "step": 22360 }, { "epoch": 0.9267271747689503, "grad_norm": 0.4424492418766022, "learning_rate": 3.6657134568361725e-07, "loss": 0.7213, "step": 22361 }, { "epoch": 0.9267686186746239, "grad_norm": 0.40268415212631226, "learning_rate": 3.663641261552489e-07, "loss": 0.6346, "step": 22362 }, { "epoch": 0.9268100625802975, "grad_norm": 0.4163341224193573, "learning_rate": 3.6615690662688055e-07, "loss": 0.6362, "step": 22363 }, { "epoch": 0.9268515064859713, "grad_norm": 0.4012050926685333, "learning_rate": 3.659496870985122e-07, "loss": 0.6571, "step": 22364 }, { "epoch": 0.9268929503916449, "grad_norm": 0.42011502385139465, "learning_rate": 3.657424675701438e-07, "loss": 0.6448, "step": 22365 }, { "epoch": 0.9269343942973186, "grad_norm": 0.3702411353588104, "learning_rate": 3.655352480417755e-07, "loss": 0.5994, "step": 22366 }, { "epoch": 0.9269758382029922, "grad_norm": 0.4240339696407318, "learning_rate": 3.653280285134071e-07, "loss": 0.6565, "step": 22367 }, { "epoch": 0.927017282108666, "grad_norm": 0.4463580846786499, "learning_rate": 3.651208089850388e-07, "loss": 0.6693, "step": 22368 }, { "epoch": 0.9270587260143396, "grad_norm": 0.4004729092121124, "learning_rate": 3.649135894566704e-07, "loss": 0.6614, "step": 22369 }, { "epoch": 0.9271001699200133, "grad_norm": 0.39380308985710144, "learning_rate": 3.647063699283021e-07, "loss": 0.6135, "step": 22370 }, { "epoch": 0.9271416138256869, "grad_norm": 0.4014785587787628, "learning_rate": 3.644991503999337e-07, "loss": 0.6014, "step": 22371 }, { "epoch": 0.9271830577313606, "grad_norm": 0.44208085536956787, "learning_rate": 3.6429193087156535e-07, "loss": 0.6417, "step": 22372 }, { "epoch": 0.9272245016370343, "grad_norm": 0.3795013725757599, "learning_rate": 3.64084711343197e-07, "loss": 0.6302, "step": 22373 }, { "epoch": 0.9272659455427079, "grad_norm": 0.42747366428375244, "learning_rate": 3.6387749181482865e-07, "loss": 0.6625, "step": 22374 }, { "epoch": 0.9273073894483816, "grad_norm": 0.4012719690799713, "learning_rate": 3.636702722864603e-07, "loss": 0.6661, "step": 22375 }, { "epoch": 0.9273488333540553, "grad_norm": 0.41593748331069946, "learning_rate": 3.6346305275809195e-07, "loss": 0.6858, "step": 22376 }, { "epoch": 0.927390277259729, "grad_norm": 0.4109537899494171, "learning_rate": 3.632558332297236e-07, "loss": 0.6841, "step": 22377 }, { "epoch": 0.9274317211654026, "grad_norm": 0.42824918031692505, "learning_rate": 3.6304861370135525e-07, "loss": 0.7009, "step": 22378 }, { "epoch": 0.9274731650710762, "grad_norm": 0.4123459756374359, "learning_rate": 3.628413941729869e-07, "loss": 0.6736, "step": 22379 }, { "epoch": 0.92751460897675, "grad_norm": 0.45608964562416077, "learning_rate": 3.6263417464461855e-07, "loss": 0.6521, "step": 22380 }, { "epoch": 0.9275560528824236, "grad_norm": 0.5152004361152649, "learning_rate": 3.624269551162502e-07, "loss": 0.6631, "step": 22381 }, { "epoch": 0.9275974967880973, "grad_norm": 0.4209660589694977, "learning_rate": 3.6221973558788186e-07, "loss": 0.6982, "step": 22382 }, { "epoch": 0.927638940693771, "grad_norm": 0.42127805948257446, "learning_rate": 3.6201251605951345e-07, "loss": 0.6545, "step": 22383 }, { "epoch": 0.9276803845994447, "grad_norm": 0.4284980595111847, "learning_rate": 3.618052965311451e-07, "loss": 0.6825, "step": 22384 }, { "epoch": 0.9277218285051183, "grad_norm": 0.40579432249069214, "learning_rate": 3.6159807700277675e-07, "loss": 0.7092, "step": 22385 }, { "epoch": 0.927763272410792, "grad_norm": 0.40481293201446533, "learning_rate": 3.613908574744084e-07, "loss": 0.6311, "step": 22386 }, { "epoch": 0.9278047163164657, "grad_norm": 0.4197527766227722, "learning_rate": 3.6118363794604005e-07, "loss": 0.631, "step": 22387 }, { "epoch": 0.9278461602221393, "grad_norm": 0.42228105664253235, "learning_rate": 3.609764184176717e-07, "loss": 0.7075, "step": 22388 }, { "epoch": 0.927887604127813, "grad_norm": 0.38583412766456604, "learning_rate": 3.6076919888930336e-07, "loss": 0.6112, "step": 22389 }, { "epoch": 0.9279290480334866, "grad_norm": 0.42533639073371887, "learning_rate": 3.60561979360935e-07, "loss": 0.658, "step": 22390 }, { "epoch": 0.9279704919391604, "grad_norm": 0.4376464784145355, "learning_rate": 3.6035475983256666e-07, "loss": 0.6604, "step": 22391 }, { "epoch": 0.928011935844834, "grad_norm": 0.4134712517261505, "learning_rate": 3.601475403041983e-07, "loss": 0.6365, "step": 22392 }, { "epoch": 0.9280533797505077, "grad_norm": 0.4058242440223694, "learning_rate": 3.5994032077582996e-07, "loss": 0.6287, "step": 22393 }, { "epoch": 0.9280948236561813, "grad_norm": 0.40809330344200134, "learning_rate": 3.597331012474616e-07, "loss": 0.6388, "step": 22394 }, { "epoch": 0.9281362675618551, "grad_norm": 0.441999614238739, "learning_rate": 3.5952588171909326e-07, "loss": 0.7515, "step": 22395 }, { "epoch": 0.9281777114675287, "grad_norm": 0.4518662095069885, "learning_rate": 3.5931866219072485e-07, "loss": 0.6694, "step": 22396 }, { "epoch": 0.9282191553732023, "grad_norm": 0.4308764338493347, "learning_rate": 3.5911144266235656e-07, "loss": 0.6072, "step": 22397 }, { "epoch": 0.928260599278876, "grad_norm": 0.3827759623527527, "learning_rate": 3.5890422313398816e-07, "loss": 0.6428, "step": 22398 }, { "epoch": 0.9283020431845497, "grad_norm": 0.44304701685905457, "learning_rate": 3.5869700360561986e-07, "loss": 0.7048, "step": 22399 }, { "epoch": 0.9283434870902234, "grad_norm": 0.4073714017868042, "learning_rate": 3.5848978407725146e-07, "loss": 0.6301, "step": 22400 }, { "epoch": 0.928384930995897, "grad_norm": 0.4313697814941406, "learning_rate": 3.582825645488831e-07, "loss": 0.6672, "step": 22401 }, { "epoch": 0.9284263749015708, "grad_norm": 0.4722842574119568, "learning_rate": 3.5807534502051476e-07, "loss": 0.7222, "step": 22402 }, { "epoch": 0.9284678188072444, "grad_norm": 0.44513779878616333, "learning_rate": 3.578681254921464e-07, "loss": 0.6284, "step": 22403 }, { "epoch": 0.9285092627129181, "grad_norm": 0.4363020062446594, "learning_rate": 3.5766090596377806e-07, "loss": 0.6954, "step": 22404 }, { "epoch": 0.9285507066185917, "grad_norm": 0.45416370034217834, "learning_rate": 3.574536864354097e-07, "loss": 0.6967, "step": 22405 }, { "epoch": 0.9285921505242654, "grad_norm": 0.41454970836639404, "learning_rate": 3.5724646690704136e-07, "loss": 0.6663, "step": 22406 }, { "epoch": 0.9286335944299391, "grad_norm": 0.42112571001052856, "learning_rate": 3.57039247378673e-07, "loss": 0.665, "step": 22407 }, { "epoch": 0.9286750383356127, "grad_norm": 0.461509644985199, "learning_rate": 3.5683202785030466e-07, "loss": 0.662, "step": 22408 }, { "epoch": 0.9287164822412864, "grad_norm": 0.4556443393230438, "learning_rate": 3.5662480832193626e-07, "loss": 0.708, "step": 22409 }, { "epoch": 0.9287579261469601, "grad_norm": 0.4383894205093384, "learning_rate": 3.5641758879356796e-07, "loss": 0.6902, "step": 22410 }, { "epoch": 0.9287993700526338, "grad_norm": 0.43385639786720276, "learning_rate": 3.5621036926519956e-07, "loss": 0.7045, "step": 22411 }, { "epoch": 0.9288408139583074, "grad_norm": 0.4054213762283325, "learning_rate": 3.5600314973683126e-07, "loss": 0.7266, "step": 22412 }, { "epoch": 0.9288822578639812, "grad_norm": 0.4278903603553772, "learning_rate": 3.5579593020846286e-07, "loss": 0.6786, "step": 22413 }, { "epoch": 0.9289237017696548, "grad_norm": 0.4777695834636688, "learning_rate": 3.5558871068009456e-07, "loss": 0.6865, "step": 22414 }, { "epoch": 0.9289651456753284, "grad_norm": 0.4187951385974884, "learning_rate": 3.5538149115172616e-07, "loss": 0.6267, "step": 22415 }, { "epoch": 0.9290065895810021, "grad_norm": 0.45424923300743103, "learning_rate": 3.5517427162335786e-07, "loss": 0.7108, "step": 22416 }, { "epoch": 0.9290480334866758, "grad_norm": 0.4252395033836365, "learning_rate": 3.5496705209498946e-07, "loss": 0.6643, "step": 22417 }, { "epoch": 0.9290894773923495, "grad_norm": 0.41241398453712463, "learning_rate": 3.5475983256662116e-07, "loss": 0.6526, "step": 22418 }, { "epoch": 0.9291309212980231, "grad_norm": 0.38142287731170654, "learning_rate": 3.5455261303825276e-07, "loss": 0.6444, "step": 22419 }, { "epoch": 0.9291723652036968, "grad_norm": 0.43044668436050415, "learning_rate": 3.5434539350988436e-07, "loss": 0.6615, "step": 22420 }, { "epoch": 0.9292138091093705, "grad_norm": 0.39762264490127563, "learning_rate": 3.5413817398151606e-07, "loss": 0.6707, "step": 22421 }, { "epoch": 0.9292552530150442, "grad_norm": 0.4123538136482239, "learning_rate": 3.5393095445314766e-07, "loss": 0.7273, "step": 22422 }, { "epoch": 0.9292966969207178, "grad_norm": 0.4516674876213074, "learning_rate": 3.5372373492477936e-07, "loss": 0.6775, "step": 22423 }, { "epoch": 0.9293381408263914, "grad_norm": 0.3963054418563843, "learning_rate": 3.5351651539641096e-07, "loss": 0.6466, "step": 22424 }, { "epoch": 0.9293795847320652, "grad_norm": 0.38929614424705505, "learning_rate": 3.5330929586804266e-07, "loss": 0.6365, "step": 22425 }, { "epoch": 0.9294210286377388, "grad_norm": 0.41165411472320557, "learning_rate": 3.5310207633967426e-07, "loss": 0.6729, "step": 22426 }, { "epoch": 0.9294624725434125, "grad_norm": 0.42684558033943176, "learning_rate": 3.5289485681130596e-07, "loss": 0.6539, "step": 22427 }, { "epoch": 0.9295039164490861, "grad_norm": 0.4151720106601715, "learning_rate": 3.5268763728293756e-07, "loss": 0.6545, "step": 22428 }, { "epoch": 0.9295453603547599, "grad_norm": 0.4097486138343811, "learning_rate": 3.5248041775456926e-07, "loss": 0.6613, "step": 22429 }, { "epoch": 0.9295868042604335, "grad_norm": 0.4908868074417114, "learning_rate": 3.5227319822620086e-07, "loss": 0.6929, "step": 22430 }, { "epoch": 0.9296282481661072, "grad_norm": 0.3948611319065094, "learning_rate": 3.5206597869783256e-07, "loss": 0.672, "step": 22431 }, { "epoch": 0.9296696920717809, "grad_norm": 0.4200514853000641, "learning_rate": 3.5185875916946416e-07, "loss": 0.665, "step": 22432 }, { "epoch": 0.9297111359774545, "grad_norm": 0.42276307940483093, "learning_rate": 3.5165153964109576e-07, "loss": 0.7258, "step": 22433 }, { "epoch": 0.9297525798831282, "grad_norm": 0.42619457840919495, "learning_rate": 3.5144432011272746e-07, "loss": 0.7268, "step": 22434 }, { "epoch": 0.9297940237888018, "grad_norm": 0.38453376293182373, "learning_rate": 3.5123710058435906e-07, "loss": 0.6217, "step": 22435 }, { "epoch": 0.9298354676944756, "grad_norm": 0.39644718170166016, "learning_rate": 3.5102988105599076e-07, "loss": 0.6584, "step": 22436 }, { "epoch": 0.9298769116001492, "grad_norm": 0.4060280919075012, "learning_rate": 3.5082266152762236e-07, "loss": 0.6833, "step": 22437 }, { "epoch": 0.9299183555058229, "grad_norm": 0.39882612228393555, "learning_rate": 3.5061544199925406e-07, "loss": 0.6533, "step": 22438 }, { "epoch": 0.9299597994114965, "grad_norm": 0.3972475826740265, "learning_rate": 3.5040822247088566e-07, "loss": 0.6736, "step": 22439 }, { "epoch": 0.9300012433171702, "grad_norm": 0.4339313209056854, "learning_rate": 3.5020100294251736e-07, "loss": 0.6536, "step": 22440 }, { "epoch": 0.9300426872228439, "grad_norm": 0.4450444281101227, "learning_rate": 3.4999378341414896e-07, "loss": 0.6682, "step": 22441 }, { "epoch": 0.9300841311285175, "grad_norm": 0.4280235767364502, "learning_rate": 3.4978656388578067e-07, "loss": 0.6781, "step": 22442 }, { "epoch": 0.9301255750341912, "grad_norm": 0.4501829147338867, "learning_rate": 3.4957934435741226e-07, "loss": 0.6455, "step": 22443 }, { "epoch": 0.9301670189398649, "grad_norm": 0.40820568799972534, "learning_rate": 3.4937212482904397e-07, "loss": 0.6909, "step": 22444 }, { "epoch": 0.9302084628455386, "grad_norm": 0.41135138273239136, "learning_rate": 3.4916490530067556e-07, "loss": 0.6816, "step": 22445 }, { "epoch": 0.9302499067512122, "grad_norm": 0.4309143126010895, "learning_rate": 3.4895768577230716e-07, "loss": 0.5996, "step": 22446 }, { "epoch": 0.930291350656886, "grad_norm": 0.4103061556816101, "learning_rate": 3.4875046624393886e-07, "loss": 0.6311, "step": 22447 }, { "epoch": 0.9303327945625596, "grad_norm": 0.4207097589969635, "learning_rate": 3.4854324671557046e-07, "loss": 0.6221, "step": 22448 }, { "epoch": 0.9303742384682332, "grad_norm": 0.38750946521759033, "learning_rate": 3.4833602718720216e-07, "loss": 0.6354, "step": 22449 }, { "epoch": 0.9304156823739069, "grad_norm": 0.427666574716568, "learning_rate": 3.4812880765883376e-07, "loss": 0.677, "step": 22450 }, { "epoch": 0.9304571262795805, "grad_norm": 0.42890846729278564, "learning_rate": 3.4792158813046547e-07, "loss": 0.6663, "step": 22451 }, { "epoch": 0.9304985701852543, "grad_norm": 0.43159276247024536, "learning_rate": 3.4771436860209706e-07, "loss": 0.6667, "step": 22452 }, { "epoch": 0.9305400140909279, "grad_norm": 0.3827398419380188, "learning_rate": 3.4750714907372877e-07, "loss": 0.6272, "step": 22453 }, { "epoch": 0.9305814579966016, "grad_norm": 0.4345005452632904, "learning_rate": 3.4729992954536036e-07, "loss": 0.688, "step": 22454 }, { "epoch": 0.9306229019022753, "grad_norm": 0.3971203565597534, "learning_rate": 3.4709271001699207e-07, "loss": 0.6494, "step": 22455 }, { "epoch": 0.930664345807949, "grad_norm": 0.41512519121170044, "learning_rate": 3.4688549048862366e-07, "loss": 0.6655, "step": 22456 }, { "epoch": 0.9307057897136226, "grad_norm": 0.4498893916606903, "learning_rate": 3.466782709602553e-07, "loss": 0.6562, "step": 22457 }, { "epoch": 0.9307472336192962, "grad_norm": 0.42801666259765625, "learning_rate": 3.4647105143188696e-07, "loss": 0.6863, "step": 22458 }, { "epoch": 0.93078867752497, "grad_norm": 0.44655290246009827, "learning_rate": 3.4626383190351856e-07, "loss": 0.6892, "step": 22459 }, { "epoch": 0.9308301214306436, "grad_norm": 0.4149303436279297, "learning_rate": 3.4605661237515027e-07, "loss": 0.6915, "step": 22460 }, { "epoch": 0.9308715653363173, "grad_norm": 0.44622233510017395, "learning_rate": 3.4584939284678186e-07, "loss": 0.6776, "step": 22461 }, { "epoch": 0.9309130092419909, "grad_norm": 0.42673763632774353, "learning_rate": 3.4564217331841357e-07, "loss": 0.6396, "step": 22462 }, { "epoch": 0.9309544531476647, "grad_norm": 0.4450651705265045, "learning_rate": 3.4543495379004516e-07, "loss": 0.6953, "step": 22463 }, { "epoch": 0.9309958970533383, "grad_norm": 0.3957541882991791, "learning_rate": 3.4522773426167687e-07, "loss": 0.6909, "step": 22464 }, { "epoch": 0.931037340959012, "grad_norm": 0.4338967502117157, "learning_rate": 3.4502051473330846e-07, "loss": 0.6537, "step": 22465 }, { "epoch": 0.9310787848646856, "grad_norm": 0.4376203417778015, "learning_rate": 3.4481329520494017e-07, "loss": 0.6582, "step": 22466 }, { "epoch": 0.9311202287703593, "grad_norm": 0.4419507682323456, "learning_rate": 3.4460607567657176e-07, "loss": 0.7, "step": 22467 }, { "epoch": 0.931161672676033, "grad_norm": 0.39703530073165894, "learning_rate": 3.4439885614820347e-07, "loss": 0.6705, "step": 22468 }, { "epoch": 0.9312031165817066, "grad_norm": 0.4446874260902405, "learning_rate": 3.4419163661983507e-07, "loss": 0.6749, "step": 22469 }, { "epoch": 0.9312445604873804, "grad_norm": 0.4514203369617462, "learning_rate": 3.439844170914667e-07, "loss": 0.6825, "step": 22470 }, { "epoch": 0.931286004393054, "grad_norm": 0.42847374081611633, "learning_rate": 3.4377719756309837e-07, "loss": 0.6809, "step": 22471 }, { "epoch": 0.9313274482987277, "grad_norm": 0.4392583966255188, "learning_rate": 3.4356997803473e-07, "loss": 0.6964, "step": 22472 }, { "epoch": 0.9313688922044013, "grad_norm": 0.41295143961906433, "learning_rate": 3.4336275850636167e-07, "loss": 0.5714, "step": 22473 }, { "epoch": 0.9314103361100751, "grad_norm": 0.4675564765930176, "learning_rate": 3.431555389779933e-07, "loss": 0.6958, "step": 22474 }, { "epoch": 0.9314517800157487, "grad_norm": 0.39130446314811707, "learning_rate": 3.4294831944962497e-07, "loss": 0.6613, "step": 22475 }, { "epoch": 0.9314932239214223, "grad_norm": 0.44917210936546326, "learning_rate": 3.427410999212566e-07, "loss": 0.5966, "step": 22476 }, { "epoch": 0.931534667827096, "grad_norm": 0.3709532618522644, "learning_rate": 3.4253388039288827e-07, "loss": 0.6444, "step": 22477 }, { "epoch": 0.9315761117327697, "grad_norm": 0.413872092962265, "learning_rate": 3.4232666086451987e-07, "loss": 0.62, "step": 22478 }, { "epoch": 0.9316175556384434, "grad_norm": 0.4304647743701935, "learning_rate": 3.4211944133615157e-07, "loss": 0.7083, "step": 22479 }, { "epoch": 0.931658999544117, "grad_norm": 0.39772829413414, "learning_rate": 3.4191222180778317e-07, "loss": 0.6647, "step": 22480 }, { "epoch": 0.9317004434497907, "grad_norm": 0.44184941053390503, "learning_rate": 3.4170500227941487e-07, "loss": 0.6489, "step": 22481 }, { "epoch": 0.9317418873554644, "grad_norm": 0.38860422372817993, "learning_rate": 3.4149778275104647e-07, "loss": 0.6124, "step": 22482 }, { "epoch": 0.9317833312611381, "grad_norm": 0.3913264274597168, "learning_rate": 3.412905632226781e-07, "loss": 0.6427, "step": 22483 }, { "epoch": 0.9318247751668117, "grad_norm": 0.43540334701538086, "learning_rate": 3.4108334369430977e-07, "loss": 0.6736, "step": 22484 }, { "epoch": 0.9318662190724853, "grad_norm": 0.4227714240550995, "learning_rate": 3.408761241659414e-07, "loss": 0.6658, "step": 22485 }, { "epoch": 0.9319076629781591, "grad_norm": 0.42171013355255127, "learning_rate": 3.4066890463757307e-07, "loss": 0.7168, "step": 22486 }, { "epoch": 0.9319491068838327, "grad_norm": 0.45419901609420776, "learning_rate": 3.404616851092047e-07, "loss": 0.6091, "step": 22487 }, { "epoch": 0.9319905507895064, "grad_norm": 0.447968989610672, "learning_rate": 3.4025446558083637e-07, "loss": 0.6824, "step": 22488 }, { "epoch": 0.93203199469518, "grad_norm": 0.4223446846008301, "learning_rate": 3.40047246052468e-07, "loss": 0.6949, "step": 22489 }, { "epoch": 0.9320734386008538, "grad_norm": 0.3904838562011719, "learning_rate": 3.3984002652409967e-07, "loss": 0.5995, "step": 22490 }, { "epoch": 0.9321148825065274, "grad_norm": 0.4448259770870209, "learning_rate": 3.396328069957313e-07, "loss": 0.6755, "step": 22491 }, { "epoch": 0.932156326412201, "grad_norm": 0.426695317029953, "learning_rate": 3.3942558746736297e-07, "loss": 0.6599, "step": 22492 }, { "epoch": 0.9321977703178748, "grad_norm": 0.41158419847488403, "learning_rate": 3.392183679389946e-07, "loss": 0.689, "step": 22493 }, { "epoch": 0.9322392142235484, "grad_norm": 0.4201909899711609, "learning_rate": 3.390111484106262e-07, "loss": 0.656, "step": 22494 }, { "epoch": 0.9322806581292221, "grad_norm": 0.46222546696662903, "learning_rate": 3.388039288822579e-07, "loss": 0.6979, "step": 22495 }, { "epoch": 0.9323221020348957, "grad_norm": 0.4192471504211426, "learning_rate": 3.385967093538895e-07, "loss": 0.6494, "step": 22496 }, { "epoch": 0.9323635459405695, "grad_norm": 0.43279245495796204, "learning_rate": 3.3838948982552117e-07, "loss": 0.6575, "step": 22497 }, { "epoch": 0.9324049898462431, "grad_norm": 0.4200434684753418, "learning_rate": 3.381822702971528e-07, "loss": 0.7162, "step": 22498 }, { "epoch": 0.9324464337519168, "grad_norm": 0.48031294345855713, "learning_rate": 3.3797505076878447e-07, "loss": 0.6552, "step": 22499 }, { "epoch": 0.9324878776575904, "grad_norm": 0.417064905166626, "learning_rate": 3.377678312404161e-07, "loss": 0.6528, "step": 22500 }, { "epoch": 0.9325293215632641, "grad_norm": 0.40902429819107056, "learning_rate": 3.3756061171204777e-07, "loss": 0.5928, "step": 22501 }, { "epoch": 0.9325707654689378, "grad_norm": 0.42229366302490234, "learning_rate": 3.373533921836794e-07, "loss": 0.6895, "step": 22502 }, { "epoch": 0.9326122093746114, "grad_norm": 0.4400928020477295, "learning_rate": 3.3714617265531107e-07, "loss": 0.6702, "step": 22503 }, { "epoch": 0.9326536532802852, "grad_norm": 0.4230530560016632, "learning_rate": 3.369389531269427e-07, "loss": 0.7115, "step": 22504 }, { "epoch": 0.9326950971859588, "grad_norm": 0.4365421235561371, "learning_rate": 3.3673173359857437e-07, "loss": 0.672, "step": 22505 }, { "epoch": 0.9327365410916325, "grad_norm": 0.4257984757423401, "learning_rate": 3.36524514070206e-07, "loss": 0.6639, "step": 22506 }, { "epoch": 0.9327779849973061, "grad_norm": 0.41713088750839233, "learning_rate": 3.363172945418376e-07, "loss": 0.6263, "step": 22507 }, { "epoch": 0.9328194289029799, "grad_norm": 0.42098501324653625, "learning_rate": 3.361100750134693e-07, "loss": 0.6473, "step": 22508 }, { "epoch": 0.9328608728086535, "grad_norm": 0.4304363429546356, "learning_rate": 3.359028554851009e-07, "loss": 0.7419, "step": 22509 }, { "epoch": 0.9329023167143271, "grad_norm": 0.38049963116645813, "learning_rate": 3.356956359567326e-07, "loss": 0.6171, "step": 22510 }, { "epoch": 0.9329437606200008, "grad_norm": 0.4703044891357422, "learning_rate": 3.354884164283642e-07, "loss": 0.709, "step": 22511 }, { "epoch": 0.9329852045256745, "grad_norm": 0.44463521242141724, "learning_rate": 3.352811968999959e-07, "loss": 0.6324, "step": 22512 }, { "epoch": 0.9330266484313482, "grad_norm": 0.43506574630737305, "learning_rate": 3.350739773716275e-07, "loss": 0.6697, "step": 22513 }, { "epoch": 0.9330680923370218, "grad_norm": 0.40987923741340637, "learning_rate": 3.348667578432592e-07, "loss": 0.6053, "step": 22514 }, { "epoch": 0.9331095362426955, "grad_norm": 0.4395192563533783, "learning_rate": 3.346595383148908e-07, "loss": 0.6606, "step": 22515 }, { "epoch": 0.9331509801483692, "grad_norm": 0.43187767267227173, "learning_rate": 3.3445231878652247e-07, "loss": 0.7041, "step": 22516 }, { "epoch": 0.9331924240540429, "grad_norm": 0.5478802919387817, "learning_rate": 3.342450992581541e-07, "loss": 0.7148, "step": 22517 }, { "epoch": 0.9332338679597165, "grad_norm": 0.43575769662857056, "learning_rate": 3.340378797297858e-07, "loss": 0.6698, "step": 22518 }, { "epoch": 0.9332753118653901, "grad_norm": 0.3952096104621887, "learning_rate": 3.338306602014174e-07, "loss": 0.5858, "step": 22519 }, { "epoch": 0.9333167557710639, "grad_norm": 0.4112103581428528, "learning_rate": 3.33623440673049e-07, "loss": 0.625, "step": 22520 }, { "epoch": 0.9333581996767375, "grad_norm": 0.39789533615112305, "learning_rate": 3.334162211446807e-07, "loss": 0.6191, "step": 22521 }, { "epoch": 0.9333996435824112, "grad_norm": 0.3806796371936798, "learning_rate": 3.332090016163123e-07, "loss": 0.6299, "step": 22522 }, { "epoch": 0.9334410874880849, "grad_norm": 0.4431036412715912, "learning_rate": 3.33001782087944e-07, "loss": 0.6608, "step": 22523 }, { "epoch": 0.9334825313937586, "grad_norm": 0.4300888478755951, "learning_rate": 3.327945625595756e-07, "loss": 0.6429, "step": 22524 }, { "epoch": 0.9335239752994322, "grad_norm": 0.4719328284263611, "learning_rate": 3.3258734303120733e-07, "loss": 0.7184, "step": 22525 }, { "epoch": 0.9335654192051059, "grad_norm": 0.464844286441803, "learning_rate": 3.323801235028389e-07, "loss": 0.6747, "step": 22526 }, { "epoch": 0.9336068631107796, "grad_norm": 0.42974865436553955, "learning_rate": 3.3217290397447063e-07, "loss": 0.6743, "step": 22527 }, { "epoch": 0.9336483070164532, "grad_norm": 0.45739665627479553, "learning_rate": 3.319656844461022e-07, "loss": 0.6903, "step": 22528 }, { "epoch": 0.9336897509221269, "grad_norm": 0.39657679200172424, "learning_rate": 3.3175846491773393e-07, "loss": 0.6257, "step": 22529 }, { "epoch": 0.9337311948278005, "grad_norm": 0.4236172139644623, "learning_rate": 3.315512453893655e-07, "loss": 0.7118, "step": 22530 }, { "epoch": 0.9337726387334743, "grad_norm": 0.42161887884140015, "learning_rate": 3.313440258609971e-07, "loss": 0.6987, "step": 22531 }, { "epoch": 0.9338140826391479, "grad_norm": 0.40426599979400635, "learning_rate": 3.311368063326288e-07, "loss": 0.6415, "step": 22532 }, { "epoch": 0.9338555265448216, "grad_norm": 0.3915784955024719, "learning_rate": 3.309295868042604e-07, "loss": 0.6221, "step": 22533 }, { "epoch": 0.9338969704504952, "grad_norm": 0.4045061767101288, "learning_rate": 3.3072236727589213e-07, "loss": 0.6108, "step": 22534 }, { "epoch": 0.933938414356169, "grad_norm": 0.42910143733024597, "learning_rate": 3.305151477475237e-07, "loss": 0.6755, "step": 22535 }, { "epoch": 0.9339798582618426, "grad_norm": 0.41924622654914856, "learning_rate": 3.3030792821915543e-07, "loss": 0.6992, "step": 22536 }, { "epoch": 0.9340213021675162, "grad_norm": 0.4237860143184662, "learning_rate": 3.30100708690787e-07, "loss": 0.6746, "step": 22537 }, { "epoch": 0.93406274607319, "grad_norm": 0.404877245426178, "learning_rate": 3.2989348916241873e-07, "loss": 0.678, "step": 22538 }, { "epoch": 0.9341041899788636, "grad_norm": 0.413104385137558, "learning_rate": 3.296862696340503e-07, "loss": 0.7146, "step": 22539 }, { "epoch": 0.9341456338845373, "grad_norm": 0.43517807126045227, "learning_rate": 3.2947905010568203e-07, "loss": 0.6653, "step": 22540 }, { "epoch": 0.9341870777902109, "grad_norm": 0.41642457246780396, "learning_rate": 3.292718305773136e-07, "loss": 0.6696, "step": 22541 }, { "epoch": 0.9342285216958847, "grad_norm": 0.4401613473892212, "learning_rate": 3.2906461104894533e-07, "loss": 0.6564, "step": 22542 }, { "epoch": 0.9342699656015583, "grad_norm": 0.40061819553375244, "learning_rate": 3.2885739152057693e-07, "loss": 0.6029, "step": 22543 }, { "epoch": 0.934311409507232, "grad_norm": 0.44017326831817627, "learning_rate": 3.286501719922085e-07, "loss": 0.6219, "step": 22544 }, { "epoch": 0.9343528534129056, "grad_norm": 0.4017145037651062, "learning_rate": 3.2844295246384023e-07, "loss": 0.6577, "step": 22545 }, { "epoch": 0.9343942973185793, "grad_norm": 0.4330856204032898, "learning_rate": 3.282357329354718e-07, "loss": 0.6885, "step": 22546 }, { "epoch": 0.934435741224253, "grad_norm": 0.4124925136566162, "learning_rate": 3.2802851340710353e-07, "loss": 0.6338, "step": 22547 }, { "epoch": 0.9344771851299266, "grad_norm": 0.44773757457733154, "learning_rate": 3.278212938787351e-07, "loss": 0.6849, "step": 22548 }, { "epoch": 0.9345186290356003, "grad_norm": 0.3872513771057129, "learning_rate": 3.2761407435036683e-07, "loss": 0.6151, "step": 22549 }, { "epoch": 0.934560072941274, "grad_norm": 0.4315352439880371, "learning_rate": 3.2740685482199843e-07, "loss": 0.6829, "step": 22550 }, { "epoch": 0.9346015168469477, "grad_norm": 0.41650962829589844, "learning_rate": 3.2719963529363013e-07, "loss": 0.673, "step": 22551 }, { "epoch": 0.9346429607526213, "grad_norm": 0.39676204323768616, "learning_rate": 3.2699241576526173e-07, "loss": 0.6478, "step": 22552 }, { "epoch": 0.9346844046582949, "grad_norm": 0.4246250092983246, "learning_rate": 3.2678519623689343e-07, "loss": 0.6503, "step": 22553 }, { "epoch": 0.9347258485639687, "grad_norm": 0.4099022150039673, "learning_rate": 3.2657797670852503e-07, "loss": 0.667, "step": 22554 }, { "epoch": 0.9347672924696423, "grad_norm": 0.4494040608406067, "learning_rate": 3.2637075718015673e-07, "loss": 0.645, "step": 22555 }, { "epoch": 0.934808736375316, "grad_norm": 0.3926289677619934, "learning_rate": 3.2616353765178833e-07, "loss": 0.6244, "step": 22556 }, { "epoch": 0.9348501802809897, "grad_norm": 0.42267096042633057, "learning_rate": 3.259563181234199e-07, "loss": 0.624, "step": 22557 }, { "epoch": 0.9348916241866634, "grad_norm": 0.40026867389678955, "learning_rate": 3.2574909859505163e-07, "loss": 0.6619, "step": 22558 }, { "epoch": 0.934933068092337, "grad_norm": 0.4497218132019043, "learning_rate": 3.2554187906668323e-07, "loss": 0.6685, "step": 22559 }, { "epoch": 0.9349745119980107, "grad_norm": 0.40292811393737793, "learning_rate": 3.2533465953831493e-07, "loss": 0.663, "step": 22560 }, { "epoch": 0.9350159559036844, "grad_norm": 0.43290334939956665, "learning_rate": 3.2512744000994653e-07, "loss": 0.6932, "step": 22561 }, { "epoch": 0.935057399809358, "grad_norm": 0.3959939181804657, "learning_rate": 3.2492022048157823e-07, "loss": 0.6021, "step": 22562 }, { "epoch": 0.9350988437150317, "grad_norm": 0.4570123553276062, "learning_rate": 3.2471300095320983e-07, "loss": 0.6576, "step": 22563 }, { "epoch": 0.9351402876207053, "grad_norm": 0.429307222366333, "learning_rate": 3.2450578142484153e-07, "loss": 0.6775, "step": 22564 }, { "epoch": 0.9351817315263791, "grad_norm": 0.43759632110595703, "learning_rate": 3.2429856189647313e-07, "loss": 0.6755, "step": 22565 }, { "epoch": 0.9352231754320527, "grad_norm": 0.3922669589519501, "learning_rate": 3.2409134236810483e-07, "loss": 0.6909, "step": 22566 }, { "epoch": 0.9352646193377264, "grad_norm": 0.41394883394241333, "learning_rate": 3.2388412283973643e-07, "loss": 0.6711, "step": 22567 }, { "epoch": 0.9353060632434, "grad_norm": 0.43639811873435974, "learning_rate": 3.236769033113681e-07, "loss": 0.6536, "step": 22568 }, { "epoch": 0.9353475071490738, "grad_norm": 0.40334808826446533, "learning_rate": 3.2346968378299973e-07, "loss": 0.6211, "step": 22569 }, { "epoch": 0.9353889510547474, "grad_norm": 0.39216935634613037, "learning_rate": 3.232624642546314e-07, "loss": 0.6305, "step": 22570 }, { "epoch": 0.935430394960421, "grad_norm": 0.40508219599723816, "learning_rate": 3.2305524472626303e-07, "loss": 0.6725, "step": 22571 }, { "epoch": 0.9354718388660948, "grad_norm": 0.46662476658821106, "learning_rate": 3.2284802519789463e-07, "loss": 0.7003, "step": 22572 }, { "epoch": 0.9355132827717684, "grad_norm": 0.4066019356250763, "learning_rate": 3.2264080566952633e-07, "loss": 0.6772, "step": 22573 }, { "epoch": 0.9355547266774421, "grad_norm": 0.44691184163093567, "learning_rate": 3.2243358614115793e-07, "loss": 0.6202, "step": 22574 }, { "epoch": 0.9355961705831157, "grad_norm": 0.4330269396305084, "learning_rate": 3.2222636661278963e-07, "loss": 0.6873, "step": 22575 }, { "epoch": 0.9356376144887895, "grad_norm": 0.40682119131088257, "learning_rate": 3.2201914708442123e-07, "loss": 0.6425, "step": 22576 }, { "epoch": 0.9356790583944631, "grad_norm": 0.40092039108276367, "learning_rate": 3.2181192755605293e-07, "loss": 0.6183, "step": 22577 }, { "epoch": 0.9357205023001368, "grad_norm": 0.4146396219730377, "learning_rate": 3.2160470802768453e-07, "loss": 0.605, "step": 22578 }, { "epoch": 0.9357619462058104, "grad_norm": 0.38058701157569885, "learning_rate": 3.2139748849931623e-07, "loss": 0.6647, "step": 22579 }, { "epoch": 0.9358033901114841, "grad_norm": 0.42083314061164856, "learning_rate": 3.2119026897094783e-07, "loss": 0.7112, "step": 22580 }, { "epoch": 0.9358448340171578, "grad_norm": 0.3826419413089752, "learning_rate": 3.209830494425795e-07, "loss": 0.6575, "step": 22581 }, { "epoch": 0.9358862779228314, "grad_norm": 0.42726337909698486, "learning_rate": 3.2077582991421113e-07, "loss": 0.6863, "step": 22582 }, { "epoch": 0.9359277218285051, "grad_norm": 0.4279736876487732, "learning_rate": 3.205686103858428e-07, "loss": 0.6967, "step": 22583 }, { "epoch": 0.9359691657341788, "grad_norm": 0.3983338475227356, "learning_rate": 3.2036139085747443e-07, "loss": 0.6658, "step": 22584 }, { "epoch": 0.9360106096398525, "grad_norm": 0.40128687024116516, "learning_rate": 3.201541713291061e-07, "loss": 0.6497, "step": 22585 }, { "epoch": 0.9360520535455261, "grad_norm": 0.42378413677215576, "learning_rate": 3.1994695180073773e-07, "loss": 0.6321, "step": 22586 }, { "epoch": 0.9360934974511999, "grad_norm": 0.4139254689216614, "learning_rate": 3.197397322723694e-07, "loss": 0.6423, "step": 22587 }, { "epoch": 0.9361349413568735, "grad_norm": 0.4287931025028229, "learning_rate": 3.1953251274400103e-07, "loss": 0.656, "step": 22588 }, { "epoch": 0.9361763852625471, "grad_norm": 0.42412757873535156, "learning_rate": 3.193252932156327e-07, "loss": 0.7063, "step": 22589 }, { "epoch": 0.9362178291682208, "grad_norm": 0.4218384325504303, "learning_rate": 3.1911807368726433e-07, "loss": 0.6741, "step": 22590 }, { "epoch": 0.9362592730738944, "grad_norm": 0.40070295333862305, "learning_rate": 3.1891085415889593e-07, "loss": 0.6333, "step": 22591 }, { "epoch": 0.9363007169795682, "grad_norm": 0.42076897621154785, "learning_rate": 3.187036346305276e-07, "loss": 0.6736, "step": 22592 }, { "epoch": 0.9363421608852418, "grad_norm": 0.45353585481643677, "learning_rate": 3.1849641510215923e-07, "loss": 0.6747, "step": 22593 }, { "epoch": 0.9363836047909155, "grad_norm": 0.4181041121482849, "learning_rate": 3.182891955737909e-07, "loss": 0.6736, "step": 22594 }, { "epoch": 0.9364250486965892, "grad_norm": 0.43138203024864197, "learning_rate": 3.1808197604542253e-07, "loss": 0.7139, "step": 22595 }, { "epoch": 0.9364664926022629, "grad_norm": 0.45184755325317383, "learning_rate": 3.178747565170542e-07, "loss": 0.6989, "step": 22596 }, { "epoch": 0.9365079365079365, "grad_norm": 0.4023093283176422, "learning_rate": 3.1766753698868583e-07, "loss": 0.6487, "step": 22597 }, { "epoch": 0.9365493804136101, "grad_norm": 0.41347697377204895, "learning_rate": 3.174603174603175e-07, "loss": 0.6707, "step": 22598 }, { "epoch": 0.9365908243192839, "grad_norm": 0.4041733741760254, "learning_rate": 3.1725309793194913e-07, "loss": 0.6156, "step": 22599 }, { "epoch": 0.9366322682249575, "grad_norm": 0.4014715850353241, "learning_rate": 3.170458784035808e-07, "loss": 0.6373, "step": 22600 }, { "epoch": 0.9366737121306312, "grad_norm": 0.4268902540206909, "learning_rate": 3.1683865887521244e-07, "loss": 0.6792, "step": 22601 }, { "epoch": 0.9367151560363048, "grad_norm": 0.448744535446167, "learning_rate": 3.166314393468441e-07, "loss": 0.7025, "step": 22602 }, { "epoch": 0.9367565999419786, "grad_norm": 0.4212503135204315, "learning_rate": 3.1642421981847574e-07, "loss": 0.6621, "step": 22603 }, { "epoch": 0.9367980438476522, "grad_norm": 0.47785696387290955, "learning_rate": 3.162170002901074e-07, "loss": 0.7067, "step": 22604 }, { "epoch": 0.9368394877533259, "grad_norm": 0.3724975883960724, "learning_rate": 3.16009780761739e-07, "loss": 0.6205, "step": 22605 }, { "epoch": 0.9368809316589995, "grad_norm": 0.402670294046402, "learning_rate": 3.158025612333707e-07, "loss": 0.6335, "step": 22606 }, { "epoch": 0.9369223755646732, "grad_norm": 0.3873659074306488, "learning_rate": 3.155953417050023e-07, "loss": 0.6704, "step": 22607 }, { "epoch": 0.9369638194703469, "grad_norm": 0.4435826241970062, "learning_rate": 3.15388122176634e-07, "loss": 0.6842, "step": 22608 }, { "epoch": 0.9370052633760205, "grad_norm": 0.3981541097164154, "learning_rate": 3.151809026482656e-07, "loss": 0.6494, "step": 22609 }, { "epoch": 0.9370467072816943, "grad_norm": 0.4442363977432251, "learning_rate": 3.1497368311989724e-07, "loss": 0.7067, "step": 22610 }, { "epoch": 0.9370881511873679, "grad_norm": 0.4465920627117157, "learning_rate": 3.147664635915289e-07, "loss": 0.6761, "step": 22611 }, { "epoch": 0.9371295950930416, "grad_norm": 0.4386685788631439, "learning_rate": 3.1455924406316054e-07, "loss": 0.701, "step": 22612 }, { "epoch": 0.9371710389987152, "grad_norm": 0.37436750531196594, "learning_rate": 3.143520245347922e-07, "loss": 0.6237, "step": 22613 }, { "epoch": 0.9372124829043889, "grad_norm": 0.3905163109302521, "learning_rate": 3.1414480500642384e-07, "loss": 0.5999, "step": 22614 }, { "epoch": 0.9372539268100626, "grad_norm": 0.407635897397995, "learning_rate": 3.139375854780555e-07, "loss": 0.7092, "step": 22615 }, { "epoch": 0.9372953707157362, "grad_norm": 0.42757824063301086, "learning_rate": 3.1373036594968714e-07, "loss": 0.675, "step": 22616 }, { "epoch": 0.9373368146214099, "grad_norm": 0.41417235136032104, "learning_rate": 3.135231464213188e-07, "loss": 0.6869, "step": 22617 }, { "epoch": 0.9373782585270836, "grad_norm": 0.4252808392047882, "learning_rate": 3.133159268929504e-07, "loss": 0.6813, "step": 22618 }, { "epoch": 0.9374197024327573, "grad_norm": 0.4096668064594269, "learning_rate": 3.131087073645821e-07, "loss": 0.608, "step": 22619 }, { "epoch": 0.9374611463384309, "grad_norm": 0.5335261821746826, "learning_rate": 3.129014878362137e-07, "loss": 0.6775, "step": 22620 }, { "epoch": 0.9375025902441046, "grad_norm": 0.39103323221206665, "learning_rate": 3.126942683078454e-07, "loss": 0.6304, "step": 22621 }, { "epoch": 0.9375440341497783, "grad_norm": 0.444345623254776, "learning_rate": 3.1248704877947704e-07, "loss": 0.7036, "step": 22622 }, { "epoch": 0.9375854780554519, "grad_norm": 0.3969419300556183, "learning_rate": 3.1227982925110864e-07, "loss": 0.6774, "step": 22623 }, { "epoch": 0.9376269219611256, "grad_norm": 0.41894611716270447, "learning_rate": 3.120726097227403e-07, "loss": 0.6765, "step": 22624 }, { "epoch": 0.9376683658667992, "grad_norm": 0.4521391987800598, "learning_rate": 3.1186539019437194e-07, "loss": 0.687, "step": 22625 }, { "epoch": 0.937709809772473, "grad_norm": 0.4342905879020691, "learning_rate": 3.116581706660036e-07, "loss": 0.681, "step": 22626 }, { "epoch": 0.9377512536781466, "grad_norm": 0.43111875653266907, "learning_rate": 3.1145095113763524e-07, "loss": 0.684, "step": 22627 }, { "epoch": 0.9377926975838203, "grad_norm": 0.43110212683677673, "learning_rate": 3.112437316092669e-07, "loss": 0.6765, "step": 22628 }, { "epoch": 0.937834141489494, "grad_norm": 0.40399089455604553, "learning_rate": 3.1103651208089854e-07, "loss": 0.6296, "step": 22629 }, { "epoch": 0.9378755853951677, "grad_norm": 0.48418989777565, "learning_rate": 3.108292925525302e-07, "loss": 0.7214, "step": 22630 }, { "epoch": 0.9379170293008413, "grad_norm": 0.4238094091415405, "learning_rate": 3.1062207302416184e-07, "loss": 0.6979, "step": 22631 }, { "epoch": 0.9379584732065149, "grad_norm": 0.3733350932598114, "learning_rate": 3.104148534957935e-07, "loss": 0.6323, "step": 22632 }, { "epoch": 0.9379999171121887, "grad_norm": 0.41005539894104004, "learning_rate": 3.1020763396742514e-07, "loss": 0.6648, "step": 22633 }, { "epoch": 0.9380413610178623, "grad_norm": 0.41264620423316956, "learning_rate": 3.100004144390568e-07, "loss": 0.6005, "step": 22634 }, { "epoch": 0.938082804923536, "grad_norm": 0.4105056822299957, "learning_rate": 3.0979319491068844e-07, "loss": 0.6941, "step": 22635 }, { "epoch": 0.9381242488292096, "grad_norm": 0.4443499445915222, "learning_rate": 3.0958597538232004e-07, "loss": 0.6683, "step": 22636 }, { "epoch": 0.9381656927348834, "grad_norm": 0.4108963906764984, "learning_rate": 3.093787558539517e-07, "loss": 0.639, "step": 22637 }, { "epoch": 0.938207136640557, "grad_norm": 0.4111112952232361, "learning_rate": 3.0917153632558334e-07, "loss": 0.6523, "step": 22638 }, { "epoch": 0.9382485805462307, "grad_norm": 0.4012923836708069, "learning_rate": 3.08964316797215e-07, "loss": 0.6331, "step": 22639 }, { "epoch": 0.9382900244519043, "grad_norm": 0.39918848872184753, "learning_rate": 3.0875709726884664e-07, "loss": 0.6758, "step": 22640 }, { "epoch": 0.938331468357578, "grad_norm": 0.41612181067466736, "learning_rate": 3.085498777404783e-07, "loss": 0.6351, "step": 22641 }, { "epoch": 0.9383729122632517, "grad_norm": 0.41747620701789856, "learning_rate": 3.0834265821210994e-07, "loss": 0.6891, "step": 22642 }, { "epoch": 0.9384143561689253, "grad_norm": 0.4157271087169647, "learning_rate": 3.081354386837416e-07, "loss": 0.6527, "step": 22643 }, { "epoch": 0.9384558000745991, "grad_norm": 0.40750008821487427, "learning_rate": 3.0792821915537324e-07, "loss": 0.6462, "step": 22644 }, { "epoch": 0.9384972439802727, "grad_norm": 0.4657857120037079, "learning_rate": 3.077209996270049e-07, "loss": 0.7078, "step": 22645 }, { "epoch": 0.9385386878859464, "grad_norm": 0.4145195186138153, "learning_rate": 3.0751378009863654e-07, "loss": 0.6648, "step": 22646 }, { "epoch": 0.93858013179162, "grad_norm": 0.3850816488265991, "learning_rate": 3.073065605702682e-07, "loss": 0.6161, "step": 22647 }, { "epoch": 0.9386215756972938, "grad_norm": 0.4316581189632416, "learning_rate": 3.070993410418998e-07, "loss": 0.6847, "step": 22648 }, { "epoch": 0.9386630196029674, "grad_norm": 0.4093256890773773, "learning_rate": 3.0689212151353144e-07, "loss": 0.6587, "step": 22649 }, { "epoch": 0.938704463508641, "grad_norm": 0.41901537775993347, "learning_rate": 3.066849019851631e-07, "loss": 0.7061, "step": 22650 }, { "epoch": 0.9387459074143147, "grad_norm": 0.40523555874824524, "learning_rate": 3.0647768245679474e-07, "loss": 0.6927, "step": 22651 }, { "epoch": 0.9387873513199884, "grad_norm": 0.4257374107837677, "learning_rate": 3.062704629284264e-07, "loss": 0.6603, "step": 22652 }, { "epoch": 0.9388287952256621, "grad_norm": 0.40490153431892395, "learning_rate": 3.0606324340005804e-07, "loss": 0.6597, "step": 22653 }, { "epoch": 0.9388702391313357, "grad_norm": 0.4396701753139496, "learning_rate": 3.058560238716897e-07, "loss": 0.667, "step": 22654 }, { "epoch": 0.9389116830370094, "grad_norm": 0.4322687089443207, "learning_rate": 3.0564880434332134e-07, "loss": 0.7506, "step": 22655 }, { "epoch": 0.9389531269426831, "grad_norm": 0.42184755206108093, "learning_rate": 3.05441584814953e-07, "loss": 0.6685, "step": 22656 }, { "epoch": 0.9389945708483568, "grad_norm": 0.48660042881965637, "learning_rate": 3.0523436528658464e-07, "loss": 0.6975, "step": 22657 }, { "epoch": 0.9390360147540304, "grad_norm": 0.43585479259490967, "learning_rate": 3.050271457582163e-07, "loss": 0.6646, "step": 22658 }, { "epoch": 0.939077458659704, "grad_norm": 0.3921436667442322, "learning_rate": 3.0481992622984794e-07, "loss": 0.6346, "step": 22659 }, { "epoch": 0.9391189025653778, "grad_norm": 0.41703155636787415, "learning_rate": 3.0461270670147954e-07, "loss": 0.7029, "step": 22660 }, { "epoch": 0.9391603464710514, "grad_norm": 0.4036855399608612, "learning_rate": 3.044054871731112e-07, "loss": 0.698, "step": 22661 }, { "epoch": 0.9392017903767251, "grad_norm": 0.3861207067966461, "learning_rate": 3.0419826764474284e-07, "loss": 0.6543, "step": 22662 }, { "epoch": 0.9392432342823988, "grad_norm": 0.42968934774398804, "learning_rate": 3.039910481163745e-07, "loss": 0.6274, "step": 22663 }, { "epoch": 0.9392846781880725, "grad_norm": 0.45244643092155457, "learning_rate": 3.0378382858800614e-07, "loss": 0.6895, "step": 22664 }, { "epoch": 0.9393261220937461, "grad_norm": 0.4216071367263794, "learning_rate": 3.035766090596378e-07, "loss": 0.6938, "step": 22665 }, { "epoch": 0.9393675659994198, "grad_norm": 0.4145006239414215, "learning_rate": 3.0336938953126944e-07, "loss": 0.6569, "step": 22666 }, { "epoch": 0.9394090099050935, "grad_norm": 0.3545956611633301, "learning_rate": 3.031621700029011e-07, "loss": 0.6086, "step": 22667 }, { "epoch": 0.9394504538107671, "grad_norm": 0.420364648103714, "learning_rate": 3.0295495047453274e-07, "loss": 0.6499, "step": 22668 }, { "epoch": 0.9394918977164408, "grad_norm": 0.4074712097644806, "learning_rate": 3.027477309461644e-07, "loss": 0.6473, "step": 22669 }, { "epoch": 0.9395333416221144, "grad_norm": 0.452358603477478, "learning_rate": 3.0254051141779605e-07, "loss": 0.6967, "step": 22670 }, { "epoch": 0.9395747855277882, "grad_norm": 0.4092451333999634, "learning_rate": 3.023332918894277e-07, "loss": 0.6831, "step": 22671 }, { "epoch": 0.9396162294334618, "grad_norm": 0.4189174473285675, "learning_rate": 3.021260723610593e-07, "loss": 0.6687, "step": 22672 }, { "epoch": 0.9396576733391355, "grad_norm": 0.3928716778755188, "learning_rate": 3.0191885283269094e-07, "loss": 0.6487, "step": 22673 }, { "epoch": 0.9396991172448091, "grad_norm": 0.41957369446754456, "learning_rate": 3.017116333043226e-07, "loss": 0.6141, "step": 22674 }, { "epoch": 0.9397405611504828, "grad_norm": 0.44678106904029846, "learning_rate": 3.0150441377595424e-07, "loss": 0.6312, "step": 22675 }, { "epoch": 0.9397820050561565, "grad_norm": 0.4766293466091156, "learning_rate": 3.012971942475859e-07, "loss": 0.668, "step": 22676 }, { "epoch": 0.9398234489618301, "grad_norm": 0.3994397521018982, "learning_rate": 3.0108997471921754e-07, "loss": 0.6456, "step": 22677 }, { "epoch": 0.9398648928675039, "grad_norm": 0.44541677832603455, "learning_rate": 3.008827551908492e-07, "loss": 0.6414, "step": 22678 }, { "epoch": 0.9399063367731775, "grad_norm": 0.3981247544288635, "learning_rate": 3.0067553566248085e-07, "loss": 0.7126, "step": 22679 }, { "epoch": 0.9399477806788512, "grad_norm": 0.41834592819213867, "learning_rate": 3.004683161341125e-07, "loss": 0.6638, "step": 22680 }, { "epoch": 0.9399892245845248, "grad_norm": 0.5145294666290283, "learning_rate": 3.0026109660574415e-07, "loss": 0.741, "step": 22681 }, { "epoch": 0.9400306684901986, "grad_norm": 0.47122928500175476, "learning_rate": 3.000538770773758e-07, "loss": 0.6801, "step": 22682 }, { "epoch": 0.9400721123958722, "grad_norm": 0.3984459340572357, "learning_rate": 2.9984665754900745e-07, "loss": 0.621, "step": 22683 }, { "epoch": 0.9401135563015458, "grad_norm": 0.4163937568664551, "learning_rate": 2.996394380206391e-07, "loss": 0.6693, "step": 22684 }, { "epoch": 0.9401550002072195, "grad_norm": 0.4243709146976471, "learning_rate": 2.994322184922707e-07, "loss": 0.6832, "step": 22685 }, { "epoch": 0.9401964441128932, "grad_norm": 0.40118080377578735, "learning_rate": 2.9922499896390234e-07, "loss": 0.6736, "step": 22686 }, { "epoch": 0.9402378880185669, "grad_norm": 0.39724454283714294, "learning_rate": 2.99017779435534e-07, "loss": 0.605, "step": 22687 }, { "epoch": 0.9402793319242405, "grad_norm": 0.3788558840751648, "learning_rate": 2.9881055990716565e-07, "loss": 0.6029, "step": 22688 }, { "epoch": 0.9403207758299142, "grad_norm": 0.3969612717628479, "learning_rate": 2.986033403787973e-07, "loss": 0.6504, "step": 22689 }, { "epoch": 0.9403622197355879, "grad_norm": 0.4430015981197357, "learning_rate": 2.9839612085042895e-07, "loss": 0.6353, "step": 22690 }, { "epoch": 0.9404036636412616, "grad_norm": 0.433103084564209, "learning_rate": 2.981889013220606e-07, "loss": 0.661, "step": 22691 }, { "epoch": 0.9404451075469352, "grad_norm": 0.39984339475631714, "learning_rate": 2.9798168179369225e-07, "loss": 0.6357, "step": 22692 }, { "epoch": 0.9404865514526088, "grad_norm": 0.4219370484352112, "learning_rate": 2.977744622653239e-07, "loss": 0.6628, "step": 22693 }, { "epoch": 0.9405279953582826, "grad_norm": 0.4019707143306732, "learning_rate": 2.9756724273695555e-07, "loss": 0.594, "step": 22694 }, { "epoch": 0.9405694392639562, "grad_norm": 0.4206814169883728, "learning_rate": 2.973600232085872e-07, "loss": 0.6469, "step": 22695 }, { "epoch": 0.9406108831696299, "grad_norm": 0.40712857246398926, "learning_rate": 2.9715280368021885e-07, "loss": 0.6577, "step": 22696 }, { "epoch": 0.9406523270753036, "grad_norm": 0.38911762833595276, "learning_rate": 2.969455841518505e-07, "loss": 0.5895, "step": 22697 }, { "epoch": 0.9406937709809773, "grad_norm": 0.42631709575653076, "learning_rate": 2.9673836462348215e-07, "loss": 0.7179, "step": 22698 }, { "epoch": 0.9407352148866509, "grad_norm": 0.41164296865463257, "learning_rate": 2.965311450951138e-07, "loss": 0.6664, "step": 22699 }, { "epoch": 0.9407766587923246, "grad_norm": 0.40951773524284363, "learning_rate": 2.9632392556674545e-07, "loss": 0.701, "step": 22700 }, { "epoch": 0.9408181026979983, "grad_norm": 0.41902661323547363, "learning_rate": 2.961167060383771e-07, "loss": 0.5952, "step": 22701 }, { "epoch": 0.9408595466036719, "grad_norm": 0.44185319542884827, "learning_rate": 2.9590948651000875e-07, "loss": 0.6719, "step": 22702 }, { "epoch": 0.9409009905093456, "grad_norm": 0.446958988904953, "learning_rate": 2.957022669816404e-07, "loss": 0.6926, "step": 22703 }, { "epoch": 0.9409424344150192, "grad_norm": 0.4236953854560852, "learning_rate": 2.95495047453272e-07, "loss": 0.6534, "step": 22704 }, { "epoch": 0.940983878320693, "grad_norm": 0.4064887762069702, "learning_rate": 2.9528782792490365e-07, "loss": 0.6588, "step": 22705 }, { "epoch": 0.9410253222263666, "grad_norm": 0.42141008377075195, "learning_rate": 2.950806083965353e-07, "loss": 0.7013, "step": 22706 }, { "epoch": 0.9410667661320403, "grad_norm": 0.4182586967945099, "learning_rate": 2.9487338886816695e-07, "loss": 0.6973, "step": 22707 }, { "epoch": 0.9411082100377139, "grad_norm": 0.45304182171821594, "learning_rate": 2.946661693397986e-07, "loss": 0.704, "step": 22708 }, { "epoch": 0.9411496539433877, "grad_norm": 0.47902780771255493, "learning_rate": 2.9445894981143025e-07, "loss": 0.7422, "step": 22709 }, { "epoch": 0.9411910978490613, "grad_norm": 0.4208369255065918, "learning_rate": 2.942517302830619e-07, "loss": 0.6577, "step": 22710 }, { "epoch": 0.9412325417547349, "grad_norm": 0.4209844172000885, "learning_rate": 2.9404451075469355e-07, "loss": 0.7283, "step": 22711 }, { "epoch": 0.9412739856604087, "grad_norm": 0.4426361620426178, "learning_rate": 2.938372912263252e-07, "loss": 0.6523, "step": 22712 }, { "epoch": 0.9413154295660823, "grad_norm": 0.43733128905296326, "learning_rate": 2.9363007169795685e-07, "loss": 0.688, "step": 22713 }, { "epoch": 0.941356873471756, "grad_norm": 0.4314127266407013, "learning_rate": 2.934228521695885e-07, "loss": 0.6344, "step": 22714 }, { "epoch": 0.9413983173774296, "grad_norm": 0.41051992774009705, "learning_rate": 2.9321563264122015e-07, "loss": 0.6548, "step": 22715 }, { "epoch": 0.9414397612831034, "grad_norm": 0.4073718190193176, "learning_rate": 2.930084131128518e-07, "loss": 0.7301, "step": 22716 }, { "epoch": 0.941481205188777, "grad_norm": 0.40204429626464844, "learning_rate": 2.9280119358448345e-07, "loss": 0.5961, "step": 22717 }, { "epoch": 0.9415226490944507, "grad_norm": 0.4238159954547882, "learning_rate": 2.925939740561151e-07, "loss": 0.6633, "step": 22718 }, { "epoch": 0.9415640930001243, "grad_norm": 0.39215710759162903, "learning_rate": 2.9238675452774675e-07, "loss": 0.6113, "step": 22719 }, { "epoch": 0.941605536905798, "grad_norm": 0.4143853783607483, "learning_rate": 2.921795349993784e-07, "loss": 0.6362, "step": 22720 }, { "epoch": 0.9416469808114717, "grad_norm": 0.4640759527683258, "learning_rate": 2.9197231547101005e-07, "loss": 0.6946, "step": 22721 }, { "epoch": 0.9416884247171453, "grad_norm": 0.41405633091926575, "learning_rate": 2.9176509594264165e-07, "loss": 0.672, "step": 22722 }, { "epoch": 0.941729868622819, "grad_norm": 0.42910036444664, "learning_rate": 2.915578764142733e-07, "loss": 0.6741, "step": 22723 }, { "epoch": 0.9417713125284927, "grad_norm": 0.4006488025188446, "learning_rate": 2.9135065688590495e-07, "loss": 0.6862, "step": 22724 }, { "epoch": 0.9418127564341664, "grad_norm": 0.4322017431259155, "learning_rate": 2.911434373575366e-07, "loss": 0.6354, "step": 22725 }, { "epoch": 0.94185420033984, "grad_norm": 0.3877887427806854, "learning_rate": 2.9093621782916825e-07, "loss": 0.6064, "step": 22726 }, { "epoch": 0.9418956442455138, "grad_norm": 0.4459737241268158, "learning_rate": 2.907289983007999e-07, "loss": 0.6624, "step": 22727 }, { "epoch": 0.9419370881511874, "grad_norm": 0.39123016595840454, "learning_rate": 2.9052177877243155e-07, "loss": 0.6533, "step": 22728 }, { "epoch": 0.941978532056861, "grad_norm": 0.4387635290622711, "learning_rate": 2.903145592440632e-07, "loss": 0.6948, "step": 22729 }, { "epoch": 0.9420199759625347, "grad_norm": 0.48579201102256775, "learning_rate": 2.9010733971569485e-07, "loss": 0.6659, "step": 22730 }, { "epoch": 0.9420614198682083, "grad_norm": 0.40345367789268494, "learning_rate": 2.899001201873265e-07, "loss": 0.6675, "step": 22731 }, { "epoch": 0.9421028637738821, "grad_norm": 0.4185834527015686, "learning_rate": 2.8969290065895816e-07, "loss": 0.6396, "step": 22732 }, { "epoch": 0.9421443076795557, "grad_norm": 0.4086865782737732, "learning_rate": 2.894856811305898e-07, "loss": 0.6692, "step": 22733 }, { "epoch": 0.9421857515852294, "grad_norm": 0.3893241882324219, "learning_rate": 2.892784616022214e-07, "loss": 0.6245, "step": 22734 }, { "epoch": 0.9422271954909031, "grad_norm": 0.42727822065353394, "learning_rate": 2.8907124207385305e-07, "loss": 0.7101, "step": 22735 }, { "epoch": 0.9422686393965767, "grad_norm": 0.42357343435287476, "learning_rate": 2.888640225454847e-07, "loss": 0.7048, "step": 22736 }, { "epoch": 0.9423100833022504, "grad_norm": 0.41281503438949585, "learning_rate": 2.8865680301711635e-07, "loss": 0.6055, "step": 22737 }, { "epoch": 0.942351527207924, "grad_norm": 0.39140456914901733, "learning_rate": 2.88449583488748e-07, "loss": 0.626, "step": 22738 }, { "epoch": 0.9423929711135978, "grad_norm": 0.4127078950405121, "learning_rate": 2.8824236396037965e-07, "loss": 0.6504, "step": 22739 }, { "epoch": 0.9424344150192714, "grad_norm": 0.4043368399143219, "learning_rate": 2.880351444320113e-07, "loss": 0.6653, "step": 22740 }, { "epoch": 0.9424758589249451, "grad_norm": 0.4386996030807495, "learning_rate": 2.8782792490364296e-07, "loss": 0.6967, "step": 22741 }, { "epoch": 0.9425173028306187, "grad_norm": 0.3970065712928772, "learning_rate": 2.876207053752746e-07, "loss": 0.6575, "step": 22742 }, { "epoch": 0.9425587467362925, "grad_norm": 0.41672858595848083, "learning_rate": 2.8741348584690626e-07, "loss": 0.6594, "step": 22743 }, { "epoch": 0.9426001906419661, "grad_norm": 0.4536133110523224, "learning_rate": 2.872062663185379e-07, "loss": 0.693, "step": 22744 }, { "epoch": 0.9426416345476397, "grad_norm": 0.42414590716362, "learning_rate": 2.8699904679016956e-07, "loss": 0.686, "step": 22745 }, { "epoch": 0.9426830784533134, "grad_norm": 0.42621704936027527, "learning_rate": 2.8679182726180115e-07, "loss": 0.6869, "step": 22746 }, { "epoch": 0.9427245223589871, "grad_norm": 0.3844846785068512, "learning_rate": 2.865846077334328e-07, "loss": 0.6677, "step": 22747 }, { "epoch": 0.9427659662646608, "grad_norm": 0.4149191677570343, "learning_rate": 2.8637738820506446e-07, "loss": 0.6179, "step": 22748 }, { "epoch": 0.9428074101703344, "grad_norm": 0.41492363810539246, "learning_rate": 2.861701686766961e-07, "loss": 0.6765, "step": 22749 }, { "epoch": 0.9428488540760082, "grad_norm": 0.4472828507423401, "learning_rate": 2.8596294914832776e-07, "loss": 0.6107, "step": 22750 }, { "epoch": 0.9428902979816818, "grad_norm": 0.40962597727775574, "learning_rate": 2.857557296199594e-07, "loss": 0.7069, "step": 22751 }, { "epoch": 0.9429317418873555, "grad_norm": 0.43634462356567383, "learning_rate": 2.8554851009159106e-07, "loss": 0.6343, "step": 22752 }, { "epoch": 0.9429731857930291, "grad_norm": 0.4072112739086151, "learning_rate": 2.853412905632227e-07, "loss": 0.6438, "step": 22753 }, { "epoch": 0.9430146296987028, "grad_norm": 0.42363491654396057, "learning_rate": 2.8513407103485436e-07, "loss": 0.6327, "step": 22754 }, { "epoch": 0.9430560736043765, "grad_norm": 0.4420884847640991, "learning_rate": 2.84926851506486e-07, "loss": 0.6705, "step": 22755 }, { "epoch": 0.9430975175100501, "grad_norm": 0.39288148283958435, "learning_rate": 2.8471963197811766e-07, "loss": 0.6508, "step": 22756 }, { "epoch": 0.9431389614157238, "grad_norm": 0.42284977436065674, "learning_rate": 2.845124124497493e-07, "loss": 0.6479, "step": 22757 }, { "epoch": 0.9431804053213975, "grad_norm": 0.4109274446964264, "learning_rate": 2.8430519292138096e-07, "loss": 0.702, "step": 22758 }, { "epoch": 0.9432218492270712, "grad_norm": 0.3990035653114319, "learning_rate": 2.8409797339301256e-07, "loss": 0.6489, "step": 22759 }, { "epoch": 0.9432632931327448, "grad_norm": 0.41258591413497925, "learning_rate": 2.838907538646442e-07, "loss": 0.6492, "step": 22760 }, { "epoch": 0.9433047370384186, "grad_norm": 0.422921746969223, "learning_rate": 2.8368353433627586e-07, "loss": 0.6682, "step": 22761 }, { "epoch": 0.9433461809440922, "grad_norm": 0.4034198820590973, "learning_rate": 2.834763148079075e-07, "loss": 0.6427, "step": 22762 }, { "epoch": 0.9433876248497658, "grad_norm": 0.4882027506828308, "learning_rate": 2.8326909527953916e-07, "loss": 0.6904, "step": 22763 }, { "epoch": 0.9434290687554395, "grad_norm": 0.4321634769439697, "learning_rate": 2.830618757511708e-07, "loss": 0.6154, "step": 22764 }, { "epoch": 0.9434705126611131, "grad_norm": 0.41709962487220764, "learning_rate": 2.8285465622280246e-07, "loss": 0.7153, "step": 22765 }, { "epoch": 0.9435119565667869, "grad_norm": 0.407401978969574, "learning_rate": 2.826474366944341e-07, "loss": 0.6119, "step": 22766 }, { "epoch": 0.9435534004724605, "grad_norm": 0.3820617198944092, "learning_rate": 2.8244021716606576e-07, "loss": 0.6637, "step": 22767 }, { "epoch": 0.9435948443781342, "grad_norm": 0.41792768239974976, "learning_rate": 2.822329976376974e-07, "loss": 0.646, "step": 22768 }, { "epoch": 0.9436362882838079, "grad_norm": 0.44623687863349915, "learning_rate": 2.8202577810932906e-07, "loss": 0.7092, "step": 22769 }, { "epoch": 0.9436777321894816, "grad_norm": 0.4191036820411682, "learning_rate": 2.818185585809607e-07, "loss": 0.6536, "step": 22770 }, { "epoch": 0.9437191760951552, "grad_norm": 0.43237483501434326, "learning_rate": 2.816113390525923e-07, "loss": 0.6614, "step": 22771 }, { "epoch": 0.9437606200008288, "grad_norm": 0.4314989149570465, "learning_rate": 2.8140411952422396e-07, "loss": 0.6752, "step": 22772 }, { "epoch": 0.9438020639065026, "grad_norm": 0.4296998679637909, "learning_rate": 2.811968999958556e-07, "loss": 0.6774, "step": 22773 }, { "epoch": 0.9438435078121762, "grad_norm": 0.4172271192073822, "learning_rate": 2.8098968046748726e-07, "loss": 0.658, "step": 22774 }, { "epoch": 0.9438849517178499, "grad_norm": 0.4085381031036377, "learning_rate": 2.807824609391189e-07, "loss": 0.646, "step": 22775 }, { "epoch": 0.9439263956235235, "grad_norm": 0.4323885142803192, "learning_rate": 2.8057524141075056e-07, "loss": 0.7078, "step": 22776 }, { "epoch": 0.9439678395291973, "grad_norm": 0.4209352433681488, "learning_rate": 2.803680218823822e-07, "loss": 0.6238, "step": 22777 }, { "epoch": 0.9440092834348709, "grad_norm": 0.4218093752861023, "learning_rate": 2.8016080235401386e-07, "loss": 0.6708, "step": 22778 }, { "epoch": 0.9440507273405446, "grad_norm": 0.45900022983551025, "learning_rate": 2.799535828256455e-07, "loss": 0.6559, "step": 22779 }, { "epoch": 0.9440921712462182, "grad_norm": 0.44049981236457825, "learning_rate": 2.7974636329727716e-07, "loss": 0.6567, "step": 22780 }, { "epoch": 0.9441336151518919, "grad_norm": 0.4077194333076477, "learning_rate": 2.795391437689088e-07, "loss": 0.6791, "step": 22781 }, { "epoch": 0.9441750590575656, "grad_norm": 0.3887229859828949, "learning_rate": 2.7933192424054046e-07, "loss": 0.6438, "step": 22782 }, { "epoch": 0.9442165029632392, "grad_norm": 0.42631518840789795, "learning_rate": 2.7912470471217206e-07, "loss": 0.6743, "step": 22783 }, { "epoch": 0.944257946868913, "grad_norm": 0.3999282121658325, "learning_rate": 2.789174851838037e-07, "loss": 0.6665, "step": 22784 }, { "epoch": 0.9442993907745866, "grad_norm": 0.45306265354156494, "learning_rate": 2.7871026565543536e-07, "loss": 0.7222, "step": 22785 }, { "epoch": 0.9443408346802603, "grad_norm": 0.38545554876327515, "learning_rate": 2.78503046127067e-07, "loss": 0.6072, "step": 22786 }, { "epoch": 0.9443822785859339, "grad_norm": 0.44729822874069214, "learning_rate": 2.7829582659869866e-07, "loss": 0.6917, "step": 22787 }, { "epoch": 0.9444237224916077, "grad_norm": 0.4546288251876831, "learning_rate": 2.780886070703303e-07, "loss": 0.7275, "step": 22788 }, { "epoch": 0.9444651663972813, "grad_norm": 0.44708070158958435, "learning_rate": 2.7788138754196196e-07, "loss": 0.729, "step": 22789 }, { "epoch": 0.9445066103029549, "grad_norm": 0.41593965888023376, "learning_rate": 2.776741680135936e-07, "loss": 0.7064, "step": 22790 }, { "epoch": 0.9445480542086286, "grad_norm": 0.46121135354042053, "learning_rate": 2.7746694848522526e-07, "loss": 0.7017, "step": 22791 }, { "epoch": 0.9445894981143023, "grad_norm": 0.3997088670730591, "learning_rate": 2.772597289568569e-07, "loss": 0.6292, "step": 22792 }, { "epoch": 0.944630942019976, "grad_norm": 0.4015962481498718, "learning_rate": 2.7705250942848856e-07, "loss": 0.686, "step": 22793 }, { "epoch": 0.9446723859256496, "grad_norm": 0.4062272310256958, "learning_rate": 2.768452899001202e-07, "loss": 0.7415, "step": 22794 }, { "epoch": 0.9447138298313233, "grad_norm": 0.42459723353385925, "learning_rate": 2.7663807037175186e-07, "loss": 0.6528, "step": 22795 }, { "epoch": 0.944755273736997, "grad_norm": 0.41581830382347107, "learning_rate": 2.764308508433835e-07, "loss": 0.6472, "step": 22796 }, { "epoch": 0.9447967176426706, "grad_norm": 0.39395421743392944, "learning_rate": 2.7622363131501516e-07, "loss": 0.601, "step": 22797 }, { "epoch": 0.9448381615483443, "grad_norm": 0.4263230860233307, "learning_rate": 2.760164117866468e-07, "loss": 0.6388, "step": 22798 }, { "epoch": 0.944879605454018, "grad_norm": 0.4168474078178406, "learning_rate": 2.758091922582784e-07, "loss": 0.6658, "step": 22799 }, { "epoch": 0.9449210493596917, "grad_norm": 0.38852357864379883, "learning_rate": 2.7560197272991006e-07, "loss": 0.6072, "step": 22800 }, { "epoch": 0.9449624932653653, "grad_norm": 0.4184841215610504, "learning_rate": 2.753947532015417e-07, "loss": 0.6681, "step": 22801 }, { "epoch": 0.945003937171039, "grad_norm": 0.42349672317504883, "learning_rate": 2.7518753367317336e-07, "loss": 0.6328, "step": 22802 }, { "epoch": 0.9450453810767127, "grad_norm": 0.437913715839386, "learning_rate": 2.74980314144805e-07, "loss": 0.7043, "step": 22803 }, { "epoch": 0.9450868249823864, "grad_norm": 0.42048004269599915, "learning_rate": 2.7477309461643666e-07, "loss": 0.6553, "step": 22804 }, { "epoch": 0.94512826888806, "grad_norm": 0.4124428927898407, "learning_rate": 2.745658750880683e-07, "loss": 0.6924, "step": 22805 }, { "epoch": 0.9451697127937336, "grad_norm": 0.42635539174079895, "learning_rate": 2.7435865555969996e-07, "loss": 0.7002, "step": 22806 }, { "epoch": 0.9452111566994074, "grad_norm": 0.4151478111743927, "learning_rate": 2.741514360313316e-07, "loss": 0.6647, "step": 22807 }, { "epoch": 0.945252600605081, "grad_norm": 0.4213005602359772, "learning_rate": 2.7394421650296326e-07, "loss": 0.725, "step": 22808 }, { "epoch": 0.9452940445107547, "grad_norm": 0.4288274645805359, "learning_rate": 2.737369969745949e-07, "loss": 0.6467, "step": 22809 }, { "epoch": 0.9453354884164283, "grad_norm": 0.3863148093223572, "learning_rate": 2.7352977744622657e-07, "loss": 0.6394, "step": 22810 }, { "epoch": 0.9453769323221021, "grad_norm": 0.4247457683086395, "learning_rate": 2.733225579178582e-07, "loss": 0.6707, "step": 22811 }, { "epoch": 0.9454183762277757, "grad_norm": 0.3899017870426178, "learning_rate": 2.7311533838948987e-07, "loss": 0.6265, "step": 22812 }, { "epoch": 0.9454598201334494, "grad_norm": 0.4104098677635193, "learning_rate": 2.729081188611215e-07, "loss": 0.6155, "step": 22813 }, { "epoch": 0.945501264039123, "grad_norm": 0.40642639994621277, "learning_rate": 2.7270089933275317e-07, "loss": 0.6609, "step": 22814 }, { "epoch": 0.9455427079447967, "grad_norm": 0.4625271260738373, "learning_rate": 2.724936798043848e-07, "loss": 0.7, "step": 22815 }, { "epoch": 0.9455841518504704, "grad_norm": 0.40861526131629944, "learning_rate": 2.7228646027601647e-07, "loss": 0.5981, "step": 22816 }, { "epoch": 0.945625595756144, "grad_norm": 0.41150790452957153, "learning_rate": 2.7207924074764806e-07, "loss": 0.6605, "step": 22817 }, { "epoch": 0.9456670396618178, "grad_norm": 0.4292409121990204, "learning_rate": 2.718720212192797e-07, "loss": 0.6953, "step": 22818 }, { "epoch": 0.9457084835674914, "grad_norm": 0.4093433916568756, "learning_rate": 2.7166480169091137e-07, "loss": 0.6442, "step": 22819 }, { "epoch": 0.9457499274731651, "grad_norm": 0.4469381868839264, "learning_rate": 2.71457582162543e-07, "loss": 0.6758, "step": 22820 }, { "epoch": 0.9457913713788387, "grad_norm": 0.4092901945114136, "learning_rate": 2.7125036263417467e-07, "loss": 0.6187, "step": 22821 }, { "epoch": 0.9458328152845125, "grad_norm": 0.43391701579093933, "learning_rate": 2.710431431058063e-07, "loss": 0.7025, "step": 22822 }, { "epoch": 0.9458742591901861, "grad_norm": 0.4669155776500702, "learning_rate": 2.7083592357743797e-07, "loss": 0.6602, "step": 22823 }, { "epoch": 0.9459157030958597, "grad_norm": 0.39093348383903503, "learning_rate": 2.706287040490696e-07, "loss": 0.5902, "step": 22824 }, { "epoch": 0.9459571470015334, "grad_norm": 0.428759902715683, "learning_rate": 2.7042148452070127e-07, "loss": 0.7198, "step": 22825 }, { "epoch": 0.9459985909072071, "grad_norm": 0.485566645860672, "learning_rate": 2.702142649923329e-07, "loss": 0.7222, "step": 22826 }, { "epoch": 0.9460400348128808, "grad_norm": 0.39952725172042847, "learning_rate": 2.7000704546396457e-07, "loss": 0.6692, "step": 22827 }, { "epoch": 0.9460814787185544, "grad_norm": 0.5552158355712891, "learning_rate": 2.697998259355962e-07, "loss": 0.6833, "step": 22828 }, { "epoch": 0.9461229226242281, "grad_norm": 0.4321841597557068, "learning_rate": 2.6959260640722787e-07, "loss": 0.6433, "step": 22829 }, { "epoch": 0.9461643665299018, "grad_norm": 0.4281070828437805, "learning_rate": 2.693853868788595e-07, "loss": 0.6023, "step": 22830 }, { "epoch": 0.9462058104355755, "grad_norm": 0.4176303446292877, "learning_rate": 2.6917816735049117e-07, "loss": 0.5981, "step": 22831 }, { "epoch": 0.9462472543412491, "grad_norm": 0.4318159818649292, "learning_rate": 2.6897094782212277e-07, "loss": 0.6721, "step": 22832 }, { "epoch": 0.9462886982469227, "grad_norm": 0.4278686046600342, "learning_rate": 2.687637282937544e-07, "loss": 0.6924, "step": 22833 }, { "epoch": 0.9463301421525965, "grad_norm": 0.4061988592147827, "learning_rate": 2.6855650876538607e-07, "loss": 0.6475, "step": 22834 }, { "epoch": 0.9463715860582701, "grad_norm": 0.46817705035209656, "learning_rate": 2.683492892370177e-07, "loss": 0.672, "step": 22835 }, { "epoch": 0.9464130299639438, "grad_norm": 0.42470812797546387, "learning_rate": 2.6814206970864937e-07, "loss": 0.687, "step": 22836 }, { "epoch": 0.9464544738696175, "grad_norm": 0.44553080201148987, "learning_rate": 2.67934850180281e-07, "loss": 0.634, "step": 22837 }, { "epoch": 0.9464959177752912, "grad_norm": 0.43136778473854065, "learning_rate": 2.6772763065191267e-07, "loss": 0.6805, "step": 22838 }, { "epoch": 0.9465373616809648, "grad_norm": 0.4268726408481598, "learning_rate": 2.675204111235443e-07, "loss": 0.7079, "step": 22839 }, { "epoch": 0.9465788055866385, "grad_norm": 0.392160564661026, "learning_rate": 2.6731319159517597e-07, "loss": 0.5809, "step": 22840 }, { "epoch": 0.9466202494923122, "grad_norm": 0.3885416090488434, "learning_rate": 2.671059720668076e-07, "loss": 0.6221, "step": 22841 }, { "epoch": 0.9466616933979858, "grad_norm": 0.4072680175304413, "learning_rate": 2.6689875253843927e-07, "loss": 0.5942, "step": 22842 }, { "epoch": 0.9467031373036595, "grad_norm": 0.4582754969596863, "learning_rate": 2.666915330100709e-07, "loss": 0.663, "step": 22843 }, { "epoch": 0.9467445812093331, "grad_norm": 0.4269886016845703, "learning_rate": 2.6648431348170257e-07, "loss": 0.6903, "step": 22844 }, { "epoch": 0.9467860251150069, "grad_norm": 0.4284595847129822, "learning_rate": 2.6627709395333417e-07, "loss": 0.6603, "step": 22845 }, { "epoch": 0.9468274690206805, "grad_norm": 0.42095744609832764, "learning_rate": 2.660698744249658e-07, "loss": 0.6538, "step": 22846 }, { "epoch": 0.9468689129263542, "grad_norm": 0.4087023437023163, "learning_rate": 2.6586265489659747e-07, "loss": 0.6884, "step": 22847 }, { "epoch": 0.9469103568320278, "grad_norm": 0.4188758134841919, "learning_rate": 2.656554353682291e-07, "loss": 0.6233, "step": 22848 }, { "epoch": 0.9469518007377015, "grad_norm": 0.41798171401023865, "learning_rate": 2.6544821583986077e-07, "loss": 0.6653, "step": 22849 }, { "epoch": 0.9469932446433752, "grad_norm": 0.43697234988212585, "learning_rate": 2.652409963114924e-07, "loss": 0.6388, "step": 22850 }, { "epoch": 0.9470346885490488, "grad_norm": 0.47930365800857544, "learning_rate": 2.6503377678312407e-07, "loss": 0.7336, "step": 22851 }, { "epoch": 0.9470761324547226, "grad_norm": 0.40789684653282166, "learning_rate": 2.648265572547557e-07, "loss": 0.6978, "step": 22852 }, { "epoch": 0.9471175763603962, "grad_norm": 0.41941335797309875, "learning_rate": 2.6461933772638737e-07, "loss": 0.7112, "step": 22853 }, { "epoch": 0.9471590202660699, "grad_norm": 0.4139295518398285, "learning_rate": 2.64412118198019e-07, "loss": 0.624, "step": 22854 }, { "epoch": 0.9472004641717435, "grad_norm": 0.3950515687465668, "learning_rate": 2.6420489866965067e-07, "loss": 0.6213, "step": 22855 }, { "epoch": 0.9472419080774173, "grad_norm": 0.40832066535949707, "learning_rate": 2.639976791412823e-07, "loss": 0.671, "step": 22856 }, { "epoch": 0.9472833519830909, "grad_norm": 0.4133493900299072, "learning_rate": 2.637904596129139e-07, "loss": 0.6117, "step": 22857 }, { "epoch": 0.9473247958887645, "grad_norm": 0.40054136514663696, "learning_rate": 2.6358324008454557e-07, "loss": 0.6437, "step": 22858 }, { "epoch": 0.9473662397944382, "grad_norm": 0.42023786902427673, "learning_rate": 2.633760205561772e-07, "loss": 0.7009, "step": 22859 }, { "epoch": 0.9474076837001119, "grad_norm": 0.4199494421482086, "learning_rate": 2.6316880102780887e-07, "loss": 0.6765, "step": 22860 }, { "epoch": 0.9474491276057856, "grad_norm": 0.4012482166290283, "learning_rate": 2.629615814994405e-07, "loss": 0.6699, "step": 22861 }, { "epoch": 0.9474905715114592, "grad_norm": 0.3931578993797302, "learning_rate": 2.6275436197107217e-07, "loss": 0.668, "step": 22862 }, { "epoch": 0.947532015417133, "grad_norm": 0.41210150718688965, "learning_rate": 2.625471424427038e-07, "loss": 0.7166, "step": 22863 }, { "epoch": 0.9475734593228066, "grad_norm": 0.4258553981781006, "learning_rate": 2.6233992291433547e-07, "loss": 0.7047, "step": 22864 }, { "epoch": 0.9476149032284803, "grad_norm": 0.40003249049186707, "learning_rate": 2.621327033859671e-07, "loss": 0.6443, "step": 22865 }, { "epoch": 0.9476563471341539, "grad_norm": 0.4364911913871765, "learning_rate": 2.6192548385759877e-07, "loss": 0.6741, "step": 22866 }, { "epoch": 0.9476977910398275, "grad_norm": 0.44728341698646545, "learning_rate": 2.617182643292304e-07, "loss": 0.6799, "step": 22867 }, { "epoch": 0.9477392349455013, "grad_norm": 0.43094602227211, "learning_rate": 2.615110448008621e-07, "loss": 0.6697, "step": 22868 }, { "epoch": 0.9477806788511749, "grad_norm": 0.4405622184276581, "learning_rate": 2.6130382527249367e-07, "loss": 0.6689, "step": 22869 }, { "epoch": 0.9478221227568486, "grad_norm": 0.4337182343006134, "learning_rate": 2.610966057441253e-07, "loss": 0.6938, "step": 22870 }, { "epoch": 0.9478635666625223, "grad_norm": 0.4185543656349182, "learning_rate": 2.6088938621575697e-07, "loss": 0.6766, "step": 22871 }, { "epoch": 0.947905010568196, "grad_norm": 0.42378294467926025, "learning_rate": 2.606821666873886e-07, "loss": 0.6145, "step": 22872 }, { "epoch": 0.9479464544738696, "grad_norm": 0.36861488223075867, "learning_rate": 2.6047494715902027e-07, "loss": 0.6483, "step": 22873 }, { "epoch": 0.9479878983795433, "grad_norm": 0.4233914315700531, "learning_rate": 2.602677276306519e-07, "loss": 0.7084, "step": 22874 }, { "epoch": 0.948029342285217, "grad_norm": 0.40444132685661316, "learning_rate": 2.6006050810228357e-07, "loss": 0.6547, "step": 22875 }, { "epoch": 0.9480707861908906, "grad_norm": 0.4329718053340912, "learning_rate": 2.598532885739152e-07, "loss": 0.667, "step": 22876 }, { "epoch": 0.9481122300965643, "grad_norm": 0.40779465436935425, "learning_rate": 2.596460690455469e-07, "loss": 0.6305, "step": 22877 }, { "epoch": 0.9481536740022379, "grad_norm": 0.3921043276786804, "learning_rate": 2.594388495171785e-07, "loss": 0.6624, "step": 22878 }, { "epoch": 0.9481951179079117, "grad_norm": 0.4168822169303894, "learning_rate": 2.592316299888102e-07, "loss": 0.681, "step": 22879 }, { "epoch": 0.9482365618135853, "grad_norm": 0.42491114139556885, "learning_rate": 2.590244104604418e-07, "loss": 0.6523, "step": 22880 }, { "epoch": 0.948278005719259, "grad_norm": 0.4007750153541565, "learning_rate": 2.588171909320735e-07, "loss": 0.6335, "step": 22881 }, { "epoch": 0.9483194496249326, "grad_norm": 0.4305400848388672, "learning_rate": 2.5860997140370507e-07, "loss": 0.7456, "step": 22882 }, { "epoch": 0.9483608935306064, "grad_norm": 0.4000861644744873, "learning_rate": 2.584027518753367e-07, "loss": 0.6755, "step": 22883 }, { "epoch": 0.94840233743628, "grad_norm": 0.43147340416908264, "learning_rate": 2.5819553234696837e-07, "loss": 0.7332, "step": 22884 }, { "epoch": 0.9484437813419536, "grad_norm": 0.47997432947158813, "learning_rate": 2.579883128186e-07, "loss": 0.6685, "step": 22885 }, { "epoch": 0.9484852252476274, "grad_norm": 0.40064743161201477, "learning_rate": 2.577810932902317e-07, "loss": 0.6028, "step": 22886 }, { "epoch": 0.948526669153301, "grad_norm": 0.4713772237300873, "learning_rate": 2.575738737618633e-07, "loss": 0.7024, "step": 22887 }, { "epoch": 0.9485681130589747, "grad_norm": 0.3695751130580902, "learning_rate": 2.57366654233495e-07, "loss": 0.6147, "step": 22888 }, { "epoch": 0.9486095569646483, "grad_norm": 0.40204527974128723, "learning_rate": 2.571594347051266e-07, "loss": 0.6278, "step": 22889 }, { "epoch": 0.9486510008703221, "grad_norm": 0.3713224530220032, "learning_rate": 2.569522151767583e-07, "loss": 0.598, "step": 22890 }, { "epoch": 0.9486924447759957, "grad_norm": 0.44002947211265564, "learning_rate": 2.567449956483899e-07, "loss": 0.6479, "step": 22891 }, { "epoch": 0.9487338886816694, "grad_norm": 0.3893745243549347, "learning_rate": 2.565377761200216e-07, "loss": 0.6149, "step": 22892 }, { "epoch": 0.948775332587343, "grad_norm": 0.4052067697048187, "learning_rate": 2.5633055659165323e-07, "loss": 0.6476, "step": 22893 }, { "epoch": 0.9488167764930167, "grad_norm": 0.4145209491252899, "learning_rate": 2.561233370632848e-07, "loss": 0.7284, "step": 22894 }, { "epoch": 0.9488582203986904, "grad_norm": 0.3993420898914337, "learning_rate": 2.559161175349165e-07, "loss": 0.6791, "step": 22895 }, { "epoch": 0.948899664304364, "grad_norm": 0.42741960287094116, "learning_rate": 2.557088980065481e-07, "loss": 0.6432, "step": 22896 }, { "epoch": 0.9489411082100377, "grad_norm": 0.43356314301490784, "learning_rate": 2.555016784781798e-07, "loss": 0.714, "step": 22897 }, { "epoch": 0.9489825521157114, "grad_norm": 0.4041145443916321, "learning_rate": 2.552944589498114e-07, "loss": 0.6562, "step": 22898 }, { "epoch": 0.9490239960213851, "grad_norm": 0.40037861466407776, "learning_rate": 2.550872394214431e-07, "loss": 0.615, "step": 22899 }, { "epoch": 0.9490654399270587, "grad_norm": 0.4392162263393402, "learning_rate": 2.548800198930747e-07, "loss": 0.6332, "step": 22900 }, { "epoch": 0.9491068838327325, "grad_norm": 0.40276139974594116, "learning_rate": 2.546728003647064e-07, "loss": 0.6669, "step": 22901 }, { "epoch": 0.9491483277384061, "grad_norm": 0.3904343247413635, "learning_rate": 2.5446558083633803e-07, "loss": 0.6271, "step": 22902 }, { "epoch": 0.9491897716440797, "grad_norm": 0.4627732038497925, "learning_rate": 2.542583613079697e-07, "loss": 0.6963, "step": 22903 }, { "epoch": 0.9492312155497534, "grad_norm": 0.39827394485473633, "learning_rate": 2.5405114177960133e-07, "loss": 0.615, "step": 22904 }, { "epoch": 0.949272659455427, "grad_norm": 0.4118174612522125, "learning_rate": 2.53843922251233e-07, "loss": 0.6204, "step": 22905 }, { "epoch": 0.9493141033611008, "grad_norm": 0.4250461459159851, "learning_rate": 2.5363670272286463e-07, "loss": 0.6661, "step": 22906 }, { "epoch": 0.9493555472667744, "grad_norm": 0.4134157598018646, "learning_rate": 2.534294831944963e-07, "loss": 0.6985, "step": 22907 }, { "epoch": 0.9493969911724481, "grad_norm": 0.4255031645298004, "learning_rate": 2.5322226366612793e-07, "loss": 0.6219, "step": 22908 }, { "epoch": 0.9494384350781218, "grad_norm": 0.42101195454597473, "learning_rate": 2.530150441377596e-07, "loss": 0.6514, "step": 22909 }, { "epoch": 0.9494798789837954, "grad_norm": 0.43490487337112427, "learning_rate": 2.5280782460939123e-07, "loss": 0.6541, "step": 22910 }, { "epoch": 0.9495213228894691, "grad_norm": 0.4604805111885071, "learning_rate": 2.526006050810229e-07, "loss": 0.6451, "step": 22911 }, { "epoch": 0.9495627667951427, "grad_norm": 0.3938830494880676, "learning_rate": 2.523933855526545e-07, "loss": 0.6542, "step": 22912 }, { "epoch": 0.9496042107008165, "grad_norm": 0.42697417736053467, "learning_rate": 2.5218616602428613e-07, "loss": 0.6995, "step": 22913 }, { "epoch": 0.9496456546064901, "grad_norm": 0.4559043347835541, "learning_rate": 2.519789464959178e-07, "loss": 0.6899, "step": 22914 }, { "epoch": 0.9496870985121638, "grad_norm": 0.41328009963035583, "learning_rate": 2.5177172696754943e-07, "loss": 0.6429, "step": 22915 }, { "epoch": 0.9497285424178374, "grad_norm": 0.4519127905368805, "learning_rate": 2.515645074391811e-07, "loss": 0.6907, "step": 22916 }, { "epoch": 0.9497699863235112, "grad_norm": 0.4523499011993408, "learning_rate": 2.5135728791081273e-07, "loss": 0.7476, "step": 22917 }, { "epoch": 0.9498114302291848, "grad_norm": 0.4186107814311981, "learning_rate": 2.511500683824444e-07, "loss": 0.6885, "step": 22918 }, { "epoch": 0.9498528741348584, "grad_norm": 0.4260886311531067, "learning_rate": 2.5094284885407603e-07, "loss": 0.7083, "step": 22919 }, { "epoch": 0.9498943180405321, "grad_norm": 0.417978972196579, "learning_rate": 2.507356293257077e-07, "loss": 0.6451, "step": 22920 }, { "epoch": 0.9499357619462058, "grad_norm": 0.4097512662410736, "learning_rate": 2.5052840979733933e-07, "loss": 0.6539, "step": 22921 }, { "epoch": 0.9499772058518795, "grad_norm": 0.40072301030158997, "learning_rate": 2.50321190268971e-07, "loss": 0.6523, "step": 22922 }, { "epoch": 0.9500186497575531, "grad_norm": 0.4339374303817749, "learning_rate": 2.5011397074060263e-07, "loss": 0.6506, "step": 22923 }, { "epoch": 0.9500600936632269, "grad_norm": 0.4084415137767792, "learning_rate": 2.499067512122343e-07, "loss": 0.6705, "step": 22924 }, { "epoch": 0.9501015375689005, "grad_norm": 0.4454505741596222, "learning_rate": 2.4969953168386593e-07, "loss": 0.6506, "step": 22925 }, { "epoch": 0.9501429814745742, "grad_norm": 0.40532633662223816, "learning_rate": 2.494923121554976e-07, "loss": 0.6613, "step": 22926 }, { "epoch": 0.9501844253802478, "grad_norm": 0.37040647864341736, "learning_rate": 2.4928509262712923e-07, "loss": 0.6329, "step": 22927 }, { "epoch": 0.9502258692859215, "grad_norm": 0.3976791799068451, "learning_rate": 2.490778730987609e-07, "loss": 0.6458, "step": 22928 }, { "epoch": 0.9502673131915952, "grad_norm": 0.3973597288131714, "learning_rate": 2.4887065357039253e-07, "loss": 0.614, "step": 22929 }, { "epoch": 0.9503087570972688, "grad_norm": 0.41508814692497253, "learning_rate": 2.4866343404202413e-07, "loss": 0.6534, "step": 22930 }, { "epoch": 0.9503502010029425, "grad_norm": 0.4126274287700653, "learning_rate": 2.484562145136558e-07, "loss": 0.6375, "step": 22931 }, { "epoch": 0.9503916449086162, "grad_norm": 0.43353089690208435, "learning_rate": 2.4824899498528743e-07, "loss": 0.7323, "step": 22932 }, { "epoch": 0.9504330888142899, "grad_norm": 0.4166928231716156, "learning_rate": 2.480417754569191e-07, "loss": 0.6677, "step": 22933 }, { "epoch": 0.9504745327199635, "grad_norm": 0.4163173735141754, "learning_rate": 2.4783455592855073e-07, "loss": 0.6666, "step": 22934 }, { "epoch": 0.9505159766256372, "grad_norm": 0.44188305735588074, "learning_rate": 2.476273364001824e-07, "loss": 0.6826, "step": 22935 }, { "epoch": 0.9505574205313109, "grad_norm": 0.4279739260673523, "learning_rate": 2.4742011687181403e-07, "loss": 0.6399, "step": 22936 }, { "epoch": 0.9505988644369845, "grad_norm": 0.4257028102874756, "learning_rate": 2.472128973434457e-07, "loss": 0.6783, "step": 22937 }, { "epoch": 0.9506403083426582, "grad_norm": 0.4130859076976776, "learning_rate": 2.4700567781507733e-07, "loss": 0.699, "step": 22938 }, { "epoch": 0.9506817522483318, "grad_norm": 0.39376822113990784, "learning_rate": 2.46798458286709e-07, "loss": 0.6729, "step": 22939 }, { "epoch": 0.9507231961540056, "grad_norm": 0.42714056372642517, "learning_rate": 2.4659123875834063e-07, "loss": 0.642, "step": 22940 }, { "epoch": 0.9507646400596792, "grad_norm": 0.43861863017082214, "learning_rate": 2.463840192299723e-07, "loss": 0.7269, "step": 22941 }, { "epoch": 0.9508060839653529, "grad_norm": 0.42031535506248474, "learning_rate": 2.4617679970160394e-07, "loss": 0.6306, "step": 22942 }, { "epoch": 0.9508475278710266, "grad_norm": 0.3881395161151886, "learning_rate": 2.4596958017323553e-07, "loss": 0.6467, "step": 22943 }, { "epoch": 0.9508889717767003, "grad_norm": 0.46742671728134155, "learning_rate": 2.457623606448672e-07, "loss": 0.7313, "step": 22944 }, { "epoch": 0.9509304156823739, "grad_norm": 0.47064971923828125, "learning_rate": 2.4555514111649883e-07, "loss": 0.6967, "step": 22945 }, { "epoch": 0.9509718595880475, "grad_norm": 0.3907201886177063, "learning_rate": 2.453479215881305e-07, "loss": 0.6646, "step": 22946 }, { "epoch": 0.9510133034937213, "grad_norm": 0.4067206382751465, "learning_rate": 2.4514070205976213e-07, "loss": 0.675, "step": 22947 }, { "epoch": 0.9510547473993949, "grad_norm": 0.4150945246219635, "learning_rate": 2.449334825313938e-07, "loss": 0.6254, "step": 22948 }, { "epoch": 0.9510961913050686, "grad_norm": 0.4254530668258667, "learning_rate": 2.4472626300302543e-07, "loss": 0.6643, "step": 22949 }, { "epoch": 0.9511376352107422, "grad_norm": 0.44320616126060486, "learning_rate": 2.445190434746571e-07, "loss": 0.7347, "step": 22950 }, { "epoch": 0.951179079116416, "grad_norm": 0.40031999349594116, "learning_rate": 2.4431182394628874e-07, "loss": 0.6725, "step": 22951 }, { "epoch": 0.9512205230220896, "grad_norm": 0.41472044587135315, "learning_rate": 2.441046044179204e-07, "loss": 0.6758, "step": 22952 }, { "epoch": 0.9512619669277633, "grad_norm": 0.45670685172080994, "learning_rate": 2.4389738488955204e-07, "loss": 0.698, "step": 22953 }, { "epoch": 0.951303410833437, "grad_norm": 0.4573492705821991, "learning_rate": 2.436901653611837e-07, "loss": 0.6726, "step": 22954 }, { "epoch": 0.9513448547391106, "grad_norm": 0.4401511251926422, "learning_rate": 2.434829458328153e-07, "loss": 0.692, "step": 22955 }, { "epoch": 0.9513862986447843, "grad_norm": 0.4167815148830414, "learning_rate": 2.4327572630444693e-07, "loss": 0.6638, "step": 22956 }, { "epoch": 0.9514277425504579, "grad_norm": 0.44382917881011963, "learning_rate": 2.430685067760786e-07, "loss": 0.6699, "step": 22957 }, { "epoch": 0.9514691864561317, "grad_norm": 0.4075579047203064, "learning_rate": 2.4286128724771023e-07, "loss": 0.6155, "step": 22958 }, { "epoch": 0.9515106303618053, "grad_norm": 0.436689555644989, "learning_rate": 2.426540677193419e-07, "loss": 0.738, "step": 22959 }, { "epoch": 0.951552074267479, "grad_norm": 0.4287460446357727, "learning_rate": 2.4244684819097354e-07, "loss": 0.6866, "step": 22960 }, { "epoch": 0.9515935181731526, "grad_norm": 0.419484406709671, "learning_rate": 2.422396286626052e-07, "loss": 0.6858, "step": 22961 }, { "epoch": 0.9516349620788264, "grad_norm": 0.40869638323783875, "learning_rate": 2.4203240913423684e-07, "loss": 0.666, "step": 22962 }, { "epoch": 0.9516764059845, "grad_norm": 0.4054705500602722, "learning_rate": 2.418251896058685e-07, "loss": 0.6129, "step": 22963 }, { "epoch": 0.9517178498901736, "grad_norm": 0.435607373714447, "learning_rate": 2.4161797007750014e-07, "loss": 0.6481, "step": 22964 }, { "epoch": 0.9517592937958473, "grad_norm": 0.3902180790901184, "learning_rate": 2.414107505491318e-07, "loss": 0.634, "step": 22965 }, { "epoch": 0.951800737701521, "grad_norm": 0.447815865278244, "learning_rate": 2.4120353102076344e-07, "loss": 0.6857, "step": 22966 }, { "epoch": 0.9518421816071947, "grad_norm": 0.4189189076423645, "learning_rate": 2.409963114923951e-07, "loss": 0.6213, "step": 22967 }, { "epoch": 0.9518836255128683, "grad_norm": 0.4183293282985687, "learning_rate": 2.407890919640267e-07, "loss": 0.6366, "step": 22968 }, { "epoch": 0.951925069418542, "grad_norm": 0.4004623591899872, "learning_rate": 2.4058187243565834e-07, "loss": 0.6526, "step": 22969 }, { "epoch": 0.9519665133242157, "grad_norm": 0.38660067319869995, "learning_rate": 2.4037465290729e-07, "loss": 0.6337, "step": 22970 }, { "epoch": 0.9520079572298893, "grad_norm": 0.40654176473617554, "learning_rate": 2.4016743337892164e-07, "loss": 0.6285, "step": 22971 }, { "epoch": 0.952049401135563, "grad_norm": 0.42269065976142883, "learning_rate": 2.399602138505533e-07, "loss": 0.6378, "step": 22972 }, { "epoch": 0.9520908450412366, "grad_norm": 0.3705494999885559, "learning_rate": 2.3975299432218494e-07, "loss": 0.6172, "step": 22973 }, { "epoch": 0.9521322889469104, "grad_norm": 0.41681766510009766, "learning_rate": 2.395457747938166e-07, "loss": 0.6454, "step": 22974 }, { "epoch": 0.952173732852584, "grad_norm": 0.40389660000801086, "learning_rate": 2.3933855526544824e-07, "loss": 0.7, "step": 22975 }, { "epoch": 0.9522151767582577, "grad_norm": 0.4558272659778595, "learning_rate": 2.391313357370799e-07, "loss": 0.6421, "step": 22976 }, { "epoch": 0.9522566206639314, "grad_norm": 0.49605125188827515, "learning_rate": 2.3892411620871154e-07, "loss": 0.7195, "step": 22977 }, { "epoch": 0.9522980645696051, "grad_norm": 0.395453542470932, "learning_rate": 2.387168966803432e-07, "loss": 0.6638, "step": 22978 }, { "epoch": 0.9523395084752787, "grad_norm": 0.39343881607055664, "learning_rate": 2.3850967715197484e-07, "loss": 0.6213, "step": 22979 }, { "epoch": 0.9523809523809523, "grad_norm": 0.45467105507850647, "learning_rate": 2.3830245762360646e-07, "loss": 0.6516, "step": 22980 }, { "epoch": 0.9524223962866261, "grad_norm": 0.45640087127685547, "learning_rate": 2.3809523809523811e-07, "loss": 0.71, "step": 22981 }, { "epoch": 0.9524638401922997, "grad_norm": 0.44020524621009827, "learning_rate": 2.3788801856686976e-07, "loss": 0.7397, "step": 22982 }, { "epoch": 0.9525052840979734, "grad_norm": 0.42731040716171265, "learning_rate": 2.376807990385014e-07, "loss": 0.6517, "step": 22983 }, { "epoch": 0.952546728003647, "grad_norm": 0.4042796492576599, "learning_rate": 2.3747357951013304e-07, "loss": 0.6093, "step": 22984 }, { "epoch": 0.9525881719093208, "grad_norm": 0.40690121054649353, "learning_rate": 2.372663599817647e-07, "loss": 0.6555, "step": 22985 }, { "epoch": 0.9526296158149944, "grad_norm": 0.4129328429698944, "learning_rate": 2.3705914045339634e-07, "loss": 0.6495, "step": 22986 }, { "epoch": 0.9526710597206681, "grad_norm": 0.41668522357940674, "learning_rate": 2.36851920925028e-07, "loss": 0.6602, "step": 22987 }, { "epoch": 0.9527125036263417, "grad_norm": 0.40695318579673767, "learning_rate": 2.3664470139665964e-07, "loss": 0.6689, "step": 22988 }, { "epoch": 0.9527539475320154, "grad_norm": 0.4231056272983551, "learning_rate": 2.364374818682913e-07, "loss": 0.6636, "step": 22989 }, { "epoch": 0.9527953914376891, "grad_norm": 0.4279882311820984, "learning_rate": 2.3623026233992294e-07, "loss": 0.6952, "step": 22990 }, { "epoch": 0.9528368353433627, "grad_norm": 0.47367140650749207, "learning_rate": 2.360230428115546e-07, "loss": 0.6721, "step": 22991 }, { "epoch": 0.9528782792490365, "grad_norm": 0.39083296060562134, "learning_rate": 2.3581582328318621e-07, "loss": 0.6801, "step": 22992 }, { "epoch": 0.9529197231547101, "grad_norm": 0.4076195955276489, "learning_rate": 2.3560860375481786e-07, "loss": 0.6808, "step": 22993 }, { "epoch": 0.9529611670603838, "grad_norm": 0.3925579786300659, "learning_rate": 2.3540138422644952e-07, "loss": 0.613, "step": 22994 }, { "epoch": 0.9530026109660574, "grad_norm": 0.418381005525589, "learning_rate": 2.3519416469808117e-07, "loss": 0.6672, "step": 22995 }, { "epoch": 0.9530440548717312, "grad_norm": 0.4180576205253601, "learning_rate": 2.3498694516971282e-07, "loss": 0.65, "step": 22996 }, { "epoch": 0.9530854987774048, "grad_norm": 0.44758865237236023, "learning_rate": 2.3477972564134447e-07, "loss": 0.6959, "step": 22997 }, { "epoch": 0.9531269426830784, "grad_norm": 0.41935187578201294, "learning_rate": 2.3457250611297612e-07, "loss": 0.6936, "step": 22998 }, { "epoch": 0.9531683865887521, "grad_norm": 0.41272836923599243, "learning_rate": 2.3436528658460777e-07, "loss": 0.6594, "step": 22999 }, { "epoch": 0.9532098304944258, "grad_norm": 0.4224584698677063, "learning_rate": 2.3415806705623942e-07, "loss": 0.6449, "step": 23000 }, { "epoch": 0.9532512744000995, "grad_norm": 0.4248402416706085, "learning_rate": 2.3395084752787104e-07, "loss": 0.6957, "step": 23001 }, { "epoch": 0.9532927183057731, "grad_norm": 0.4413665235042572, "learning_rate": 2.337436279995027e-07, "loss": 0.7336, "step": 23002 }, { "epoch": 0.9533341622114468, "grad_norm": 0.4118202030658722, "learning_rate": 2.3353640847113434e-07, "loss": 0.6589, "step": 23003 }, { "epoch": 0.9533756061171205, "grad_norm": 0.4617410898208618, "learning_rate": 2.33329188942766e-07, "loss": 0.687, "step": 23004 }, { "epoch": 0.9534170500227942, "grad_norm": 0.3688286244869232, "learning_rate": 2.3312196941439762e-07, "loss": 0.6215, "step": 23005 }, { "epoch": 0.9534584939284678, "grad_norm": 0.4183925688266754, "learning_rate": 2.3291474988602927e-07, "loss": 0.6309, "step": 23006 }, { "epoch": 0.9534999378341414, "grad_norm": 0.4117042124271393, "learning_rate": 2.3270753035766092e-07, "loss": 0.6705, "step": 23007 }, { "epoch": 0.9535413817398152, "grad_norm": 0.4411187469959259, "learning_rate": 2.3250031082929257e-07, "loss": 0.6984, "step": 23008 }, { "epoch": 0.9535828256454888, "grad_norm": 0.4096396565437317, "learning_rate": 2.3229309130092422e-07, "loss": 0.6514, "step": 23009 }, { "epoch": 0.9536242695511625, "grad_norm": 0.40306371450424194, "learning_rate": 2.3208587177255587e-07, "loss": 0.625, "step": 23010 }, { "epoch": 0.9536657134568362, "grad_norm": 0.457781583070755, "learning_rate": 2.3187865224418752e-07, "loss": 0.699, "step": 23011 }, { "epoch": 0.9537071573625099, "grad_norm": 0.38474053144454956, "learning_rate": 2.3167143271581917e-07, "loss": 0.6302, "step": 23012 }, { "epoch": 0.9537486012681835, "grad_norm": 0.38026633858680725, "learning_rate": 2.3146421318745082e-07, "loss": 0.6575, "step": 23013 }, { "epoch": 0.9537900451738572, "grad_norm": 0.4118310511112213, "learning_rate": 2.3125699365908247e-07, "loss": 0.6543, "step": 23014 }, { "epoch": 0.9538314890795309, "grad_norm": 0.40507233142852783, "learning_rate": 2.3104977413071412e-07, "loss": 0.6051, "step": 23015 }, { "epoch": 0.9538729329852045, "grad_norm": 0.4533459544181824, "learning_rate": 2.3084255460234577e-07, "loss": 0.7201, "step": 23016 }, { "epoch": 0.9539143768908782, "grad_norm": 0.4498926103115082, "learning_rate": 2.3063533507397737e-07, "loss": 0.6963, "step": 23017 }, { "epoch": 0.9539558207965518, "grad_norm": 0.4431069791316986, "learning_rate": 2.3042811554560902e-07, "loss": 0.6622, "step": 23018 }, { "epoch": 0.9539972647022256, "grad_norm": 0.4086289703845978, "learning_rate": 2.3022089601724067e-07, "loss": 0.6536, "step": 23019 }, { "epoch": 0.9540387086078992, "grad_norm": 0.4287601113319397, "learning_rate": 2.3001367648887232e-07, "loss": 0.6681, "step": 23020 }, { "epoch": 0.9540801525135729, "grad_norm": 0.48039767146110535, "learning_rate": 2.2980645696050397e-07, "loss": 0.6984, "step": 23021 }, { "epoch": 0.9541215964192465, "grad_norm": 0.42725619673728943, "learning_rate": 2.2959923743213562e-07, "loss": 0.6406, "step": 23022 }, { "epoch": 0.9541630403249203, "grad_norm": 0.4328678250312805, "learning_rate": 2.2939201790376727e-07, "loss": 0.6418, "step": 23023 }, { "epoch": 0.9542044842305939, "grad_norm": 0.4361988306045532, "learning_rate": 2.2918479837539892e-07, "loss": 0.6851, "step": 23024 }, { "epoch": 0.9542459281362675, "grad_norm": 0.43480053544044495, "learning_rate": 2.2897757884703057e-07, "loss": 0.6946, "step": 23025 }, { "epoch": 0.9542873720419413, "grad_norm": 0.40493062138557434, "learning_rate": 2.2877035931866222e-07, "loss": 0.6211, "step": 23026 }, { "epoch": 0.9543288159476149, "grad_norm": 0.41657501459121704, "learning_rate": 2.2856313979029387e-07, "loss": 0.6969, "step": 23027 }, { "epoch": 0.9543702598532886, "grad_norm": 0.4044494032859802, "learning_rate": 2.2835592026192552e-07, "loss": 0.6205, "step": 23028 }, { "epoch": 0.9544117037589622, "grad_norm": 0.43438801169395447, "learning_rate": 2.2814870073355715e-07, "loss": 0.6586, "step": 23029 }, { "epoch": 0.954453147664636, "grad_norm": 0.390813946723938, "learning_rate": 2.2794148120518877e-07, "loss": 0.6815, "step": 23030 }, { "epoch": 0.9544945915703096, "grad_norm": 0.4380500316619873, "learning_rate": 2.2773426167682042e-07, "loss": 0.6812, "step": 23031 }, { "epoch": 0.9545360354759832, "grad_norm": 0.4174291789531708, "learning_rate": 2.2752704214845207e-07, "loss": 0.6625, "step": 23032 }, { "epoch": 0.9545774793816569, "grad_norm": 0.4351099729537964, "learning_rate": 2.2731982262008372e-07, "loss": 0.6438, "step": 23033 }, { "epoch": 0.9546189232873306, "grad_norm": 0.4168747365474701, "learning_rate": 2.2711260309171537e-07, "loss": 0.6958, "step": 23034 }, { "epoch": 0.9546603671930043, "grad_norm": 0.425367146730423, "learning_rate": 2.2690538356334702e-07, "loss": 0.6633, "step": 23035 }, { "epoch": 0.9547018110986779, "grad_norm": 0.4160795211791992, "learning_rate": 2.2669816403497867e-07, "loss": 0.6683, "step": 23036 }, { "epoch": 0.9547432550043516, "grad_norm": 0.4083781838417053, "learning_rate": 2.2649094450661032e-07, "loss": 0.6843, "step": 23037 }, { "epoch": 0.9547846989100253, "grad_norm": 0.427225261926651, "learning_rate": 2.2628372497824197e-07, "loss": 0.7087, "step": 23038 }, { "epoch": 0.954826142815699, "grad_norm": 0.4449876546859741, "learning_rate": 2.2607650544987362e-07, "loss": 0.6279, "step": 23039 }, { "epoch": 0.9548675867213726, "grad_norm": 0.4162338674068451, "learning_rate": 2.2586928592150527e-07, "loss": 0.7003, "step": 23040 }, { "epoch": 0.9549090306270462, "grad_norm": 0.525565505027771, "learning_rate": 2.2566206639313692e-07, "loss": 0.5938, "step": 23041 }, { "epoch": 0.95495047453272, "grad_norm": 0.39031946659088135, "learning_rate": 2.2545484686476855e-07, "loss": 0.6511, "step": 23042 }, { "epoch": 0.9549919184383936, "grad_norm": 0.419796884059906, "learning_rate": 2.252476273364002e-07, "loss": 0.6792, "step": 23043 }, { "epoch": 0.9550333623440673, "grad_norm": 0.3977256417274475, "learning_rate": 2.2504040780803185e-07, "loss": 0.6805, "step": 23044 }, { "epoch": 0.955074806249741, "grad_norm": 0.4180990159511566, "learning_rate": 2.248331882796635e-07, "loss": 0.657, "step": 23045 }, { "epoch": 0.9551162501554147, "grad_norm": 0.4230247735977173, "learning_rate": 2.2462596875129515e-07, "loss": 0.6469, "step": 23046 }, { "epoch": 0.9551576940610883, "grad_norm": 0.420587956905365, "learning_rate": 2.244187492229268e-07, "loss": 0.6693, "step": 23047 }, { "epoch": 0.955199137966762, "grad_norm": 0.42101094126701355, "learning_rate": 2.2421152969455845e-07, "loss": 0.6979, "step": 23048 }, { "epoch": 0.9552405818724357, "grad_norm": 0.40215256810188293, "learning_rate": 2.2400431016619007e-07, "loss": 0.6577, "step": 23049 }, { "epoch": 0.9552820257781093, "grad_norm": 0.43815138936042786, "learning_rate": 2.2379709063782172e-07, "loss": 0.6948, "step": 23050 }, { "epoch": 0.955323469683783, "grad_norm": 0.42006292939186096, "learning_rate": 2.2358987110945337e-07, "loss": 0.6606, "step": 23051 }, { "epoch": 0.9553649135894566, "grad_norm": 0.42261528968811035, "learning_rate": 2.2338265158108502e-07, "loss": 0.6527, "step": 23052 }, { "epoch": 0.9554063574951304, "grad_norm": 0.44373568892478943, "learning_rate": 2.2317543205271667e-07, "loss": 0.698, "step": 23053 }, { "epoch": 0.955447801400804, "grad_norm": 0.4723448157310486, "learning_rate": 2.229682125243483e-07, "loss": 0.6993, "step": 23054 }, { "epoch": 0.9554892453064777, "grad_norm": 0.4002114534378052, "learning_rate": 2.2276099299597995e-07, "loss": 0.7041, "step": 23055 }, { "epoch": 0.9555306892121513, "grad_norm": 0.40913933515548706, "learning_rate": 2.225537734676116e-07, "loss": 0.6572, "step": 23056 }, { "epoch": 0.9555721331178251, "grad_norm": 0.41568508744239807, "learning_rate": 2.2234655393924325e-07, "loss": 0.6678, "step": 23057 }, { "epoch": 0.9556135770234987, "grad_norm": 0.44930580258369446, "learning_rate": 2.221393344108749e-07, "loss": 0.7056, "step": 23058 }, { "epoch": 0.9556550209291723, "grad_norm": 0.45008140802383423, "learning_rate": 2.2193211488250655e-07, "loss": 0.6672, "step": 23059 }, { "epoch": 0.955696464834846, "grad_norm": 0.414188027381897, "learning_rate": 2.217248953541382e-07, "loss": 0.6321, "step": 23060 }, { "epoch": 0.9557379087405197, "grad_norm": 0.4118972420692444, "learning_rate": 2.2151767582576985e-07, "loss": 0.7104, "step": 23061 }, { "epoch": 0.9557793526461934, "grad_norm": 0.44999557733535767, "learning_rate": 2.213104562974015e-07, "loss": 0.7661, "step": 23062 }, { "epoch": 0.955820796551867, "grad_norm": 0.4481130838394165, "learning_rate": 2.2110323676903315e-07, "loss": 0.6376, "step": 23063 }, { "epoch": 0.9558622404575408, "grad_norm": 0.3855637013912201, "learning_rate": 2.208960172406648e-07, "loss": 0.6522, "step": 23064 }, { "epoch": 0.9559036843632144, "grad_norm": 0.3998076021671295, "learning_rate": 2.2068879771229645e-07, "loss": 0.639, "step": 23065 }, { "epoch": 0.9559451282688881, "grad_norm": 0.4299071133136749, "learning_rate": 2.2048157818392805e-07, "loss": 0.6542, "step": 23066 }, { "epoch": 0.9559865721745617, "grad_norm": 0.41824570298194885, "learning_rate": 2.202743586555597e-07, "loss": 0.6442, "step": 23067 }, { "epoch": 0.9560280160802354, "grad_norm": 0.40059101581573486, "learning_rate": 2.2006713912719135e-07, "loss": 0.6426, "step": 23068 }, { "epoch": 0.9560694599859091, "grad_norm": 0.3977491855621338, "learning_rate": 2.19859919598823e-07, "loss": 0.6306, "step": 23069 }, { "epoch": 0.9561109038915827, "grad_norm": 0.40396496653556824, "learning_rate": 2.1965270007045465e-07, "loss": 0.5979, "step": 23070 }, { "epoch": 0.9561523477972564, "grad_norm": 0.4345090985298157, "learning_rate": 2.194454805420863e-07, "loss": 0.6562, "step": 23071 }, { "epoch": 0.9561937917029301, "grad_norm": 0.3842495381832123, "learning_rate": 2.1923826101371795e-07, "loss": 0.6096, "step": 23072 }, { "epoch": 0.9562352356086038, "grad_norm": 0.47109782695770264, "learning_rate": 2.190310414853496e-07, "loss": 0.6877, "step": 23073 }, { "epoch": 0.9562766795142774, "grad_norm": 0.4509117901325226, "learning_rate": 2.1882382195698125e-07, "loss": 0.6711, "step": 23074 }, { "epoch": 0.9563181234199511, "grad_norm": 0.4120785593986511, "learning_rate": 2.186166024286129e-07, "loss": 0.6938, "step": 23075 }, { "epoch": 0.9563595673256248, "grad_norm": 0.3917151391506195, "learning_rate": 2.1840938290024455e-07, "loss": 0.6084, "step": 23076 }, { "epoch": 0.9564010112312984, "grad_norm": 0.40734294056892395, "learning_rate": 2.182021633718762e-07, "loss": 0.6909, "step": 23077 }, { "epoch": 0.9564424551369721, "grad_norm": 0.3935093879699707, "learning_rate": 2.179949438435078e-07, "loss": 0.619, "step": 23078 }, { "epoch": 0.9564838990426457, "grad_norm": 0.44054827094078064, "learning_rate": 2.1778772431513945e-07, "loss": 0.6005, "step": 23079 }, { "epoch": 0.9565253429483195, "grad_norm": 0.4292285442352295, "learning_rate": 2.175805047867711e-07, "loss": 0.6846, "step": 23080 }, { "epoch": 0.9565667868539931, "grad_norm": 0.40296241641044617, "learning_rate": 2.1737328525840275e-07, "loss": 0.662, "step": 23081 }, { "epoch": 0.9566082307596668, "grad_norm": 0.47924676537513733, "learning_rate": 2.171660657300344e-07, "loss": 0.6146, "step": 23082 }, { "epoch": 0.9566496746653405, "grad_norm": 0.3806125819683075, "learning_rate": 2.1695884620166605e-07, "loss": 0.6229, "step": 23083 }, { "epoch": 0.9566911185710142, "grad_norm": 0.3953922688961029, "learning_rate": 2.167516266732977e-07, "loss": 0.6655, "step": 23084 }, { "epoch": 0.9567325624766878, "grad_norm": 0.37801459431648254, "learning_rate": 2.1654440714492935e-07, "loss": 0.6189, "step": 23085 }, { "epoch": 0.9567740063823614, "grad_norm": 0.45688873529434204, "learning_rate": 2.16337187616561e-07, "loss": 0.6298, "step": 23086 }, { "epoch": 0.9568154502880352, "grad_norm": 0.42127177119255066, "learning_rate": 2.1612996808819265e-07, "loss": 0.6602, "step": 23087 }, { "epoch": 0.9568568941937088, "grad_norm": 0.4120872914791107, "learning_rate": 2.159227485598243e-07, "loss": 0.6592, "step": 23088 }, { "epoch": 0.9568983380993825, "grad_norm": 0.4545368552207947, "learning_rate": 2.1571552903145595e-07, "loss": 0.6045, "step": 23089 }, { "epoch": 0.9569397820050561, "grad_norm": 0.4282217025756836, "learning_rate": 2.155083095030876e-07, "loss": 0.6501, "step": 23090 }, { "epoch": 0.9569812259107299, "grad_norm": 0.45731642842292786, "learning_rate": 2.1530108997471923e-07, "loss": 0.6592, "step": 23091 }, { "epoch": 0.9570226698164035, "grad_norm": 0.38938209414482117, "learning_rate": 2.1509387044635088e-07, "loss": 0.6467, "step": 23092 }, { "epoch": 0.9570641137220771, "grad_norm": 0.39338698983192444, "learning_rate": 2.1488665091798253e-07, "loss": 0.6711, "step": 23093 }, { "epoch": 0.9571055576277508, "grad_norm": 0.40484172105789185, "learning_rate": 2.1467943138961418e-07, "loss": 0.6904, "step": 23094 }, { "epoch": 0.9571470015334245, "grad_norm": 0.39045190811157227, "learning_rate": 2.1447221186124583e-07, "loss": 0.645, "step": 23095 }, { "epoch": 0.9571884454390982, "grad_norm": 0.387788861989975, "learning_rate": 2.1426499233287745e-07, "loss": 0.6158, "step": 23096 }, { "epoch": 0.9572298893447718, "grad_norm": 0.41141822934150696, "learning_rate": 2.140577728045091e-07, "loss": 0.6646, "step": 23097 }, { "epoch": 0.9572713332504456, "grad_norm": 0.42459625005722046, "learning_rate": 2.1385055327614075e-07, "loss": 0.6808, "step": 23098 }, { "epoch": 0.9573127771561192, "grad_norm": 0.41666072607040405, "learning_rate": 2.136433337477724e-07, "loss": 0.6876, "step": 23099 }, { "epoch": 0.9573542210617929, "grad_norm": 0.38848885893821716, "learning_rate": 2.1343611421940406e-07, "loss": 0.6378, "step": 23100 }, { "epoch": 0.9573956649674665, "grad_norm": 0.4206179976463318, "learning_rate": 2.132288946910357e-07, "loss": 0.7119, "step": 23101 }, { "epoch": 0.9574371088731402, "grad_norm": 0.41347256302833557, "learning_rate": 2.1302167516266736e-07, "loss": 0.6685, "step": 23102 }, { "epoch": 0.9574785527788139, "grad_norm": 0.44028353691101074, "learning_rate": 2.1281445563429898e-07, "loss": 0.7117, "step": 23103 }, { "epoch": 0.9575199966844875, "grad_norm": 0.42882731556892395, "learning_rate": 2.1260723610593063e-07, "loss": 0.6819, "step": 23104 }, { "epoch": 0.9575614405901612, "grad_norm": 0.4082034230232239, "learning_rate": 2.1240001657756228e-07, "loss": 0.6541, "step": 23105 }, { "epoch": 0.9576028844958349, "grad_norm": 0.40479129552841187, "learning_rate": 2.1219279704919393e-07, "loss": 0.655, "step": 23106 }, { "epoch": 0.9576443284015086, "grad_norm": 0.39620548486709595, "learning_rate": 2.1198557752082558e-07, "loss": 0.6228, "step": 23107 }, { "epoch": 0.9576857723071822, "grad_norm": 0.39385801553726196, "learning_rate": 2.1177835799245723e-07, "loss": 0.6733, "step": 23108 }, { "epoch": 0.957727216212856, "grad_norm": 0.39574769139289856, "learning_rate": 2.1157113846408888e-07, "loss": 0.6376, "step": 23109 }, { "epoch": 0.9577686601185296, "grad_norm": 0.396316260099411, "learning_rate": 2.1136391893572053e-07, "loss": 0.6843, "step": 23110 }, { "epoch": 0.9578101040242032, "grad_norm": 0.3944850265979767, "learning_rate": 2.1115669940735218e-07, "loss": 0.6301, "step": 23111 }, { "epoch": 0.9578515479298769, "grad_norm": 0.42518535256385803, "learning_rate": 2.1094947987898383e-07, "loss": 0.6514, "step": 23112 }, { "epoch": 0.9578929918355505, "grad_norm": 0.4587007462978363, "learning_rate": 2.1074226035061548e-07, "loss": 0.6907, "step": 23113 }, { "epoch": 0.9579344357412243, "grad_norm": 0.4469851851463318, "learning_rate": 2.1053504082224713e-07, "loss": 0.6753, "step": 23114 }, { "epoch": 0.9579758796468979, "grad_norm": 0.42378219962120056, "learning_rate": 2.1032782129387873e-07, "loss": 0.6765, "step": 23115 }, { "epoch": 0.9580173235525716, "grad_norm": 0.3928704857826233, "learning_rate": 2.1012060176551038e-07, "loss": 0.616, "step": 23116 }, { "epoch": 0.9580587674582453, "grad_norm": 0.43857342004776, "learning_rate": 2.0991338223714203e-07, "loss": 0.6711, "step": 23117 }, { "epoch": 0.958100211363919, "grad_norm": 0.4277779757976532, "learning_rate": 2.0970616270877368e-07, "loss": 0.6951, "step": 23118 }, { "epoch": 0.9581416552695926, "grad_norm": 0.4310479164123535, "learning_rate": 2.0949894318040533e-07, "loss": 0.6996, "step": 23119 }, { "epoch": 0.9581830991752662, "grad_norm": 0.3995084762573242, "learning_rate": 2.0929172365203698e-07, "loss": 0.6211, "step": 23120 }, { "epoch": 0.95822454308094, "grad_norm": 0.40500330924987793, "learning_rate": 2.0908450412366863e-07, "loss": 0.6395, "step": 23121 }, { "epoch": 0.9582659869866136, "grad_norm": 0.3900543451309204, "learning_rate": 2.0887728459530028e-07, "loss": 0.6575, "step": 23122 }, { "epoch": 0.9583074308922873, "grad_norm": 0.4262588918209076, "learning_rate": 2.0867006506693193e-07, "loss": 0.7283, "step": 23123 }, { "epoch": 0.9583488747979609, "grad_norm": 0.42917948961257935, "learning_rate": 2.0846284553856358e-07, "loss": 0.7189, "step": 23124 }, { "epoch": 0.9583903187036347, "grad_norm": 0.3942415714263916, "learning_rate": 2.0825562601019523e-07, "loss": 0.6084, "step": 23125 }, { "epoch": 0.9584317626093083, "grad_norm": 0.4708394408226013, "learning_rate": 2.0804840648182689e-07, "loss": 0.6707, "step": 23126 }, { "epoch": 0.958473206514982, "grad_norm": 0.4115959107875824, "learning_rate": 2.0784118695345854e-07, "loss": 0.7085, "step": 23127 }, { "epoch": 0.9585146504206556, "grad_norm": 0.4072836637496948, "learning_rate": 2.0763396742509013e-07, "loss": 0.6497, "step": 23128 }, { "epoch": 0.9585560943263293, "grad_norm": 0.37720367312431335, "learning_rate": 2.0742674789672178e-07, "loss": 0.6409, "step": 23129 }, { "epoch": 0.958597538232003, "grad_norm": 0.4143693149089813, "learning_rate": 2.0721952836835343e-07, "loss": 0.7084, "step": 23130 }, { "epoch": 0.9586389821376766, "grad_norm": 0.4518071711063385, "learning_rate": 2.0701230883998508e-07, "loss": 0.6763, "step": 23131 }, { "epoch": 0.9586804260433504, "grad_norm": 0.4292539954185486, "learning_rate": 2.0680508931161673e-07, "loss": 0.6653, "step": 23132 }, { "epoch": 0.958721869949024, "grad_norm": 0.40304943919181824, "learning_rate": 2.0659786978324838e-07, "loss": 0.7002, "step": 23133 }, { "epoch": 0.9587633138546977, "grad_norm": 0.4577230215072632, "learning_rate": 2.0639065025488003e-07, "loss": 0.7302, "step": 23134 }, { "epoch": 0.9588047577603713, "grad_norm": 0.39477604627609253, "learning_rate": 2.0618343072651169e-07, "loss": 0.6445, "step": 23135 }, { "epoch": 0.9588462016660451, "grad_norm": 0.40126991271972656, "learning_rate": 2.0597621119814334e-07, "loss": 0.6195, "step": 23136 }, { "epoch": 0.9588876455717187, "grad_norm": 0.4666047692298889, "learning_rate": 2.0576899166977499e-07, "loss": 0.7156, "step": 23137 }, { "epoch": 0.9589290894773923, "grad_norm": 0.42344728112220764, "learning_rate": 2.0556177214140664e-07, "loss": 0.6887, "step": 23138 }, { "epoch": 0.958970533383066, "grad_norm": 0.3777908980846405, "learning_rate": 2.0535455261303829e-07, "loss": 0.6597, "step": 23139 }, { "epoch": 0.9590119772887397, "grad_norm": 0.4349067509174347, "learning_rate": 2.051473330846699e-07, "loss": 0.6501, "step": 23140 }, { "epoch": 0.9590534211944134, "grad_norm": 0.44103920459747314, "learning_rate": 2.0494011355630156e-07, "loss": 0.6707, "step": 23141 }, { "epoch": 0.959094865100087, "grad_norm": 0.3859243094921112, "learning_rate": 2.047328940279332e-07, "loss": 0.6287, "step": 23142 }, { "epoch": 0.9591363090057607, "grad_norm": 0.4258202314376831, "learning_rate": 2.0452567449956484e-07, "loss": 0.6042, "step": 23143 }, { "epoch": 0.9591777529114344, "grad_norm": 0.4573183059692383, "learning_rate": 2.0431845497119649e-07, "loss": 0.6853, "step": 23144 }, { "epoch": 0.9592191968171081, "grad_norm": 0.42446082830429077, "learning_rate": 2.0411123544282814e-07, "loss": 0.6934, "step": 23145 }, { "epoch": 0.9592606407227817, "grad_norm": 0.38893240690231323, "learning_rate": 2.0390401591445979e-07, "loss": 0.651, "step": 23146 }, { "epoch": 0.9593020846284553, "grad_norm": 0.41613274812698364, "learning_rate": 2.0369679638609144e-07, "loss": 0.6831, "step": 23147 }, { "epoch": 0.9593435285341291, "grad_norm": 0.40390047430992126, "learning_rate": 2.034895768577231e-07, "loss": 0.5852, "step": 23148 }, { "epoch": 0.9593849724398027, "grad_norm": 0.40002626180648804, "learning_rate": 2.0328235732935474e-07, "loss": 0.6213, "step": 23149 }, { "epoch": 0.9594264163454764, "grad_norm": 0.41857755184173584, "learning_rate": 2.030751378009864e-07, "loss": 0.6534, "step": 23150 }, { "epoch": 0.95946786025115, "grad_norm": 0.3895922601222992, "learning_rate": 2.0286791827261804e-07, "loss": 0.691, "step": 23151 }, { "epoch": 0.9595093041568238, "grad_norm": 0.4271538257598877, "learning_rate": 2.0266069874424966e-07, "loss": 0.6812, "step": 23152 }, { "epoch": 0.9595507480624974, "grad_norm": 0.4382900297641754, "learning_rate": 2.024534792158813e-07, "loss": 0.6721, "step": 23153 }, { "epoch": 0.959592191968171, "grad_norm": 0.4384820759296417, "learning_rate": 2.0224625968751296e-07, "loss": 0.6833, "step": 23154 }, { "epoch": 0.9596336358738448, "grad_norm": 0.39148852229118347, "learning_rate": 2.020390401591446e-07, "loss": 0.6587, "step": 23155 }, { "epoch": 0.9596750797795184, "grad_norm": 0.43319573998451233, "learning_rate": 2.0183182063077626e-07, "loss": 0.7266, "step": 23156 }, { "epoch": 0.9597165236851921, "grad_norm": 0.43426328897476196, "learning_rate": 2.0162460110240791e-07, "loss": 0.7261, "step": 23157 }, { "epoch": 0.9597579675908657, "grad_norm": 0.4503077268600464, "learning_rate": 2.0141738157403956e-07, "loss": 0.7109, "step": 23158 }, { "epoch": 0.9597994114965395, "grad_norm": 0.43513306975364685, "learning_rate": 2.0121016204567121e-07, "loss": 0.6322, "step": 23159 }, { "epoch": 0.9598408554022131, "grad_norm": 0.42748013138771057, "learning_rate": 2.0100294251730286e-07, "loss": 0.6545, "step": 23160 }, { "epoch": 0.9598822993078868, "grad_norm": 0.4084666073322296, "learning_rate": 2.0079572298893452e-07, "loss": 0.7018, "step": 23161 }, { "epoch": 0.9599237432135604, "grad_norm": 0.40143901109695435, "learning_rate": 2.0058850346056614e-07, "loss": 0.6278, "step": 23162 }, { "epoch": 0.9599651871192341, "grad_norm": 0.4238412082195282, "learning_rate": 2.003812839321978e-07, "loss": 0.6865, "step": 23163 }, { "epoch": 0.9600066310249078, "grad_norm": 0.42535850405693054, "learning_rate": 2.0017406440382944e-07, "loss": 0.6759, "step": 23164 }, { "epoch": 0.9600480749305814, "grad_norm": 0.4023435413837433, "learning_rate": 1.9996684487546106e-07, "loss": 0.6285, "step": 23165 }, { "epoch": 0.9600895188362552, "grad_norm": 0.4379653036594391, "learning_rate": 1.9975962534709271e-07, "loss": 0.678, "step": 23166 }, { "epoch": 0.9601309627419288, "grad_norm": 0.3986414670944214, "learning_rate": 1.9955240581872436e-07, "loss": 0.6555, "step": 23167 }, { "epoch": 0.9601724066476025, "grad_norm": 0.42397162318229675, "learning_rate": 1.9934518629035601e-07, "loss": 0.6631, "step": 23168 }, { "epoch": 0.9602138505532761, "grad_norm": 0.43795710802078247, "learning_rate": 1.9913796676198766e-07, "loss": 0.7224, "step": 23169 }, { "epoch": 0.9602552944589499, "grad_norm": 0.37666749954223633, "learning_rate": 1.9893074723361932e-07, "loss": 0.6166, "step": 23170 }, { "epoch": 0.9602967383646235, "grad_norm": 0.4144953191280365, "learning_rate": 1.9872352770525097e-07, "loss": 0.6517, "step": 23171 }, { "epoch": 0.9603381822702971, "grad_norm": 0.43220314383506775, "learning_rate": 1.9851630817688262e-07, "loss": 0.6978, "step": 23172 }, { "epoch": 0.9603796261759708, "grad_norm": 0.42193520069122314, "learning_rate": 1.9830908864851427e-07, "loss": 0.6458, "step": 23173 }, { "epoch": 0.9604210700816445, "grad_norm": 0.4525734484195709, "learning_rate": 1.9810186912014592e-07, "loss": 0.6388, "step": 23174 }, { "epoch": 0.9604625139873182, "grad_norm": 0.38898661732673645, "learning_rate": 1.9789464959177757e-07, "loss": 0.6469, "step": 23175 }, { "epoch": 0.9605039578929918, "grad_norm": 0.4502175748348236, "learning_rate": 1.9768743006340922e-07, "loss": 0.6682, "step": 23176 }, { "epoch": 0.9605454017986655, "grad_norm": 0.418739914894104, "learning_rate": 1.9748021053504081e-07, "loss": 0.7018, "step": 23177 }, { "epoch": 0.9605868457043392, "grad_norm": 0.4222378730773926, "learning_rate": 1.9727299100667247e-07, "loss": 0.6573, "step": 23178 }, { "epoch": 0.9606282896100129, "grad_norm": 0.4074171483516693, "learning_rate": 1.9706577147830412e-07, "loss": 0.6429, "step": 23179 }, { "epoch": 0.9606697335156865, "grad_norm": 0.44411569833755493, "learning_rate": 1.9685855194993577e-07, "loss": 0.649, "step": 23180 }, { "epoch": 0.9607111774213601, "grad_norm": 0.4248681664466858, "learning_rate": 1.9665133242156742e-07, "loss": 0.6376, "step": 23181 }, { "epoch": 0.9607526213270339, "grad_norm": 0.41487404704093933, "learning_rate": 1.9644411289319907e-07, "loss": 0.6588, "step": 23182 }, { "epoch": 0.9607940652327075, "grad_norm": 0.40593189001083374, "learning_rate": 1.9623689336483072e-07, "loss": 0.6688, "step": 23183 }, { "epoch": 0.9608355091383812, "grad_norm": 0.4116593301296234, "learning_rate": 1.9602967383646237e-07, "loss": 0.6313, "step": 23184 }, { "epoch": 0.9608769530440548, "grad_norm": 0.42997676134109497, "learning_rate": 1.9582245430809402e-07, "loss": 0.677, "step": 23185 }, { "epoch": 0.9609183969497286, "grad_norm": 0.4413765072822571, "learning_rate": 1.9561523477972567e-07, "loss": 0.7112, "step": 23186 }, { "epoch": 0.9609598408554022, "grad_norm": 0.41863149404525757, "learning_rate": 1.9540801525135732e-07, "loss": 0.6829, "step": 23187 }, { "epoch": 0.9610012847610759, "grad_norm": 0.42147454619407654, "learning_rate": 1.9520079572298897e-07, "loss": 0.6508, "step": 23188 }, { "epoch": 0.9610427286667496, "grad_norm": 0.38393649458885193, "learning_rate": 1.949935761946206e-07, "loss": 0.615, "step": 23189 }, { "epoch": 0.9610841725724232, "grad_norm": 0.40934789180755615, "learning_rate": 1.9478635666625224e-07, "loss": 0.671, "step": 23190 }, { "epoch": 0.9611256164780969, "grad_norm": 0.45235881209373474, "learning_rate": 1.9457913713788387e-07, "loss": 0.6771, "step": 23191 }, { "epoch": 0.9611670603837705, "grad_norm": 0.3910123109817505, "learning_rate": 1.9437191760951552e-07, "loss": 0.6278, "step": 23192 }, { "epoch": 0.9612085042894443, "grad_norm": 0.39734891057014465, "learning_rate": 1.9416469808114717e-07, "loss": 0.636, "step": 23193 }, { "epoch": 0.9612499481951179, "grad_norm": 0.4058263599872589, "learning_rate": 1.9395747855277882e-07, "loss": 0.6783, "step": 23194 }, { "epoch": 0.9612913921007916, "grad_norm": 0.412992388010025, "learning_rate": 1.9375025902441047e-07, "loss": 0.684, "step": 23195 }, { "epoch": 0.9613328360064652, "grad_norm": 0.422380268573761, "learning_rate": 1.9354303949604212e-07, "loss": 0.6432, "step": 23196 }, { "epoch": 0.961374279912139, "grad_norm": 0.4061347246170044, "learning_rate": 1.9333581996767377e-07, "loss": 0.6598, "step": 23197 }, { "epoch": 0.9614157238178126, "grad_norm": 0.430497944355011, "learning_rate": 1.9312860043930542e-07, "loss": 0.6721, "step": 23198 }, { "epoch": 0.9614571677234862, "grad_norm": 0.4027978777885437, "learning_rate": 1.9292138091093707e-07, "loss": 0.6396, "step": 23199 }, { "epoch": 0.96149861162916, "grad_norm": 0.41532155871391296, "learning_rate": 1.9271416138256872e-07, "loss": 0.6735, "step": 23200 }, { "epoch": 0.9615400555348336, "grad_norm": 0.4190988838672638, "learning_rate": 1.9250694185420037e-07, "loss": 0.6173, "step": 23201 }, { "epoch": 0.9615814994405073, "grad_norm": 0.4334971606731415, "learning_rate": 1.92299722325832e-07, "loss": 0.6802, "step": 23202 }, { "epoch": 0.9616229433461809, "grad_norm": 0.43776872754096985, "learning_rate": 1.9209250279746364e-07, "loss": 0.6747, "step": 23203 }, { "epoch": 0.9616643872518547, "grad_norm": 0.38229429721832275, "learning_rate": 1.918852832690953e-07, "loss": 0.6342, "step": 23204 }, { "epoch": 0.9617058311575283, "grad_norm": 0.378037691116333, "learning_rate": 1.9167806374072695e-07, "loss": 0.6338, "step": 23205 }, { "epoch": 0.9617472750632019, "grad_norm": 0.4342402517795563, "learning_rate": 1.914708442123586e-07, "loss": 0.6759, "step": 23206 }, { "epoch": 0.9617887189688756, "grad_norm": 0.3797514736652374, "learning_rate": 1.9126362468399025e-07, "loss": 0.6145, "step": 23207 }, { "epoch": 0.9618301628745493, "grad_norm": 0.44878053665161133, "learning_rate": 1.910564051556219e-07, "loss": 0.6819, "step": 23208 }, { "epoch": 0.961871606780223, "grad_norm": 0.395308256149292, "learning_rate": 1.9084918562725352e-07, "loss": 0.652, "step": 23209 }, { "epoch": 0.9619130506858966, "grad_norm": 0.3902066946029663, "learning_rate": 1.9064196609888517e-07, "loss": 0.6118, "step": 23210 }, { "epoch": 0.9619544945915703, "grad_norm": 0.4185203015804291, "learning_rate": 1.9043474657051682e-07, "loss": 0.6404, "step": 23211 }, { "epoch": 0.961995938497244, "grad_norm": 0.38643380999565125, "learning_rate": 1.9022752704214847e-07, "loss": 0.6516, "step": 23212 }, { "epoch": 0.9620373824029177, "grad_norm": 0.3884592056274414, "learning_rate": 1.9002030751378012e-07, "loss": 0.6237, "step": 23213 }, { "epoch": 0.9620788263085913, "grad_norm": 0.4123048782348633, "learning_rate": 1.8981308798541175e-07, "loss": 0.7129, "step": 23214 }, { "epoch": 0.9621202702142649, "grad_norm": 0.4210478365421295, "learning_rate": 1.896058684570434e-07, "loss": 0.6989, "step": 23215 }, { "epoch": 0.9621617141199387, "grad_norm": 0.45603370666503906, "learning_rate": 1.8939864892867505e-07, "loss": 0.6981, "step": 23216 }, { "epoch": 0.9622031580256123, "grad_norm": 0.41936248540878296, "learning_rate": 1.891914294003067e-07, "loss": 0.6575, "step": 23217 }, { "epoch": 0.962244601931286, "grad_norm": 0.4217749536037445, "learning_rate": 1.8898420987193835e-07, "loss": 0.6357, "step": 23218 }, { "epoch": 0.9622860458369596, "grad_norm": 0.39954033493995667, "learning_rate": 1.8877699034357e-07, "loss": 0.7048, "step": 23219 }, { "epoch": 0.9623274897426334, "grad_norm": 0.42419958114624023, "learning_rate": 1.8856977081520165e-07, "loss": 0.6323, "step": 23220 }, { "epoch": 0.962368933648307, "grad_norm": 0.4251878261566162, "learning_rate": 1.883625512868333e-07, "loss": 0.673, "step": 23221 }, { "epoch": 0.9624103775539807, "grad_norm": 0.4311068058013916, "learning_rate": 1.8815533175846495e-07, "loss": 0.6742, "step": 23222 }, { "epoch": 0.9624518214596544, "grad_norm": 0.42706409096717834, "learning_rate": 1.879481122300966e-07, "loss": 0.6907, "step": 23223 }, { "epoch": 0.962493265365328, "grad_norm": 0.430532842874527, "learning_rate": 1.8774089270172825e-07, "loss": 0.6721, "step": 23224 }, { "epoch": 0.9625347092710017, "grad_norm": 0.41283872723579407, "learning_rate": 1.875336731733599e-07, "loss": 0.6777, "step": 23225 }, { "epoch": 0.9625761531766753, "grad_norm": 0.4132024049758911, "learning_rate": 1.873264536449915e-07, "loss": 0.6156, "step": 23226 }, { "epoch": 0.9626175970823491, "grad_norm": 0.40249085426330566, "learning_rate": 1.8711923411662315e-07, "loss": 0.6277, "step": 23227 }, { "epoch": 0.9626590409880227, "grad_norm": 0.4029373228549957, "learning_rate": 1.869120145882548e-07, "loss": 0.6053, "step": 23228 }, { "epoch": 0.9627004848936964, "grad_norm": 0.38418957591056824, "learning_rate": 1.8670479505988645e-07, "loss": 0.6237, "step": 23229 }, { "epoch": 0.96274192879937, "grad_norm": 0.4161483347415924, "learning_rate": 1.864975755315181e-07, "loss": 0.6384, "step": 23230 }, { "epoch": 0.9627833727050438, "grad_norm": 0.4120124876499176, "learning_rate": 1.8629035600314975e-07, "loss": 0.6611, "step": 23231 }, { "epoch": 0.9628248166107174, "grad_norm": 0.41910380125045776, "learning_rate": 1.860831364747814e-07, "loss": 0.6692, "step": 23232 }, { "epoch": 0.962866260516391, "grad_norm": 0.40932485461235046, "learning_rate": 1.8587591694641305e-07, "loss": 0.6499, "step": 23233 }, { "epoch": 0.9629077044220647, "grad_norm": 0.4058624505996704, "learning_rate": 1.856686974180447e-07, "loss": 0.6067, "step": 23234 }, { "epoch": 0.9629491483277384, "grad_norm": 0.406186580657959, "learning_rate": 1.8546147788967635e-07, "loss": 0.7041, "step": 23235 }, { "epoch": 0.9629905922334121, "grad_norm": 0.41728949546813965, "learning_rate": 1.85254258361308e-07, "loss": 0.63, "step": 23236 }, { "epoch": 0.9630320361390857, "grad_norm": 0.41570979356765747, "learning_rate": 1.8504703883293965e-07, "loss": 0.6804, "step": 23237 }, { "epoch": 0.9630734800447595, "grad_norm": 0.4367997944355011, "learning_rate": 1.8483981930457125e-07, "loss": 0.675, "step": 23238 }, { "epoch": 0.9631149239504331, "grad_norm": 0.41493576765060425, "learning_rate": 1.846325997762029e-07, "loss": 0.6614, "step": 23239 }, { "epoch": 0.9631563678561068, "grad_norm": 0.42952725291252136, "learning_rate": 1.8442538024783455e-07, "loss": 0.6482, "step": 23240 }, { "epoch": 0.9631978117617804, "grad_norm": 0.4610096216201782, "learning_rate": 1.842181607194662e-07, "loss": 0.7068, "step": 23241 }, { "epoch": 0.963239255667454, "grad_norm": 0.4690104126930237, "learning_rate": 1.8401094119109785e-07, "loss": 0.7045, "step": 23242 }, { "epoch": 0.9632806995731278, "grad_norm": 0.43758708238601685, "learning_rate": 1.838037216627295e-07, "loss": 0.7327, "step": 23243 }, { "epoch": 0.9633221434788014, "grad_norm": 0.42984187602996826, "learning_rate": 1.8359650213436115e-07, "loss": 0.7129, "step": 23244 }, { "epoch": 0.9633635873844751, "grad_norm": 0.41961970925331116, "learning_rate": 1.833892826059928e-07, "loss": 0.6415, "step": 23245 }, { "epoch": 0.9634050312901488, "grad_norm": 0.4083646833896637, "learning_rate": 1.8318206307762445e-07, "loss": 0.6659, "step": 23246 }, { "epoch": 0.9634464751958225, "grad_norm": 0.4411815106868744, "learning_rate": 1.829748435492561e-07, "loss": 0.6304, "step": 23247 }, { "epoch": 0.9634879191014961, "grad_norm": 0.41600650548934937, "learning_rate": 1.8276762402088775e-07, "loss": 0.6564, "step": 23248 }, { "epoch": 0.9635293630071698, "grad_norm": 0.44136688113212585, "learning_rate": 1.825604044925194e-07, "loss": 0.6925, "step": 23249 }, { "epoch": 0.9635708069128435, "grad_norm": 0.43774133920669556, "learning_rate": 1.8235318496415105e-07, "loss": 0.7256, "step": 23250 }, { "epoch": 0.9636122508185171, "grad_norm": 0.41175365447998047, "learning_rate": 1.8214596543578268e-07, "loss": 0.699, "step": 23251 }, { "epoch": 0.9636536947241908, "grad_norm": 0.39821356534957886, "learning_rate": 1.8193874590741433e-07, "loss": 0.6346, "step": 23252 }, { "epoch": 0.9636951386298644, "grad_norm": 0.430891752243042, "learning_rate": 1.8173152637904598e-07, "loss": 0.6461, "step": 23253 }, { "epoch": 0.9637365825355382, "grad_norm": 0.3951548635959625, "learning_rate": 1.8152430685067763e-07, "loss": 0.6283, "step": 23254 }, { "epoch": 0.9637780264412118, "grad_norm": 0.41319188475608826, "learning_rate": 1.8131708732230928e-07, "loss": 0.6438, "step": 23255 }, { "epoch": 0.9638194703468855, "grad_norm": 0.38934096693992615, "learning_rate": 1.8110986779394093e-07, "loss": 0.6237, "step": 23256 }, { "epoch": 0.9638609142525592, "grad_norm": 0.42149388790130615, "learning_rate": 1.8090264826557255e-07, "loss": 0.6882, "step": 23257 }, { "epoch": 0.9639023581582329, "grad_norm": 0.41719764471054077, "learning_rate": 1.806954287372042e-07, "loss": 0.6344, "step": 23258 }, { "epoch": 0.9639438020639065, "grad_norm": 0.4408019483089447, "learning_rate": 1.8048820920883585e-07, "loss": 0.6823, "step": 23259 }, { "epoch": 0.9639852459695801, "grad_norm": 0.4154941439628601, "learning_rate": 1.802809896804675e-07, "loss": 0.6337, "step": 23260 }, { "epoch": 0.9640266898752539, "grad_norm": 0.41061410307884216, "learning_rate": 1.8007377015209915e-07, "loss": 0.6882, "step": 23261 }, { "epoch": 0.9640681337809275, "grad_norm": 0.4463736414909363, "learning_rate": 1.798665506237308e-07, "loss": 0.6268, "step": 23262 }, { "epoch": 0.9641095776866012, "grad_norm": 0.4064274728298187, "learning_rate": 1.7965933109536243e-07, "loss": 0.6704, "step": 23263 }, { "epoch": 0.9641510215922748, "grad_norm": 0.4128144383430481, "learning_rate": 1.7945211156699408e-07, "loss": 0.6272, "step": 23264 }, { "epoch": 0.9641924654979486, "grad_norm": 0.42582666873931885, "learning_rate": 1.7924489203862573e-07, "loss": 0.6475, "step": 23265 }, { "epoch": 0.9642339094036222, "grad_norm": 0.45964887738227844, "learning_rate": 1.7903767251025738e-07, "loss": 0.6324, "step": 23266 }, { "epoch": 0.9642753533092958, "grad_norm": 0.4536997377872467, "learning_rate": 1.7883045298188903e-07, "loss": 0.7354, "step": 23267 }, { "epoch": 0.9643167972149695, "grad_norm": 0.3932463824748993, "learning_rate": 1.7862323345352068e-07, "loss": 0.6995, "step": 23268 }, { "epoch": 0.9643582411206432, "grad_norm": 0.40317365527153015, "learning_rate": 1.7841601392515233e-07, "loss": 0.6438, "step": 23269 }, { "epoch": 0.9643996850263169, "grad_norm": 0.4453044831752777, "learning_rate": 1.7820879439678398e-07, "loss": 0.7097, "step": 23270 }, { "epoch": 0.9644411289319905, "grad_norm": 0.3931093215942383, "learning_rate": 1.7800157486841563e-07, "loss": 0.6686, "step": 23271 }, { "epoch": 0.9644825728376643, "grad_norm": 0.42329302430152893, "learning_rate": 1.7779435534004728e-07, "loss": 0.6182, "step": 23272 }, { "epoch": 0.9645240167433379, "grad_norm": 0.4212232530117035, "learning_rate": 1.7758713581167893e-07, "loss": 0.6995, "step": 23273 }, { "epoch": 0.9645654606490116, "grad_norm": 0.4006105065345764, "learning_rate": 1.7737991628331058e-07, "loss": 0.6525, "step": 23274 }, { "epoch": 0.9646069045546852, "grad_norm": 0.39013901352882385, "learning_rate": 1.7717269675494218e-07, "loss": 0.6158, "step": 23275 }, { "epoch": 0.9646483484603589, "grad_norm": 0.43686220049858093, "learning_rate": 1.7696547722657383e-07, "loss": 0.7319, "step": 23276 }, { "epoch": 0.9646897923660326, "grad_norm": 0.3823152780532837, "learning_rate": 1.7675825769820548e-07, "loss": 0.6122, "step": 23277 }, { "epoch": 0.9647312362717062, "grad_norm": 0.42826035618782043, "learning_rate": 1.7655103816983713e-07, "loss": 0.6324, "step": 23278 }, { "epoch": 0.9647726801773799, "grad_norm": 0.4264126121997833, "learning_rate": 1.7634381864146878e-07, "loss": 0.6349, "step": 23279 }, { "epoch": 0.9648141240830536, "grad_norm": 0.4255863130092621, "learning_rate": 1.7613659911310043e-07, "loss": 0.6659, "step": 23280 }, { "epoch": 0.9648555679887273, "grad_norm": 0.442272424697876, "learning_rate": 1.7592937958473208e-07, "loss": 0.6877, "step": 23281 }, { "epoch": 0.9648970118944009, "grad_norm": 0.4512612521648407, "learning_rate": 1.7572216005636373e-07, "loss": 0.6843, "step": 23282 }, { "epoch": 0.9649384558000746, "grad_norm": 0.4233115017414093, "learning_rate": 1.7551494052799538e-07, "loss": 0.679, "step": 23283 }, { "epoch": 0.9649798997057483, "grad_norm": 0.37886670231819153, "learning_rate": 1.7530772099962703e-07, "loss": 0.6379, "step": 23284 }, { "epoch": 0.9650213436114219, "grad_norm": 0.4184572994709015, "learning_rate": 1.7510050147125868e-07, "loss": 0.6436, "step": 23285 }, { "epoch": 0.9650627875170956, "grad_norm": 0.4252547025680542, "learning_rate": 1.7489328194289033e-07, "loss": 0.6758, "step": 23286 }, { "epoch": 0.9651042314227692, "grad_norm": 0.40730932354927063, "learning_rate": 1.7468606241452198e-07, "loss": 0.6527, "step": 23287 }, { "epoch": 0.965145675328443, "grad_norm": 0.4110737144947052, "learning_rate": 1.7447884288615358e-07, "loss": 0.6553, "step": 23288 }, { "epoch": 0.9651871192341166, "grad_norm": 0.43789148330688477, "learning_rate": 1.7427162335778523e-07, "loss": 0.6726, "step": 23289 }, { "epoch": 0.9652285631397903, "grad_norm": 0.43218380212783813, "learning_rate": 1.7406440382941688e-07, "loss": 0.6372, "step": 23290 }, { "epoch": 0.965270007045464, "grad_norm": 0.4686080515384674, "learning_rate": 1.7385718430104853e-07, "loss": 0.6636, "step": 23291 }, { "epoch": 0.9653114509511377, "grad_norm": 0.41321247816085815, "learning_rate": 1.7364996477268018e-07, "loss": 0.6456, "step": 23292 }, { "epoch": 0.9653528948568113, "grad_norm": 0.4297012984752655, "learning_rate": 1.7344274524431183e-07, "loss": 0.6809, "step": 23293 }, { "epoch": 0.9653943387624849, "grad_norm": 0.4195738732814789, "learning_rate": 1.7323552571594348e-07, "loss": 0.6797, "step": 23294 }, { "epoch": 0.9654357826681587, "grad_norm": 0.40984293818473816, "learning_rate": 1.7302830618757513e-07, "loss": 0.6268, "step": 23295 }, { "epoch": 0.9654772265738323, "grad_norm": 0.3901982307434082, "learning_rate": 1.7282108665920678e-07, "loss": 0.6401, "step": 23296 }, { "epoch": 0.965518670479506, "grad_norm": 0.4661453366279602, "learning_rate": 1.7261386713083843e-07, "loss": 0.7253, "step": 23297 }, { "epoch": 0.9655601143851796, "grad_norm": 0.41373497247695923, "learning_rate": 1.7240664760247008e-07, "loss": 0.6647, "step": 23298 }, { "epoch": 0.9656015582908534, "grad_norm": 0.3907544016838074, "learning_rate": 1.7219942807410173e-07, "loss": 0.6318, "step": 23299 }, { "epoch": 0.965643002196527, "grad_norm": 0.4454175531864166, "learning_rate": 1.7199220854573336e-07, "loss": 0.6675, "step": 23300 }, { "epoch": 0.9656844461022007, "grad_norm": 0.38820400834083557, "learning_rate": 1.71784989017365e-07, "loss": 0.6184, "step": 23301 }, { "epoch": 0.9657258900078743, "grad_norm": 0.39134272933006287, "learning_rate": 1.7157776948899666e-07, "loss": 0.6418, "step": 23302 }, { "epoch": 0.965767333913548, "grad_norm": 0.42655515670776367, "learning_rate": 1.713705499606283e-07, "loss": 0.6655, "step": 23303 }, { "epoch": 0.9658087778192217, "grad_norm": 0.44280579686164856, "learning_rate": 1.7116333043225993e-07, "loss": 0.6492, "step": 23304 }, { "epoch": 0.9658502217248953, "grad_norm": 0.4750725030899048, "learning_rate": 1.7095611090389158e-07, "loss": 0.6542, "step": 23305 }, { "epoch": 0.965891665630569, "grad_norm": 0.4715070426464081, "learning_rate": 1.7074889137552323e-07, "loss": 0.7612, "step": 23306 }, { "epoch": 0.9659331095362427, "grad_norm": 0.41819092631340027, "learning_rate": 1.7054167184715488e-07, "loss": 0.6112, "step": 23307 }, { "epoch": 0.9659745534419164, "grad_norm": 0.42313912510871887, "learning_rate": 1.7033445231878653e-07, "loss": 0.6313, "step": 23308 }, { "epoch": 0.96601599734759, "grad_norm": 0.43265488743782043, "learning_rate": 1.7012723279041818e-07, "loss": 0.7216, "step": 23309 }, { "epoch": 0.9660574412532638, "grad_norm": 0.43604597449302673, "learning_rate": 1.6992001326204984e-07, "loss": 0.6455, "step": 23310 }, { "epoch": 0.9660988851589374, "grad_norm": 0.421562135219574, "learning_rate": 1.6971279373368149e-07, "loss": 0.6637, "step": 23311 }, { "epoch": 0.966140329064611, "grad_norm": 0.39156481623649597, "learning_rate": 1.695055742053131e-07, "loss": 0.6224, "step": 23312 }, { "epoch": 0.9661817729702847, "grad_norm": 0.4224454164505005, "learning_rate": 1.6929835467694476e-07, "loss": 0.6807, "step": 23313 }, { "epoch": 0.9662232168759584, "grad_norm": 0.4051852822303772, "learning_rate": 1.690911351485764e-07, "loss": 0.6702, "step": 23314 }, { "epoch": 0.9662646607816321, "grad_norm": 0.3934318721294403, "learning_rate": 1.6888391562020806e-07, "loss": 0.6871, "step": 23315 }, { "epoch": 0.9663061046873057, "grad_norm": 0.4254280626773834, "learning_rate": 1.686766960918397e-07, "loss": 0.6562, "step": 23316 }, { "epoch": 0.9663475485929794, "grad_norm": 0.406938374042511, "learning_rate": 1.6846947656347136e-07, "loss": 0.71, "step": 23317 }, { "epoch": 0.9663889924986531, "grad_norm": 0.39998579025268555, "learning_rate": 1.68262257035103e-07, "loss": 0.6763, "step": 23318 }, { "epoch": 0.9664304364043268, "grad_norm": 0.407700777053833, "learning_rate": 1.6805503750673466e-07, "loss": 0.6858, "step": 23319 }, { "epoch": 0.9664718803100004, "grad_norm": 0.45109105110168457, "learning_rate": 1.678478179783663e-07, "loss": 0.6521, "step": 23320 }, { "epoch": 0.966513324215674, "grad_norm": 0.39635810256004333, "learning_rate": 1.6764059844999796e-07, "loss": 0.6857, "step": 23321 }, { "epoch": 0.9665547681213478, "grad_norm": 0.42575880885124207, "learning_rate": 1.674333789216296e-07, "loss": 0.6687, "step": 23322 }, { "epoch": 0.9665962120270214, "grad_norm": 0.39914363622665405, "learning_rate": 1.6722615939326124e-07, "loss": 0.6002, "step": 23323 }, { "epoch": 0.9666376559326951, "grad_norm": 0.4216274917125702, "learning_rate": 1.670189398648929e-07, "loss": 0.6581, "step": 23324 }, { "epoch": 0.9666790998383687, "grad_norm": 0.40114206075668335, "learning_rate": 1.668117203365245e-07, "loss": 0.6343, "step": 23325 }, { "epoch": 0.9667205437440425, "grad_norm": 0.42549943923950195, "learning_rate": 1.6660450080815616e-07, "loss": 0.6473, "step": 23326 }, { "epoch": 0.9667619876497161, "grad_norm": 0.4355872571468353, "learning_rate": 1.663972812797878e-07, "loss": 0.679, "step": 23327 }, { "epoch": 0.9668034315553897, "grad_norm": 0.4368642270565033, "learning_rate": 1.6619006175141946e-07, "loss": 0.7002, "step": 23328 }, { "epoch": 0.9668448754610635, "grad_norm": 0.39656850695610046, "learning_rate": 1.659828422230511e-07, "loss": 0.6362, "step": 23329 }, { "epoch": 0.9668863193667371, "grad_norm": 0.41979920864105225, "learning_rate": 1.6577562269468276e-07, "loss": 0.6925, "step": 23330 }, { "epoch": 0.9669277632724108, "grad_norm": 0.41821253299713135, "learning_rate": 1.655684031663144e-07, "loss": 0.689, "step": 23331 }, { "epoch": 0.9669692071780844, "grad_norm": 0.37016725540161133, "learning_rate": 1.6536118363794606e-07, "loss": 0.642, "step": 23332 }, { "epoch": 0.9670106510837582, "grad_norm": 0.38707494735717773, "learning_rate": 1.6515396410957771e-07, "loss": 0.6263, "step": 23333 }, { "epoch": 0.9670520949894318, "grad_norm": 0.4342324435710907, "learning_rate": 1.6494674458120936e-07, "loss": 0.7299, "step": 23334 }, { "epoch": 0.9670935388951055, "grad_norm": 0.39876458048820496, "learning_rate": 1.6473952505284101e-07, "loss": 0.6648, "step": 23335 }, { "epoch": 0.9671349828007791, "grad_norm": 0.43323883414268494, "learning_rate": 1.6453230552447266e-07, "loss": 0.6555, "step": 23336 }, { "epoch": 0.9671764267064528, "grad_norm": 0.4499939978122711, "learning_rate": 1.6432508599610426e-07, "loss": 0.6907, "step": 23337 }, { "epoch": 0.9672178706121265, "grad_norm": 0.4446576237678528, "learning_rate": 1.641178664677359e-07, "loss": 0.7026, "step": 23338 }, { "epoch": 0.9672593145178001, "grad_norm": 0.41455352306365967, "learning_rate": 1.6391064693936756e-07, "loss": 0.6632, "step": 23339 }, { "epoch": 0.9673007584234738, "grad_norm": 0.4042210280895233, "learning_rate": 1.6370342741099921e-07, "loss": 0.62, "step": 23340 }, { "epoch": 0.9673422023291475, "grad_norm": 0.4209439754486084, "learning_rate": 1.6349620788263086e-07, "loss": 0.6833, "step": 23341 }, { "epoch": 0.9673836462348212, "grad_norm": 0.40686357021331787, "learning_rate": 1.6328898835426251e-07, "loss": 0.6898, "step": 23342 }, { "epoch": 0.9674250901404948, "grad_norm": 0.4403996169567108, "learning_rate": 1.6308176882589416e-07, "loss": 0.6477, "step": 23343 }, { "epoch": 0.9674665340461686, "grad_norm": 0.4391769468784332, "learning_rate": 1.6287454929752581e-07, "loss": 0.6362, "step": 23344 }, { "epoch": 0.9675079779518422, "grad_norm": 0.38516345620155334, "learning_rate": 1.6266732976915747e-07, "loss": 0.6132, "step": 23345 }, { "epoch": 0.9675494218575158, "grad_norm": 0.4932960271835327, "learning_rate": 1.6246011024078912e-07, "loss": 0.7222, "step": 23346 }, { "epoch": 0.9675908657631895, "grad_norm": 0.4122534394264221, "learning_rate": 1.6225289071242077e-07, "loss": 0.6646, "step": 23347 }, { "epoch": 0.9676323096688632, "grad_norm": 0.3870895206928253, "learning_rate": 1.6204567118405242e-07, "loss": 0.6703, "step": 23348 }, { "epoch": 0.9676737535745369, "grad_norm": 0.41361358761787415, "learning_rate": 1.6183845165568404e-07, "loss": 0.6985, "step": 23349 }, { "epoch": 0.9677151974802105, "grad_norm": 0.41119641065597534, "learning_rate": 1.616312321273157e-07, "loss": 0.6746, "step": 23350 }, { "epoch": 0.9677566413858842, "grad_norm": 0.40768709778785706, "learning_rate": 1.6142401259894731e-07, "loss": 0.6663, "step": 23351 }, { "epoch": 0.9677980852915579, "grad_norm": 0.4416126012802124, "learning_rate": 1.6121679307057896e-07, "loss": 0.6838, "step": 23352 }, { "epoch": 0.9678395291972316, "grad_norm": 0.4883098006248474, "learning_rate": 1.6100957354221061e-07, "loss": 0.6515, "step": 23353 }, { "epoch": 0.9678809731029052, "grad_norm": 0.4071291983127594, "learning_rate": 1.6080235401384227e-07, "loss": 0.694, "step": 23354 }, { "epoch": 0.9679224170085788, "grad_norm": 0.4490273594856262, "learning_rate": 1.6059513448547392e-07, "loss": 0.6887, "step": 23355 }, { "epoch": 0.9679638609142526, "grad_norm": 0.46576401591300964, "learning_rate": 1.6038791495710557e-07, "loss": 0.6078, "step": 23356 }, { "epoch": 0.9680053048199262, "grad_norm": 0.3993131220340729, "learning_rate": 1.6018069542873722e-07, "loss": 0.6732, "step": 23357 }, { "epoch": 0.9680467487255999, "grad_norm": 0.40115588903427124, "learning_rate": 1.5997347590036887e-07, "loss": 0.655, "step": 23358 }, { "epoch": 0.9680881926312735, "grad_norm": 0.430477112531662, "learning_rate": 1.5976625637200052e-07, "loss": 0.6547, "step": 23359 }, { "epoch": 0.9681296365369473, "grad_norm": 0.436361700296402, "learning_rate": 1.5955903684363217e-07, "loss": 0.6436, "step": 23360 }, { "epoch": 0.9681710804426209, "grad_norm": 0.46355488896369934, "learning_rate": 1.593518173152638e-07, "loss": 0.6467, "step": 23361 }, { "epoch": 0.9682125243482946, "grad_norm": 0.39769119024276733, "learning_rate": 1.5914459778689544e-07, "loss": 0.6144, "step": 23362 }, { "epoch": 0.9682539682539683, "grad_norm": 0.4173765182495117, "learning_rate": 1.589373782585271e-07, "loss": 0.6442, "step": 23363 }, { "epoch": 0.9682954121596419, "grad_norm": 0.47269025444984436, "learning_rate": 1.5873015873015874e-07, "loss": 0.702, "step": 23364 }, { "epoch": 0.9683368560653156, "grad_norm": 0.4626195728778839, "learning_rate": 1.585229392017904e-07, "loss": 0.7061, "step": 23365 }, { "epoch": 0.9683782999709892, "grad_norm": 0.4063701927661896, "learning_rate": 1.5831571967342204e-07, "loss": 0.6123, "step": 23366 }, { "epoch": 0.968419743876663, "grad_norm": 0.41015464067459106, "learning_rate": 1.581085001450537e-07, "loss": 0.6926, "step": 23367 }, { "epoch": 0.9684611877823366, "grad_norm": 0.41619399189949036, "learning_rate": 1.5790128061668534e-07, "loss": 0.6803, "step": 23368 }, { "epoch": 0.9685026316880103, "grad_norm": 0.39935314655303955, "learning_rate": 1.57694061088317e-07, "loss": 0.689, "step": 23369 }, { "epoch": 0.9685440755936839, "grad_norm": 0.4308881163597107, "learning_rate": 1.5748684155994862e-07, "loss": 0.6656, "step": 23370 }, { "epoch": 0.9685855194993577, "grad_norm": 0.4899929463863373, "learning_rate": 1.5727962203158027e-07, "loss": 0.6705, "step": 23371 }, { "epoch": 0.9686269634050313, "grad_norm": 0.41016316413879395, "learning_rate": 1.5707240250321192e-07, "loss": 0.6702, "step": 23372 }, { "epoch": 0.9686684073107049, "grad_norm": 0.4686843156814575, "learning_rate": 1.5686518297484357e-07, "loss": 0.6711, "step": 23373 }, { "epoch": 0.9687098512163786, "grad_norm": 0.4339531362056732, "learning_rate": 1.566579634464752e-07, "loss": 0.6273, "step": 23374 }, { "epoch": 0.9687512951220523, "grad_norm": 0.3982437551021576, "learning_rate": 1.5645074391810684e-07, "loss": 0.6021, "step": 23375 }, { "epoch": 0.968792739027726, "grad_norm": 0.424327552318573, "learning_rate": 1.5624352438973852e-07, "loss": 0.676, "step": 23376 }, { "epoch": 0.9688341829333996, "grad_norm": 0.40400204062461853, "learning_rate": 1.5603630486137014e-07, "loss": 0.6531, "step": 23377 }, { "epoch": 0.9688756268390734, "grad_norm": 0.41770923137664795, "learning_rate": 1.558290853330018e-07, "loss": 0.6454, "step": 23378 }, { "epoch": 0.968917070744747, "grad_norm": 0.4862979054450989, "learning_rate": 1.5562186580463344e-07, "loss": 0.7075, "step": 23379 }, { "epoch": 0.9689585146504207, "grad_norm": 0.43865296244621277, "learning_rate": 1.554146462762651e-07, "loss": 0.6987, "step": 23380 }, { "epoch": 0.9689999585560943, "grad_norm": 0.4065636992454529, "learning_rate": 1.5520742674789675e-07, "loss": 0.671, "step": 23381 }, { "epoch": 0.969041402461768, "grad_norm": 0.42944273352622986, "learning_rate": 1.550002072195284e-07, "loss": 0.6766, "step": 23382 }, { "epoch": 0.9690828463674417, "grad_norm": 0.4478388726711273, "learning_rate": 1.5479298769116002e-07, "loss": 0.7327, "step": 23383 }, { "epoch": 0.9691242902731153, "grad_norm": 0.3859099745750427, "learning_rate": 1.5458576816279167e-07, "loss": 0.6332, "step": 23384 }, { "epoch": 0.969165734178789, "grad_norm": 0.39606973528862, "learning_rate": 1.5437854863442332e-07, "loss": 0.6318, "step": 23385 }, { "epoch": 0.9692071780844627, "grad_norm": 0.41753703355789185, "learning_rate": 1.5417132910605497e-07, "loss": 0.6798, "step": 23386 }, { "epoch": 0.9692486219901364, "grad_norm": 0.447609007358551, "learning_rate": 1.5396410957768662e-07, "loss": 0.6615, "step": 23387 }, { "epoch": 0.96929006589581, "grad_norm": 0.4227668344974518, "learning_rate": 1.5375689004931827e-07, "loss": 0.6288, "step": 23388 }, { "epoch": 0.9693315098014836, "grad_norm": 0.42959046363830566, "learning_rate": 1.535496705209499e-07, "loss": 0.6447, "step": 23389 }, { "epoch": 0.9693729537071574, "grad_norm": 0.39226147532463074, "learning_rate": 1.5334245099258155e-07, "loss": 0.6409, "step": 23390 }, { "epoch": 0.969414397612831, "grad_norm": 0.3937316834926605, "learning_rate": 1.531352314642132e-07, "loss": 0.6512, "step": 23391 }, { "epoch": 0.9694558415185047, "grad_norm": 0.4207232594490051, "learning_rate": 1.5292801193584485e-07, "loss": 0.6849, "step": 23392 }, { "epoch": 0.9694972854241783, "grad_norm": 0.45545512437820435, "learning_rate": 1.527207924074765e-07, "loss": 0.6663, "step": 23393 }, { "epoch": 0.9695387293298521, "grad_norm": 0.42424553632736206, "learning_rate": 1.5251357287910815e-07, "loss": 0.662, "step": 23394 }, { "epoch": 0.9695801732355257, "grad_norm": 0.4297972321510315, "learning_rate": 1.5230635335073977e-07, "loss": 0.6929, "step": 23395 }, { "epoch": 0.9696216171411994, "grad_norm": 0.4273180663585663, "learning_rate": 1.5209913382237142e-07, "loss": 0.6865, "step": 23396 }, { "epoch": 0.969663061046873, "grad_norm": 0.42989614605903625, "learning_rate": 1.5189191429400307e-07, "loss": 0.6635, "step": 23397 }, { "epoch": 0.9697045049525467, "grad_norm": 0.4713872969150543, "learning_rate": 1.5168469476563472e-07, "loss": 0.6807, "step": 23398 }, { "epoch": 0.9697459488582204, "grad_norm": 0.3995877802371979, "learning_rate": 1.5147747523726637e-07, "loss": 0.6591, "step": 23399 }, { "epoch": 0.969787392763894, "grad_norm": 0.4467785954475403, "learning_rate": 1.5127025570889802e-07, "loss": 0.6897, "step": 23400 }, { "epoch": 0.9698288366695678, "grad_norm": 0.4269404113292694, "learning_rate": 1.5106303618052965e-07, "loss": 0.6963, "step": 23401 }, { "epoch": 0.9698702805752414, "grad_norm": 0.40911659598350525, "learning_rate": 1.508558166521613e-07, "loss": 0.6409, "step": 23402 }, { "epoch": 0.9699117244809151, "grad_norm": 0.4664289951324463, "learning_rate": 1.5064859712379295e-07, "loss": 0.6381, "step": 23403 }, { "epoch": 0.9699531683865887, "grad_norm": 0.4427375793457031, "learning_rate": 1.504413775954246e-07, "loss": 0.6588, "step": 23404 }, { "epoch": 0.9699946122922625, "grad_norm": 0.40940624475479126, "learning_rate": 1.5023415806705625e-07, "loss": 0.6373, "step": 23405 }, { "epoch": 0.9700360561979361, "grad_norm": 0.44930630922317505, "learning_rate": 1.500269385386879e-07, "loss": 0.6866, "step": 23406 }, { "epoch": 0.9700775001036097, "grad_norm": 0.41708970069885254, "learning_rate": 1.4981971901031955e-07, "loss": 0.673, "step": 23407 }, { "epoch": 0.9701189440092834, "grad_norm": 0.40876084566116333, "learning_rate": 1.4961249948195117e-07, "loss": 0.7266, "step": 23408 }, { "epoch": 0.9701603879149571, "grad_norm": 0.4588559865951538, "learning_rate": 1.4940527995358282e-07, "loss": 0.7056, "step": 23409 }, { "epoch": 0.9702018318206308, "grad_norm": 0.4186292290687561, "learning_rate": 1.4919806042521447e-07, "loss": 0.6167, "step": 23410 }, { "epoch": 0.9702432757263044, "grad_norm": 0.4015178084373474, "learning_rate": 1.4899084089684612e-07, "loss": 0.6266, "step": 23411 }, { "epoch": 0.9702847196319782, "grad_norm": 0.43719688057899475, "learning_rate": 1.4878362136847777e-07, "loss": 0.6555, "step": 23412 }, { "epoch": 0.9703261635376518, "grad_norm": 0.423287957906723, "learning_rate": 1.4857640184010942e-07, "loss": 0.6404, "step": 23413 }, { "epoch": 0.9703676074433255, "grad_norm": 0.427774578332901, "learning_rate": 1.4836918231174107e-07, "loss": 0.6619, "step": 23414 }, { "epoch": 0.9704090513489991, "grad_norm": 0.40787872672080994, "learning_rate": 1.4816196278337272e-07, "loss": 0.6544, "step": 23415 }, { "epoch": 0.9704504952546728, "grad_norm": 0.4110839366912842, "learning_rate": 1.4795474325500438e-07, "loss": 0.6324, "step": 23416 }, { "epoch": 0.9704919391603465, "grad_norm": 0.46905484795570374, "learning_rate": 1.47747523726636e-07, "loss": 0.729, "step": 23417 }, { "epoch": 0.9705333830660201, "grad_norm": 0.41941845417022705, "learning_rate": 1.4754030419826765e-07, "loss": 0.661, "step": 23418 }, { "epoch": 0.9705748269716938, "grad_norm": 0.40946829319000244, "learning_rate": 1.473330846698993e-07, "loss": 0.644, "step": 23419 }, { "epoch": 0.9706162708773675, "grad_norm": 0.4393613636493683, "learning_rate": 1.4712586514153095e-07, "loss": 0.6509, "step": 23420 }, { "epoch": 0.9706577147830412, "grad_norm": 0.41081106662750244, "learning_rate": 1.469186456131626e-07, "loss": 0.6946, "step": 23421 }, { "epoch": 0.9706991586887148, "grad_norm": 0.3804410398006439, "learning_rate": 1.4671142608479425e-07, "loss": 0.6548, "step": 23422 }, { "epoch": 0.9707406025943885, "grad_norm": 0.4108729362487793, "learning_rate": 1.465042065564259e-07, "loss": 0.6477, "step": 23423 }, { "epoch": 0.9707820465000622, "grad_norm": 0.4490480422973633, "learning_rate": 1.4629698702805755e-07, "loss": 0.7046, "step": 23424 }, { "epoch": 0.9708234904057358, "grad_norm": 0.42290663719177246, "learning_rate": 1.460897674996892e-07, "loss": 0.6602, "step": 23425 }, { "epoch": 0.9708649343114095, "grad_norm": 0.400392085313797, "learning_rate": 1.4588254797132083e-07, "loss": 0.6521, "step": 23426 }, { "epoch": 0.9709063782170831, "grad_norm": 0.41102978587150574, "learning_rate": 1.4567532844295248e-07, "loss": 0.6268, "step": 23427 }, { "epoch": 0.9709478221227569, "grad_norm": 0.4439678192138672, "learning_rate": 1.4546810891458413e-07, "loss": 0.6189, "step": 23428 }, { "epoch": 0.9709892660284305, "grad_norm": 0.43098974227905273, "learning_rate": 1.4526088938621578e-07, "loss": 0.6694, "step": 23429 }, { "epoch": 0.9710307099341042, "grad_norm": 0.41941556334495544, "learning_rate": 1.4505366985784743e-07, "loss": 0.6556, "step": 23430 }, { "epoch": 0.9710721538397779, "grad_norm": 0.4133436679840088, "learning_rate": 1.4484645032947908e-07, "loss": 0.6183, "step": 23431 }, { "epoch": 0.9711135977454516, "grad_norm": 0.4090350866317749, "learning_rate": 1.446392308011107e-07, "loss": 0.6233, "step": 23432 }, { "epoch": 0.9711550416511252, "grad_norm": 0.39913684129714966, "learning_rate": 1.4443201127274235e-07, "loss": 0.6758, "step": 23433 }, { "epoch": 0.9711964855567988, "grad_norm": 0.3914391100406647, "learning_rate": 1.44224791744374e-07, "loss": 0.6084, "step": 23434 }, { "epoch": 0.9712379294624726, "grad_norm": 0.42958560585975647, "learning_rate": 1.4401757221600565e-07, "loss": 0.6943, "step": 23435 }, { "epoch": 0.9712793733681462, "grad_norm": 0.42146405577659607, "learning_rate": 1.438103526876373e-07, "loss": 0.6588, "step": 23436 }, { "epoch": 0.9713208172738199, "grad_norm": 0.4668481647968292, "learning_rate": 1.4360313315926895e-07, "loss": 0.7119, "step": 23437 }, { "epoch": 0.9713622611794935, "grad_norm": 0.42503249645233154, "learning_rate": 1.4339591363090058e-07, "loss": 0.6243, "step": 23438 }, { "epoch": 0.9714037050851673, "grad_norm": 0.4010186791419983, "learning_rate": 1.4318869410253223e-07, "loss": 0.6362, "step": 23439 }, { "epoch": 0.9714451489908409, "grad_norm": 0.4168928265571594, "learning_rate": 1.4298147457416388e-07, "loss": 0.6802, "step": 23440 }, { "epoch": 0.9714865928965146, "grad_norm": 0.4136193096637726, "learning_rate": 1.4277425504579553e-07, "loss": 0.6711, "step": 23441 }, { "epoch": 0.9715280368021882, "grad_norm": 0.40608352422714233, "learning_rate": 1.4256703551742718e-07, "loss": 0.649, "step": 23442 }, { "epoch": 0.9715694807078619, "grad_norm": 0.4512658715248108, "learning_rate": 1.4235981598905883e-07, "loss": 0.6934, "step": 23443 }, { "epoch": 0.9716109246135356, "grad_norm": 0.40187135338783264, "learning_rate": 1.4215259646069048e-07, "loss": 0.7305, "step": 23444 }, { "epoch": 0.9716523685192092, "grad_norm": 0.40227022767066956, "learning_rate": 1.419453769323221e-07, "loss": 0.6433, "step": 23445 }, { "epoch": 0.971693812424883, "grad_norm": 0.3988104462623596, "learning_rate": 1.4173815740395375e-07, "loss": 0.6602, "step": 23446 }, { "epoch": 0.9717352563305566, "grad_norm": 0.39173558354377747, "learning_rate": 1.415309378755854e-07, "loss": 0.6198, "step": 23447 }, { "epoch": 0.9717767002362303, "grad_norm": 0.41801536083221436, "learning_rate": 1.4132371834721705e-07, "loss": 0.6556, "step": 23448 }, { "epoch": 0.9718181441419039, "grad_norm": 0.41201284527778625, "learning_rate": 1.411164988188487e-07, "loss": 0.6741, "step": 23449 }, { "epoch": 0.9718595880475775, "grad_norm": 0.41657787561416626, "learning_rate": 1.4090927929048035e-07, "loss": 0.6713, "step": 23450 }, { "epoch": 0.9719010319532513, "grad_norm": 0.43569543957710266, "learning_rate": 1.4070205976211198e-07, "loss": 0.7495, "step": 23451 }, { "epoch": 0.9719424758589249, "grad_norm": 0.43175867199897766, "learning_rate": 1.4049484023374363e-07, "loss": 0.6631, "step": 23452 }, { "epoch": 0.9719839197645986, "grad_norm": 0.43650224804878235, "learning_rate": 1.4028762070537528e-07, "loss": 0.6761, "step": 23453 }, { "epoch": 0.9720253636702723, "grad_norm": 0.40753138065338135, "learning_rate": 1.4008040117700693e-07, "loss": 0.6531, "step": 23454 }, { "epoch": 0.972066807575946, "grad_norm": 0.46421778202056885, "learning_rate": 1.3987318164863858e-07, "loss": 0.6708, "step": 23455 }, { "epoch": 0.9721082514816196, "grad_norm": 0.4288656413555145, "learning_rate": 1.3966596212027023e-07, "loss": 0.6533, "step": 23456 }, { "epoch": 0.9721496953872933, "grad_norm": 0.4824259281158447, "learning_rate": 1.3945874259190185e-07, "loss": 0.6387, "step": 23457 }, { "epoch": 0.972191139292967, "grad_norm": 0.4637094736099243, "learning_rate": 1.392515230635335e-07, "loss": 0.7178, "step": 23458 }, { "epoch": 0.9722325831986406, "grad_norm": 0.43972375988960266, "learning_rate": 1.3904430353516516e-07, "loss": 0.6492, "step": 23459 }, { "epoch": 0.9722740271043143, "grad_norm": 0.45529282093048096, "learning_rate": 1.388370840067968e-07, "loss": 0.6975, "step": 23460 }, { "epoch": 0.9723154710099879, "grad_norm": 0.4419008791446686, "learning_rate": 1.3862986447842846e-07, "loss": 0.6423, "step": 23461 }, { "epoch": 0.9723569149156617, "grad_norm": 0.431314617395401, "learning_rate": 1.384226449500601e-07, "loss": 0.6711, "step": 23462 }, { "epoch": 0.9723983588213353, "grad_norm": 0.5047176480293274, "learning_rate": 1.3821542542169176e-07, "loss": 0.7446, "step": 23463 }, { "epoch": 0.972439802727009, "grad_norm": 0.42560112476348877, "learning_rate": 1.380082058933234e-07, "loss": 0.6559, "step": 23464 }, { "epoch": 0.9724812466326826, "grad_norm": 0.4083189070224762, "learning_rate": 1.3780098636495503e-07, "loss": 0.6489, "step": 23465 }, { "epoch": 0.9725226905383564, "grad_norm": 0.4638565182685852, "learning_rate": 1.3759376683658668e-07, "loss": 0.6484, "step": 23466 }, { "epoch": 0.97256413444403, "grad_norm": 0.394010990858078, "learning_rate": 1.3738654730821833e-07, "loss": 0.6648, "step": 23467 }, { "epoch": 0.9726055783497036, "grad_norm": 0.38691356778144836, "learning_rate": 1.3717932777984998e-07, "loss": 0.6201, "step": 23468 }, { "epoch": 0.9726470222553774, "grad_norm": 0.4198537766933441, "learning_rate": 1.3697210825148163e-07, "loss": 0.6251, "step": 23469 }, { "epoch": 0.972688466161051, "grad_norm": 0.4543820917606354, "learning_rate": 1.3676488872311328e-07, "loss": 0.6804, "step": 23470 }, { "epoch": 0.9727299100667247, "grad_norm": 0.43026572465896606, "learning_rate": 1.3655766919474493e-07, "loss": 0.7375, "step": 23471 }, { "epoch": 0.9727713539723983, "grad_norm": 0.4219870865345001, "learning_rate": 1.3635044966637658e-07, "loss": 0.6926, "step": 23472 }, { "epoch": 0.9728127978780721, "grad_norm": 0.46639543771743774, "learning_rate": 1.3614323013800823e-07, "loss": 0.6682, "step": 23473 }, { "epoch": 0.9728542417837457, "grad_norm": 0.3914889395236969, "learning_rate": 1.3593601060963986e-07, "loss": 0.6259, "step": 23474 }, { "epoch": 0.9728956856894194, "grad_norm": 0.41712695360183716, "learning_rate": 1.357287910812715e-07, "loss": 0.6748, "step": 23475 }, { "epoch": 0.972937129595093, "grad_norm": 0.4274061620235443, "learning_rate": 1.3552157155290316e-07, "loss": 0.6654, "step": 23476 }, { "epoch": 0.9729785735007667, "grad_norm": 0.4057908058166504, "learning_rate": 1.353143520245348e-07, "loss": 0.6598, "step": 23477 }, { "epoch": 0.9730200174064404, "grad_norm": 0.4555774927139282, "learning_rate": 1.3510713249616646e-07, "loss": 0.7072, "step": 23478 }, { "epoch": 0.973061461312114, "grad_norm": 0.4330986440181732, "learning_rate": 1.348999129677981e-07, "loss": 0.6842, "step": 23479 }, { "epoch": 0.9731029052177878, "grad_norm": 0.42811378836631775, "learning_rate": 1.3469269343942976e-07, "loss": 0.6814, "step": 23480 }, { "epoch": 0.9731443491234614, "grad_norm": 0.3939908444881439, "learning_rate": 1.3448547391106138e-07, "loss": 0.6355, "step": 23481 }, { "epoch": 0.9731857930291351, "grad_norm": 0.43714234232902527, "learning_rate": 1.3427825438269303e-07, "loss": 0.6729, "step": 23482 }, { "epoch": 0.9732272369348087, "grad_norm": 0.4425908327102661, "learning_rate": 1.3407103485432468e-07, "loss": 0.6401, "step": 23483 }, { "epoch": 0.9732686808404825, "grad_norm": 0.38999950885772705, "learning_rate": 1.3386381532595633e-07, "loss": 0.6245, "step": 23484 }, { "epoch": 0.9733101247461561, "grad_norm": 0.46349036693573, "learning_rate": 1.3365659579758798e-07, "loss": 0.6816, "step": 23485 }, { "epoch": 0.9733515686518297, "grad_norm": 0.41009509563446045, "learning_rate": 1.3344937626921964e-07, "loss": 0.6399, "step": 23486 }, { "epoch": 0.9733930125575034, "grad_norm": 0.3979561924934387, "learning_rate": 1.3324215674085129e-07, "loss": 0.6628, "step": 23487 }, { "epoch": 0.9734344564631771, "grad_norm": 0.3908049464225769, "learning_rate": 1.330349372124829e-07, "loss": 0.6212, "step": 23488 }, { "epoch": 0.9734759003688508, "grad_norm": 0.43977534770965576, "learning_rate": 1.3282771768411456e-07, "loss": 0.6567, "step": 23489 }, { "epoch": 0.9735173442745244, "grad_norm": 0.4015119969844818, "learning_rate": 1.326204981557462e-07, "loss": 0.6228, "step": 23490 }, { "epoch": 0.9735587881801981, "grad_norm": 0.41544198989868164, "learning_rate": 1.3241327862737786e-07, "loss": 0.6439, "step": 23491 }, { "epoch": 0.9736002320858718, "grad_norm": 0.48099830746650696, "learning_rate": 1.322060590990095e-07, "loss": 0.7253, "step": 23492 }, { "epoch": 0.9736416759915455, "grad_norm": 0.42074263095855713, "learning_rate": 1.3199883957064116e-07, "loss": 0.6509, "step": 23493 }, { "epoch": 0.9736831198972191, "grad_norm": 0.4198032021522522, "learning_rate": 1.3179162004227279e-07, "loss": 0.7036, "step": 23494 }, { "epoch": 0.9737245638028927, "grad_norm": 0.3943674862384796, "learning_rate": 1.3158440051390444e-07, "loss": 0.6572, "step": 23495 }, { "epoch": 0.9737660077085665, "grad_norm": 0.4248095154762268, "learning_rate": 1.3137718098553609e-07, "loss": 0.689, "step": 23496 }, { "epoch": 0.9738074516142401, "grad_norm": 0.4173724353313446, "learning_rate": 1.3116996145716774e-07, "loss": 0.6312, "step": 23497 }, { "epoch": 0.9738488955199138, "grad_norm": 0.4483766555786133, "learning_rate": 1.3096274192879939e-07, "loss": 0.6439, "step": 23498 }, { "epoch": 0.9738903394255874, "grad_norm": 0.4117897152900696, "learning_rate": 1.3075552240043104e-07, "loss": 0.6943, "step": 23499 }, { "epoch": 0.9739317833312612, "grad_norm": 0.4290332794189453, "learning_rate": 1.3054830287206266e-07, "loss": 0.6876, "step": 23500 }, { "epoch": 0.9739732272369348, "grad_norm": 0.3797157406806946, "learning_rate": 1.303410833436943e-07, "loss": 0.6178, "step": 23501 }, { "epoch": 0.9740146711426085, "grad_norm": 0.3939172029495239, "learning_rate": 1.3013386381532596e-07, "loss": 0.665, "step": 23502 }, { "epoch": 0.9740561150482822, "grad_norm": 0.4490468204021454, "learning_rate": 1.299266442869576e-07, "loss": 0.6869, "step": 23503 }, { "epoch": 0.9740975589539558, "grad_norm": 0.443692684173584, "learning_rate": 1.2971942475858926e-07, "loss": 0.6444, "step": 23504 }, { "epoch": 0.9741390028596295, "grad_norm": 0.47876259684562683, "learning_rate": 1.295122052302209e-07, "loss": 0.629, "step": 23505 }, { "epoch": 0.9741804467653031, "grad_norm": 0.4409225881099701, "learning_rate": 1.2930498570185254e-07, "loss": 0.693, "step": 23506 }, { "epoch": 0.9742218906709769, "grad_norm": 0.454172283411026, "learning_rate": 1.2909776617348419e-07, "loss": 0.6724, "step": 23507 }, { "epoch": 0.9742633345766505, "grad_norm": 0.410898357629776, "learning_rate": 1.2889054664511584e-07, "loss": 0.6686, "step": 23508 }, { "epoch": 0.9743047784823242, "grad_norm": 0.4191705584526062, "learning_rate": 1.286833271167475e-07, "loss": 0.6721, "step": 23509 }, { "epoch": 0.9743462223879978, "grad_norm": 0.4100308120250702, "learning_rate": 1.2847610758837914e-07, "loss": 0.6765, "step": 23510 }, { "epoch": 0.9743876662936715, "grad_norm": 0.40019726753234863, "learning_rate": 1.282688880600108e-07, "loss": 0.632, "step": 23511 }, { "epoch": 0.9744291101993452, "grad_norm": 0.4674622714519501, "learning_rate": 1.280616685316424e-07, "loss": 0.6628, "step": 23512 }, { "epoch": 0.9744705541050188, "grad_norm": 0.410868376493454, "learning_rate": 1.2785444900327406e-07, "loss": 0.6991, "step": 23513 }, { "epoch": 0.9745119980106925, "grad_norm": 0.40889397263526917, "learning_rate": 1.276472294749057e-07, "loss": 0.6384, "step": 23514 }, { "epoch": 0.9745534419163662, "grad_norm": 0.39269715547561646, "learning_rate": 1.2744000994653736e-07, "loss": 0.6394, "step": 23515 }, { "epoch": 0.9745948858220399, "grad_norm": 0.4291469156742096, "learning_rate": 1.2723279041816901e-07, "loss": 0.6755, "step": 23516 }, { "epoch": 0.9746363297277135, "grad_norm": 0.4319342374801636, "learning_rate": 1.2702557088980066e-07, "loss": 0.6724, "step": 23517 }, { "epoch": 0.9746777736333873, "grad_norm": 0.41709399223327637, "learning_rate": 1.2681835136143231e-07, "loss": 0.6733, "step": 23518 }, { "epoch": 0.9747192175390609, "grad_norm": 0.40622055530548096, "learning_rate": 1.2661113183306396e-07, "loss": 0.6843, "step": 23519 }, { "epoch": 0.9747606614447345, "grad_norm": 0.431393027305603, "learning_rate": 1.2640391230469561e-07, "loss": 0.6304, "step": 23520 }, { "epoch": 0.9748021053504082, "grad_norm": 0.41044142842292786, "learning_rate": 1.2619669277632724e-07, "loss": 0.637, "step": 23521 }, { "epoch": 0.9748435492560819, "grad_norm": 0.4152747392654419, "learning_rate": 1.259894732479589e-07, "loss": 0.6643, "step": 23522 }, { "epoch": 0.9748849931617556, "grad_norm": 0.39618974924087524, "learning_rate": 1.2578225371959054e-07, "loss": 0.6296, "step": 23523 }, { "epoch": 0.9749264370674292, "grad_norm": 0.398184597492218, "learning_rate": 1.255750341912222e-07, "loss": 0.618, "step": 23524 }, { "epoch": 0.9749678809731029, "grad_norm": 0.4166944622993469, "learning_rate": 1.2536781466285384e-07, "loss": 0.6157, "step": 23525 }, { "epoch": 0.9750093248787766, "grad_norm": 0.4399368464946747, "learning_rate": 1.251605951344855e-07, "loss": 0.6594, "step": 23526 }, { "epoch": 0.9750507687844503, "grad_norm": 0.42428773641586304, "learning_rate": 1.2495337560611714e-07, "loss": 0.6667, "step": 23527 }, { "epoch": 0.9750922126901239, "grad_norm": 0.4176606833934784, "learning_rate": 1.247461560777488e-07, "loss": 0.6824, "step": 23528 }, { "epoch": 0.9751336565957975, "grad_norm": 0.397001713514328, "learning_rate": 1.2453893654938044e-07, "loss": 0.6259, "step": 23529 }, { "epoch": 0.9751751005014713, "grad_norm": 0.47528907656669617, "learning_rate": 1.2433171702101207e-07, "loss": 0.689, "step": 23530 }, { "epoch": 0.9752165444071449, "grad_norm": 0.44263598322868347, "learning_rate": 1.2412449749264372e-07, "loss": 0.6686, "step": 23531 }, { "epoch": 0.9752579883128186, "grad_norm": 0.42034009099006653, "learning_rate": 1.2391727796427537e-07, "loss": 0.6261, "step": 23532 }, { "epoch": 0.9752994322184922, "grad_norm": 0.4552428126335144, "learning_rate": 1.2371005843590702e-07, "loss": 0.6697, "step": 23533 }, { "epoch": 0.975340876124166, "grad_norm": 0.4031771719455719, "learning_rate": 1.2350283890753867e-07, "loss": 0.6548, "step": 23534 }, { "epoch": 0.9753823200298396, "grad_norm": 0.3973510265350342, "learning_rate": 1.2329561937917032e-07, "loss": 0.6367, "step": 23535 }, { "epoch": 0.9754237639355133, "grad_norm": 0.4100991189479828, "learning_rate": 1.2308839985080197e-07, "loss": 0.6609, "step": 23536 }, { "epoch": 0.975465207841187, "grad_norm": 0.466200053691864, "learning_rate": 1.228811803224336e-07, "loss": 0.7, "step": 23537 }, { "epoch": 0.9755066517468606, "grad_norm": 0.411411315202713, "learning_rate": 1.2267396079406524e-07, "loss": 0.6377, "step": 23538 }, { "epoch": 0.9755480956525343, "grad_norm": 0.4675273001194, "learning_rate": 1.224667412656969e-07, "loss": 0.7067, "step": 23539 }, { "epoch": 0.9755895395582079, "grad_norm": 0.40684938430786133, "learning_rate": 1.2225952173732854e-07, "loss": 0.7056, "step": 23540 }, { "epoch": 0.9756309834638817, "grad_norm": 0.41750404238700867, "learning_rate": 1.220523022089602e-07, "loss": 0.6707, "step": 23541 }, { "epoch": 0.9756724273695553, "grad_norm": 0.42446863651275635, "learning_rate": 1.2184508268059184e-07, "loss": 0.6744, "step": 23542 }, { "epoch": 0.975713871275229, "grad_norm": 0.4019761383533478, "learning_rate": 1.2163786315222347e-07, "loss": 0.6672, "step": 23543 }, { "epoch": 0.9757553151809026, "grad_norm": 0.41590309143066406, "learning_rate": 1.2143064362385512e-07, "loss": 0.6265, "step": 23544 }, { "epoch": 0.9757967590865764, "grad_norm": 0.4452741742134094, "learning_rate": 1.2122342409548677e-07, "loss": 0.6348, "step": 23545 }, { "epoch": 0.97583820299225, "grad_norm": 0.42487606406211853, "learning_rate": 1.2101620456711842e-07, "loss": 0.663, "step": 23546 }, { "epoch": 0.9758796468979236, "grad_norm": 0.43896546959877014, "learning_rate": 1.2080898503875007e-07, "loss": 0.6729, "step": 23547 }, { "epoch": 0.9759210908035973, "grad_norm": 0.40278294682502747, "learning_rate": 1.2060176551038172e-07, "loss": 0.6895, "step": 23548 }, { "epoch": 0.975962534709271, "grad_norm": 0.41020047664642334, "learning_rate": 1.2039454598201334e-07, "loss": 0.6653, "step": 23549 }, { "epoch": 0.9760039786149447, "grad_norm": 0.4382156729698181, "learning_rate": 1.20187326453645e-07, "loss": 0.7051, "step": 23550 }, { "epoch": 0.9760454225206183, "grad_norm": 0.38714325428009033, "learning_rate": 1.1998010692527664e-07, "loss": 0.6046, "step": 23551 }, { "epoch": 0.976086866426292, "grad_norm": 0.4153909385204315, "learning_rate": 1.197728873969083e-07, "loss": 0.6732, "step": 23552 }, { "epoch": 0.9761283103319657, "grad_norm": 0.45773690938949585, "learning_rate": 1.1956566786853994e-07, "loss": 0.7416, "step": 23553 }, { "epoch": 0.9761697542376394, "grad_norm": 0.429177850484848, "learning_rate": 1.193584483401716e-07, "loss": 0.6677, "step": 23554 }, { "epoch": 0.976211198143313, "grad_norm": 0.45945897698402405, "learning_rate": 1.1915122881180323e-07, "loss": 0.6868, "step": 23555 }, { "epoch": 0.9762526420489867, "grad_norm": 0.41973116993904114, "learning_rate": 1.1894400928343488e-07, "loss": 0.6425, "step": 23556 }, { "epoch": 0.9762940859546604, "grad_norm": 0.40328025817871094, "learning_rate": 1.1873678975506652e-07, "loss": 0.6141, "step": 23557 }, { "epoch": 0.976335529860334, "grad_norm": 0.4115675985813141, "learning_rate": 1.1852957022669817e-07, "loss": 0.6982, "step": 23558 }, { "epoch": 0.9763769737660077, "grad_norm": 0.437727689743042, "learning_rate": 1.1832235069832982e-07, "loss": 0.6696, "step": 23559 }, { "epoch": 0.9764184176716814, "grad_norm": 0.43778878450393677, "learning_rate": 1.1811513116996147e-07, "loss": 0.6683, "step": 23560 }, { "epoch": 0.9764598615773551, "grad_norm": 0.4059181809425354, "learning_rate": 1.1790791164159311e-07, "loss": 0.6321, "step": 23561 }, { "epoch": 0.9765013054830287, "grad_norm": 0.4220151901245117, "learning_rate": 1.1770069211322476e-07, "loss": 0.6864, "step": 23562 }, { "epoch": 0.9765427493887024, "grad_norm": 0.43281692266464233, "learning_rate": 1.1749347258485641e-07, "loss": 0.6511, "step": 23563 }, { "epoch": 0.9765841932943761, "grad_norm": 0.4162597060203552, "learning_rate": 1.1728625305648806e-07, "loss": 0.6678, "step": 23564 }, { "epoch": 0.9766256372000497, "grad_norm": 0.45709019899368286, "learning_rate": 1.1707903352811971e-07, "loss": 0.7185, "step": 23565 }, { "epoch": 0.9766670811057234, "grad_norm": 0.40548428893089294, "learning_rate": 1.1687181399975135e-07, "loss": 0.6986, "step": 23566 }, { "epoch": 0.976708525011397, "grad_norm": 0.4265049993991852, "learning_rate": 1.16664594471383e-07, "loss": 0.6658, "step": 23567 }, { "epoch": 0.9767499689170708, "grad_norm": 0.3892822265625, "learning_rate": 1.1645737494301463e-07, "loss": 0.6349, "step": 23568 }, { "epoch": 0.9767914128227444, "grad_norm": 0.4082087576389313, "learning_rate": 1.1625015541464628e-07, "loss": 0.6906, "step": 23569 }, { "epoch": 0.9768328567284181, "grad_norm": 0.4802243113517761, "learning_rate": 1.1604293588627793e-07, "loss": 0.7235, "step": 23570 }, { "epoch": 0.9768743006340918, "grad_norm": 0.40916332602500916, "learning_rate": 1.1583571635790958e-07, "loss": 0.6626, "step": 23571 }, { "epoch": 0.9769157445397654, "grad_norm": 0.4045411944389343, "learning_rate": 1.1562849682954123e-07, "loss": 0.6022, "step": 23572 }, { "epoch": 0.9769571884454391, "grad_norm": 0.39164069294929504, "learning_rate": 1.1542127730117288e-07, "loss": 0.7053, "step": 23573 }, { "epoch": 0.9769986323511127, "grad_norm": 0.4402811825275421, "learning_rate": 1.1521405777280451e-07, "loss": 0.61, "step": 23574 }, { "epoch": 0.9770400762567865, "grad_norm": 0.42288073897361755, "learning_rate": 1.1500683824443616e-07, "loss": 0.6365, "step": 23575 }, { "epoch": 0.9770815201624601, "grad_norm": 0.38923558592796326, "learning_rate": 1.1479961871606781e-07, "loss": 0.6185, "step": 23576 }, { "epoch": 0.9771229640681338, "grad_norm": 0.39798402786254883, "learning_rate": 1.1459239918769946e-07, "loss": 0.6179, "step": 23577 }, { "epoch": 0.9771644079738074, "grad_norm": 0.39141547679901123, "learning_rate": 1.1438517965933111e-07, "loss": 0.6877, "step": 23578 }, { "epoch": 0.9772058518794812, "grad_norm": 0.41581299901008606, "learning_rate": 1.1417796013096276e-07, "loss": 0.6588, "step": 23579 }, { "epoch": 0.9772472957851548, "grad_norm": 0.41499218344688416, "learning_rate": 1.1397074060259438e-07, "loss": 0.6698, "step": 23580 }, { "epoch": 0.9772887396908284, "grad_norm": 0.41559165716171265, "learning_rate": 1.1376352107422603e-07, "loss": 0.6852, "step": 23581 }, { "epoch": 0.9773301835965021, "grad_norm": 0.4036775231361389, "learning_rate": 1.1355630154585769e-07, "loss": 0.6685, "step": 23582 }, { "epoch": 0.9773716275021758, "grad_norm": 0.4006056487560272, "learning_rate": 1.1334908201748934e-07, "loss": 0.688, "step": 23583 }, { "epoch": 0.9774130714078495, "grad_norm": 0.392010897397995, "learning_rate": 1.1314186248912099e-07, "loss": 0.6536, "step": 23584 }, { "epoch": 0.9774545153135231, "grad_norm": 0.41725683212280273, "learning_rate": 1.1293464296075264e-07, "loss": 0.7119, "step": 23585 }, { "epoch": 0.9774959592191969, "grad_norm": 0.42873620986938477, "learning_rate": 1.1272742343238427e-07, "loss": 0.6523, "step": 23586 }, { "epoch": 0.9775374031248705, "grad_norm": 0.45240551233291626, "learning_rate": 1.1252020390401592e-07, "loss": 0.7089, "step": 23587 }, { "epoch": 0.9775788470305442, "grad_norm": 0.3923335671424866, "learning_rate": 1.1231298437564757e-07, "loss": 0.626, "step": 23588 }, { "epoch": 0.9776202909362178, "grad_norm": 0.40390488505363464, "learning_rate": 1.1210576484727922e-07, "loss": 0.6865, "step": 23589 }, { "epoch": 0.9776617348418914, "grad_norm": 0.3931303918361664, "learning_rate": 1.1189854531891086e-07, "loss": 0.6265, "step": 23590 }, { "epoch": 0.9777031787475652, "grad_norm": 0.4041198790073395, "learning_rate": 1.1169132579054251e-07, "loss": 0.6311, "step": 23591 }, { "epoch": 0.9777446226532388, "grad_norm": 0.42545369267463684, "learning_rate": 1.1148410626217415e-07, "loss": 0.6837, "step": 23592 }, { "epoch": 0.9777860665589125, "grad_norm": 0.41677185893058777, "learning_rate": 1.112768867338058e-07, "loss": 0.6619, "step": 23593 }, { "epoch": 0.9778275104645862, "grad_norm": 0.5093568563461304, "learning_rate": 1.1106966720543745e-07, "loss": 0.6908, "step": 23594 }, { "epoch": 0.9778689543702599, "grad_norm": 0.4546039402484894, "learning_rate": 1.108624476770691e-07, "loss": 0.7075, "step": 23595 }, { "epoch": 0.9779103982759335, "grad_norm": 0.39054110646247864, "learning_rate": 1.1065522814870075e-07, "loss": 0.6074, "step": 23596 }, { "epoch": 0.9779518421816072, "grad_norm": 0.4467930793762207, "learning_rate": 1.104480086203324e-07, "loss": 0.6509, "step": 23597 }, { "epoch": 0.9779932860872809, "grad_norm": 0.39826902747154236, "learning_rate": 1.1024078909196402e-07, "loss": 0.6677, "step": 23598 }, { "epoch": 0.9780347299929545, "grad_norm": 0.42044970393180847, "learning_rate": 1.1003356956359567e-07, "loss": 0.6748, "step": 23599 }, { "epoch": 0.9780761738986282, "grad_norm": 0.40181124210357666, "learning_rate": 1.0982635003522733e-07, "loss": 0.7295, "step": 23600 }, { "epoch": 0.9781176178043018, "grad_norm": 0.4282439053058624, "learning_rate": 1.0961913050685898e-07, "loss": 0.6953, "step": 23601 }, { "epoch": 0.9781590617099756, "grad_norm": 0.3857939541339874, "learning_rate": 1.0941191097849063e-07, "loss": 0.5785, "step": 23602 }, { "epoch": 0.9782005056156492, "grad_norm": 0.4286782741546631, "learning_rate": 1.0920469145012228e-07, "loss": 0.6606, "step": 23603 }, { "epoch": 0.9782419495213229, "grad_norm": 0.43351104855537415, "learning_rate": 1.089974719217539e-07, "loss": 0.6189, "step": 23604 }, { "epoch": 0.9782833934269966, "grad_norm": 0.4434877932071686, "learning_rate": 1.0879025239338555e-07, "loss": 0.6821, "step": 23605 }, { "epoch": 0.9783248373326703, "grad_norm": 0.410850465297699, "learning_rate": 1.085830328650172e-07, "loss": 0.6222, "step": 23606 }, { "epoch": 0.9783662812383439, "grad_norm": 0.41938742995262146, "learning_rate": 1.0837581333664885e-07, "loss": 0.7091, "step": 23607 }, { "epoch": 0.9784077251440175, "grad_norm": 0.4288516640663147, "learning_rate": 1.081685938082805e-07, "loss": 0.6318, "step": 23608 }, { "epoch": 0.9784491690496913, "grad_norm": 0.41946277022361755, "learning_rate": 1.0796137427991215e-07, "loss": 0.6326, "step": 23609 }, { "epoch": 0.9784906129553649, "grad_norm": 0.5122292041778564, "learning_rate": 1.077541547515438e-07, "loss": 0.6932, "step": 23610 }, { "epoch": 0.9785320568610386, "grad_norm": 0.3869394063949585, "learning_rate": 1.0754693522317544e-07, "loss": 0.6476, "step": 23611 }, { "epoch": 0.9785735007667122, "grad_norm": 0.42109549045562744, "learning_rate": 1.0733971569480709e-07, "loss": 0.656, "step": 23612 }, { "epoch": 0.978614944672386, "grad_norm": 0.4887112081050873, "learning_rate": 1.0713249616643873e-07, "loss": 0.7107, "step": 23613 }, { "epoch": 0.9786563885780596, "grad_norm": 0.41121748089790344, "learning_rate": 1.0692527663807038e-07, "loss": 0.6124, "step": 23614 }, { "epoch": 0.9786978324837333, "grad_norm": 0.41716399788856506, "learning_rate": 1.0671805710970203e-07, "loss": 0.6892, "step": 23615 }, { "epoch": 0.9787392763894069, "grad_norm": 0.42540186643600464, "learning_rate": 1.0651083758133368e-07, "loss": 0.6877, "step": 23616 }, { "epoch": 0.9787807202950806, "grad_norm": 0.4231995642185211, "learning_rate": 1.0630361805296532e-07, "loss": 0.6741, "step": 23617 }, { "epoch": 0.9788221642007543, "grad_norm": 0.44637176394462585, "learning_rate": 1.0609639852459697e-07, "loss": 0.686, "step": 23618 }, { "epoch": 0.9788636081064279, "grad_norm": 0.41902029514312744, "learning_rate": 1.0588917899622862e-07, "loss": 0.6324, "step": 23619 }, { "epoch": 0.9789050520121017, "grad_norm": 0.4379342794418335, "learning_rate": 1.0568195946786027e-07, "loss": 0.6943, "step": 23620 }, { "epoch": 0.9789464959177753, "grad_norm": 0.4473947286605835, "learning_rate": 1.0547473993949192e-07, "loss": 0.66, "step": 23621 }, { "epoch": 0.978987939823449, "grad_norm": 0.431602418422699, "learning_rate": 1.0526752041112357e-07, "loss": 0.6853, "step": 23622 }, { "epoch": 0.9790293837291226, "grad_norm": 0.4170432984828949, "learning_rate": 1.0506030088275519e-07, "loss": 0.6508, "step": 23623 }, { "epoch": 0.9790708276347962, "grad_norm": 0.4146314561367035, "learning_rate": 1.0485308135438684e-07, "loss": 0.6431, "step": 23624 }, { "epoch": 0.97911227154047, "grad_norm": 0.40374454855918884, "learning_rate": 1.0464586182601849e-07, "loss": 0.6909, "step": 23625 }, { "epoch": 0.9791537154461436, "grad_norm": 0.386064738035202, "learning_rate": 1.0443864229765014e-07, "loss": 0.6379, "step": 23626 }, { "epoch": 0.9791951593518173, "grad_norm": 0.4148416519165039, "learning_rate": 1.0423142276928179e-07, "loss": 0.6494, "step": 23627 }, { "epoch": 0.979236603257491, "grad_norm": 0.4147054851055145, "learning_rate": 1.0402420324091344e-07, "loss": 0.6641, "step": 23628 }, { "epoch": 0.9792780471631647, "grad_norm": 0.41580909490585327, "learning_rate": 1.0381698371254507e-07, "loss": 0.6527, "step": 23629 }, { "epoch": 0.9793194910688383, "grad_norm": 0.4065316319465637, "learning_rate": 1.0360976418417672e-07, "loss": 0.6545, "step": 23630 }, { "epoch": 0.979360934974512, "grad_norm": 0.4223558306694031, "learning_rate": 1.0340254465580837e-07, "loss": 0.7079, "step": 23631 }, { "epoch": 0.9794023788801857, "grad_norm": 0.4026843309402466, "learning_rate": 1.0319532512744002e-07, "loss": 0.6715, "step": 23632 }, { "epoch": 0.9794438227858593, "grad_norm": 0.42909160256385803, "learning_rate": 1.0298810559907167e-07, "loss": 0.6948, "step": 23633 }, { "epoch": 0.979485266691533, "grad_norm": 0.42372986674308777, "learning_rate": 1.0278088607070332e-07, "loss": 0.6466, "step": 23634 }, { "epoch": 0.9795267105972066, "grad_norm": 0.4231073558330536, "learning_rate": 1.0257366654233496e-07, "loss": 0.6748, "step": 23635 }, { "epoch": 0.9795681545028804, "grad_norm": 0.42625677585601807, "learning_rate": 1.023664470139666e-07, "loss": 0.6216, "step": 23636 }, { "epoch": 0.979609598408554, "grad_norm": 0.41383159160614014, "learning_rate": 1.0215922748559824e-07, "loss": 0.6427, "step": 23637 }, { "epoch": 0.9796510423142277, "grad_norm": 0.40653562545776367, "learning_rate": 1.0195200795722989e-07, "loss": 0.6448, "step": 23638 }, { "epoch": 0.9796924862199013, "grad_norm": 0.40527260303497314, "learning_rate": 1.0174478842886154e-07, "loss": 0.6644, "step": 23639 }, { "epoch": 0.9797339301255751, "grad_norm": 0.43669235706329346, "learning_rate": 1.015375689004932e-07, "loss": 0.6689, "step": 23640 }, { "epoch": 0.9797753740312487, "grad_norm": 0.44632866978645325, "learning_rate": 1.0133034937212483e-07, "loss": 0.6846, "step": 23641 }, { "epoch": 0.9798168179369223, "grad_norm": 0.4209052622318268, "learning_rate": 1.0112312984375648e-07, "loss": 0.6644, "step": 23642 }, { "epoch": 0.9798582618425961, "grad_norm": 0.4505915641784668, "learning_rate": 1.0091591031538813e-07, "loss": 0.6555, "step": 23643 }, { "epoch": 0.9798997057482697, "grad_norm": 0.42974236607551575, "learning_rate": 1.0070869078701978e-07, "loss": 0.7119, "step": 23644 }, { "epoch": 0.9799411496539434, "grad_norm": 0.42199140787124634, "learning_rate": 1.0050147125865143e-07, "loss": 0.6677, "step": 23645 }, { "epoch": 0.979982593559617, "grad_norm": 0.39782482385635376, "learning_rate": 1.0029425173028307e-07, "loss": 0.6418, "step": 23646 }, { "epoch": 0.9800240374652908, "grad_norm": 0.4724113345146179, "learning_rate": 1.0008703220191472e-07, "loss": 0.6746, "step": 23647 }, { "epoch": 0.9800654813709644, "grad_norm": 0.4095297157764435, "learning_rate": 9.987981267354636e-08, "loss": 0.6406, "step": 23648 }, { "epoch": 0.9801069252766381, "grad_norm": 0.5389772057533264, "learning_rate": 9.967259314517801e-08, "loss": 0.6731, "step": 23649 }, { "epoch": 0.9801483691823117, "grad_norm": 0.42420363426208496, "learning_rate": 9.946537361680966e-08, "loss": 0.7046, "step": 23650 }, { "epoch": 0.9801898130879854, "grad_norm": 0.43187591433525085, "learning_rate": 9.925815408844131e-08, "loss": 0.6914, "step": 23651 }, { "epoch": 0.9802312569936591, "grad_norm": 0.4129951298236847, "learning_rate": 9.905093456007296e-08, "loss": 0.6399, "step": 23652 }, { "epoch": 0.9802727008993327, "grad_norm": 0.4367525279521942, "learning_rate": 9.884371503170461e-08, "loss": 0.6481, "step": 23653 }, { "epoch": 0.9803141448050064, "grad_norm": 0.39493945240974426, "learning_rate": 9.863649550333623e-08, "loss": 0.6892, "step": 23654 }, { "epoch": 0.9803555887106801, "grad_norm": 0.48866474628448486, "learning_rate": 9.842927597496788e-08, "loss": 0.7043, "step": 23655 }, { "epoch": 0.9803970326163538, "grad_norm": 0.4350723922252655, "learning_rate": 9.822205644659953e-08, "loss": 0.6879, "step": 23656 }, { "epoch": 0.9804384765220274, "grad_norm": 0.39586225152015686, "learning_rate": 9.801483691823118e-08, "loss": 0.6335, "step": 23657 }, { "epoch": 0.9804799204277012, "grad_norm": 0.43086689710617065, "learning_rate": 9.780761738986283e-08, "loss": 0.656, "step": 23658 }, { "epoch": 0.9805213643333748, "grad_norm": 0.4141995310783386, "learning_rate": 9.760039786149448e-08, "loss": 0.6152, "step": 23659 }, { "epoch": 0.9805628082390484, "grad_norm": 0.4413281977176666, "learning_rate": 9.739317833312612e-08, "loss": 0.6768, "step": 23660 }, { "epoch": 0.9806042521447221, "grad_norm": 0.39343810081481934, "learning_rate": 9.718595880475776e-08, "loss": 0.672, "step": 23661 }, { "epoch": 0.9806456960503958, "grad_norm": 0.3849329650402069, "learning_rate": 9.697873927638941e-08, "loss": 0.6343, "step": 23662 }, { "epoch": 0.9806871399560695, "grad_norm": 0.4134483337402344, "learning_rate": 9.677151974802106e-08, "loss": 0.6394, "step": 23663 }, { "epoch": 0.9807285838617431, "grad_norm": 0.4229297935962677, "learning_rate": 9.656430021965271e-08, "loss": 0.6985, "step": 23664 }, { "epoch": 0.9807700277674168, "grad_norm": 0.4560183584690094, "learning_rate": 9.635708069128436e-08, "loss": 0.6927, "step": 23665 }, { "epoch": 0.9808114716730905, "grad_norm": 0.40779420733451843, "learning_rate": 9.6149861162916e-08, "loss": 0.6293, "step": 23666 }, { "epoch": 0.9808529155787642, "grad_norm": 0.3912232518196106, "learning_rate": 9.594264163454765e-08, "loss": 0.6101, "step": 23667 }, { "epoch": 0.9808943594844378, "grad_norm": 0.46027958393096924, "learning_rate": 9.57354221061793e-08, "loss": 0.6963, "step": 23668 }, { "epoch": 0.9809358033901114, "grad_norm": 0.3983546197414398, "learning_rate": 9.552820257781095e-08, "loss": 0.6353, "step": 23669 }, { "epoch": 0.9809772472957852, "grad_norm": 0.44635459780693054, "learning_rate": 9.532098304944259e-08, "loss": 0.6592, "step": 23670 }, { "epoch": 0.9810186912014588, "grad_norm": 0.4508945643901825, "learning_rate": 9.511376352107424e-08, "loss": 0.6736, "step": 23671 }, { "epoch": 0.9810601351071325, "grad_norm": 0.38556066155433655, "learning_rate": 9.490654399270587e-08, "loss": 0.6107, "step": 23672 }, { "epoch": 0.9811015790128061, "grad_norm": 0.45048484206199646, "learning_rate": 9.469932446433752e-08, "loss": 0.6971, "step": 23673 }, { "epoch": 0.9811430229184799, "grad_norm": 0.44595471024513245, "learning_rate": 9.449210493596917e-08, "loss": 0.7084, "step": 23674 }, { "epoch": 0.9811844668241535, "grad_norm": 0.41767561435699463, "learning_rate": 9.428488540760082e-08, "loss": 0.6799, "step": 23675 }, { "epoch": 0.9812259107298272, "grad_norm": 0.4130467176437378, "learning_rate": 9.407766587923247e-08, "loss": 0.6689, "step": 23676 }, { "epoch": 0.9812673546355009, "grad_norm": 0.396087646484375, "learning_rate": 9.387044635086412e-08, "loss": 0.6598, "step": 23677 }, { "epoch": 0.9813087985411745, "grad_norm": 0.45541685819625854, "learning_rate": 9.366322682249575e-08, "loss": 0.6896, "step": 23678 }, { "epoch": 0.9813502424468482, "grad_norm": 0.5333490967750549, "learning_rate": 9.34560072941274e-08, "loss": 0.6093, "step": 23679 }, { "epoch": 0.9813916863525218, "grad_norm": 0.43766605854034424, "learning_rate": 9.324878776575905e-08, "loss": 0.6802, "step": 23680 }, { "epoch": 0.9814331302581956, "grad_norm": 0.45895758271217346, "learning_rate": 9.30415682373907e-08, "loss": 0.6189, "step": 23681 }, { "epoch": 0.9814745741638692, "grad_norm": 0.3990688621997833, "learning_rate": 9.283434870902235e-08, "loss": 0.6887, "step": 23682 }, { "epoch": 0.9815160180695429, "grad_norm": 0.4337676167488098, "learning_rate": 9.2627129180654e-08, "loss": 0.6653, "step": 23683 }, { "epoch": 0.9815574619752165, "grad_norm": 0.42258375883102417, "learning_rate": 9.241990965228562e-08, "loss": 0.6748, "step": 23684 }, { "epoch": 0.9815989058808902, "grad_norm": 0.3940608501434326, "learning_rate": 9.221269012391727e-08, "loss": 0.6575, "step": 23685 }, { "epoch": 0.9816403497865639, "grad_norm": 0.4222395122051239, "learning_rate": 9.200547059554892e-08, "loss": 0.6681, "step": 23686 }, { "epoch": 0.9816817936922375, "grad_norm": 0.42154744267463684, "learning_rate": 9.179825106718058e-08, "loss": 0.6351, "step": 23687 }, { "epoch": 0.9817232375979112, "grad_norm": 0.42230817675590515, "learning_rate": 9.159103153881223e-08, "loss": 0.7095, "step": 23688 }, { "epoch": 0.9817646815035849, "grad_norm": 0.4477296471595764, "learning_rate": 9.138381201044388e-08, "loss": 0.6969, "step": 23689 }, { "epoch": 0.9818061254092586, "grad_norm": 0.415199875831604, "learning_rate": 9.117659248207553e-08, "loss": 0.679, "step": 23690 }, { "epoch": 0.9818475693149322, "grad_norm": 0.39856746792793274, "learning_rate": 9.096937295370716e-08, "loss": 0.6671, "step": 23691 }, { "epoch": 0.981889013220606, "grad_norm": 0.4418445825576782, "learning_rate": 9.076215342533881e-08, "loss": 0.6207, "step": 23692 }, { "epoch": 0.9819304571262796, "grad_norm": 0.38116884231567383, "learning_rate": 9.055493389697046e-08, "loss": 0.6272, "step": 23693 }, { "epoch": 0.9819719010319532, "grad_norm": 0.37959975004196167, "learning_rate": 9.03477143686021e-08, "loss": 0.6215, "step": 23694 }, { "epoch": 0.9820133449376269, "grad_norm": 0.41990989446640015, "learning_rate": 9.014049484023375e-08, "loss": 0.717, "step": 23695 }, { "epoch": 0.9820547888433006, "grad_norm": 0.43460017442703247, "learning_rate": 8.99332753118654e-08, "loss": 0.6926, "step": 23696 }, { "epoch": 0.9820962327489743, "grad_norm": 0.46710285544395447, "learning_rate": 8.972605578349704e-08, "loss": 0.7186, "step": 23697 }, { "epoch": 0.9821376766546479, "grad_norm": 0.4233827590942383, "learning_rate": 8.951883625512869e-08, "loss": 0.5942, "step": 23698 }, { "epoch": 0.9821791205603216, "grad_norm": 0.3891182541847229, "learning_rate": 8.931161672676034e-08, "loss": 0.6486, "step": 23699 }, { "epoch": 0.9822205644659953, "grad_norm": 0.41781291365623474, "learning_rate": 8.910439719839199e-08, "loss": 0.7029, "step": 23700 }, { "epoch": 0.982262008371669, "grad_norm": 0.3945339024066925, "learning_rate": 8.889717767002364e-08, "loss": 0.6299, "step": 23701 }, { "epoch": 0.9823034522773426, "grad_norm": 0.3898824453353882, "learning_rate": 8.868995814165529e-08, "loss": 0.6118, "step": 23702 }, { "epoch": 0.9823448961830162, "grad_norm": 0.437576562166214, "learning_rate": 8.848273861328691e-08, "loss": 0.6704, "step": 23703 }, { "epoch": 0.98238634008869, "grad_norm": 0.42995303869247437, "learning_rate": 8.827551908491856e-08, "loss": 0.6552, "step": 23704 }, { "epoch": 0.9824277839943636, "grad_norm": 0.43487101793289185, "learning_rate": 8.806829955655022e-08, "loss": 0.7251, "step": 23705 }, { "epoch": 0.9824692279000373, "grad_norm": 0.42375245690345764, "learning_rate": 8.786108002818187e-08, "loss": 0.6647, "step": 23706 }, { "epoch": 0.982510671805711, "grad_norm": 0.40663692355155945, "learning_rate": 8.765386049981352e-08, "loss": 0.6224, "step": 23707 }, { "epoch": 0.9825521157113847, "grad_norm": 0.3912353515625, "learning_rate": 8.744664097144517e-08, "loss": 0.6328, "step": 23708 }, { "epoch": 0.9825935596170583, "grad_norm": 0.41251981258392334, "learning_rate": 8.723942144307679e-08, "loss": 0.6736, "step": 23709 }, { "epoch": 0.982635003522732, "grad_norm": 0.44015276432037354, "learning_rate": 8.703220191470844e-08, "loss": 0.7104, "step": 23710 }, { "epoch": 0.9826764474284057, "grad_norm": 0.3964208960533142, "learning_rate": 8.682498238634009e-08, "loss": 0.645, "step": 23711 }, { "epoch": 0.9827178913340793, "grad_norm": 0.4139605760574341, "learning_rate": 8.661776285797174e-08, "loss": 0.7083, "step": 23712 }, { "epoch": 0.982759335239753, "grad_norm": 0.40190422534942627, "learning_rate": 8.641054332960339e-08, "loss": 0.6783, "step": 23713 }, { "epoch": 0.9828007791454266, "grad_norm": 0.44008633494377136, "learning_rate": 8.620332380123504e-08, "loss": 0.6918, "step": 23714 }, { "epoch": 0.9828422230511004, "grad_norm": 0.452303946018219, "learning_rate": 8.599610427286668e-08, "loss": 0.6985, "step": 23715 }, { "epoch": 0.982883666956774, "grad_norm": 0.43062764406204224, "learning_rate": 8.578888474449833e-08, "loss": 0.6764, "step": 23716 }, { "epoch": 0.9829251108624477, "grad_norm": 0.39649298787117004, "learning_rate": 8.558166521612997e-08, "loss": 0.6416, "step": 23717 }, { "epoch": 0.9829665547681213, "grad_norm": 0.3983179032802582, "learning_rate": 8.537444568776162e-08, "loss": 0.6473, "step": 23718 }, { "epoch": 0.9830079986737951, "grad_norm": 0.45764994621276855, "learning_rate": 8.516722615939327e-08, "loss": 0.6895, "step": 23719 }, { "epoch": 0.9830494425794687, "grad_norm": 0.44658827781677246, "learning_rate": 8.496000663102492e-08, "loss": 0.6637, "step": 23720 }, { "epoch": 0.9830908864851423, "grad_norm": 0.4045303463935852, "learning_rate": 8.475278710265655e-08, "loss": 0.6318, "step": 23721 }, { "epoch": 0.983132330390816, "grad_norm": 0.436860591173172, "learning_rate": 8.45455675742882e-08, "loss": 0.676, "step": 23722 }, { "epoch": 0.9831737742964897, "grad_norm": 0.4122665524482727, "learning_rate": 8.433834804591986e-08, "loss": 0.672, "step": 23723 }, { "epoch": 0.9832152182021634, "grad_norm": 0.42395463585853577, "learning_rate": 8.41311285175515e-08, "loss": 0.7058, "step": 23724 }, { "epoch": 0.983256662107837, "grad_norm": 0.41648733615875244, "learning_rate": 8.392390898918316e-08, "loss": 0.6709, "step": 23725 }, { "epoch": 0.9832981060135108, "grad_norm": 0.4401293396949768, "learning_rate": 8.37166894608148e-08, "loss": 0.6324, "step": 23726 }, { "epoch": 0.9833395499191844, "grad_norm": 0.40695586800575256, "learning_rate": 8.350946993244644e-08, "loss": 0.6571, "step": 23727 }, { "epoch": 0.9833809938248581, "grad_norm": 0.4133570194244385, "learning_rate": 8.330225040407808e-08, "loss": 0.598, "step": 23728 }, { "epoch": 0.9834224377305317, "grad_norm": 0.4219667911529541, "learning_rate": 8.309503087570973e-08, "loss": 0.689, "step": 23729 }, { "epoch": 0.9834638816362054, "grad_norm": 0.4425598382949829, "learning_rate": 8.288781134734138e-08, "loss": 0.6871, "step": 23730 }, { "epoch": 0.9835053255418791, "grad_norm": 0.37690967321395874, "learning_rate": 8.268059181897303e-08, "loss": 0.6892, "step": 23731 }, { "epoch": 0.9835467694475527, "grad_norm": 0.43040478229522705, "learning_rate": 8.247337229060468e-08, "loss": 0.7042, "step": 23732 }, { "epoch": 0.9835882133532264, "grad_norm": 0.40907347202301025, "learning_rate": 8.226615276223633e-08, "loss": 0.6614, "step": 23733 }, { "epoch": 0.9836296572589001, "grad_norm": 0.4230988621711731, "learning_rate": 8.205893323386796e-08, "loss": 0.6285, "step": 23734 }, { "epoch": 0.9836711011645738, "grad_norm": 0.4477046728134155, "learning_rate": 8.185171370549961e-08, "loss": 0.6409, "step": 23735 }, { "epoch": 0.9837125450702474, "grad_norm": 0.46720707416534424, "learning_rate": 8.164449417713126e-08, "loss": 0.6227, "step": 23736 }, { "epoch": 0.9837539889759211, "grad_norm": 0.38927161693573, "learning_rate": 8.143727464876291e-08, "loss": 0.5819, "step": 23737 }, { "epoch": 0.9837954328815948, "grad_norm": 0.4213904142379761, "learning_rate": 8.123005512039456e-08, "loss": 0.668, "step": 23738 }, { "epoch": 0.9838368767872684, "grad_norm": 0.45237278938293457, "learning_rate": 8.102283559202621e-08, "loss": 0.7325, "step": 23739 }, { "epoch": 0.9838783206929421, "grad_norm": 0.3955310881137848, "learning_rate": 8.081561606365785e-08, "loss": 0.646, "step": 23740 }, { "epoch": 0.9839197645986157, "grad_norm": 0.4115259349346161, "learning_rate": 8.060839653528948e-08, "loss": 0.6987, "step": 23741 }, { "epoch": 0.9839612085042895, "grad_norm": 0.46165499091148376, "learning_rate": 8.040117700692113e-08, "loss": 0.6699, "step": 23742 }, { "epoch": 0.9840026524099631, "grad_norm": 0.43582576513290405, "learning_rate": 8.019395747855278e-08, "loss": 0.6699, "step": 23743 }, { "epoch": 0.9840440963156368, "grad_norm": 0.45321768522262573, "learning_rate": 7.998673795018443e-08, "loss": 0.6429, "step": 23744 }, { "epoch": 0.9840855402213105, "grad_norm": 0.4111030399799347, "learning_rate": 7.977951842181608e-08, "loss": 0.7114, "step": 23745 }, { "epoch": 0.9841269841269841, "grad_norm": 0.4149567186832428, "learning_rate": 7.957229889344772e-08, "loss": 0.6812, "step": 23746 }, { "epoch": 0.9841684280326578, "grad_norm": 0.42554032802581787, "learning_rate": 7.936507936507937e-08, "loss": 0.6606, "step": 23747 }, { "epoch": 0.9842098719383314, "grad_norm": 0.47971871495246887, "learning_rate": 7.915785983671102e-08, "loss": 0.7273, "step": 23748 }, { "epoch": 0.9842513158440052, "grad_norm": 0.46155983209609985, "learning_rate": 7.895064030834267e-08, "loss": 0.6792, "step": 23749 }, { "epoch": 0.9842927597496788, "grad_norm": 0.4305484890937805, "learning_rate": 7.874342077997431e-08, "loss": 0.6665, "step": 23750 }, { "epoch": 0.9843342036553525, "grad_norm": 0.47075098752975464, "learning_rate": 7.853620125160596e-08, "loss": 0.6619, "step": 23751 }, { "epoch": 0.9843756475610261, "grad_norm": 0.41983652114868164, "learning_rate": 7.83289817232376e-08, "loss": 0.6744, "step": 23752 }, { "epoch": 0.9844170914666999, "grad_norm": 0.41290122270584106, "learning_rate": 7.812176219486926e-08, "loss": 0.6736, "step": 23753 }, { "epoch": 0.9844585353723735, "grad_norm": 0.437527596950531, "learning_rate": 7.79145426665009e-08, "loss": 0.6599, "step": 23754 }, { "epoch": 0.9844999792780471, "grad_norm": 0.42158234119415283, "learning_rate": 7.770732313813255e-08, "loss": 0.6318, "step": 23755 }, { "epoch": 0.9845414231837208, "grad_norm": 0.43469369411468506, "learning_rate": 7.75001036097642e-08, "loss": 0.6401, "step": 23756 }, { "epoch": 0.9845828670893945, "grad_norm": 0.44089460372924805, "learning_rate": 7.729288408139583e-08, "loss": 0.7079, "step": 23757 }, { "epoch": 0.9846243109950682, "grad_norm": 0.45520657300949097, "learning_rate": 7.708566455302749e-08, "loss": 0.6923, "step": 23758 }, { "epoch": 0.9846657549007418, "grad_norm": 0.42664265632629395, "learning_rate": 7.687844502465914e-08, "loss": 0.631, "step": 23759 }, { "epoch": 0.9847071988064156, "grad_norm": 0.45176950097084045, "learning_rate": 7.667122549629077e-08, "loss": 0.7144, "step": 23760 }, { "epoch": 0.9847486427120892, "grad_norm": 0.4436737298965454, "learning_rate": 7.646400596792242e-08, "loss": 0.7117, "step": 23761 }, { "epoch": 0.9847900866177629, "grad_norm": 0.4233611524105072, "learning_rate": 7.625678643955407e-08, "loss": 0.6462, "step": 23762 }, { "epoch": 0.9848315305234365, "grad_norm": 0.40636399388313293, "learning_rate": 7.604956691118571e-08, "loss": 0.6628, "step": 23763 }, { "epoch": 0.9848729744291101, "grad_norm": 0.429129034280777, "learning_rate": 7.584234738281736e-08, "loss": 0.6664, "step": 23764 }, { "epoch": 0.9849144183347839, "grad_norm": 0.42212092876434326, "learning_rate": 7.563512785444901e-08, "loss": 0.6696, "step": 23765 }, { "epoch": 0.9849558622404575, "grad_norm": 0.44828087091445923, "learning_rate": 7.542790832608065e-08, "loss": 0.6832, "step": 23766 }, { "epoch": 0.9849973061461312, "grad_norm": 0.4451351761817932, "learning_rate": 7.52206887977123e-08, "loss": 0.6769, "step": 23767 }, { "epoch": 0.9850387500518049, "grad_norm": 0.42323923110961914, "learning_rate": 7.501346926934395e-08, "loss": 0.6631, "step": 23768 }, { "epoch": 0.9850801939574786, "grad_norm": 0.44780468940734863, "learning_rate": 7.480624974097559e-08, "loss": 0.6793, "step": 23769 }, { "epoch": 0.9851216378631522, "grad_norm": 0.4005405008792877, "learning_rate": 7.459903021260724e-08, "loss": 0.6614, "step": 23770 }, { "epoch": 0.9851630817688259, "grad_norm": 0.4254232347011566, "learning_rate": 7.439181068423889e-08, "loss": 0.6946, "step": 23771 }, { "epoch": 0.9852045256744996, "grad_norm": 0.4308984577655792, "learning_rate": 7.418459115587054e-08, "loss": 0.6669, "step": 23772 }, { "epoch": 0.9852459695801732, "grad_norm": 0.4412676990032196, "learning_rate": 7.397737162750219e-08, "loss": 0.7084, "step": 23773 }, { "epoch": 0.9852874134858469, "grad_norm": 0.4421842098236084, "learning_rate": 7.377015209913382e-08, "loss": 0.6201, "step": 23774 }, { "epoch": 0.9853288573915205, "grad_norm": 0.4159706234931946, "learning_rate": 7.356293257076548e-08, "loss": 0.6732, "step": 23775 }, { "epoch": 0.9853703012971943, "grad_norm": 0.41001251339912415, "learning_rate": 7.335571304239713e-08, "loss": 0.6621, "step": 23776 }, { "epoch": 0.9854117452028679, "grad_norm": 0.44088247418403625, "learning_rate": 7.314849351402878e-08, "loss": 0.7172, "step": 23777 }, { "epoch": 0.9854531891085416, "grad_norm": 0.4310210645198822, "learning_rate": 7.294127398566041e-08, "loss": 0.6521, "step": 23778 }, { "epoch": 0.9854946330142152, "grad_norm": 0.48532834649086, "learning_rate": 7.273405445729206e-08, "loss": 0.7316, "step": 23779 }, { "epoch": 0.985536076919889, "grad_norm": 0.43172118067741394, "learning_rate": 7.252683492892371e-08, "loss": 0.6495, "step": 23780 }, { "epoch": 0.9855775208255626, "grad_norm": 0.4010343551635742, "learning_rate": 7.231961540055535e-08, "loss": 0.6432, "step": 23781 }, { "epoch": 0.9856189647312362, "grad_norm": 0.4331321716308594, "learning_rate": 7.2112395872187e-08, "loss": 0.658, "step": 23782 }, { "epoch": 0.98566040863691, "grad_norm": 0.391863077878952, "learning_rate": 7.190517634381865e-08, "loss": 0.652, "step": 23783 }, { "epoch": 0.9857018525425836, "grad_norm": 0.46277379989624023, "learning_rate": 7.169795681545029e-08, "loss": 0.6967, "step": 23784 }, { "epoch": 0.9857432964482573, "grad_norm": 0.4070708453655243, "learning_rate": 7.149073728708194e-08, "loss": 0.6899, "step": 23785 }, { "epoch": 0.9857847403539309, "grad_norm": 0.42039719223976135, "learning_rate": 7.128351775871359e-08, "loss": 0.6412, "step": 23786 }, { "epoch": 0.9858261842596047, "grad_norm": 0.38501501083374023, "learning_rate": 7.107629823034524e-08, "loss": 0.6182, "step": 23787 }, { "epoch": 0.9858676281652783, "grad_norm": 0.44314178824424744, "learning_rate": 7.086907870197688e-08, "loss": 0.7062, "step": 23788 }, { "epoch": 0.985909072070952, "grad_norm": 0.4155070185661316, "learning_rate": 7.066185917360853e-08, "loss": 0.6428, "step": 23789 }, { "epoch": 0.9859505159766256, "grad_norm": 0.43992865085601807, "learning_rate": 7.045463964524018e-08, "loss": 0.6962, "step": 23790 }, { "epoch": 0.9859919598822993, "grad_norm": 0.4030715823173523, "learning_rate": 7.024742011687181e-08, "loss": 0.6366, "step": 23791 }, { "epoch": 0.986033403787973, "grad_norm": 0.37775757908821106, "learning_rate": 7.004020058850346e-08, "loss": 0.6199, "step": 23792 }, { "epoch": 0.9860748476936466, "grad_norm": 0.4076269268989563, "learning_rate": 6.983298106013512e-08, "loss": 0.6511, "step": 23793 }, { "epoch": 0.9861162915993203, "grad_norm": 0.39976966381073, "learning_rate": 6.962576153176675e-08, "loss": 0.6215, "step": 23794 }, { "epoch": 0.986157735504994, "grad_norm": 0.4103187322616577, "learning_rate": 6.94185420033984e-08, "loss": 0.7004, "step": 23795 }, { "epoch": 0.9861991794106677, "grad_norm": 0.44598817825317383, "learning_rate": 6.921132247503005e-08, "loss": 0.7129, "step": 23796 }, { "epoch": 0.9862406233163413, "grad_norm": 0.44460245966911316, "learning_rate": 6.90041029466617e-08, "loss": 0.6985, "step": 23797 }, { "epoch": 0.9862820672220151, "grad_norm": 0.4153294265270233, "learning_rate": 6.879688341829334e-08, "loss": 0.6163, "step": 23798 }, { "epoch": 0.9863235111276887, "grad_norm": 0.43037402629852295, "learning_rate": 6.858966388992499e-08, "loss": 0.6541, "step": 23799 }, { "epoch": 0.9863649550333623, "grad_norm": 0.4054703414440155, "learning_rate": 6.838244436155664e-08, "loss": 0.6107, "step": 23800 }, { "epoch": 0.986406398939036, "grad_norm": 0.4539373815059662, "learning_rate": 6.817522483318829e-08, "loss": 0.6816, "step": 23801 }, { "epoch": 0.9864478428447097, "grad_norm": 0.4354526102542877, "learning_rate": 6.796800530481993e-08, "loss": 0.7031, "step": 23802 }, { "epoch": 0.9864892867503834, "grad_norm": 0.41053515672683716, "learning_rate": 6.776078577645158e-08, "loss": 0.6602, "step": 23803 }, { "epoch": 0.986530730656057, "grad_norm": 0.402578204870224, "learning_rate": 6.755356624808323e-08, "loss": 0.6365, "step": 23804 }, { "epoch": 0.9865721745617307, "grad_norm": 0.40409743785858154, "learning_rate": 6.734634671971488e-08, "loss": 0.6306, "step": 23805 }, { "epoch": 0.9866136184674044, "grad_norm": 0.4055984318256378, "learning_rate": 6.713912719134652e-08, "loss": 0.6482, "step": 23806 }, { "epoch": 0.986655062373078, "grad_norm": 0.4101327359676361, "learning_rate": 6.693190766297817e-08, "loss": 0.6266, "step": 23807 }, { "epoch": 0.9866965062787517, "grad_norm": 0.4070189297199249, "learning_rate": 6.672468813460982e-08, "loss": 0.6558, "step": 23808 }, { "epoch": 0.9867379501844253, "grad_norm": 0.4296228885650635, "learning_rate": 6.651746860624145e-08, "loss": 0.688, "step": 23809 }, { "epoch": 0.9867793940900991, "grad_norm": 0.40965697169303894, "learning_rate": 6.63102490778731e-08, "loss": 0.6365, "step": 23810 }, { "epoch": 0.9868208379957727, "grad_norm": 0.390299916267395, "learning_rate": 6.610302954950476e-08, "loss": 0.6155, "step": 23811 }, { "epoch": 0.9868622819014464, "grad_norm": 0.40320396423339844, "learning_rate": 6.589581002113639e-08, "loss": 0.6552, "step": 23812 }, { "epoch": 0.98690372580712, "grad_norm": 0.38492876291275024, "learning_rate": 6.568859049276804e-08, "loss": 0.616, "step": 23813 }, { "epoch": 0.9869451697127938, "grad_norm": 0.4260987937450409, "learning_rate": 6.548137096439969e-08, "loss": 0.6617, "step": 23814 }, { "epoch": 0.9869866136184674, "grad_norm": 0.4066937267780304, "learning_rate": 6.527415143603133e-08, "loss": 0.668, "step": 23815 }, { "epoch": 0.987028057524141, "grad_norm": 0.3814796805381775, "learning_rate": 6.506693190766298e-08, "loss": 0.6317, "step": 23816 }, { "epoch": 0.9870695014298148, "grad_norm": 0.4391738772392273, "learning_rate": 6.485971237929463e-08, "loss": 0.6689, "step": 23817 }, { "epoch": 0.9871109453354884, "grad_norm": 0.42578840255737305, "learning_rate": 6.465249285092627e-08, "loss": 0.6863, "step": 23818 }, { "epoch": 0.9871523892411621, "grad_norm": 0.40593814849853516, "learning_rate": 6.444527332255792e-08, "loss": 0.6791, "step": 23819 }, { "epoch": 0.9871938331468357, "grad_norm": 0.43989697098731995, "learning_rate": 6.423805379418957e-08, "loss": 0.7062, "step": 23820 }, { "epoch": 0.9872352770525095, "grad_norm": 0.40679386258125305, "learning_rate": 6.40308342658212e-08, "loss": 0.6855, "step": 23821 }, { "epoch": 0.9872767209581831, "grad_norm": 0.4051612317562103, "learning_rate": 6.382361473745286e-08, "loss": 0.6072, "step": 23822 }, { "epoch": 0.9873181648638568, "grad_norm": 0.4297308325767517, "learning_rate": 6.361639520908451e-08, "loss": 0.6119, "step": 23823 }, { "epoch": 0.9873596087695304, "grad_norm": 0.4561173617839813, "learning_rate": 6.340917568071616e-08, "loss": 0.6948, "step": 23824 }, { "epoch": 0.9874010526752041, "grad_norm": 0.41341331601142883, "learning_rate": 6.320195615234781e-08, "loss": 0.6167, "step": 23825 }, { "epoch": 0.9874424965808778, "grad_norm": 0.43012991547584534, "learning_rate": 6.299473662397944e-08, "loss": 0.587, "step": 23826 }, { "epoch": 0.9874839404865514, "grad_norm": 0.43469130992889404, "learning_rate": 6.27875170956111e-08, "loss": 0.6733, "step": 23827 }, { "epoch": 0.9875253843922251, "grad_norm": 0.40984296798706055, "learning_rate": 6.258029756724275e-08, "loss": 0.6591, "step": 23828 }, { "epoch": 0.9875668282978988, "grad_norm": 0.4504980146884918, "learning_rate": 6.23730780388744e-08, "loss": 0.6689, "step": 23829 }, { "epoch": 0.9876082722035725, "grad_norm": 0.4669979214668274, "learning_rate": 6.216585851050603e-08, "loss": 0.6942, "step": 23830 }, { "epoch": 0.9876497161092461, "grad_norm": 0.4514341950416565, "learning_rate": 6.195863898213768e-08, "loss": 0.676, "step": 23831 }, { "epoch": 0.9876911600149199, "grad_norm": 0.4008638262748718, "learning_rate": 6.175141945376933e-08, "loss": 0.6338, "step": 23832 }, { "epoch": 0.9877326039205935, "grad_norm": 0.40999457240104675, "learning_rate": 6.154419992540098e-08, "loss": 0.6442, "step": 23833 }, { "epoch": 0.9877740478262671, "grad_norm": 0.42545345425605774, "learning_rate": 6.133698039703262e-08, "loss": 0.6472, "step": 23834 }, { "epoch": 0.9878154917319408, "grad_norm": 0.4226754605770111, "learning_rate": 6.112976086866427e-08, "loss": 0.6689, "step": 23835 }, { "epoch": 0.9878569356376145, "grad_norm": 0.4396931231021881, "learning_rate": 6.092254134029592e-08, "loss": 0.5957, "step": 23836 }, { "epoch": 0.9878983795432882, "grad_norm": 0.41814103722572327, "learning_rate": 6.071532181192756e-08, "loss": 0.6963, "step": 23837 }, { "epoch": 0.9879398234489618, "grad_norm": 0.4251059889793396, "learning_rate": 6.050810228355921e-08, "loss": 0.626, "step": 23838 }, { "epoch": 0.9879812673546355, "grad_norm": 0.46041062474250793, "learning_rate": 6.030088275519086e-08, "loss": 0.7013, "step": 23839 }, { "epoch": 0.9880227112603092, "grad_norm": 0.42614808678627014, "learning_rate": 6.00936632268225e-08, "loss": 0.6521, "step": 23840 }, { "epoch": 0.9880641551659829, "grad_norm": 0.41670337319374084, "learning_rate": 5.988644369845415e-08, "loss": 0.7168, "step": 23841 }, { "epoch": 0.9881055990716565, "grad_norm": 0.40100279450416565, "learning_rate": 5.96792241700858e-08, "loss": 0.6604, "step": 23842 }, { "epoch": 0.9881470429773301, "grad_norm": 0.46226972341537476, "learning_rate": 5.947200464171744e-08, "loss": 0.7183, "step": 23843 }, { "epoch": 0.9881884868830039, "grad_norm": 0.4439721703529358, "learning_rate": 5.9264785113349085e-08, "loss": 0.6741, "step": 23844 }, { "epoch": 0.9882299307886775, "grad_norm": 0.4113834500312805, "learning_rate": 5.9057565584980735e-08, "loss": 0.6625, "step": 23845 }, { "epoch": 0.9882713746943512, "grad_norm": 0.4051987826824188, "learning_rate": 5.885034605661238e-08, "loss": 0.629, "step": 23846 }, { "epoch": 0.9883128186000248, "grad_norm": 0.3764939308166504, "learning_rate": 5.864312652824403e-08, "loss": 0.6257, "step": 23847 }, { "epoch": 0.9883542625056986, "grad_norm": 0.4414096474647522, "learning_rate": 5.843590699987567e-08, "loss": 0.6697, "step": 23848 }, { "epoch": 0.9883957064113722, "grad_norm": 0.45066481828689575, "learning_rate": 5.8228687471507317e-08, "loss": 0.6675, "step": 23849 }, { "epoch": 0.9884371503170459, "grad_norm": 0.4306630492210388, "learning_rate": 5.802146794313897e-08, "loss": 0.6279, "step": 23850 }, { "epoch": 0.9884785942227196, "grad_norm": 0.428249716758728, "learning_rate": 5.781424841477062e-08, "loss": 0.6395, "step": 23851 }, { "epoch": 0.9885200381283932, "grad_norm": 0.39646971225738525, "learning_rate": 5.7607028886402254e-08, "loss": 0.6494, "step": 23852 }, { "epoch": 0.9885614820340669, "grad_norm": 0.3952701985836029, "learning_rate": 5.7399809358033905e-08, "loss": 0.6848, "step": 23853 }, { "epoch": 0.9886029259397405, "grad_norm": 0.4247997999191284, "learning_rate": 5.7192589829665555e-08, "loss": 0.6814, "step": 23854 }, { "epoch": 0.9886443698454143, "grad_norm": 0.4270767569541931, "learning_rate": 5.698537030129719e-08, "loss": 0.7244, "step": 23855 }, { "epoch": 0.9886858137510879, "grad_norm": 0.40708184242248535, "learning_rate": 5.677815077292884e-08, "loss": 0.6334, "step": 23856 }, { "epoch": 0.9887272576567616, "grad_norm": 0.4274381101131439, "learning_rate": 5.657093124456049e-08, "loss": 0.6943, "step": 23857 }, { "epoch": 0.9887687015624352, "grad_norm": 0.3978058099746704, "learning_rate": 5.6363711716192137e-08, "loss": 0.6088, "step": 23858 }, { "epoch": 0.988810145468109, "grad_norm": 0.40732327103614807, "learning_rate": 5.615649218782379e-08, "loss": 0.672, "step": 23859 }, { "epoch": 0.9888515893737826, "grad_norm": 0.4291216731071472, "learning_rate": 5.594927265945543e-08, "loss": 0.693, "step": 23860 }, { "epoch": 0.9888930332794562, "grad_norm": 0.39499691128730774, "learning_rate": 5.5742053131087074e-08, "loss": 0.6505, "step": 23861 }, { "epoch": 0.98893447718513, "grad_norm": 0.37826603651046753, "learning_rate": 5.5534833602718725e-08, "loss": 0.6272, "step": 23862 }, { "epoch": 0.9889759210908036, "grad_norm": 0.3967103064060211, "learning_rate": 5.5327614074350375e-08, "loss": 0.6718, "step": 23863 }, { "epoch": 0.9890173649964773, "grad_norm": 0.43328383564949036, "learning_rate": 5.512039454598201e-08, "loss": 0.673, "step": 23864 }, { "epoch": 0.9890588089021509, "grad_norm": 0.3892805278301239, "learning_rate": 5.491317501761366e-08, "loss": 0.6353, "step": 23865 }, { "epoch": 0.9891002528078247, "grad_norm": 0.4255126416683197, "learning_rate": 5.470595548924531e-08, "loss": 0.6968, "step": 23866 }, { "epoch": 0.9891416967134983, "grad_norm": 0.4232417941093445, "learning_rate": 5.449873596087695e-08, "loss": 0.6812, "step": 23867 }, { "epoch": 0.9891831406191719, "grad_norm": 0.44028931856155396, "learning_rate": 5.42915164325086e-08, "loss": 0.6375, "step": 23868 }, { "epoch": 0.9892245845248456, "grad_norm": 0.41984230279922485, "learning_rate": 5.408429690414025e-08, "loss": 0.6682, "step": 23869 }, { "epoch": 0.9892660284305193, "grad_norm": 0.4386623799800873, "learning_rate": 5.38770773757719e-08, "loss": 0.6237, "step": 23870 }, { "epoch": 0.989307472336193, "grad_norm": 0.3988359272480011, "learning_rate": 5.3669857847403545e-08, "loss": 0.5978, "step": 23871 }, { "epoch": 0.9893489162418666, "grad_norm": 0.420605331659317, "learning_rate": 5.346263831903519e-08, "loss": 0.7124, "step": 23872 }, { "epoch": 0.9893903601475403, "grad_norm": 0.44066599011421204, "learning_rate": 5.325541879066684e-08, "loss": 0.6589, "step": 23873 }, { "epoch": 0.989431804053214, "grad_norm": 0.40938159823417664, "learning_rate": 5.304819926229848e-08, "loss": 0.6804, "step": 23874 }, { "epoch": 0.9894732479588877, "grad_norm": 0.41471728682518005, "learning_rate": 5.284097973393013e-08, "loss": 0.6776, "step": 23875 }, { "epoch": 0.9895146918645613, "grad_norm": 0.3953459560871124, "learning_rate": 5.2633760205561783e-08, "loss": 0.6382, "step": 23876 }, { "epoch": 0.9895561357702349, "grad_norm": 0.42707890272140503, "learning_rate": 5.242654067719342e-08, "loss": 0.6348, "step": 23877 }, { "epoch": 0.9895975796759087, "grad_norm": 0.4542788863182068, "learning_rate": 5.221932114882507e-08, "loss": 0.6445, "step": 23878 }, { "epoch": 0.9896390235815823, "grad_norm": 0.4172305762767792, "learning_rate": 5.201210162045672e-08, "loss": 0.682, "step": 23879 }, { "epoch": 0.989680467487256, "grad_norm": 0.41861143708229065, "learning_rate": 5.180488209208836e-08, "loss": 0.677, "step": 23880 }, { "epoch": 0.9897219113929296, "grad_norm": 0.43870046734809875, "learning_rate": 5.159766256372001e-08, "loss": 0.6703, "step": 23881 }, { "epoch": 0.9897633552986034, "grad_norm": 0.4483950734138489, "learning_rate": 5.139044303535166e-08, "loss": 0.6929, "step": 23882 }, { "epoch": 0.989804799204277, "grad_norm": 0.44413384795188904, "learning_rate": 5.11832235069833e-08, "loss": 0.6893, "step": 23883 }, { "epoch": 0.9898462431099507, "grad_norm": 0.411482572555542, "learning_rate": 5.0976003978614947e-08, "loss": 0.6399, "step": 23884 }, { "epoch": 0.9898876870156244, "grad_norm": 0.42969369888305664, "learning_rate": 5.07687844502466e-08, "loss": 0.6177, "step": 23885 }, { "epoch": 0.989929130921298, "grad_norm": 0.4268505275249481, "learning_rate": 5.056156492187824e-08, "loss": 0.6855, "step": 23886 }, { "epoch": 0.9899705748269717, "grad_norm": 0.4469764232635498, "learning_rate": 5.035434539350989e-08, "loss": 0.6772, "step": 23887 }, { "epoch": 0.9900120187326453, "grad_norm": 0.40855875611305237, "learning_rate": 5.0147125865141535e-08, "loss": 0.6421, "step": 23888 }, { "epoch": 0.9900534626383191, "grad_norm": 0.49218034744262695, "learning_rate": 4.993990633677318e-08, "loss": 0.6447, "step": 23889 }, { "epoch": 0.9900949065439927, "grad_norm": 0.42165106534957886, "learning_rate": 4.973268680840483e-08, "loss": 0.6567, "step": 23890 }, { "epoch": 0.9901363504496664, "grad_norm": 0.43486782908439636, "learning_rate": 4.952546728003648e-08, "loss": 0.6763, "step": 23891 }, { "epoch": 0.99017779435534, "grad_norm": 0.4308215081691742, "learning_rate": 4.9318247751668116e-08, "loss": 0.7178, "step": 23892 }, { "epoch": 0.9902192382610138, "grad_norm": 0.4345095455646515, "learning_rate": 4.9111028223299767e-08, "loss": 0.6533, "step": 23893 }, { "epoch": 0.9902606821666874, "grad_norm": 0.4309340715408325, "learning_rate": 4.890380869493142e-08, "loss": 0.6874, "step": 23894 }, { "epoch": 0.990302126072361, "grad_norm": 0.3916526138782501, "learning_rate": 4.869658916656306e-08, "loss": 0.6406, "step": 23895 }, { "epoch": 0.9903435699780347, "grad_norm": 0.42533913254737854, "learning_rate": 4.8489369638194704e-08, "loss": 0.616, "step": 23896 }, { "epoch": 0.9903850138837084, "grad_norm": 0.45141837000846863, "learning_rate": 4.8282150109826355e-08, "loss": 0.6383, "step": 23897 }, { "epoch": 0.9904264577893821, "grad_norm": 0.4341018795967102, "learning_rate": 4.8074930581458e-08, "loss": 0.6533, "step": 23898 }, { "epoch": 0.9904679016950557, "grad_norm": 0.4130069315433502, "learning_rate": 4.786771105308965e-08, "loss": 0.6445, "step": 23899 }, { "epoch": 0.9905093456007295, "grad_norm": 0.44865158200263977, "learning_rate": 4.766049152472129e-08, "loss": 0.637, "step": 23900 }, { "epoch": 0.9905507895064031, "grad_norm": 0.43999144434928894, "learning_rate": 4.7453271996352936e-08, "loss": 0.6555, "step": 23901 }, { "epoch": 0.9905922334120768, "grad_norm": 0.38872820138931274, "learning_rate": 4.724605246798459e-08, "loss": 0.6653, "step": 23902 }, { "epoch": 0.9906336773177504, "grad_norm": 0.42373907566070557, "learning_rate": 4.703883293961624e-08, "loss": 0.6724, "step": 23903 }, { "epoch": 0.990675121223424, "grad_norm": 0.4237865209579468, "learning_rate": 4.6831613411247874e-08, "loss": 0.6896, "step": 23904 }, { "epoch": 0.9907165651290978, "grad_norm": 0.4095766246318817, "learning_rate": 4.6624393882879525e-08, "loss": 0.6765, "step": 23905 }, { "epoch": 0.9907580090347714, "grad_norm": 0.41105422377586365, "learning_rate": 4.6417174354511175e-08, "loss": 0.6429, "step": 23906 }, { "epoch": 0.9907994529404451, "grad_norm": 0.4342929720878601, "learning_rate": 4.620995482614281e-08, "loss": 0.6572, "step": 23907 }, { "epoch": 0.9908408968461188, "grad_norm": 0.4210394322872162, "learning_rate": 4.600273529777446e-08, "loss": 0.6763, "step": 23908 }, { "epoch": 0.9908823407517925, "grad_norm": 0.4643417298793793, "learning_rate": 4.579551576940611e-08, "loss": 0.6959, "step": 23909 }, { "epoch": 0.9909237846574661, "grad_norm": 0.4214555621147156, "learning_rate": 4.558829624103776e-08, "loss": 0.6649, "step": 23910 }, { "epoch": 0.9909652285631398, "grad_norm": 0.4132906198501587, "learning_rate": 4.538107671266941e-08, "loss": 0.6086, "step": 23911 }, { "epoch": 0.9910066724688135, "grad_norm": 0.41022923588752747, "learning_rate": 4.517385718430105e-08, "loss": 0.6366, "step": 23912 }, { "epoch": 0.9910481163744871, "grad_norm": 0.3995725214481354, "learning_rate": 4.49666376559327e-08, "loss": 0.7126, "step": 23913 }, { "epoch": 0.9910895602801608, "grad_norm": 0.4443933069705963, "learning_rate": 4.4759418127564345e-08, "loss": 0.6436, "step": 23914 }, { "epoch": 0.9911310041858344, "grad_norm": 0.38557112216949463, "learning_rate": 4.4552198599195995e-08, "loss": 0.6539, "step": 23915 }, { "epoch": 0.9911724480915082, "grad_norm": 0.45564794540405273, "learning_rate": 4.4344979070827645e-08, "loss": 0.6544, "step": 23916 }, { "epoch": 0.9912138919971818, "grad_norm": 0.46011221408843994, "learning_rate": 4.413775954245928e-08, "loss": 0.6785, "step": 23917 }, { "epoch": 0.9912553359028555, "grad_norm": 0.4133033752441406, "learning_rate": 4.393054001409093e-08, "loss": 0.6919, "step": 23918 }, { "epoch": 0.9912967798085291, "grad_norm": 0.4311780631542206, "learning_rate": 4.372332048572258e-08, "loss": 0.6326, "step": 23919 }, { "epoch": 0.9913382237142029, "grad_norm": 0.40672388672828674, "learning_rate": 4.351610095735422e-08, "loss": 0.6003, "step": 23920 }, { "epoch": 0.9913796676198765, "grad_norm": 0.39109909534454346, "learning_rate": 4.330888142898587e-08, "loss": 0.6613, "step": 23921 }, { "epoch": 0.9914211115255501, "grad_norm": 0.39611127972602844, "learning_rate": 4.310166190061752e-08, "loss": 0.6422, "step": 23922 }, { "epoch": 0.9914625554312239, "grad_norm": 0.3867829740047455, "learning_rate": 4.2894442372249165e-08, "loss": 0.6772, "step": 23923 }, { "epoch": 0.9915039993368975, "grad_norm": 0.4738997519016266, "learning_rate": 4.268722284388081e-08, "loss": 0.6718, "step": 23924 }, { "epoch": 0.9915454432425712, "grad_norm": 0.38147035241127014, "learning_rate": 4.248000331551246e-08, "loss": 0.6273, "step": 23925 }, { "epoch": 0.9915868871482448, "grad_norm": 0.40971487760543823, "learning_rate": 4.22727837871441e-08, "loss": 0.6577, "step": 23926 }, { "epoch": 0.9916283310539186, "grad_norm": 0.40269848704338074, "learning_rate": 4.206556425877575e-08, "loss": 0.6544, "step": 23927 }, { "epoch": 0.9916697749595922, "grad_norm": 0.3963625729084015, "learning_rate": 4.18583447304074e-08, "loss": 0.6517, "step": 23928 }, { "epoch": 0.9917112188652658, "grad_norm": 0.4332420825958252, "learning_rate": 4.165112520203904e-08, "loss": 0.6876, "step": 23929 }, { "epoch": 0.9917526627709395, "grad_norm": 0.4320157766342163, "learning_rate": 4.144390567367069e-08, "loss": 0.6746, "step": 23930 }, { "epoch": 0.9917941066766132, "grad_norm": 0.4295324981212616, "learning_rate": 4.123668614530234e-08, "loss": 0.6685, "step": 23931 }, { "epoch": 0.9918355505822869, "grad_norm": 0.4232003092765808, "learning_rate": 4.102946661693398e-08, "loss": 0.6641, "step": 23932 }, { "epoch": 0.9918769944879605, "grad_norm": 0.4093596339225769, "learning_rate": 4.082224708856563e-08, "loss": 0.6355, "step": 23933 }, { "epoch": 0.9919184383936342, "grad_norm": 0.4533938467502594, "learning_rate": 4.061502756019728e-08, "loss": 0.6539, "step": 23934 }, { "epoch": 0.9919598822993079, "grad_norm": 0.44928279519081116, "learning_rate": 4.040780803182892e-08, "loss": 0.6487, "step": 23935 }, { "epoch": 0.9920013262049816, "grad_norm": 0.4099031388759613, "learning_rate": 4.0200588503460566e-08, "loss": 0.6631, "step": 23936 }, { "epoch": 0.9920427701106552, "grad_norm": 0.43626055121421814, "learning_rate": 3.9993368975092217e-08, "loss": 0.7251, "step": 23937 }, { "epoch": 0.9920842140163288, "grad_norm": 0.4466804563999176, "learning_rate": 3.978614944672386e-08, "loss": 0.6403, "step": 23938 }, { "epoch": 0.9921256579220026, "grad_norm": 0.3976042866706848, "learning_rate": 3.957892991835551e-08, "loss": 0.6429, "step": 23939 }, { "epoch": 0.9921671018276762, "grad_norm": 0.4415148198604584, "learning_rate": 3.9371710389987154e-08, "loss": 0.6615, "step": 23940 }, { "epoch": 0.9922085457333499, "grad_norm": 0.40020158886909485, "learning_rate": 3.91644908616188e-08, "loss": 0.6219, "step": 23941 }, { "epoch": 0.9922499896390236, "grad_norm": 0.41308656334877014, "learning_rate": 3.895727133325045e-08, "loss": 0.6434, "step": 23942 }, { "epoch": 0.9922914335446973, "grad_norm": 0.39947307109832764, "learning_rate": 3.87500518048821e-08, "loss": 0.6427, "step": 23943 }, { "epoch": 0.9923328774503709, "grad_norm": 0.43069303035736084, "learning_rate": 3.854283227651374e-08, "loss": 0.6299, "step": 23944 }, { "epoch": 0.9923743213560446, "grad_norm": 0.4048948287963867, "learning_rate": 3.8335612748145386e-08, "loss": 0.6355, "step": 23945 }, { "epoch": 0.9924157652617183, "grad_norm": 0.40478643774986267, "learning_rate": 3.812839321977704e-08, "loss": 0.6652, "step": 23946 }, { "epoch": 0.9924572091673919, "grad_norm": 0.4193378686904907, "learning_rate": 3.792117369140868e-08, "loss": 0.6855, "step": 23947 }, { "epoch": 0.9924986530730656, "grad_norm": 0.41007861495018005, "learning_rate": 3.7713954163040324e-08, "loss": 0.6731, "step": 23948 }, { "epoch": 0.9925400969787392, "grad_norm": 0.4058210849761963, "learning_rate": 3.7506734634671975e-08, "loss": 0.6681, "step": 23949 }, { "epoch": 0.992581540884413, "grad_norm": 0.40121230483055115, "learning_rate": 3.729951510630362e-08, "loss": 0.6497, "step": 23950 }, { "epoch": 0.9926229847900866, "grad_norm": 0.40425416827201843, "learning_rate": 3.709229557793527e-08, "loss": 0.6248, "step": 23951 }, { "epoch": 0.9926644286957603, "grad_norm": 0.45725902915000916, "learning_rate": 3.688507604956691e-08, "loss": 0.6616, "step": 23952 }, { "epoch": 0.992705872601434, "grad_norm": 0.41074100136756897, "learning_rate": 3.667785652119856e-08, "loss": 0.6538, "step": 23953 }, { "epoch": 0.9927473165071077, "grad_norm": 0.4123689532279968, "learning_rate": 3.6470636992830206e-08, "loss": 0.6968, "step": 23954 }, { "epoch": 0.9927887604127813, "grad_norm": 0.4388999342918396, "learning_rate": 3.626341746446186e-08, "loss": 0.6357, "step": 23955 }, { "epoch": 0.9928302043184549, "grad_norm": 0.4298550486564636, "learning_rate": 3.60561979360935e-08, "loss": 0.6831, "step": 23956 }, { "epoch": 0.9928716482241287, "grad_norm": 0.4490863084793091, "learning_rate": 3.5848978407725144e-08, "loss": 0.6755, "step": 23957 }, { "epoch": 0.9929130921298023, "grad_norm": 0.4286145865917206, "learning_rate": 3.5641758879356795e-08, "loss": 0.6887, "step": 23958 }, { "epoch": 0.992954536035476, "grad_norm": 0.4001103937625885, "learning_rate": 3.543453935098844e-08, "loss": 0.6658, "step": 23959 }, { "epoch": 0.9929959799411496, "grad_norm": 0.434372216463089, "learning_rate": 3.522731982262009e-08, "loss": 0.6318, "step": 23960 }, { "epoch": 0.9930374238468234, "grad_norm": 0.43184077739715576, "learning_rate": 3.502010029425173e-08, "loss": 0.7129, "step": 23961 }, { "epoch": 0.993078867752497, "grad_norm": 0.41406378149986267, "learning_rate": 3.4812880765883376e-08, "loss": 0.6743, "step": 23962 }, { "epoch": 0.9931203116581707, "grad_norm": 0.4166565239429474, "learning_rate": 3.4605661237515027e-08, "loss": 0.7332, "step": 23963 }, { "epoch": 0.9931617555638443, "grad_norm": 0.42120128870010376, "learning_rate": 3.439844170914667e-08, "loss": 0.6836, "step": 23964 }, { "epoch": 0.993203199469518, "grad_norm": 0.37605899572372437, "learning_rate": 3.419122218077832e-08, "loss": 0.599, "step": 23965 }, { "epoch": 0.9932446433751917, "grad_norm": 0.39245766401290894, "learning_rate": 3.3984002652409964e-08, "loss": 0.6328, "step": 23966 }, { "epoch": 0.9932860872808653, "grad_norm": 0.5243255496025085, "learning_rate": 3.3776783124041615e-08, "loss": 0.6299, "step": 23967 }, { "epoch": 0.993327531186539, "grad_norm": 0.38965901732444763, "learning_rate": 3.356956359567326e-08, "loss": 0.6766, "step": 23968 }, { "epoch": 0.9933689750922127, "grad_norm": 0.3873787820339203, "learning_rate": 3.336234406730491e-08, "loss": 0.7056, "step": 23969 }, { "epoch": 0.9934104189978864, "grad_norm": 0.44185787439346313, "learning_rate": 3.315512453893655e-08, "loss": 0.6628, "step": 23970 }, { "epoch": 0.99345186290356, "grad_norm": 0.4705636501312256, "learning_rate": 3.2947905010568196e-08, "loss": 0.7012, "step": 23971 }, { "epoch": 0.9934933068092338, "grad_norm": 0.44281303882598877, "learning_rate": 3.2740685482199847e-08, "loss": 0.6128, "step": 23972 }, { "epoch": 0.9935347507149074, "grad_norm": 0.43493175506591797, "learning_rate": 3.253346595383149e-08, "loss": 0.6769, "step": 23973 }, { "epoch": 0.993576194620581, "grad_norm": 0.4483121633529663, "learning_rate": 3.2326246425463134e-08, "loss": 0.6819, "step": 23974 }, { "epoch": 0.9936176385262547, "grad_norm": 0.42820483446121216, "learning_rate": 3.2119026897094784e-08, "loss": 0.6864, "step": 23975 }, { "epoch": 0.9936590824319284, "grad_norm": 0.39589545130729675, "learning_rate": 3.191180736872643e-08, "loss": 0.6355, "step": 23976 }, { "epoch": 0.9937005263376021, "grad_norm": 0.41566401720046997, "learning_rate": 3.170458784035808e-08, "loss": 0.6631, "step": 23977 }, { "epoch": 0.9937419702432757, "grad_norm": 0.4182901084423065, "learning_rate": 3.149736831198972e-08, "loss": 0.6381, "step": 23978 }, { "epoch": 0.9937834141489494, "grad_norm": 0.4169228971004486, "learning_rate": 3.129014878362137e-08, "loss": 0.6652, "step": 23979 }, { "epoch": 0.9938248580546231, "grad_norm": 0.4141540229320526, "learning_rate": 3.1082929255253016e-08, "loss": 0.6592, "step": 23980 }, { "epoch": 0.9938663019602967, "grad_norm": 0.39370259642601013, "learning_rate": 3.087570972688467e-08, "loss": 0.6199, "step": 23981 }, { "epoch": 0.9939077458659704, "grad_norm": 0.40079283714294434, "learning_rate": 3.066849019851631e-08, "loss": 0.6102, "step": 23982 }, { "epoch": 0.993949189771644, "grad_norm": 0.4259001314640045, "learning_rate": 3.046127067014796e-08, "loss": 0.6472, "step": 23983 }, { "epoch": 0.9939906336773178, "grad_norm": 0.4223737418651581, "learning_rate": 3.0254051141779605e-08, "loss": 0.6648, "step": 23984 }, { "epoch": 0.9940320775829914, "grad_norm": 0.3827546238899231, "learning_rate": 3.004683161341125e-08, "loss": 0.6326, "step": 23985 }, { "epoch": 0.9940735214886651, "grad_norm": 0.4522757828235626, "learning_rate": 2.98396120850429e-08, "loss": 0.7242, "step": 23986 }, { "epoch": 0.9941149653943387, "grad_norm": 0.45495134592056274, "learning_rate": 2.9632392556674542e-08, "loss": 0.6852, "step": 23987 }, { "epoch": 0.9941564093000125, "grad_norm": 0.4497627317905426, "learning_rate": 2.942517302830619e-08, "loss": 0.6965, "step": 23988 }, { "epoch": 0.9941978532056861, "grad_norm": 0.4223647713661194, "learning_rate": 2.9217953499937836e-08, "loss": 0.6885, "step": 23989 }, { "epoch": 0.9942392971113597, "grad_norm": 0.42440611124038696, "learning_rate": 2.9010733971569483e-08, "loss": 0.6421, "step": 23990 }, { "epoch": 0.9942807410170335, "grad_norm": 0.4231124222278595, "learning_rate": 2.8803514443201127e-08, "loss": 0.7598, "step": 23991 }, { "epoch": 0.9943221849227071, "grad_norm": 0.41109099984169006, "learning_rate": 2.8596294914832778e-08, "loss": 0.6433, "step": 23992 }, { "epoch": 0.9943636288283808, "grad_norm": 0.414800763130188, "learning_rate": 2.838907538646442e-08, "loss": 0.6938, "step": 23993 }, { "epoch": 0.9944050727340544, "grad_norm": 0.4483809173107147, "learning_rate": 2.8181855858096068e-08, "loss": 0.6476, "step": 23994 }, { "epoch": 0.9944465166397282, "grad_norm": 0.43550732731819153, "learning_rate": 2.7974636329727715e-08, "loss": 0.6639, "step": 23995 }, { "epoch": 0.9944879605454018, "grad_norm": 0.39853668212890625, "learning_rate": 2.7767416801359362e-08, "loss": 0.6302, "step": 23996 }, { "epoch": 0.9945294044510755, "grad_norm": 0.48836749792099, "learning_rate": 2.7560197272991006e-08, "loss": 0.7449, "step": 23997 }, { "epoch": 0.9945708483567491, "grad_norm": 0.4014059603214264, "learning_rate": 2.7352977744622657e-08, "loss": 0.6176, "step": 23998 }, { "epoch": 0.9946122922624228, "grad_norm": 0.4268745183944702, "learning_rate": 2.71457582162543e-08, "loss": 0.6741, "step": 23999 }, { "epoch": 0.9946537361680965, "grad_norm": 0.3827657997608185, "learning_rate": 2.693853868788595e-08, "loss": 0.6486, "step": 24000 }, { "epoch": 0.9946951800737701, "grad_norm": 0.4078461527824402, "learning_rate": 2.6731319159517594e-08, "loss": 0.6929, "step": 24001 }, { "epoch": 0.9947366239794438, "grad_norm": 0.42268165946006775, "learning_rate": 2.652409963114924e-08, "loss": 0.6471, "step": 24002 }, { "epoch": 0.9947780678851175, "grad_norm": 0.4636204242706299, "learning_rate": 2.6316880102780892e-08, "loss": 0.5714, "step": 24003 }, { "epoch": 0.9948195117907912, "grad_norm": 0.405472069978714, "learning_rate": 2.6109660574412535e-08, "loss": 0.6396, "step": 24004 }, { "epoch": 0.9948609556964648, "grad_norm": 0.39924898743629456, "learning_rate": 2.590244104604418e-08, "loss": 0.6373, "step": 24005 }, { "epoch": 0.9949023996021386, "grad_norm": 0.4534502923488617, "learning_rate": 2.569522151767583e-08, "loss": 0.6558, "step": 24006 }, { "epoch": 0.9949438435078122, "grad_norm": 0.3909415900707245, "learning_rate": 2.5488001989307473e-08, "loss": 0.6665, "step": 24007 }, { "epoch": 0.9949852874134858, "grad_norm": 0.43003329634666443, "learning_rate": 2.528078246093912e-08, "loss": 0.6794, "step": 24008 }, { "epoch": 0.9950267313191595, "grad_norm": 0.4201449751853943, "learning_rate": 2.5073562932570767e-08, "loss": 0.6494, "step": 24009 }, { "epoch": 0.9950681752248332, "grad_norm": 0.3922904133796692, "learning_rate": 2.4866343404202414e-08, "loss": 0.6698, "step": 24010 }, { "epoch": 0.9951096191305069, "grad_norm": 0.4540359079837799, "learning_rate": 2.4659123875834058e-08, "loss": 0.6941, "step": 24011 }, { "epoch": 0.9951510630361805, "grad_norm": 0.41348156332969666, "learning_rate": 2.445190434746571e-08, "loss": 0.5879, "step": 24012 }, { "epoch": 0.9951925069418542, "grad_norm": 0.43994447588920593, "learning_rate": 2.4244684819097352e-08, "loss": 0.6561, "step": 24013 }, { "epoch": 0.9952339508475279, "grad_norm": 0.4196043610572815, "learning_rate": 2.4037465290729e-08, "loss": 0.6593, "step": 24014 }, { "epoch": 0.9952753947532016, "grad_norm": 0.40524864196777344, "learning_rate": 2.3830245762360646e-08, "loss": 0.7095, "step": 24015 }, { "epoch": 0.9953168386588752, "grad_norm": 0.40045714378356934, "learning_rate": 2.3623026233992293e-08, "loss": 0.6552, "step": 24016 }, { "epoch": 0.9953582825645488, "grad_norm": 0.4123160243034363, "learning_rate": 2.3415806705623937e-08, "loss": 0.7189, "step": 24017 }, { "epoch": 0.9953997264702226, "grad_norm": 0.43043577671051025, "learning_rate": 2.3208587177255587e-08, "loss": 0.6654, "step": 24018 }, { "epoch": 0.9954411703758962, "grad_norm": 0.41473039984703064, "learning_rate": 2.300136764888723e-08, "loss": 0.651, "step": 24019 }, { "epoch": 0.9954826142815699, "grad_norm": 0.4346679747104645, "learning_rate": 2.279414812051888e-08, "loss": 0.7111, "step": 24020 }, { "epoch": 0.9955240581872435, "grad_norm": 0.4852639436721802, "learning_rate": 2.2586928592150525e-08, "loss": 0.6508, "step": 24021 }, { "epoch": 0.9955655020929173, "grad_norm": 0.41961541771888733, "learning_rate": 2.2379709063782172e-08, "loss": 0.6517, "step": 24022 }, { "epoch": 0.9956069459985909, "grad_norm": 0.47137436270713806, "learning_rate": 2.2172489535413823e-08, "loss": 0.6381, "step": 24023 }, { "epoch": 0.9956483899042646, "grad_norm": 0.4609004855155945, "learning_rate": 2.1965270007045466e-08, "loss": 0.6813, "step": 24024 }, { "epoch": 0.9956898338099383, "grad_norm": 0.3929833173751831, "learning_rate": 2.175805047867711e-08, "loss": 0.6216, "step": 24025 }, { "epoch": 0.9957312777156119, "grad_norm": 0.43735677003860474, "learning_rate": 2.155083095030876e-08, "loss": 0.6576, "step": 24026 }, { "epoch": 0.9957727216212856, "grad_norm": 0.392007052898407, "learning_rate": 2.1343611421940404e-08, "loss": 0.6396, "step": 24027 }, { "epoch": 0.9958141655269592, "grad_norm": 0.5145682692527771, "learning_rate": 2.113639189357205e-08, "loss": 0.7427, "step": 24028 }, { "epoch": 0.995855609432633, "grad_norm": 0.39346325397491455, "learning_rate": 2.09291723652037e-08, "loss": 0.6226, "step": 24029 }, { "epoch": 0.9958970533383066, "grad_norm": 0.4105379283428192, "learning_rate": 2.0721952836835345e-08, "loss": 0.7173, "step": 24030 }, { "epoch": 0.9959384972439803, "grad_norm": 0.4506531059741974, "learning_rate": 2.051473330846699e-08, "loss": 0.6978, "step": 24031 }, { "epoch": 0.9959799411496539, "grad_norm": 0.40263912081718445, "learning_rate": 2.030751378009864e-08, "loss": 0.6287, "step": 24032 }, { "epoch": 0.9960213850553277, "grad_norm": 0.43123677372932434, "learning_rate": 2.0100294251730283e-08, "loss": 0.6704, "step": 24033 }, { "epoch": 0.9960628289610013, "grad_norm": 0.41216036677360535, "learning_rate": 1.989307472336193e-08, "loss": 0.6636, "step": 24034 }, { "epoch": 0.9961042728666749, "grad_norm": 0.4459758698940277, "learning_rate": 1.9685855194993577e-08, "loss": 0.6793, "step": 24035 }, { "epoch": 0.9961457167723486, "grad_norm": 0.3996075689792633, "learning_rate": 1.9478635666625224e-08, "loss": 0.6106, "step": 24036 }, { "epoch": 0.9961871606780223, "grad_norm": 0.41113999485969543, "learning_rate": 1.927141613825687e-08, "loss": 0.6396, "step": 24037 }, { "epoch": 0.996228604583696, "grad_norm": 0.39087584614753723, "learning_rate": 1.906419660988852e-08, "loss": 0.6481, "step": 24038 }, { "epoch": 0.9962700484893696, "grad_norm": 0.42288389801979065, "learning_rate": 1.8856977081520162e-08, "loss": 0.694, "step": 24039 }, { "epoch": 0.9963114923950434, "grad_norm": 0.4320143759250641, "learning_rate": 1.864975755315181e-08, "loss": 0.6553, "step": 24040 }, { "epoch": 0.996352936300717, "grad_norm": 0.43176206946372986, "learning_rate": 1.8442538024783456e-08, "loss": 0.6361, "step": 24041 }, { "epoch": 0.9963943802063906, "grad_norm": 0.4584255516529083, "learning_rate": 1.8235318496415103e-08, "loss": 0.6851, "step": 24042 }, { "epoch": 0.9964358241120643, "grad_norm": 0.41714248061180115, "learning_rate": 1.802809896804675e-08, "loss": 0.667, "step": 24043 }, { "epoch": 0.996477268017738, "grad_norm": 0.43292784690856934, "learning_rate": 1.7820879439678397e-08, "loss": 0.6361, "step": 24044 }, { "epoch": 0.9965187119234117, "grad_norm": 0.4160439968109131, "learning_rate": 1.7613659911310044e-08, "loss": 0.6814, "step": 24045 }, { "epoch": 0.9965601558290853, "grad_norm": 0.46269482374191284, "learning_rate": 1.7406440382941688e-08, "loss": 0.691, "step": 24046 }, { "epoch": 0.996601599734759, "grad_norm": 0.3964201509952545, "learning_rate": 1.7199220854573335e-08, "loss": 0.6453, "step": 24047 }, { "epoch": 0.9966430436404327, "grad_norm": 0.47848013043403625, "learning_rate": 1.6992001326204982e-08, "loss": 0.7058, "step": 24048 }, { "epoch": 0.9966844875461064, "grad_norm": 0.45621258020401, "learning_rate": 1.678478179783663e-08, "loss": 0.6702, "step": 24049 }, { "epoch": 0.99672593145178, "grad_norm": 0.3942944407463074, "learning_rate": 1.6577562269468276e-08, "loss": 0.6686, "step": 24050 }, { "epoch": 0.9967673753574536, "grad_norm": 0.40179166197776794, "learning_rate": 1.6370342741099923e-08, "loss": 0.6965, "step": 24051 }, { "epoch": 0.9968088192631274, "grad_norm": 0.4035951793193817, "learning_rate": 1.6163123212731567e-08, "loss": 0.6327, "step": 24052 }, { "epoch": 0.996850263168801, "grad_norm": 0.42603084444999695, "learning_rate": 1.5955903684363214e-08, "loss": 0.6746, "step": 24053 }, { "epoch": 0.9968917070744747, "grad_norm": 0.4199628233909607, "learning_rate": 1.574868415599486e-08, "loss": 0.644, "step": 24054 }, { "epoch": 0.9969331509801483, "grad_norm": 0.38900476694107056, "learning_rate": 1.5541464627626508e-08, "loss": 0.6449, "step": 24055 }, { "epoch": 0.9969745948858221, "grad_norm": 0.4637569487094879, "learning_rate": 1.5334245099258155e-08, "loss": 0.6786, "step": 24056 }, { "epoch": 0.9970160387914957, "grad_norm": 0.39429667592048645, "learning_rate": 1.5127025570889802e-08, "loss": 0.7134, "step": 24057 }, { "epoch": 0.9970574826971694, "grad_norm": 0.4038718640804291, "learning_rate": 1.491980604252145e-08, "loss": 0.6216, "step": 24058 }, { "epoch": 0.997098926602843, "grad_norm": 0.4547617733478546, "learning_rate": 1.4712586514153095e-08, "loss": 0.6256, "step": 24059 }, { "epoch": 0.9971403705085167, "grad_norm": 0.37967246770858765, "learning_rate": 1.4505366985784742e-08, "loss": 0.6302, "step": 24060 }, { "epoch": 0.9971818144141904, "grad_norm": 0.4694235920906067, "learning_rate": 1.4298147457416389e-08, "loss": 0.7017, "step": 24061 }, { "epoch": 0.997223258319864, "grad_norm": 0.4229969084262848, "learning_rate": 1.4090927929048034e-08, "loss": 0.699, "step": 24062 }, { "epoch": 0.9972647022255378, "grad_norm": 0.42660489678382874, "learning_rate": 1.3883708400679681e-08, "loss": 0.6633, "step": 24063 }, { "epoch": 0.9973061461312114, "grad_norm": 0.45063167810440063, "learning_rate": 1.3676488872311328e-08, "loss": 0.64, "step": 24064 }, { "epoch": 0.9973475900368851, "grad_norm": 0.39679086208343506, "learning_rate": 1.3469269343942975e-08, "loss": 0.644, "step": 24065 }, { "epoch": 0.9973890339425587, "grad_norm": 0.42337507009506226, "learning_rate": 1.326204981557462e-08, "loss": 0.6392, "step": 24066 }, { "epoch": 0.9974304778482325, "grad_norm": 0.41460925340652466, "learning_rate": 1.3054830287206268e-08, "loss": 0.6244, "step": 24067 }, { "epoch": 0.9974719217539061, "grad_norm": 0.4437163770198822, "learning_rate": 1.2847610758837915e-08, "loss": 0.6842, "step": 24068 }, { "epoch": 0.9975133656595797, "grad_norm": 0.4410180151462555, "learning_rate": 1.264039123046956e-08, "loss": 0.7001, "step": 24069 }, { "epoch": 0.9975548095652534, "grad_norm": 0.39628365635871887, "learning_rate": 1.2433171702101207e-08, "loss": 0.6118, "step": 24070 }, { "epoch": 0.9975962534709271, "grad_norm": 0.40094804763793945, "learning_rate": 1.2225952173732854e-08, "loss": 0.6682, "step": 24071 }, { "epoch": 0.9976376973766008, "grad_norm": 0.3953889012336731, "learning_rate": 1.20187326453645e-08, "loss": 0.623, "step": 24072 }, { "epoch": 0.9976791412822744, "grad_norm": 0.49310731887817383, "learning_rate": 1.1811513116996147e-08, "loss": 0.6781, "step": 24073 }, { "epoch": 0.9977205851879482, "grad_norm": 0.4177588224411011, "learning_rate": 1.1604293588627794e-08, "loss": 0.6173, "step": 24074 }, { "epoch": 0.9977620290936218, "grad_norm": 0.4271541237831116, "learning_rate": 1.139707406025944e-08, "loss": 0.6985, "step": 24075 }, { "epoch": 0.9978034729992955, "grad_norm": 0.4241659641265869, "learning_rate": 1.1189854531891086e-08, "loss": 0.6936, "step": 24076 }, { "epoch": 0.9978449169049691, "grad_norm": 0.41232186555862427, "learning_rate": 1.0982635003522733e-08, "loss": 0.6555, "step": 24077 }, { "epoch": 0.9978863608106427, "grad_norm": 0.40220117568969727, "learning_rate": 1.077541547515438e-08, "loss": 0.6223, "step": 24078 }, { "epoch": 0.9979278047163165, "grad_norm": 0.45000794529914856, "learning_rate": 1.0568195946786026e-08, "loss": 0.649, "step": 24079 }, { "epoch": 0.9979692486219901, "grad_norm": 0.417976975440979, "learning_rate": 1.0360976418417673e-08, "loss": 0.7122, "step": 24080 }, { "epoch": 0.9980106925276638, "grad_norm": 0.41776904463768005, "learning_rate": 1.015375689004932e-08, "loss": 0.6847, "step": 24081 }, { "epoch": 0.9980521364333375, "grad_norm": 0.4293898344039917, "learning_rate": 9.946537361680965e-09, "loss": 0.6667, "step": 24082 }, { "epoch": 0.9980935803390112, "grad_norm": 0.41448596119880676, "learning_rate": 9.739317833312612e-09, "loss": 0.6235, "step": 24083 }, { "epoch": 0.9981350242446848, "grad_norm": 0.40281033515930176, "learning_rate": 9.53209830494426e-09, "loss": 0.6519, "step": 24084 }, { "epoch": 0.9981764681503585, "grad_norm": 0.5352064371109009, "learning_rate": 9.324878776575905e-09, "loss": 0.645, "step": 24085 }, { "epoch": 0.9982179120560322, "grad_norm": 0.4493749737739563, "learning_rate": 9.117659248207552e-09, "loss": 0.6775, "step": 24086 }, { "epoch": 0.9982593559617058, "grad_norm": 0.43947115540504456, "learning_rate": 8.910439719839199e-09, "loss": 0.65, "step": 24087 }, { "epoch": 0.9983007998673795, "grad_norm": 0.4529394805431366, "learning_rate": 8.703220191470844e-09, "loss": 0.6775, "step": 24088 }, { "epoch": 0.9983422437730531, "grad_norm": 0.4309327304363251, "learning_rate": 8.496000663102491e-09, "loss": 0.6674, "step": 24089 }, { "epoch": 0.9983836876787269, "grad_norm": 0.42833074927330017, "learning_rate": 8.288781134734138e-09, "loss": 0.7041, "step": 24090 }, { "epoch": 0.9984251315844005, "grad_norm": 0.42104312777519226, "learning_rate": 8.081561606365784e-09, "loss": 0.6232, "step": 24091 }, { "epoch": 0.9984665754900742, "grad_norm": 0.47261059284210205, "learning_rate": 7.87434207799743e-09, "loss": 0.6831, "step": 24092 }, { "epoch": 0.9985080193957478, "grad_norm": 0.43331441283226013, "learning_rate": 7.667122549629078e-09, "loss": 0.6389, "step": 24093 }, { "epoch": 0.9985494633014216, "grad_norm": 0.41058290004730225, "learning_rate": 7.459903021260725e-09, "loss": 0.6838, "step": 24094 }, { "epoch": 0.9985909072070952, "grad_norm": 0.388836145401001, "learning_rate": 7.252683492892371e-09, "loss": 0.6003, "step": 24095 }, { "epoch": 0.9986323511127688, "grad_norm": 0.4189140200614929, "learning_rate": 7.045463964524017e-09, "loss": 0.6917, "step": 24096 }, { "epoch": 0.9986737950184426, "grad_norm": 0.3993547558784485, "learning_rate": 6.838244436155664e-09, "loss": 0.7163, "step": 24097 }, { "epoch": 0.9987152389241162, "grad_norm": 0.40462109446525574, "learning_rate": 6.63102490778731e-09, "loss": 0.6322, "step": 24098 }, { "epoch": 0.9987566828297899, "grad_norm": 0.41604816913604736, "learning_rate": 6.423805379418957e-09, "loss": 0.6713, "step": 24099 }, { "epoch": 0.9987981267354635, "grad_norm": 0.44099119305610657, "learning_rate": 6.216585851050604e-09, "loss": 0.6675, "step": 24100 }, { "epoch": 0.9988395706411373, "grad_norm": 0.4236350357532501, "learning_rate": 6.00936632268225e-09, "loss": 0.6332, "step": 24101 }, { "epoch": 0.9988810145468109, "grad_norm": 0.43383583426475525, "learning_rate": 5.802146794313897e-09, "loss": 0.6389, "step": 24102 }, { "epoch": 0.9989224584524845, "grad_norm": 0.3844117522239685, "learning_rate": 5.594927265945543e-09, "loss": 0.6332, "step": 24103 }, { "epoch": 0.9989639023581582, "grad_norm": 0.43639302253723145, "learning_rate": 5.38770773757719e-09, "loss": 0.6434, "step": 24104 }, { "epoch": 0.9990053462638319, "grad_norm": 0.4110928475856781, "learning_rate": 5.180488209208836e-09, "loss": 0.6632, "step": 24105 }, { "epoch": 0.9990467901695056, "grad_norm": 0.41302281618118286, "learning_rate": 4.9732686808404825e-09, "loss": 0.7277, "step": 24106 }, { "epoch": 0.9990882340751792, "grad_norm": 0.4217560291290283, "learning_rate": 4.76604915247213e-09, "loss": 0.6917, "step": 24107 }, { "epoch": 0.999129677980853, "grad_norm": 0.39875221252441406, "learning_rate": 4.558829624103776e-09, "loss": 0.6223, "step": 24108 }, { "epoch": 0.9991711218865266, "grad_norm": 0.4087948501110077, "learning_rate": 4.351610095735422e-09, "loss": 0.6492, "step": 24109 }, { "epoch": 0.9992125657922003, "grad_norm": 0.4066776633262634, "learning_rate": 4.144390567367069e-09, "loss": 0.6851, "step": 24110 }, { "epoch": 0.9992540096978739, "grad_norm": 0.4268956780433655, "learning_rate": 3.937171038998715e-09, "loss": 0.703, "step": 24111 }, { "epoch": 0.9992954536035475, "grad_norm": 0.4181530177593231, "learning_rate": 3.729951510630362e-09, "loss": 0.6836, "step": 24112 }, { "epoch": 0.9993368975092213, "grad_norm": 0.40263479948043823, "learning_rate": 3.5227319822620085e-09, "loss": 0.655, "step": 24113 }, { "epoch": 0.9993783414148949, "grad_norm": 0.4511946439743042, "learning_rate": 3.315512453893655e-09, "loss": 0.6863, "step": 24114 }, { "epoch": 0.9994197853205686, "grad_norm": 0.4255143702030182, "learning_rate": 3.108292925525302e-09, "loss": 0.7095, "step": 24115 }, { "epoch": 0.9994612292262423, "grad_norm": 0.38710400462150574, "learning_rate": 2.9010733971569484e-09, "loss": 0.6963, "step": 24116 }, { "epoch": 0.999502673131916, "grad_norm": 0.45806679129600525, "learning_rate": 2.693853868788595e-09, "loss": 0.7203, "step": 24117 }, { "epoch": 0.9995441170375896, "grad_norm": 0.4411233961582184, "learning_rate": 2.4866343404202413e-09, "loss": 0.6963, "step": 24118 }, { "epoch": 0.9995855609432633, "grad_norm": 0.47997546195983887, "learning_rate": 2.279414812051888e-09, "loss": 0.6624, "step": 24119 }, { "epoch": 0.999627004848937, "grad_norm": 0.40546709299087524, "learning_rate": 2.0721952836835345e-09, "loss": 0.6761, "step": 24120 }, { "epoch": 0.9996684487546106, "grad_norm": 0.4429965317249298, "learning_rate": 1.864975755315181e-09, "loss": 0.67, "step": 24121 }, { "epoch": 0.9997098926602843, "grad_norm": 0.4323263168334961, "learning_rate": 1.6577562269468276e-09, "loss": 0.6592, "step": 24122 }, { "epoch": 0.9997513365659579, "grad_norm": 0.42195117473602295, "learning_rate": 1.4505366985784742e-09, "loss": 0.6602, "step": 24123 }, { "epoch": 0.9997927804716317, "grad_norm": 0.4603728652000427, "learning_rate": 1.2433171702101206e-09, "loss": 0.687, "step": 24124 }, { "epoch": 0.9998342243773053, "grad_norm": 0.41849106550216675, "learning_rate": 1.0360976418417673e-09, "loss": 0.6497, "step": 24125 }, { "epoch": 0.999875668282979, "grad_norm": 0.4541945457458496, "learning_rate": 8.288781134734138e-10, "loss": 0.7271, "step": 24126 }, { "epoch": 0.9999171121886526, "grad_norm": 0.4299638271331787, "learning_rate": 6.216585851050603e-10, "loss": 0.6252, "step": 24127 }, { "epoch": 0.9999585560943264, "grad_norm": 0.4042789340019226, "learning_rate": 4.144390567367069e-10, "loss": 0.6179, "step": 24128 }, { "epoch": 1.0, "grad_norm": 0.42805585265159607, "learning_rate": 2.0721952836835345e-10, "loss": 0.5836, "step": 24129 } ], "logging_steps": 1.0, "max_steps": 24129, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 8043, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 1.5173164088108148e+20, "train_batch_size": 1, "trial_name": null, "trial_params": null }